From e1f188a25eabfe444c4f4e9cef93b2df647d3618 Mon Sep 17 00:00:00 2001 From: ChipCE Date: Wed, 11 Jan 2023 16:28:32 +0900 Subject: [PATCH 01/31] Add profile for Gutenberg G-Zero By : Nguyen (minhtu.nguyen@gutenberg.co.jp) --- resources/definitions/gutenberg_base.def.json | 95 +++++++++++++++++++ .../definitions/gutenberg_gzero.def.json | 17 ++++ .../extruders/gutenberg_extruder_0.def.json | 16 ++++ .../gutenberg_global_fast_quality.inst.cfg | 57 +++++++++++ .../gutenberg_global_fine_quality.inst.cfg | 57 +++++++++++ .../gutenberg_global_normal_quality.inst.cfg | 57 +++++++++++ .../gutenberg_global_strong_quality.inst.cfg | 56 +++++++++++ 7 files changed, 355 insertions(+) create mode 100644 resources/definitions/gutenberg_base.def.json create mode 100644 resources/definitions/gutenberg_gzero.def.json create mode 100644 resources/extruders/gutenberg_extruder_0.def.json create mode 100644 resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg create mode 100644 resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg create mode 100644 resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg create mode 100644 resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg diff --git a/resources/definitions/gutenberg_base.def.json b/resources/definitions/gutenberg_base.def.json new file mode 100644 index 0000000000..9394d841bb --- /dev/null +++ b/resources/definitions/gutenberg_base.def.json @@ -0,0 +1,95 @@ +{ + "name": "Gutenberg Base", + "version": 2, + "inherits": "fdmprinter", + "metadata": + { + "visible": false, + "author": "Gutenberg Dev", + "manufacturer": "Gutenberg", + "file_formats": "text/x-gcode", + "first_start_actions": ["MachineSettingsAction"], + "preferred_quality_type": "normal", + "has_machine_quality": true, + "machine_extruder_trains": { "0": "gutenberg_extruder_0" }, + "preferred_material": "generic_abs" + }, + "overrides": + { + "machine_name": { "default_value": "GUTENBERG 3DP" }, + "machine_width": { "default_value": 165 }, + "machine_depth": { "default_value": 165 }, + "machine_height": { "default_value": 165 }, + "material_diameter": { "default_value": 1.75 }, + "gantry_height": { "value": 30 }, + "machine_use_extruder_offset_to_offset_coords": { "value": false }, + "machine_heated_bed": { "default_value": true }, + "machine_max_acceleration_x": { "default_value": 15000 }, + "machine_max_acceleration_y": { "default_value": 15000 }, + "machine_max_acceleration_z": { "default_value": 250 }, + "machine_acceleration": { "default_value": 1500 }, + "machine_max_jerk_e": { "default_value": 60 }, + "machine_steps_per_mm_x": { "default_value": 160 }, + "machine_steps_per_mm_y": { "default_value": 160 }, + "machine_steps_per_mm_z": { "default_value": 800 }, + "machine_endstop_positive_direction_x": { "default_value": true }, + "machine_endstop_positive_direction_y": { "default_value": true }, + "machine_endstop_positive_direction_z": { "default_value": false }, + "machine_feeder_wheel_diameter": { "default_value": 7.5 }, + "machine_head_with_fans_polygon": { "default_value": [ [-35, 65], [-35, -50], [35, -50], [35, 65] ] }, + "machine_max_feedrate_z": { "default_value": 40 }, + "machine_max_feedrate_e": { "default_value": 120 }, + "machine_start_gcode": { "default_value": ";Simple\nSTART_PRINT EXTRUDER_TEMP={material_print_temperature_layer_0} BED_TEMP={material_bed_temperature_layer_0}\n;Or with custom bed mesh area\n;START_PRINT EXTRUDER_TEMP={material_print_temperature_layer_0} BED_TEMP={material_bed_temperature_layer_0} AREA_START=%MINX%,%MINY% AREA_END=%MAXX%,%MAXY% FILAMENT_TYPE={material_type}" }, + "machine_end_gcode": { "default_value": "END_PRINT" }, + "adhesion_type": { "default_value": "skirt" }, + "retraction_amount": { "default_value": 0.80 }, + "skirt_brim_minimal_length": { "default_value": 550 }, + "retraction_speed": { "default_value": 35, "maximum_value_warning": 130 }, + "retraction_retract_speed": { "maximum_value_warning": 130 }, + "retraction_prime_speed": { "value": "math.ceil(retraction_speed * 0.4)", "maximum_value_warning": 130 }, + "retraction_hop_enabled": { "default_value": true }, + "retraction_hop": { "default_value": 0.2 }, + "retraction_combing": { "value": "'noskin'" }, + "retraction_combing_max_distance": { "default_value": 10 }, + "travel_avoid_other_parts": { "default_value": false }, + "speed_travel": { "value": 300, "maximum_value_warning": 501 }, + "speed_travel_layer_0": { "value": "math.ceil(speed_travel * 0.4)" }, + "speed_layer_0": { "value": "math.ceil(speed_print * 0.25)" }, + "speed_wall": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_wall_0": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_wall_x": { "value": "math.ceil(speed_print * 0.66)" }, + "speed_topbottom": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_roofing": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_slowdown_layers": { "default_value": 4 }, + "roofing_layer_count": { "value": 1 }, + "optimize_wall_printing_order": { "default_value": true }, + "infill_enable_travel_optimization": { "default_value": true }, + "minimum_polygon_circumference": { "default_value": 0.2 }, + "wall_overhang_angle": { "default_value": 75 }, + "wall_overhang_speed_factor": { "default_value": 50 }, + "bridge_settings_enabled": { "default_value": true }, + "bridge_wall_coast": { "default_value": 10 }, + "bridge_fan_speed_2": { "resolve": "max(cool_fan_speed, 50)" }, + "bridge_fan_speed_3": { "resolve": "max(cool_fan_speed, 20)" }, + "alternate_extra_perimeter": { "default_value": true }, + "cool_min_layer_time_fan_speed_max": { "default_value": 20 }, + "cool_min_layer_time": { "default_value": 15 }, + "cool_fan_full_at_height": { "value": "resolveOrValue('layer_height_0') + resolveOrValue('layer_height') * max(1, cool_fan_full_layer - 1)" }, + "cool_fan_full_layer": { "value": 4 }, + "layer_height_0": { "resolve": "max(0.2, min(extruderValues('layer_height')))" }, + "line_width": { "value": "machine_nozzle_size * 1.125" }, + "wall_line_width": { "value": "machine_nozzle_size" }, + "meshfix_maximum_resolution": { "default_value": 0.01 }, + "infill_before_walls": { "default_value": false }, + "zig_zaggify_infill": { "value": true }, + "min_infill_area": { "default_value": 5.0 }, + "acceleration_enabled": { "default_value": true }, + "acceleration_print": { "default_value": 15000 }, + "acceleration_wall_0": { "value": 7500 }, + "acceleration_layer_0": { "value": 10000 }, + "acceleration_travel_layer_0": { "value": 10000 }, + "acceleration_roofing": { "value": 10000 }, + "jerk_wall_0": { "value": 10 }, + "jerk_roofing": { "value": 10 } + } +} diff --git a/resources/definitions/gutenberg_gzero.def.json b/resources/definitions/gutenberg_gzero.def.json new file mode 100644 index 0000000000..f03d5e3d9f --- /dev/null +++ b/resources/definitions/gutenberg_gzero.def.json @@ -0,0 +1,17 @@ +{ + "name": "G-ZERO", + "version": 2, + "inherits": "gutenberg_base", + "metadata": + { + "visible": true, + "quality_definition": "gutenberg_base" + }, + "overrides": + { + "machine_name": { "default_value": "G-ZERO" }, + "machine_width": { "default_value": 250 }, + "machine_depth": { "default_value": 200 }, + "machine_height": { "default_value": 201 } + } +} diff --git a/resources/extruders/gutenberg_extruder_0.def.json b/resources/extruders/gutenberg_extruder_0.def.json new file mode 100644 index 0000000000..58b41d62c6 --- /dev/null +++ b/resources/extruders/gutenberg_extruder_0.def.json @@ -0,0 +1,16 @@ +{ + "version": 2, + "name": "Toolhead", + "inherits": "fdmextruder", + "metadata": + { + "machine": "gutenberg_base", + "position": "0" + }, + + "overrides": + { + "extruder_nr": { "default_value": 0, "maximum_value": 1 }, + "material_diameter": { "default_value": 1.75 } + } +} diff --git a/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg new file mode 100644 index 0000000000..0a50bc02cf --- /dev/null +++ b/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg @@ -0,0 +1,57 @@ + +[general] +version = 4 +name = Fast +definition = gutenberg_base + +[metadata] +setting_version = 20 +type = quality +quality_type = fast +global_quality = True + +[values] +acceleration_enabled = True +adhesion_type = skirt +layer_height = 0.2 +layer_height_0 = 0.2 +retraction_combing = noskin +support_enable = False +acceleration_print = 15000 +acceleration_roofing = 10000 +acceleration_topbottom = 10000 +acceleration_travel = 15000 +acceleration_wall = 15000 +acceleration_wall_0 = 10000 +alternate_extra_perimeter = False +infill_line_width = 0.5 +infill_pattern = lines +infill_sparse_density = 10 +optimize_wall_printing_order = True +retraction_amount = 0.8 +retraction_hop = 0.2 +retraction_hop_enabled = True +retraction_hop_only_when_collides = True +retraction_speed = 35 +roofing_layer_count = 0 +skin_monotonic = True +skirt_brim_speed = 45 +speed_infill = 200 +speed_layer_0 = 45 +speed_print = 300 +speed_print_layer_0 = 45 +speed_roofing = 100 +speed_support = 200 +speed_topbottom = 150 +speed_travel = 300 +speed_travel_layer_0 = 150 +speed_wall = 100 +speed_wall_0 = 100 +speed_wall_x = 150 +support_angle = 46 +top_bottom_thickness = 0.6 +travel_avoid_supports = True +wall_line_width = 0.45 +wall_line_width_0 = 0.4 +wall_thickness = 1.25 +zig_zaggify_infill = True \ No newline at end of file diff --git a/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg new file mode 100644 index 0000000000..ca97c99090 --- /dev/null +++ b/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg @@ -0,0 +1,57 @@ + +[general] +version = 4 +name = Fine +definition = gutenberg_base + +[metadata] +setting_version = 20 +type = quality +quality_type = fine +global_quality = True + +[values] +acceleration_enabled = True +adhesion_type = skirt +layer_height = 0.2 +layer_height_0 = 0.2 +retraction_combing = noskin +support_enable = False +acceleration_print = 15000 +acceleration_roofing = 10000 +acceleration_topbottom = 10000 +acceleration_travel = 15000 +acceleration_wall = 7500 +acceleration_wall_0 = 7500 +alternate_extra_perimeter = False +cool_min_layer_time = 10 +infill_line_width = 0.5 +infill_pattern = gyroid +infill_sparse_density = 15 +inset_direction = outside_in +optimize_wall_printing_order = True +retraction_amount = 0.8 +retraction_hop = 0.2 +retraction_hop_enabled = True +retraction_hop_only_when_collides = True +retraction_speed = 35 +roofing_layer_count = 1 +skin_monotonic = True +skirt_brim_speed = 45 +speed_infill = 150 +speed_layer_0 = 45 +speed_print = 100 +speed_roofing = 100 +speed_topbottom = 100 +speed_travel = 300 +speed_travel_layer_0 = 60 +speed_wall = 100 +speed_wall_0 = 75 +speed_wall_x = 100 +support_angle = 46 +top_bottom_thickness = 0.8 +travel_avoid_supports = True +wall_line_width = 0.45 +wall_line_width_0 = 0.4 +wall_thickness = 1.65 +zig_zaggify_infill = True \ No newline at end of file diff --git a/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg new file mode 100644 index 0000000000..0cc2559993 --- /dev/null +++ b/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg @@ -0,0 +1,57 @@ + +[general] +version = 4 +name = Normal +definition = gutenberg_base + +[metadata] +setting_version = 20 +type = quality +quality_type = normal +global_quality = True + +[values] +acceleration_enabled = True +adhesion_type = skirt +layer_height = 0.2 +layer_height_0 = 0.2 +retraction_combing = noskin +support_enable = False +acceleration_print = 15000 +acceleration_roofing = 10000 +acceleration_topbottom = 10000 +acceleration_travel = 15000 +acceleration_wall = 10000 +acceleration_wall_0 = 7500 +alternate_extra_perimeter = True +bottom_layers = 3 +infill_line_width = 0.5 +infill_pattern = gyroid +infill_sparse_density = 15 +optimize_wall_printing_order = True +retraction_amount = 0.8 +retraction_hop = 0.2 +retraction_hop_enabled = True +retraction_hop_only_when_collides = True +retraction_speed = 35 +roofing_layer_count = 1 +skin_monotonic = True +skirt_brim_speed = 45 +speed_infill = 150 +speed_layer_0 = 45 +speed_print = 150 +speed_roofing = 100 +speed_topbottom = 100 +speed_travel = 300 +speed_travel_layer_0 = 100 +speed_wall = 100 +speed_wall_0 = 100 +speed_wall_x = 150 +support_angle = 46 +top_bottom_thickness = 0.6 +top_layers = 3 +travel_avoid_supports = True +wall_line_width = 0.45 +wall_line_width_0 = 0.4 +wall_thickness = 1.25 +zig_zaggify_infill = True \ No newline at end of file diff --git a/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg new file mode 100644 index 0000000000..71ca4f3d1c --- /dev/null +++ b/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg @@ -0,0 +1,56 @@ + +[general] +version = 4 +name = Strong +definition = gutenberg_base + +[metadata] +setting_version = 20 +type = quality +quality_type = strong +global_quality = True + +[values] +acceleration_enabled = True +adhesion_type = skirt +layer_height = 0.2 +layer_height_0 = 0.2 +retraction_combing = noskin +support_enable = False +acceleration_print = 15000 +acceleration_roofing = 10000 +acceleration_topbottom = 10000 +acceleration_travel = 15000 +acceleration_wall = 10000 +acceleration_wall_0 = 7500 +alternate_extra_perimeter = True +infill_line_width = 0.5 +infill_pattern = gyroid +infill_sparse_density = 30 +optimize_wall_printing_order = True +retraction_amount = 0.8 +retraction_hop = 0.2 +retraction_hop_enabled = True +retraction_hop_only_when_collides = True +retraction_speed = 35 +roofing_layer_count = 1 +skin_monotonic = True +skirt_brim_speed = 45 +speed_infill = 150 +speed_layer_0 = 45 +speed_print = 100 +speed_print_layer_0 = 60 +speed_roofing = 100 +speed_topbottom = 100 +speed_travel = 300 +speed_travel_layer_0 = 100 +speed_wall = 100 +speed_wall_0 = 75 +speed_wall_x = 100 +support_angle = 46 +top_bottom_thickness = 1 +travel_avoid_supports = True +wall_line_width = 0.45 +wall_line_width_0 = 0.4 +wall_thickness = 2.05 +zig_zaggify_infill = True \ No newline at end of file From 20778f3d10f16888c836e644b52a299cd7467cef Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Mon, 16 Jan 2023 15:46:25 +0100 Subject: [PATCH 02/31] Move translation scripts to Cura from Uranium CURA-9814 --- scripts/translations/createjsoncontext.py | 103 +++++++++++++++++++ scripts/translations/createkeypair.py | 47 +++++++++ scripts/translations/createplugincontext.py | 72 +++++++++++++ scripts/translations/extract-all | 13 +++ scripts/translations/extract-json | 42 ++++++++ scripts/translations/extract-messages | 42 ++++++++ scripts/translations/extract-plugins | 14 +++ scripts/translations/extract-python | 12 +++ scripts/translations/extract-tr-strings | 73 +++++++++++++ scripts/translations/pirate.py | 108 ++++++++++++++++++++ scripts/translations/pirateofdoom.py | 77 ++++++++++++++ 11 files changed, 603 insertions(+) create mode 100644 scripts/translations/createjsoncontext.py create mode 100644 scripts/translations/createkeypair.py create mode 100644 scripts/translations/createplugincontext.py create mode 100755 scripts/translations/extract-all create mode 100755 scripts/translations/extract-json create mode 100755 scripts/translations/extract-messages create mode 100755 scripts/translations/extract-plugins create mode 100755 scripts/translations/extract-python create mode 100755 scripts/translations/extract-tr-strings create mode 100644 scripts/translations/pirate.py create mode 100644 scripts/translations/pirateofdoom.py diff --git a/scripts/translations/createjsoncontext.py b/scripts/translations/createjsoncontext.py new file mode 100644 index 0000000000..a9f6019005 --- /dev/null +++ b/scripts/translations/createjsoncontext.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Copyright 2014 Burkhard Lück + +Permission to use, copy, modify, and distribute this software +and its documentation for any purpose and without fee is hereby +granted, provided that the above copyright notice appear in all +copies and that both that the copyright notice and this +permission notice and warranty disclaimer appear in supporting +documentation, and that the name of the author not be used in +advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +The author disclaim all warranties with regard to this +software, including all implied warranties of merchantability +and fitness. In no event shall the author be liable for any +special, indirect or consequential damages or any damages +whatsoever resulting from loss of use, data or profits, whether +in an action of contract, negligence or other tortious action, +arising out of or in connection with the use or performance of +this software. +""" + +# This script generates a POT file from a JSON settings file. It +# has been adapted from createjsoncontext.py of KDE's translation +# scripts. It extracts the "label" and "description" values of +# the JSON file using the structure as used by Uranium settings files. + +import sys +import os +import json +import time +import os.path +import collections + +debugoutput = False #set True to print debug output in scripty's logs + +basedir = sys.argv[-1] +pottxt = "" + +def appendMessage(file, setting, field, value): + global pottxt + pottxt += "#: {0}\nmsgctxt \"{1} {2}\"\nmsgid \"{3}\"\nmsgstr \"\"\n\n".format(file, setting, field, value.replace("\n", "\\n").replace("\"", "\\\"")) + +def processSettings(file, settings): + for name, value in settings.items(): + appendMessage(file, name, "label", value["label"]) + if "description" in value: + appendMessage(file, name, "description", value["description"]) + + if "warning_description" in value: + appendMessage(file, name, "warning_description", value["warning_description"]) + + if "error_description" in value: + appendMessage(file, name, "error_description", value["error_description"]) + + if "options" in value: + for item, description in value["options"].items(): + appendMessage(file, name, "option {0}".format(item), description) + + if "children" in value: + processSettings(file, value["children"]) + +def potheader(): + headertxt = "#, fuzzy\n" + headertxt += "msgid \"\"\n" + headertxt += "msgstr \"\"\n" + headertxt += "\"Project-Id-Version: Uranium json setting files\\n\"\n" + headertxt += "\"Report-Msgid-Bugs-To: plugins@ultimaker.com\\n\"\n" + headertxt += "\"POT-Creation-Date: %s+0000\\n\"\n" %time.strftime("%Y-%m-%d %H:%M") + headertxt += "\"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n\"\n" + headertxt += "\"Last-Translator: FULL NAME \\n\"\n" + headertxt += "\"Language-Team: LANGUAGE\\n\"\n" + headertxt += "\"MIME-Version: 1.0\\n\"\n" + headertxt += "\"Content-Type: text/plain; charset=UTF-8\\n\"\n" + headertxt += "\"Content-Transfer-Encoding: 8bit\\n\"\n" + headertxt += "\n" + return headertxt + +if len(sys.argv) < 3: + print("wrong number of args: %s" % sys.argv) + print("\nUsage: python %s jsonfilenamelist basedir" % os.path.basename(sys.argv[0])) +else: + jsonfilename = sys.argv[1] + basedir = sys.argv[2] + outputfilename = sys.argv[3] + + with open(jsonfilename, "r", encoding = "utf-8") as data_file: + error = False + + jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict) + if "settings" not in jsondatadict: + print("No settings item found, nothing to translate") + exit(1) + + processSettings(jsonfilename.replace(basedir, ""), jsondatadict["settings"]) + + if pottxt != "": + with open(outputfilename, "w", encoding = "utf-8") as output_file: + output_file.write(potheader()) + output_file.write(pottxt) diff --git a/scripts/translations/createkeypair.py b/scripts/translations/createkeypair.py new file mode 100644 index 0000000000..e01c9c2a0b --- /dev/null +++ b/scripts/translations/createkeypair.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +import argparse +from typing import Optional +import sys + +from UM.Trust import TrustBasics + +# Default arguments, if arguments to the script are omitted, these values are used: +DEFAULT_PRIVATE_KEY_PATH = "./private_key.pem" +DEFAULT_PUBLIC_KEY_PATH = "./public_key.pem" +DEFAULT_PASSWORD = "" + + +def createAndStoreNewKeyPair(private_filename: str, public_filename: str, optional_password: Optional[str]) -> None: + """Creates a new public and private key, and saves them to the provided filenames. + + See also 'Trust.py' in the main library and the related scripts; 'signfile.py', 'signfolder.py' in this folder. + + :param private_filename: Filename to save the private key to. + :param public_filename: Filename to save the public key to. + :param optional_password: Private keys can have a password (or not). + """ + + password = None if optional_password == "" else optional_password + private_key, public_key = TrustBasics.generateNewKeyPair() + TrustBasics.saveKeyPair(private_key, private_filename, public_filename, password) + + +def mainfunc(): + """Arguments: + + `-k ` or `--private ` will store the generated private key to + `-p ` or `--public ` will store the generated public key to + `-w ` or `--password ` will give the private key a password (none if omitted, which is default) + """ + + parser = argparse.ArgumentParser() + parser.add_argument("-k", "--private", type = str, default = DEFAULT_PRIVATE_KEY_PATH) + parser.add_argument("-p", "--public", type = str, default = DEFAULT_PUBLIC_KEY_PATH) + parser.add_argument("-w", "--password", type = str, default = DEFAULT_PASSWORD) + args = parser.parse_args() + createAndStoreNewKeyPair(args.private, args.public, args.password) + + +if __name__ == "__main__": + sys.exit(mainfunc()) diff --git a/scripts/translations/createplugincontext.py b/scripts/translations/createplugincontext.py new file mode 100644 index 0000000000..25a086357e --- /dev/null +++ b/scripts/translations/createplugincontext.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Copyright 2014 Burkhard Lück + +Permission to use, copy, modify, and distribute this software +and its documentation for any purpose and without fee is hereby +granted, provided that the above copyright notice appear in all +copies and that both that the copyright notice and this +permission notice and warranty disclaimer appear in supporting +documentation, and that the name of the author not be used in +advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +The author disclaim all warranties with regard to this +software, including all implied warranties of merchantability +and fitness. In no event shall the author be liable for any +special, indirect or consequential damages or any damages +whatsoever resulting from loss of use, data or profits, whether +in an action of contract, negligence or other tortious action, +arising out of or in connection with the use or performance of +this software. +""" + +# This script generates a POT file from a JSON settings file. It +# has been adapted from createjsoncontext.py of KDE's translation +# scripts. It extracts the "label" and "description" values of +# the JSON file using the structure as used by Uranium settings files. + +import sys +import os.path +import collections +import json + +debugoutput = False #set True to print debug output in scripty's logs + +basedir = sys.argv[-1] +pottxt = "" + + +def appendMessage(file, field, value): + global pottxt + pottxt += "#: {0}\nmsgctxt \"{1}\"\nmsgid \"{2}\"\nmsgstr \"\"\n\n".format(file, field, value.replace("\n", "\\n").replace("\"", "\\\"")) + + +if len(sys.argv) < 3: + print("wrong number of args: %s" % sys.argv) + print("\nUsage: python %s jsonfilenamelist basedir" % os.path.basename(sys.argv[0])) +else: + json_filename = sys.argv[1] + basedir = sys.argv[2] + output_filename = sys.argv[3] + + with open(json_filename, "r", encoding = "utf-8") as data_file: + error = False + + jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict) + if "name" not in jsondatadict or ("api" not in jsondatadict and "supported_sdk_versions" not in jsondatadict) or "version" not in jsondatadict: + print("The plugin.json file found on %s is invalid, ignoring it" % json_filename) + exit(1) + + file = json_filename.replace(basedir, "") + + if "description" in jsondatadict: + appendMessage(file, "description", jsondatadict["description"]) + if "name" in jsondatadict: + appendMessage(file, "name", jsondatadict["name"]) + + if pottxt != "": + with open(output_filename, "a", encoding = "utf-8") as output_file: + output_file.write(pottxt) diff --git a/scripts/translations/extract-all b/scripts/translations/extract-all new file mode 100755 index 0000000000..ea0ac63f58 --- /dev/null +++ b/scripts/translations/extract-all @@ -0,0 +1,13 @@ +#!/bin/bash +# +# Use xgettext to extract all strings from a set of python files. +# Argument 1 is the directory to search for python files, argument 2 +# is the destination file. +# +# This script will extract strings marked using i18n or i18nc methods. +# See UM/i18n.py for the relevant methods. +# +dir=$1 +dest=$2 +xgettext --from-code=UTF-8 --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $(find -L "$dir" -name \*.py) +xgettext --from-code=UTF-8 --join-existing --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $(find -L "$dir" -name \*.qml) diff --git a/scripts/translations/extract-json b/scripts/translations/extract-json new file mode 100755 index 0000000000..01048bb5cb --- /dev/null +++ b/scripts/translations/extract-json @@ -0,0 +1,42 @@ +#! /bin/bash + +# Extract strings from a list of JSON files. +# +# This script will extract strings from all JSON files in the directory +# passed as first argument. The second argument is the destination +# directory for the extracted message file. +# +# This script uses createjsoncontext to generate the actual message file +# from the JSON file. +# +# This script is based on handle_json_files.sh from KDE's translation +# scripts. +# handle_json_files.sh is copyright 2014 Burkhard Lück +scriptdir=$(dirname $0) + +extract() { + basedir=$1 + dest=$2 + file=$3 + + python3 $scriptdir/createjsoncontext.py $file $basedir json.$$.tmp + if test $? -eq 1; then + return + fi + + echo "Extracted messages from $file" + + msguniq --to-code=UTF-8 -o json.$$ json.$$.tmp + if test -f json.$$; then + destfile="$dest/$(basename $file).pot" + mv json.$$ $destfile + fi + rm -f json.$$ json.$$.tmp +} + +dir=$1; shift +dest=$1; shift + +for file in $(find -L "$dir" -name *.json | grep -v 'tests'); do + extract $dir $dest $file +done diff --git a/scripts/translations/extract-messages b/scripts/translations/extract-messages new file mode 100755 index 0000000000..315009bcdf --- /dev/null +++ b/scripts/translations/extract-messages @@ -0,0 +1,42 @@ +#!/bin/bash + +scriptdir=$(dirname $0) +basedir=$1 +catalogname=$2 + +# This script processes the source files using several other scripts to extract strings. +# The strings are extracted to $basedir/resources/i18n/ and then post processed. After that +# It generates english translation files and testing files that are pre- and sufficed with +# xx. These can be used by setting the LANGUAGE environment variable to x-test. +# +# This script uses extract-tr-strings to extract strings from QML files, extract-json to +# extract strings from JSON files and extract-python to extract strings from Python files. +# +mkdir -p $basedir/resources/i18n +$scriptdir/extract-json $basedir/resources/definitions/ $basedir/resources/i18n +$scriptdir/extract-all $basedir $basedir/resources/i18n/$catalogname.pot +$scriptdir/extract-plugins $basedir/plugins/ $basedir/resources/i18n/$catalogname.pot +msgconv --to-code=UTF-8 $basedir/resources/i18n/$catalogname.pot -o $basedir/resources/i18n/$catalogname.pot + +for pot in $basedir/resources/i18n/*.pot; do + filename=$(basename $pot) + + dir=$basedir/resources/i18n/en_US + mkdir -p $dir + po=$dir/${filename/.pot/.po} + msginit --no-translator -l en_US -i $pot -o $po + + dir=$basedir/resources/i18n/x-test + mkdir -p $dir + po=$dir/${filename/.pot/.po} + msginit --no-translator -l en_US -i $pot -o $po + msgfilter --keep-header -i $po -o $po sed -e "s/.*/xx&xx/" + msgfilter -i $po -o $po sed -e "s/Language: en_US/Language: x-test/" + + #Auto-translate the translation files to Pirate. + dir=$basedir/resources/i18n/en_7S + mkdir -p $dir + po=$dir/${filename/.pot/.po} + python3 $scriptdir/pirate.py $pot $po + echo Created $po. +done diff --git a/scripts/translations/extract-plugins b/scripts/translations/extract-plugins new file mode 100755 index 0000000000..d5b3674968 --- /dev/null +++ b/scripts/translations/extract-plugins @@ -0,0 +1,14 @@ +#! /bin/bash + +# Extract strings from all plugins +# + +scriptdir=$(dirname $0) +dir=$1; shift +dest=$1; shift + +for file in $(find -L "$dir" -name plugin.json | grep -v 'tests'); do + python3 $scriptdir/createplugincontext.py $file $dir $dest +done + + diff --git a/scripts/translations/extract-python b/scripts/translations/extract-python new file mode 100755 index 0000000000..34cf332f99 --- /dev/null +++ b/scripts/translations/extract-python @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Use xgettext to extract all strings from a set of python files. +# Argument 1 is the directory to search for python files, argument 2 +# is the destination file. +# +# This script will extract strings marked using i18n or i18nc methods. +# See UM/i18n.py for the relevant methods. +# +dir=$1 +dest=$2 +xgettext --from-code=UTF-8 --language=python -ki18n:1 -ki18nc:1c,2 -o $dest $(find -L "$dir" -name \*.py) diff --git a/scripts/translations/extract-tr-strings b/scripts/translations/extract-tr-strings new file mode 100755 index 0000000000..378f50bb40 --- /dev/null +++ b/scripts/translations/extract-tr-strings @@ -0,0 +1,73 @@ +#!/bin/sh +# +# This script extracts strings from a set of files using Qt's translation system. +# It then converts the extracted .ts file in a .po file that can be used with +# tools that expect Gettext's po file format. +# +# This script was adapted from extract-tr-strings from KDE's translation scripts. +# extract-tr-strings is Copyright 2014 Aurélien Gateau +set -e + +OLD_PWD=$PWD +cd $(dirname $0) +SCRIPTS_DIR=$PWD +cd $OLD_PWD + +LUPDATE=${LUPDATE:-lupdate} +LCONVERT=${LCONVERT:-lconvert} + +die() { + echo "ERROR: $*" >&2 + exit 1 +} + +usage() { + cat <= 2: #There's an ID on this line! + line = line[line.find('"') + 1:] #Strip everything before the first ". + line = line[:line.rfind('"')] #And after the last ". + + if state == "ctxt": + last_ctxt += line #What's left is the context. + elif state == "idplural": + last_id_plural += line #Or the plural ID. + elif state == "id": + last_id += line #Or the ID. + elif state == "str": + last_str += line #Or the actual string. + +for key, _ in translations.items(): + context, english, english_plural = key + pirate = translate(english) + pirate_plural = translate(english_plural) + translations[key] = (pirate, pirate_plural) + +with open(po_file, "w", encoding = "utf-8") as f: + f.write("""msgid "" +msgstr "" +"Project-Id-Version: Pirate\\n" +"Report-Msgid-Bugs-To: plugins@ultimaker.com\\n" +"POT-Creation-Date: 1492\\n" +"PO-Revision-Date: 1492\\n" +"Last-Translator: Ghostkeeper and Awhiemstra\\n" +"Language-Team: Ghostkeeper and Awhiemstra\\n" +"Language: Pirate\\n" +"Lang-Code: en\\n" +"Country-Code: en_7S\\n" +"MIME-Version: 1.0\\n" +"Content-Type: text/plain; charset=UTF-8\\n" +"Content-Transfer-Encoding: 8bit\\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\\n" +""") + for key, value in translations.items(): + context, english, english_plural = key + pirate, pirate_plural = value + f.write('msgctxt "{context}"\n'.format(context = context)) + if english_plural == "": #No plurals in this item. + f.write('msgid "{english}"\n'.format(english = english)) + f.write('msgstr "{pirate}"\n'.format(pirate = pirate)) + else: + f.write('msgid "{english}"\n'.format(english = english)) + f.write('msgid_plural "{english_plural}"\n'.format(english_plural = english_plural)) + f.write('msgstr[0] "{pirate}"\n'.format(pirate = pirate)) + f.write('msgstr[1] "{pirate_plural}"\n'.format(pirate_plural = pirate_plural)) + f.write("\n") #Empty line. \ No newline at end of file diff --git a/scripts/translations/pirateofdoom.py b/scripts/translations/pirateofdoom.py new file mode 100644 index 0000000000..e8b8a28958 --- /dev/null +++ b/scripts/translations/pirateofdoom.py @@ -0,0 +1,77 @@ +pirate = { + "build plate": "deck", + "buildplate": "deck", + "quit": "abandon ship", + "back": "avast", + "nozzle": "muzzle", + "nozzles": "muzzles", + "extruder": "cannon", + "extruders": "cannons", + "yes": "aye", + "no": "nay", + "loading": "haulin'", + "you": "ye", + "you're": "ye are", + "ok": "aye", + "machine": "ship", + "machines": "ships", + "mm/s²": "knots/s", + "mm/s": "knots", + "printer": "ship", + "printers": "ships", + "view": "spyglass", + "support": "peg legs", + "fan": "wind", + "file": "treasure", + "file(s)": "treasure(s)", + "files": "treasures", + "profile": "map", + "profiles": "maps", + "setting": "knob", + "settings": "knobs", + "shield": "sail", + "your": "yer", + "the": "th'", + "travel": "journey", + "wireframe": "ropey", + "wire": "rope", + "are": "be", + "is": "be", + "there": "thar", + "not": "nay", + "delete": "send to Davy Jones' locker", + "remove": "send to Davy Jones' locker", + "print": "scribble", + "printing": "scribblin'", + "load": "haul", + "connect to": "board", + "connecting": "boarding", + "collects": "hoards", + "prime tower": "buoy", + "change log": "captain's log", + "my": "me", + "removable drive": "life boat", + "print core": "scribbler", + "printcore": "scribbler", + "abort": ["maroon", "abandon"], + "aborting": ["marooning", "abandoning"], + "aborted": ["marooned", "abandoned"], + "connected": ["anchored", "moored"], + "developer": "scurvy dog", + "wizard": "cap'n", + "active leveling": "keelhauling", + "download": "plunder", + "downloaded": "plundered", + "caution hot surface": "fire in the hole!", + "type": "sort", + "spool": "barrel", + "surface": "lacquer", + "zigzag": "heave-to", + "bottom": "bilge", + "top": "deck", + "ironing": "deck swabbing", + "adhesion": "anchorage", + "blob": "barnacle", + "blobs": "barnacles", + "slice": "set sail", +} From d24bd3f979ce53d32202d4741069abc5a638ac77 Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Mon, 16 Jan 2023 15:50:19 +0100 Subject: [PATCH 03/31] Use a loop to go over files instead of passing them all in as arguments so that we don't go over the argument size limit. CURA-9814 --- scripts/translations/extract-all | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/scripts/translations/extract-all b/scripts/translations/extract-all index ea0ac63f58..4c5fa09219 100755 --- a/scripts/translations/extract-all +++ b/scripts/translations/extract-all @@ -9,5 +9,14 @@ # dir=$1 dest=$2 -xgettext --from-code=UTF-8 --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $(find -L "$dir" -name \*.py) -xgettext --from-code=UTF-8 --join-existing --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $(find -L "$dir" -name \*.qml) +for f in $(find -L "$dir" -name \*.py) +do + echo "Extracting strings from python file: $f" + xgettext --from-code=UTF-8 --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f +done + +for f in $(find -L "$dir" -name \*.qml) +do + echo "Extracting strings from qml file: $f" + xgettext --from-code=UTF-8 --join-existing --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f +done From ec26cd509ea66e274e000c8cd4e61eb997f60a61 Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Mon, 16 Jan 2023 15:50:52 +0100 Subject: [PATCH 04/31] Extend comment CURA-9814 --- scripts/translations/extract-json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/translations/extract-json b/scripts/translations/extract-json index 01048bb5cb..00cef1e866 100755 --- a/scripts/translations/extract-json +++ b/scripts/translations/extract-json @@ -1,6 +1,6 @@ #! /bin/bash -# Extract strings from a list of JSON files. +# Extract strings from all JSON files in a directory into files with matching names ending with .pot. # # This script will extract strings from all JSON files in the directory # passed as first argument. The second argument is the destination From c8ba9bccdb614933c179d3fa490c9cbcf0bd3c1e Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Mon, 16 Jan 2023 15:52:53 +0100 Subject: [PATCH 05/31] Remove xtest as we will no longer be running this locally so it is not useful. CURA-9814 --- scripts/translations/extract-messages | 7 ------- 1 file changed, 7 deletions(-) diff --git a/scripts/translations/extract-messages b/scripts/translations/extract-messages index 315009bcdf..31014c8e90 100755 --- a/scripts/translations/extract-messages +++ b/scripts/translations/extract-messages @@ -26,13 +26,6 @@ for pot in $basedir/resources/i18n/*.pot; do po=$dir/${filename/.pot/.po} msginit --no-translator -l en_US -i $pot -o $po - dir=$basedir/resources/i18n/x-test - mkdir -p $dir - po=$dir/${filename/.pot/.po} - msginit --no-translator -l en_US -i $pot -o $po - msgfilter --keep-header -i $po -o $po sed -e "s/.*/xx&xx/" - msgfilter -i $po -o $po sed -e "s/Language: en_US/Language: x-test/" - #Auto-translate the translation files to Pirate. dir=$basedir/resources/i18n/en_7S mkdir -p $dir From 3478c0af6cbecaef6292fe006753658480b696cd Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Mon, 16 Jan 2023 15:56:42 +0100 Subject: [PATCH 06/31] Remove unused translation script CURA-9814 --- scripts/translations/extract-tr-strings | 73 ------------------------- 1 file changed, 73 deletions(-) delete mode 100755 scripts/translations/extract-tr-strings diff --git a/scripts/translations/extract-tr-strings b/scripts/translations/extract-tr-strings deleted file mode 100755 index 378f50bb40..0000000000 --- a/scripts/translations/extract-tr-strings +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/sh -# -# This script extracts strings from a set of files using Qt's translation system. -# It then converts the extracted .ts file in a .po file that can be used with -# tools that expect Gettext's po file format. -# -# This script was adapted from extract-tr-strings from KDE's translation scripts. -# extract-tr-strings is Copyright 2014 Aurélien Gateau -set -e - -OLD_PWD=$PWD -cd $(dirname $0) -SCRIPTS_DIR=$PWD -cd $OLD_PWD - -LUPDATE=${LUPDATE:-lupdate} -LCONVERT=${LCONVERT:-lconvert} - -die() { - echo "ERROR: $*" >&2 - exit 1 -} - -usage() { - cat < Date: Mon, 16 Jan 2023 16:04:40 +0100 Subject: [PATCH 07/31] make print statement more informative CURA-9814 --- scripts/translations/createjsoncontext.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/translations/createjsoncontext.py b/scripts/translations/createjsoncontext.py index a9f6019005..a4f02012d4 100644 --- a/scripts/translations/createjsoncontext.py +++ b/scripts/translations/createjsoncontext.py @@ -92,7 +92,7 @@ else: jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict) if "settings" not in jsondatadict: - print("No settings item found, nothing to translate") + print(f"Nothing to translate in file: {jsondatadict}") exit(1) processSettings(jsonfilename.replace(basedir, ""), jsondatadict["settings"]) From bcf715193add2078f9a20a9ed82e3c8d4f1ec1be Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Tue, 17 Jan 2023 17:55:51 +0100 Subject: [PATCH 08/31] Use py file instead for extracting strings --- scripts/translations/extract-all | 5 +- scripts/translations/extract_strings.py | 135 ++++++++++++++++++++++++ scripts/translations/pirate.py | 108 ------------------- scripts/translations/pirateofdoom.py | 77 -------------- 4 files changed, 138 insertions(+), 187 deletions(-) create mode 100644 scripts/translations/extract_strings.py delete mode 100644 scripts/translations/pirate.py delete mode 100644 scripts/translations/pirateofdoom.py diff --git a/scripts/translations/extract-all b/scripts/translations/extract-all index 4c5fa09219..98e748f4ee 100755 --- a/scripts/translations/extract-all +++ b/scripts/translations/extract-all @@ -9,14 +9,15 @@ # dir=$1 dest=$2 +touch $dest for f in $(find -L "$dir" -name \*.py) do echo "Extracting strings from python file: $f" - xgettext --from-code=UTF-8 --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f + xgettext --from-code=UTF-8 --join-existing --sort-by-file --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f done for f in $(find -L "$dir" -name \*.qml) do echo "Extracting strings from qml file: $f" - xgettext --from-code=UTF-8 --join-existing --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f + xgettext --from-code=UTF-8 --join-existing --sort-by-file --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f done diff --git a/scripts/translations/extract_strings.py b/scripts/translations/extract_strings.py new file mode 100644 index 0000000000..30608eeeb4 --- /dev/null +++ b/scripts/translations/extract_strings.py @@ -0,0 +1,135 @@ +# Copyright (c) 2023 UltiMaker. +# Cura is released under the terms of the LGPLv3 or higher. + +import argparse +import os +import subprocess +from os.path import isfile + +from pathlib import Path + +def extract_all_strings(root_path: Path, script_path: Path, translations_root_path: Path, all_strings_pot_path: Path): + """ Extracts all strings into a pot file with empty translations. + + Strings are extracted everywhere that i18n is used in python and qml in the project. It also checks the project + for JSON files with 'settings' in the root node and extracts these for translation as well. + + @param root_path: The root path of the project. This is the root for string searching. + @param script_path: The location of the bash scripts used for translating. + @param translations_root_path: The root of the translations folder (resources/i18n). + @param all_strings_pot_path: The path of the pot file where all strings will be outputted (resources/i8n/cura.pot). + """ + + # # Extract the setting strings from any json file with settings at its root + # extract_json_arguments = [ + # script_path.joinpath("extract-json"), + # root_path.joinpath("resources", "definitions"), + # translations_root_path + # ] + # subprocess.run(extract_json_arguments) + # + # Extract all strings from qml and py files + extract_qml_py_arguments = [ + script_path.joinpath("extract-all"), + root_path, + all_strings_pot_path + ] + subprocess.run(extract_qml_py_arguments) + + # Extract all the name and description from all plugins + extract_plugin_arguments = [ + script_path.joinpath("extract-plugins"), + root_path.joinpath("plugins"), + all_strings_pot_path + ] + subprocess.run(extract_plugin_arguments) + + # Convert the output file to utf-8 + convert_encoding_arguments = [ + "msgconv", + "--to-code=UTF-8", + all_strings_pot_path, + "-o", + all_strings_pot_path + ] + subprocess.run(convert_encoding_arguments) + + +def update_po_files_all_languages(translation_root_path: Path) -> None: + """ Updates all po files in translation_root_path with new strings mapped to blank translations. + + This will take all newly generated po files in the root of the translations path (i18n/cura.pot, i18n/fdmextruder.json.def.pot) + and merge them with the existing po files for every language. This will create new po files with empty translations + for all new words added to the project. + + @param translation_root_path: Root of the translations folder (resources/i18n). + """ + new_pot_files = [] + + for file in os.listdir(translation_root_path): + path = translations_root_path.joinpath(file) + if path.suffix == ".pot": + new_pot_files.append(path) + print(new_pot_files) + + for directory, _, po_files in os.walk(translation_root_path): + print(directory) + print(po_files) + for pot in new_pot_files: + + po_filename = pot.name.rstrip("t") + if po_filename not in po_files: + continue # We only want to merge files that have matching names + + pot_file = pot + po_file = Path(directory, po_filename).absolute() + + # # Initialize the new po file + # init_files_arguments = [ + # "msginit", + # "--no-wrap", + # "--no-translator", + # "-l", language, + # "-i", pot_file, + # "-o", po_file + # ] + # + # subprocess.run(init_files_arguments) + + merge_files_arguments = [ + "msgmerge", + "--no-wrap", + "--no-fuzzy-matching", + "--update", + "--sort-by-file", # Sort by file location, this is better than pure sorting for translators + po_file, # po file that will be updated + pot_file # source of new strings + ] + + subprocess.run(merge_files_arguments) + + return + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Extract strings from project into .po files") + parser.add_argument("root_path", type=str, help="The root of the project to extract translatable strings from") + parser.add_argument("translation_file_name", type=str, help="The .pot file that all strings from python/qml files will be inserted into") + parser.add_argument("script_path", type=str, help="The path containing the scripts for translating files") + args = parser.parse_args() + + root_path = Path(args.root_path) # root of the project + script_path = Path(args.script_path) # location of bash scripts + + # Path where all translation file are + translations_root_path = root_path.joinpath("resources", "i18n") + translations_root_path.mkdir(parents=True, exist_ok=True) # Make sure we have an output path + + all_strings_pot_path = translations_root_path.joinpath(args.translation_file_name) # pot file containing all strings untranslated + + # Clear the output file, otherwise deleted strings will still be in the output. + if os.path.exists(all_strings_pot_path): + os.remove(all_strings_pot_path) + + extract_all_strings(root_path, script_path, translations_root_path, all_strings_pot_path) + update_po_files_all_languages(translations_root_path) diff --git a/scripts/translations/pirate.py b/scripts/translations/pirate.py deleted file mode 100644 index 6c0c170537..0000000000 --- a/scripts/translations/pirate.py +++ /dev/null @@ -1,108 +0,0 @@ -#Creates the Pirate translation files. - -import sys #To get command line arguments. -import pirateofdoom #Contains our translation dictionary. -import re #Case insensitive search and replace. -import random # Take random translation candidates - -pot_file = sys.argv[1] -po_file = sys.argv[2] - -#Translates English to Pirate. -def translate(english): - english = english.replace("&", "") #Pirates don't take shortcuts. - for eng, pir in pirateofdoom.pirate.items(): - matches = list(re.finditer(r"\b" + eng.lower() + r"\b", english.lower())) - matches = [match.start(0) for match in matches] - matches = reversed(sorted(matches)) - for position in matches: - #Make sure the case is correct. - uppercase = english[position].lower() != english[position] - - if isinstance(pir, list): - pir = random.choice(pir) - - first_character = pir[0] - rest_characters = pir[1:] - if uppercase: - first_character = first_character.upper() - else: - first_character = first_character.lower() - pir = first_character + rest_characters - - english = english[:position] + pir + english[position + len(eng):] - return english - -translations = {} - -last_id = "" -last_id_plural = "" -last_ctxt = "" -last_str = "" -state = "unknown" -with open(pot_file, encoding = "utf-8") as f: - for line in f: - if line.startswith("msgctxt"): - state = "ctxt" - if last_id != "": - translations[(last_ctxt, last_id, last_id_plural)] = last_str - last_ctxt = "" - last_id = "" - last_id_plural = "" - last_str = "" - elif line.startswith("msgid_plural"): - state = "idplural" - elif line.startswith("msgid"): - state = "id" - elif line.startswith("msgstr"): - state = "str" - - if line.count('"') >= 2: #There's an ID on this line! - line = line[line.find('"') + 1:] #Strip everything before the first ". - line = line[:line.rfind('"')] #And after the last ". - - if state == "ctxt": - last_ctxt += line #What's left is the context. - elif state == "idplural": - last_id_plural += line #Or the plural ID. - elif state == "id": - last_id += line #Or the ID. - elif state == "str": - last_str += line #Or the actual string. - -for key, _ in translations.items(): - context, english, english_plural = key - pirate = translate(english) - pirate_plural = translate(english_plural) - translations[key] = (pirate, pirate_plural) - -with open(po_file, "w", encoding = "utf-8") as f: - f.write("""msgid "" -msgstr "" -"Project-Id-Version: Pirate\\n" -"Report-Msgid-Bugs-To: plugins@ultimaker.com\\n" -"POT-Creation-Date: 1492\\n" -"PO-Revision-Date: 1492\\n" -"Last-Translator: Ghostkeeper and Awhiemstra\\n" -"Language-Team: Ghostkeeper and Awhiemstra\\n" -"Language: Pirate\\n" -"Lang-Code: en\\n" -"Country-Code: en_7S\\n" -"MIME-Version: 1.0\\n" -"Content-Type: text/plain; charset=UTF-8\\n" -"Content-Transfer-Encoding: 8bit\\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\\n" -""") - for key, value in translations.items(): - context, english, english_plural = key - pirate, pirate_plural = value - f.write('msgctxt "{context}"\n'.format(context = context)) - if english_plural == "": #No plurals in this item. - f.write('msgid "{english}"\n'.format(english = english)) - f.write('msgstr "{pirate}"\n'.format(pirate = pirate)) - else: - f.write('msgid "{english}"\n'.format(english = english)) - f.write('msgid_plural "{english_plural}"\n'.format(english_plural = english_plural)) - f.write('msgstr[0] "{pirate}"\n'.format(pirate = pirate)) - f.write('msgstr[1] "{pirate_plural}"\n'.format(pirate_plural = pirate_plural)) - f.write("\n") #Empty line. \ No newline at end of file diff --git a/scripts/translations/pirateofdoom.py b/scripts/translations/pirateofdoom.py deleted file mode 100644 index e8b8a28958..0000000000 --- a/scripts/translations/pirateofdoom.py +++ /dev/null @@ -1,77 +0,0 @@ -pirate = { - "build plate": "deck", - "buildplate": "deck", - "quit": "abandon ship", - "back": "avast", - "nozzle": "muzzle", - "nozzles": "muzzles", - "extruder": "cannon", - "extruders": "cannons", - "yes": "aye", - "no": "nay", - "loading": "haulin'", - "you": "ye", - "you're": "ye are", - "ok": "aye", - "machine": "ship", - "machines": "ships", - "mm/s²": "knots/s", - "mm/s": "knots", - "printer": "ship", - "printers": "ships", - "view": "spyglass", - "support": "peg legs", - "fan": "wind", - "file": "treasure", - "file(s)": "treasure(s)", - "files": "treasures", - "profile": "map", - "profiles": "maps", - "setting": "knob", - "settings": "knobs", - "shield": "sail", - "your": "yer", - "the": "th'", - "travel": "journey", - "wireframe": "ropey", - "wire": "rope", - "are": "be", - "is": "be", - "there": "thar", - "not": "nay", - "delete": "send to Davy Jones' locker", - "remove": "send to Davy Jones' locker", - "print": "scribble", - "printing": "scribblin'", - "load": "haul", - "connect to": "board", - "connecting": "boarding", - "collects": "hoards", - "prime tower": "buoy", - "change log": "captain's log", - "my": "me", - "removable drive": "life boat", - "print core": "scribbler", - "printcore": "scribbler", - "abort": ["maroon", "abandon"], - "aborting": ["marooning", "abandoning"], - "aborted": ["marooned", "abandoned"], - "connected": ["anchored", "moored"], - "developer": "scurvy dog", - "wizard": "cap'n", - "active leveling": "keelhauling", - "download": "plunder", - "downloaded": "plundered", - "caution hot surface": "fire in the hole!", - "type": "sort", - "spool": "barrel", - "surface": "lacquer", - "zigzag": "heave-to", - "bottom": "bilge", - "top": "deck", - "ironing": "deck swabbing", - "adhesion": "anchorage", - "blob": "barnacle", - "blobs": "barnacles", - "slice": "set sail", -} From ee97962c3192320be41e91a41973e695a1efccfe Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Wed, 18 Jan 2023 10:36:09 +0100 Subject: [PATCH 09/31] Remove unused scripts --- scripts/translations/extract-messages | 35 --------------------------- scripts/translations/extract-python | 12 --------- 2 files changed, 47 deletions(-) delete mode 100755 scripts/translations/extract-messages delete mode 100755 scripts/translations/extract-python diff --git a/scripts/translations/extract-messages b/scripts/translations/extract-messages deleted file mode 100755 index 31014c8e90..0000000000 --- a/scripts/translations/extract-messages +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -scriptdir=$(dirname $0) -basedir=$1 -catalogname=$2 - -# This script processes the source files using several other scripts to extract strings. -# The strings are extracted to $basedir/resources/i18n/ and then post processed. After that -# It generates english translation files and testing files that are pre- and sufficed with -# xx. These can be used by setting the LANGUAGE environment variable to x-test. -# -# This script uses extract-tr-strings to extract strings from QML files, extract-json to -# extract strings from JSON files and extract-python to extract strings from Python files. -# -mkdir -p $basedir/resources/i18n -$scriptdir/extract-json $basedir/resources/definitions/ $basedir/resources/i18n -$scriptdir/extract-all $basedir $basedir/resources/i18n/$catalogname.pot -$scriptdir/extract-plugins $basedir/plugins/ $basedir/resources/i18n/$catalogname.pot -msgconv --to-code=UTF-8 $basedir/resources/i18n/$catalogname.pot -o $basedir/resources/i18n/$catalogname.pot - -for pot in $basedir/resources/i18n/*.pot; do - filename=$(basename $pot) - - dir=$basedir/resources/i18n/en_US - mkdir -p $dir - po=$dir/${filename/.pot/.po} - msginit --no-translator -l en_US -i $pot -o $po - - #Auto-translate the translation files to Pirate. - dir=$basedir/resources/i18n/en_7S - mkdir -p $dir - po=$dir/${filename/.pot/.po} - python3 $scriptdir/pirate.py $pot $po - echo Created $po. -done diff --git a/scripts/translations/extract-python b/scripts/translations/extract-python deleted file mode 100755 index 34cf332f99..0000000000 --- a/scripts/translations/extract-python +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -# -# Use xgettext to extract all strings from a set of python files. -# Argument 1 is the directory to search for python files, argument 2 -# is the destination file. -# -# This script will extract strings marked using i18n or i18nc methods. -# See UM/i18n.py for the relevant methods. -# -dir=$1 -dest=$2 -xgettext --from-code=UTF-8 --language=python -ki18n:1 -ki18nc:1c,2 -o $dest $(find -L "$dir" -name \*.py) From 67301a5b5491f1bf698a15e336e30579df7f8bdf Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Wed, 18 Jan 2023 10:36:45 +0100 Subject: [PATCH 10/31] cleanup debugging changes script CURA-9814 --- scripts/translations/extract_strings.py | 39 +++++++++---------------- 1 file changed, 13 insertions(+), 26 deletions(-) diff --git a/scripts/translations/extract_strings.py b/scripts/translations/extract_strings.py index 30608eeeb4..2387959ba8 100644 --- a/scripts/translations/extract_strings.py +++ b/scripts/translations/extract_strings.py @@ -8,6 +8,7 @@ from os.path import isfile from pathlib import Path + def extract_all_strings(root_path: Path, script_path: Path, translations_root_path: Path, all_strings_pot_path: Path): """ Extracts all strings into a pot file with empty translations. @@ -20,14 +21,14 @@ def extract_all_strings(root_path: Path, script_path: Path, translations_root_pa @param all_strings_pot_path: The path of the pot file where all strings will be outputted (resources/i8n/cura.pot). """ - # # Extract the setting strings from any json file with settings at its root - # extract_json_arguments = [ - # script_path.joinpath("extract-json"), - # root_path.joinpath("resources", "definitions"), - # translations_root_path - # ] - # subprocess.run(extract_json_arguments) - # + # Extract the setting strings from any json file with settings at its root + extract_json_arguments = [ + script_path.joinpath("extract-json"), + root_path.joinpath("resources", "definitions"), + translations_root_path + ] + subprocess.run(extract_json_arguments) + # Extract all strings from qml and py files extract_qml_py_arguments = [ script_path.joinpath("extract-all"), @@ -84,18 +85,6 @@ def update_po_files_all_languages(translation_root_path: Path) -> None: pot_file = pot po_file = Path(directory, po_filename).absolute() - # # Initialize the new po file - # init_files_arguments = [ - # "msginit", - # "--no-wrap", - # "--no-translator", - # "-l", language, - # "-i", pot_file, - # "-o", po_file - # ] - # - # subprocess.run(init_files_arguments) - merge_files_arguments = [ "msgmerge", "--no-wrap", @@ -108,8 +97,6 @@ def update_po_files_all_languages(translation_root_path: Path) -> None: subprocess.run(merge_files_arguments) - return - if __name__ == "__main__": parser = argparse.ArgumentParser(description="Extract strings from project into .po files") @@ -121,15 +108,15 @@ if __name__ == "__main__": root_path = Path(args.root_path) # root of the project script_path = Path(args.script_path) # location of bash scripts - # Path where all translation file are + # All the translation files should be in this path. Each language in a folder corresponding with its lang code (resource/i18n/en_US/) translations_root_path = root_path.joinpath("resources", "i18n") translations_root_path.mkdir(parents=True, exist_ok=True) # Make sure we have an output path all_strings_pot_path = translations_root_path.joinpath(args.translation_file_name) # pot file containing all strings untranslated - - # Clear the output file, otherwise deleted strings will still be in the output. if os.path.exists(all_strings_pot_path): - os.remove(all_strings_pot_path) + os.remove(all_strings_pot_path) # Clear the output file, otherwise deleted strings will still be in the output. extract_all_strings(root_path, script_path, translations_root_path, all_strings_pot_path) + + update_po_files_all_languages(translations_root_path) From 18bfcd60baf753fa920b273e2817f222878eff35 Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Wed, 18 Jan 2023 17:20:55 +0100 Subject: [PATCH 11/31] Add the extractTool to the Cura conanfile CURA-9814 --- conanfile.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/conanfile.py b/conanfile.py index 9a2c1773ed..51616cf450 100644 --- a/conanfile.py +++ b/conanfile.py @@ -27,7 +27,7 @@ class CuraConan(ConanFile): # FIXME: Remove specific branch once merged to main # Extending the conanfile with the UMBaseConanfile https://github.com/Ultimaker/conan-ultimaker-index/tree/CURA-9177_Fix_CI_CD/recipes/umbase - python_requires = "umbase/[>=0.1.7]@ultimaker/stable" + python_requires = "umbase/[>=0.1.7]@ultimaker/stable", "translationextractor/[>=1.0.0]@ultimaker/stable" python_requires_extend = "umbase.UMBaseConanfile" options = { @@ -327,6 +327,11 @@ class CuraConan(ConanFile): if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str): # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement cpp_info = self.dependencies["gettext"].cpp_info + + # Extract all the new strings and update the existing po files + extractTool = self.python_requires["translationextractor"].module.ExtractTranslations(self) + extractTool.extract(self.source_path, self.source_path.joinpath("resources", "i18n"), "cura.pot") + for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"): pot_file = self.source_path.joinpath("resources", "i18n", po_file.with_suffix('.pot').name) mkdir(self, str(unix_path(self, pot_file.parent))) From 5186b234dc94a037d690506c0df61a7609ddba7b Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Wed, 18 Jan 2023 17:33:39 +0100 Subject: [PATCH 12/31] Remove redundant code for merging po files CURA-9814 --- conanfile.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/conanfile.py b/conanfile.py index 51616cf450..c3de62f1b2 100644 --- a/conanfile.py +++ b/conanfile.py @@ -326,18 +326,10 @@ class CuraConan(ConanFile): # Update the po files if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str): # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement - cpp_info = self.dependencies["gettext"].cpp_info - # Extract all the new strings and update the existing po files extractTool = self.python_requires["translationextractor"].module.ExtractTranslations(self) extractTool.extract(self.source_path, self.source_path.joinpath("resources", "i18n"), "cura.pot") - for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"): - pot_file = self.source_path.joinpath("resources", "i18n", po_file.with_suffix('.pot').name) - mkdir(self, str(unix_path(self, pot_file.parent))) - self.run(f"{cpp_info.bindirs[0]}/msgmerge --no-wrap --no-fuzzy-matching -width=140 -o {po_file} {po_file} {pot_file}", - env = "conanbuild", ignore_errors = True) - def imports(self): self.copy("CuraEngine.exe", root_package = "curaengine", src = "@bindirs", dst = "", keep_path = False) self.copy("CuraEngine", root_package = "curaengine", src = "@bindirs", dst = "", keep_path = False) From 5af1f6e2767707aa98511ac53d9318514398b3d4 Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Thu, 19 Jan 2023 13:32:58 +0100 Subject: [PATCH 13/31] Rename extract function to generate to match better with conan style. CURA-9814 --- conanfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conanfile.py b/conanfile.py index c3de62f1b2..38e9cdce21 100644 --- a/conanfile.py +++ b/conanfile.py @@ -327,8 +327,8 @@ class CuraConan(ConanFile): if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str): # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement # Extract all the new strings and update the existing po files - extractTool = self.python_requires["translationextractor"].module.ExtractTranslations(self) - extractTool.extract(self.source_path, self.source_path.joinpath("resources", "i18n"), "cura.pot") + extractTool = self.python_requires["translationextractor"].module.ExtractTranslations(self, self.source_path, self.source_path.joinpath("resources", "i18n"), "cura.pot") + extractTool.generate() def imports(self): self.copy("CuraEngine.exe", root_package = "curaengine", src = "@bindirs", dst = "", keep_path = False) From 0f52521be203932bff874f609c5626496b94ab49 Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Thu, 19 Jan 2023 14:02:06 +0100 Subject: [PATCH 14/31] Remove source path from extract tool creation. This can be derived from "self" that is being passed in. CURA-9814 --- conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conanfile.py b/conanfile.py index 38e9cdce21..74c25fd2e2 100644 --- a/conanfile.py +++ b/conanfile.py @@ -327,7 +327,7 @@ class CuraConan(ConanFile): if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str): # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement # Extract all the new strings and update the existing po files - extractTool = self.python_requires["translationextractor"].module.ExtractTranslations(self, self.source_path, self.source_path.joinpath("resources", "i18n"), "cura.pot") + extractTool = self.python_requires["translationextractor"].module.ExtractTranslations(self, self.source_path.joinpath("resources", "i18n"), "cura.pot") extractTool.generate() def imports(self): From c99ddb30e57f6b6d1471bf6e593dd972cedfe150 Mon Sep 17 00:00:00 2001 From: Joey de l'Arago Date: Fri, 20 Jan 2023 11:49:34 +0100 Subject: [PATCH 15/31] Remove unused scripts CURA-9814 --- scripts/translations/createjsoncontext.py | 103 ----------------- scripts/translations/createkeypair.py | 47 -------- scripts/translations/createplugincontext.py | 72 ------------ scripts/translations/extract-all | 23 ---- scripts/translations/extract-json | 42 ------- scripts/translations/extract-plugins | 14 --- scripts/translations/extract_strings.py | 122 -------------------- 7 files changed, 423 deletions(-) delete mode 100644 scripts/translations/createjsoncontext.py delete mode 100644 scripts/translations/createkeypair.py delete mode 100644 scripts/translations/createplugincontext.py delete mode 100755 scripts/translations/extract-all delete mode 100755 scripts/translations/extract-json delete mode 100755 scripts/translations/extract-plugins delete mode 100644 scripts/translations/extract_strings.py diff --git a/scripts/translations/createjsoncontext.py b/scripts/translations/createjsoncontext.py deleted file mode 100644 index a4f02012d4..0000000000 --- a/scripts/translations/createjsoncontext.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -""" -Copyright 2014 Burkhard Lück - -Permission to use, copy, modify, and distribute this software -and its documentation for any purpose and without fee is hereby -granted, provided that the above copyright notice appear in all -copies and that both that the copyright notice and this -permission notice and warranty disclaimer appear in supporting -documentation, and that the name of the author not be used in -advertising or publicity pertaining to distribution of the -software without specific, written prior permission. - -The author disclaim all warranties with regard to this -software, including all implied warranties of merchantability -and fitness. In no event shall the author be liable for any -special, indirect or consequential damages or any damages -whatsoever resulting from loss of use, data or profits, whether -in an action of contract, negligence or other tortious action, -arising out of or in connection with the use or performance of -this software. -""" - -# This script generates a POT file from a JSON settings file. It -# has been adapted from createjsoncontext.py of KDE's translation -# scripts. It extracts the "label" and "description" values of -# the JSON file using the structure as used by Uranium settings files. - -import sys -import os -import json -import time -import os.path -import collections - -debugoutput = False #set True to print debug output in scripty's logs - -basedir = sys.argv[-1] -pottxt = "" - -def appendMessage(file, setting, field, value): - global pottxt - pottxt += "#: {0}\nmsgctxt \"{1} {2}\"\nmsgid \"{3}\"\nmsgstr \"\"\n\n".format(file, setting, field, value.replace("\n", "\\n").replace("\"", "\\\"")) - -def processSettings(file, settings): - for name, value in settings.items(): - appendMessage(file, name, "label", value["label"]) - if "description" in value: - appendMessage(file, name, "description", value["description"]) - - if "warning_description" in value: - appendMessage(file, name, "warning_description", value["warning_description"]) - - if "error_description" in value: - appendMessage(file, name, "error_description", value["error_description"]) - - if "options" in value: - for item, description in value["options"].items(): - appendMessage(file, name, "option {0}".format(item), description) - - if "children" in value: - processSettings(file, value["children"]) - -def potheader(): - headertxt = "#, fuzzy\n" - headertxt += "msgid \"\"\n" - headertxt += "msgstr \"\"\n" - headertxt += "\"Project-Id-Version: Uranium json setting files\\n\"\n" - headertxt += "\"Report-Msgid-Bugs-To: plugins@ultimaker.com\\n\"\n" - headertxt += "\"POT-Creation-Date: %s+0000\\n\"\n" %time.strftime("%Y-%m-%d %H:%M") - headertxt += "\"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n\"\n" - headertxt += "\"Last-Translator: FULL NAME \\n\"\n" - headertxt += "\"Language-Team: LANGUAGE\\n\"\n" - headertxt += "\"MIME-Version: 1.0\\n\"\n" - headertxt += "\"Content-Type: text/plain; charset=UTF-8\\n\"\n" - headertxt += "\"Content-Transfer-Encoding: 8bit\\n\"\n" - headertxt += "\n" - return headertxt - -if len(sys.argv) < 3: - print("wrong number of args: %s" % sys.argv) - print("\nUsage: python %s jsonfilenamelist basedir" % os.path.basename(sys.argv[0])) -else: - jsonfilename = sys.argv[1] - basedir = sys.argv[2] - outputfilename = sys.argv[3] - - with open(jsonfilename, "r", encoding = "utf-8") as data_file: - error = False - - jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict) - if "settings" not in jsondatadict: - print(f"Nothing to translate in file: {jsondatadict}") - exit(1) - - processSettings(jsonfilename.replace(basedir, ""), jsondatadict["settings"]) - - if pottxt != "": - with open(outputfilename, "w", encoding = "utf-8") as output_file: - output_file.write(potheader()) - output_file.write(pottxt) diff --git a/scripts/translations/createkeypair.py b/scripts/translations/createkeypair.py deleted file mode 100644 index e01c9c2a0b..0000000000 --- a/scripts/translations/createkeypair.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -from typing import Optional -import sys - -from UM.Trust import TrustBasics - -# Default arguments, if arguments to the script are omitted, these values are used: -DEFAULT_PRIVATE_KEY_PATH = "./private_key.pem" -DEFAULT_PUBLIC_KEY_PATH = "./public_key.pem" -DEFAULT_PASSWORD = "" - - -def createAndStoreNewKeyPair(private_filename: str, public_filename: str, optional_password: Optional[str]) -> None: - """Creates a new public and private key, and saves them to the provided filenames. - - See also 'Trust.py' in the main library and the related scripts; 'signfile.py', 'signfolder.py' in this folder. - - :param private_filename: Filename to save the private key to. - :param public_filename: Filename to save the public key to. - :param optional_password: Private keys can have a password (or not). - """ - - password = None if optional_password == "" else optional_password - private_key, public_key = TrustBasics.generateNewKeyPair() - TrustBasics.saveKeyPair(private_key, private_filename, public_filename, password) - - -def mainfunc(): - """Arguments: - - `-k ` or `--private ` will store the generated private key to - `-p ` or `--public ` will store the generated public key to - `-w ` or `--password ` will give the private key a password (none if omitted, which is default) - """ - - parser = argparse.ArgumentParser() - parser.add_argument("-k", "--private", type = str, default = DEFAULT_PRIVATE_KEY_PATH) - parser.add_argument("-p", "--public", type = str, default = DEFAULT_PUBLIC_KEY_PATH) - parser.add_argument("-w", "--password", type = str, default = DEFAULT_PASSWORD) - args = parser.parse_args() - createAndStoreNewKeyPair(args.private, args.public, args.password) - - -if __name__ == "__main__": - sys.exit(mainfunc()) diff --git a/scripts/translations/createplugincontext.py b/scripts/translations/createplugincontext.py deleted file mode 100644 index 25a086357e..0000000000 --- a/scripts/translations/createplugincontext.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -""" -Copyright 2014 Burkhard Lück - -Permission to use, copy, modify, and distribute this software -and its documentation for any purpose and without fee is hereby -granted, provided that the above copyright notice appear in all -copies and that both that the copyright notice and this -permission notice and warranty disclaimer appear in supporting -documentation, and that the name of the author not be used in -advertising or publicity pertaining to distribution of the -software without specific, written prior permission. - -The author disclaim all warranties with regard to this -software, including all implied warranties of merchantability -and fitness. In no event shall the author be liable for any -special, indirect or consequential damages or any damages -whatsoever resulting from loss of use, data or profits, whether -in an action of contract, negligence or other tortious action, -arising out of or in connection with the use or performance of -this software. -""" - -# This script generates a POT file from a JSON settings file. It -# has been adapted from createjsoncontext.py of KDE's translation -# scripts. It extracts the "label" and "description" values of -# the JSON file using the structure as used by Uranium settings files. - -import sys -import os.path -import collections -import json - -debugoutput = False #set True to print debug output in scripty's logs - -basedir = sys.argv[-1] -pottxt = "" - - -def appendMessage(file, field, value): - global pottxt - pottxt += "#: {0}\nmsgctxt \"{1}\"\nmsgid \"{2}\"\nmsgstr \"\"\n\n".format(file, field, value.replace("\n", "\\n").replace("\"", "\\\"")) - - -if len(sys.argv) < 3: - print("wrong number of args: %s" % sys.argv) - print("\nUsage: python %s jsonfilenamelist basedir" % os.path.basename(sys.argv[0])) -else: - json_filename = sys.argv[1] - basedir = sys.argv[2] - output_filename = sys.argv[3] - - with open(json_filename, "r", encoding = "utf-8") as data_file: - error = False - - jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict) - if "name" not in jsondatadict or ("api" not in jsondatadict and "supported_sdk_versions" not in jsondatadict) or "version" not in jsondatadict: - print("The plugin.json file found on %s is invalid, ignoring it" % json_filename) - exit(1) - - file = json_filename.replace(basedir, "") - - if "description" in jsondatadict: - appendMessage(file, "description", jsondatadict["description"]) - if "name" in jsondatadict: - appendMessage(file, "name", jsondatadict["name"]) - - if pottxt != "": - with open(output_filename, "a", encoding = "utf-8") as output_file: - output_file.write(pottxt) diff --git a/scripts/translations/extract-all b/scripts/translations/extract-all deleted file mode 100755 index 98e748f4ee..0000000000 --- a/scripts/translations/extract-all +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# -# Use xgettext to extract all strings from a set of python files. -# Argument 1 is the directory to search for python files, argument 2 -# is the destination file. -# -# This script will extract strings marked using i18n or i18nc methods. -# See UM/i18n.py for the relevant methods. -# -dir=$1 -dest=$2 -touch $dest -for f in $(find -L "$dir" -name \*.py) -do - echo "Extracting strings from python file: $f" - xgettext --from-code=UTF-8 --join-existing --sort-by-file --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f -done - -for f in $(find -L "$dir" -name \*.qml) -do - echo "Extracting strings from qml file: $f" - xgettext --from-code=UTF-8 --join-existing --sort-by-file --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f -done diff --git a/scripts/translations/extract-json b/scripts/translations/extract-json deleted file mode 100755 index 00cef1e866..0000000000 --- a/scripts/translations/extract-json +++ /dev/null @@ -1,42 +0,0 @@ -#! /bin/bash - -# Extract strings from all JSON files in a directory into files with matching names ending with .pot. -# -# This script will extract strings from all JSON files in the directory -# passed as first argument. The second argument is the destination -# directory for the extracted message file. -# -# This script uses createjsoncontext to generate the actual message file -# from the JSON file. -# -# This script is based on handle_json_files.sh from KDE's translation -# scripts. -# handle_json_files.sh is copyright 2014 Burkhard Lück -scriptdir=$(dirname $0) - -extract() { - basedir=$1 - dest=$2 - file=$3 - - python3 $scriptdir/createjsoncontext.py $file $basedir json.$$.tmp - if test $? -eq 1; then - return - fi - - echo "Extracted messages from $file" - - msguniq --to-code=UTF-8 -o json.$$ json.$$.tmp - if test -f json.$$; then - destfile="$dest/$(basename $file).pot" - mv json.$$ $destfile - fi - rm -f json.$$ json.$$.tmp -} - -dir=$1; shift -dest=$1; shift - -for file in $(find -L "$dir" -name *.json | grep -v 'tests'); do - extract $dir $dest $file -done diff --git a/scripts/translations/extract-plugins b/scripts/translations/extract-plugins deleted file mode 100755 index d5b3674968..0000000000 --- a/scripts/translations/extract-plugins +++ /dev/null @@ -1,14 +0,0 @@ -#! /bin/bash - -# Extract strings from all plugins -# - -scriptdir=$(dirname $0) -dir=$1; shift -dest=$1; shift - -for file in $(find -L "$dir" -name plugin.json | grep -v 'tests'); do - python3 $scriptdir/createplugincontext.py $file $dir $dest -done - - diff --git a/scripts/translations/extract_strings.py b/scripts/translations/extract_strings.py deleted file mode 100644 index 2387959ba8..0000000000 --- a/scripts/translations/extract_strings.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright (c) 2023 UltiMaker. -# Cura is released under the terms of the LGPLv3 or higher. - -import argparse -import os -import subprocess -from os.path import isfile - -from pathlib import Path - - -def extract_all_strings(root_path: Path, script_path: Path, translations_root_path: Path, all_strings_pot_path: Path): - """ Extracts all strings into a pot file with empty translations. - - Strings are extracted everywhere that i18n is used in python and qml in the project. It also checks the project - for JSON files with 'settings' in the root node and extracts these for translation as well. - - @param root_path: The root path of the project. This is the root for string searching. - @param script_path: The location of the bash scripts used for translating. - @param translations_root_path: The root of the translations folder (resources/i18n). - @param all_strings_pot_path: The path of the pot file where all strings will be outputted (resources/i8n/cura.pot). - """ - - # Extract the setting strings from any json file with settings at its root - extract_json_arguments = [ - script_path.joinpath("extract-json"), - root_path.joinpath("resources", "definitions"), - translations_root_path - ] - subprocess.run(extract_json_arguments) - - # Extract all strings from qml and py files - extract_qml_py_arguments = [ - script_path.joinpath("extract-all"), - root_path, - all_strings_pot_path - ] - subprocess.run(extract_qml_py_arguments) - - # Extract all the name and description from all plugins - extract_plugin_arguments = [ - script_path.joinpath("extract-plugins"), - root_path.joinpath("plugins"), - all_strings_pot_path - ] - subprocess.run(extract_plugin_arguments) - - # Convert the output file to utf-8 - convert_encoding_arguments = [ - "msgconv", - "--to-code=UTF-8", - all_strings_pot_path, - "-o", - all_strings_pot_path - ] - subprocess.run(convert_encoding_arguments) - - -def update_po_files_all_languages(translation_root_path: Path) -> None: - """ Updates all po files in translation_root_path with new strings mapped to blank translations. - - This will take all newly generated po files in the root of the translations path (i18n/cura.pot, i18n/fdmextruder.json.def.pot) - and merge them with the existing po files for every language. This will create new po files with empty translations - for all new words added to the project. - - @param translation_root_path: Root of the translations folder (resources/i18n). - """ - new_pot_files = [] - - for file in os.listdir(translation_root_path): - path = translations_root_path.joinpath(file) - if path.suffix == ".pot": - new_pot_files.append(path) - print(new_pot_files) - - for directory, _, po_files in os.walk(translation_root_path): - print(directory) - print(po_files) - for pot in new_pot_files: - - po_filename = pot.name.rstrip("t") - if po_filename not in po_files: - continue # We only want to merge files that have matching names - - pot_file = pot - po_file = Path(directory, po_filename).absolute() - - merge_files_arguments = [ - "msgmerge", - "--no-wrap", - "--no-fuzzy-matching", - "--update", - "--sort-by-file", # Sort by file location, this is better than pure sorting for translators - po_file, # po file that will be updated - pot_file # source of new strings - ] - - subprocess.run(merge_files_arguments) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Extract strings from project into .po files") - parser.add_argument("root_path", type=str, help="The root of the project to extract translatable strings from") - parser.add_argument("translation_file_name", type=str, help="The .pot file that all strings from python/qml files will be inserted into") - parser.add_argument("script_path", type=str, help="The path containing the scripts for translating files") - args = parser.parse_args() - - root_path = Path(args.root_path) # root of the project - script_path = Path(args.script_path) # location of bash scripts - - # All the translation files should be in this path. Each language in a folder corresponding with its lang code (resource/i18n/en_US/) - translations_root_path = root_path.joinpath("resources", "i18n") - translations_root_path.mkdir(parents=True, exist_ok=True) # Make sure we have an output path - - all_strings_pot_path = translations_root_path.joinpath(args.translation_file_name) # pot file containing all strings untranslated - if os.path.exists(all_strings_pot_path): - os.remove(all_strings_pot_path) # Clear the output file, otherwise deleted strings will still be in the output. - - extract_all_strings(root_path, script_path, translations_root_path, all_strings_pot_path) - - - update_po_files_all_languages(translations_root_path) From 0b1b19f0b19b9ac4a277a1856842f79189938867 Mon Sep 17 00:00:00 2001 From: Rijk van Manen Date: Tue, 31 Jan 2023 13:49:51 +0100 Subject: [PATCH 16/31] increase cool_min_temperature By default the cool_min_temperature is 15deg below print temperature. However, for PP and TPU this should be 10deg below print temperature. This is already changed for the other ultimaker printers, but I forget the UM3. PP-71 --- .../quality/ultimaker3/um3_aa0.25_PP_Normal_Quality.inst.cfg | 1 + resources/quality/ultimaker3/um3_aa0.4_PP_Draft_Print.inst.cfg | 1 + resources/quality/ultimaker3/um3_aa0.4_PP_Fast_Print.inst.cfg | 1 + .../quality/ultimaker3/um3_aa0.4_PP_Normal_Quality.inst.cfg | 1 + resources/quality/ultimaker3/um3_aa0.4_TPU_Draft_Print.inst.cfg | 1 + resources/quality/ultimaker3/um3_aa0.4_TPU_Fast_Print.inst.cfg | 1 + .../quality/ultimaker3/um3_aa0.4_TPU_Normal_Quality.inst.cfg | 1 + resources/quality/ultimaker3/um3_aa0.8_PP_Draft_Print.inst.cfg | 1 + .../quality/ultimaker3/um3_aa0.8_PP_Superdraft_Print.inst.cfg | 1 + .../quality/ultimaker3/um3_aa0.8_PP_Verydraft_Print.inst.cfg | 1 + resources/quality/ultimaker3/um3_aa0.8_TPU_Draft_Print.inst.cfg | 1 + .../quality/ultimaker3/um3_aa0.8_TPU_Superdraft_Print.inst.cfg | 1 + .../quality/ultimaker3/um3_aa0.8_TPU_Verydraft_Print.inst.cfg | 1 + 13 files changed, 13 insertions(+) diff --git a/resources/quality/ultimaker3/um3_aa0.25_PP_Normal_Quality.inst.cfg b/resources/quality/ultimaker3/um3_aa0.25_PP_Normal_Quality.inst.cfg index 80b49b7f87..bef2487fc7 100644 --- a/resources/quality/ultimaker3/um3_aa0.25_PP_Normal_Quality.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.25_PP_Normal_Quality.inst.cfg @@ -14,6 +14,7 @@ weight = 0 [values] brim_width = 10 +cool_min_temperature = =material_print_temperature - 10 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' infill_wipe_dist = 0.1 machine_min_cool_heat_time_window = 15 diff --git a/resources/quality/ultimaker3/um3_aa0.4_PP_Draft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.4_PP_Draft_Print.inst.cfg index 794920aab0..20da0cdc4e 100644 --- a/resources/quality/ultimaker3/um3_aa0.4_PP_Draft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.4_PP_Draft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -2 [values] brim_width = 20 +cool_min_temperature = =material_print_temperature - 10 infill_overlap = 0 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' infill_wipe_dist = 0.1 diff --git a/resources/quality/ultimaker3/um3_aa0.4_PP_Fast_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.4_PP_Fast_Print.inst.cfg index 39b2bec1b2..c906592150 100644 --- a/resources/quality/ultimaker3/um3_aa0.4_PP_Fast_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.4_PP_Fast_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -1 [values] brim_width = 20 +cool_min_temperature = =material_print_temperature - 10 infill_overlap = 0 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' infill_wipe_dist = 0.1 diff --git a/resources/quality/ultimaker3/um3_aa0.4_PP_Normal_Quality.inst.cfg b/resources/quality/ultimaker3/um3_aa0.4_PP_Normal_Quality.inst.cfg index 1d57528c95..2fd61d6bc5 100644 --- a/resources/quality/ultimaker3/um3_aa0.4_PP_Normal_Quality.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.4_PP_Normal_Quality.inst.cfg @@ -13,6 +13,7 @@ weight = 0 [values] brim_width = 20 +cool_min_temperature = =material_print_temperature - 10 infill_overlap = 0 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' infill_wipe_dist = 0.1 diff --git a/resources/quality/ultimaker3/um3_aa0.4_TPU_Draft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.4_TPU_Draft_Print.inst.cfg index c1e52c22ab..c49c389d89 100644 --- a/resources/quality/ultimaker3/um3_aa0.4_TPU_Draft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.4_TPU_Draft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -2 [values] brim_width = 8.75 +cool_min_temperature = =material_print_temperature - 10 gradual_infill_step_height = =5 * layer_height infill_overlap = 0 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'cross_3d' diff --git a/resources/quality/ultimaker3/um3_aa0.4_TPU_Fast_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.4_TPU_Fast_Print.inst.cfg index 9d1ef67fe7..c6e9ad5c8b 100644 --- a/resources/quality/ultimaker3/um3_aa0.4_TPU_Fast_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.4_TPU_Fast_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -1 [values] brim_width = 8.75 +cool_min_temperature = =material_print_temperature - 10 gradual_infill_step_height = =5 * layer_height infill_overlap = 0 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'cross_3d' diff --git a/resources/quality/ultimaker3/um3_aa0.4_TPU_Normal_Quality.inst.cfg b/resources/quality/ultimaker3/um3_aa0.4_TPU_Normal_Quality.inst.cfg index 5b98310ec1..008d4f9f5a 100644 --- a/resources/quality/ultimaker3/um3_aa0.4_TPU_Normal_Quality.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.4_TPU_Normal_Quality.inst.cfg @@ -13,6 +13,7 @@ weight = 0 [values] brim_width = 8.75 +cool_min_temperature = =material_print_temperature - 10 gradual_infill_step_height = =5 * layer_height infill_overlap = 0 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'cross_3d' diff --git a/resources/quality/ultimaker3/um3_aa0.8_PP_Draft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.8_PP_Draft_Print.inst.cfg index 6e4412de37..780ecf8b63 100644 --- a/resources/quality/ultimaker3/um3_aa0.8_PP_Draft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.8_PP_Draft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -2 [values] brim_width = 25 +cool_min_temperature = =material_print_temperature - 10 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' material_print_temperature = =default_material_print_temperature - 2 multiple_mesh_overlap = 0.2 diff --git a/resources/quality/ultimaker3/um3_aa0.8_PP_Superdraft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.8_PP_Superdraft_Print.inst.cfg index be1ca10f89..77ba4b5a83 100644 --- a/resources/quality/ultimaker3/um3_aa0.8_PP_Superdraft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.8_PP_Superdraft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -4 [values] brim_width = 25 +cool_min_temperature = =material_print_temperature - 10 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' material_print_temperature = =default_material_print_temperature + 2 multiple_mesh_overlap = 0.2 diff --git a/resources/quality/ultimaker3/um3_aa0.8_PP_Verydraft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.8_PP_Verydraft_Print.inst.cfg index a11e14d732..30a7870aad 100644 --- a/resources/quality/ultimaker3/um3_aa0.8_PP_Verydraft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.8_PP_Verydraft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -3 [values] brim_width = 25 +cool_min_temperature = =material_print_temperature - 10 infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'tetrahedral' layer_height = 0.3 multiple_mesh_overlap = 0.2 diff --git a/resources/quality/ultimaker3/um3_aa0.8_TPU_Draft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.8_TPU_Draft_Print.inst.cfg index cc77f6b7d9..90a451e472 100644 --- a/resources/quality/ultimaker3/um3_aa0.8_TPU_Draft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.8_TPU_Draft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -2 [values] brim_width = 8.75 +cool_min_temperature = =material_print_temperature - 10 gradual_infill_step_height = =4 * layer_height infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'cross_3d' infill_sparse_density = 15 diff --git a/resources/quality/ultimaker3/um3_aa0.8_TPU_Superdraft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.8_TPU_Superdraft_Print.inst.cfg index c0125187cd..d1d08ee39f 100644 --- a/resources/quality/ultimaker3/um3_aa0.8_TPU_Superdraft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.8_TPU_Superdraft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -4 [values] brim_width = 8.75 +cool_min_temperature = =material_print_temperature - 10 gradual_infill_step_height = =4 * layer_height infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'cross_3d' infill_sparse_density = 15 diff --git a/resources/quality/ultimaker3/um3_aa0.8_TPU_Verydraft_Print.inst.cfg b/resources/quality/ultimaker3/um3_aa0.8_TPU_Verydraft_Print.inst.cfg index 990d216966..421fce0265 100644 --- a/resources/quality/ultimaker3/um3_aa0.8_TPU_Verydraft_Print.inst.cfg +++ b/resources/quality/ultimaker3/um3_aa0.8_TPU_Verydraft_Print.inst.cfg @@ -13,6 +13,7 @@ weight = -3 [values] brim_width = 8.75 +cool_min_temperature = =material_print_temperature - 10 gradual_infill_step_height = =4 * layer_height infill_pattern = ='zigzag' if infill_sparse_density > 80 else 'cross_3d' infill_sparse_density = 15 From eb12a636cc12a0dd2f4240be3940cded7d2fae3e Mon Sep 17 00:00:00 2001 From: Jelle Spijker Date: Wed, 1 Feb 2023 07:55:16 +0100 Subject: [PATCH 17/31] Add 5.4.0-alpha to conandata main --- conandata.yml | 766 +++++++++++++++++++++----------------------------- 1 file changed, 328 insertions(+), 438 deletions(-) diff --git a/conandata.yml b/conandata.yml index b615341776..8b7d9b7b5d 100644 --- a/conandata.yml +++ b/conandata.yml @@ -10,443 +10,333 @@ # requirements (use the /(latest)@ultimaker/testing) # # Subject to change in the future! -"5.3.0-beta": - requirements: - - "pyarcus/5.2.2" - - "curaengine/(latest)@ultimaker/stable" - - "pysavitar/5.2.2" - - "pynest2d/5.2.2" - - "uranium/(latest)@ultimaker/stable" - - "fdm_materials/(latest)@ultimaker/stable" - - "cura_binary_data/(latest)@ultimaker/stable" - - "cpython/3.10.4" - internal_requirements: - - "fdm_materials_private/(latest)@ultimaker/testing" - - "cura_private_data/(latest)@ultimaker/testing" - runinfo: - entrypoint: "cura_app.py" - pyinstaller: - datas: - cura_plugins: - package: "cura" - src: "plugins" - dst: "share/cura/plugins" - cura_resources: - package: "cura" - src: "resources" - dst: "share/cura/resources" - cura_private_data: - package: "cura_private_data" - src: "resources" - dst: "share/cura/resources" - internal: true - cura_private_data_plugins: - package: "cura_private_data" - src: "plugins" - dst: "share/cura/plugins" - internal: true - uranium_plugins: - package: "uranium" - src: "plugins" - dst: "share/uranium/plugins" - uranium_resources: - package: "uranium" - src: "resources" - dst: "share/uranium/resources" - uranium_um_qt_qml_um: - package: "uranium" - src: "site-packages/UM/Qt/qml/UM" - dst: "PyQt6/Qt6/qml/UM" - cura_binary_data: - package: "cura_binary_data" - src: "resources/cura/resources" - dst: "share/cura/resources" - uranium_binary_data: - package: "cura_binary_data" - src: "resources/uranium/resources" - dst: "share/uranium/resources" - windows_binary_data: - package: "cura_binary_data" - src: "windows" - dst: "share/windows" - fdm_materials: - package: "fdm_materials" - src: "materials" - dst: "share/cura/resources/materials" - fdm_materials_private: - package: "fdm_materials_private" - src: "resources/materials" - dst: "share/cura/resources/materials" - internal: true - tcl: - package: "tcl" - src: "lib/tcl8.6" - dst: "tcl" - tk: - package: "tk" - src: "lib/tk8.6" - dst: "tk" - binaries: - curaengine: - package: "curaengine" - src: "bin" - dst: "." - binary: "CuraEngine" - hiddenimports: - - "pySavitar" - - "pyArcus" - - "pynest2d" - - "PyQt6" - - "PyQt6.QtNetwork" - - "PyQt6.sip" - - "logging.handlers" - - "zeroconf" - - "fcntl" - - "stl" - - "serial" - collect_all: - - "cura" - - "UM" - - "serial" - - "Charon" - - "sqlite3" - - "trimesh" - - "win32ctypes" - - "PyQt6" - - "PyQt6.QtNetwork" - - "PyQt6.sip" - - "stl" - icon: - Windows: "./icons/Cura.ico" - Macos: "./icons/cura.icns" - Linux: "./icons/cura-128.png" +"5.4.0-alpha": + requirements: + - "pyarcus/5.2.2" + - "curaengine/(latest)@ultimaker/testing" + - "pysavitar/5.2.2" + - "pynest2d/5.2.2" + - "uranium/(latest)@ultimaker/testing" + - "fdm_materials/(latest)@ultimaker/testing" + - "cura_binary_data/(latest)@ultimaker/testing" + - "cpython/3.10.4" + internal_requirements: + - "fdm_materials_private/(latest)@ultimaker/testing" + - "cura_private_data/(latest)@ultimaker/testing" + runinfo: + entrypoint: "cura_app.py" + pyinstaller: + datas: + cura_plugins: + package: "cura" + src: "plugins" + dst: "share/cura/plugins" + cura_resources: + package: "cura" + src: "resources" + dst: "share/cura/resources" + cura_private_data: + package: "cura_private_data" + src: "resources" + dst: "share/cura/resources" + internal: true + cura_private_data_plugins: + package: "cura_private_data" + src: "plugins" + dst: "share/cura/plugins" + internal: true + uranium_plugins: + package: "uranium" + src: "plugins" + dst: "share/uranium/plugins" + uranium_resources: + package: "uranium" + src: "resources" + dst: "share/uranium/resources" + uranium_um_qt_qml_um: + package: "uranium" + src: "site-packages/UM/Qt/qml/UM" + dst: "PyQt6/Qt6/qml/UM" + cura_binary_data: + package: "cura_binary_data" + src: "resources/cura/resources" + dst: "share/cura/resources" + uranium_binary_data: + package: "cura_binary_data" + src: "resources/uranium/resources" + dst: "share/uranium/resources" + windows_binary_data: + package: "cura_binary_data" + src: "windows" + dst: "share/windows" + fdm_materials: + package: "fdm_materials" + src: "materials" + dst: "share/cura/resources/materials" + fdm_materials_private: + package: "fdm_materials_private" + src: "resources/materials" + dst: "share/cura/resources/materials" + internal: true + tcl: + package: "tcl" + src: "lib/tcl8.6" + dst: "tcl" + tk: + package: "tk" + src: "lib/tk8.6" + dst: "tk" + binaries: + curaengine: + package: "curaengine" + src: "bin" + dst: "." + binary: "CuraEngine" + hiddenimports: + - "pySavitar" + - "pyArcus" + - "pynest2d" + - "PyQt6" + - "PyQt6.QtNetwork" + - "PyQt6.sip" + - "logging.handlers" + - "zeroconf" + - "fcntl" + - "stl" + - "serial" + collect_all: + - "cura" + - "UM" + - "serial" + - "Charon" + - "sqlite3" + - "trimesh" + - "win32ctypes" + - "PyQt6" + - "PyQt6.QtNetwork" + - "PyQt6.sip" + - "stl" + icon: + Windows: "./icons/Cura.ico" + Macos: "./icons/cura.icns" + Linux: "./icons/cura-128.png" "5.3.0": - requirements: - - "pyarcus/5.2.2" - - "curaengine/5.3.0" - - "pysavitar/5.2.2" - - "pynest2d/5.2.2" - - "uranium/5.3.0" - - "fdm_materials/5.3.0" - - "cura_binary_data/5.3.0" - - "cpython/3.10.4" - internal_requirements: - - "fdm_materials_private/(latest)@ultimaker/testing" - - "cura_private_data/(latest)@ultimaker/testing" - runinfo: - entrypoint: "cura_app.py" - pyinstaller: - datas: - cura_plugins: - package: "cura" - src: "plugins" - dst: "share/cura/plugins" - cura_resources: - package: "cura" - src: "resources" - dst: "share/cura/resources" - cura_private_data: - package: "cura_private_data" - src: "resources" - dst: "share/cura/resources" - internal: true - cura_private_data_plugins: - package: "cura_private_data" - src: "plugins" - dst: "share/cura/plugins" - internal: true - uranium_plugins: - package: "uranium" - src: "plugins" - dst: "share/uranium/plugins" - uranium_resources: - package: "uranium" - src: "resources" - dst: "share/uranium/resources" - uranium_um_qt_qml_um: - package: "uranium" - src: "site-packages/UM/Qt/qml/UM" - dst: "PyQt6/Qt6/qml/UM" - cura_binary_data: - package: "cura_binary_data" - src: "resources/cura/resources" - dst: "share/cura/resources" - uranium_binary_data: - package: "cura_binary_data" - src: "resources/uranium/resources" - dst: "share/uranium/resources" - windows_binary_data: - package: "cura_binary_data" - src: "windows" - dst: "share/windows" - fdm_materials: - package: "fdm_materials" - src: "materials" - dst: "share/cura/resources/materials" - fdm_materials_private: - package: "fdm_materials_private" - src: "resources/materials" - dst: "share/cura/resources/materials" - internal: true - tcl: - package: "tcl" - src: "lib/tcl8.6" - dst: "tcl" - tk: - package: "tk" - src: "lib/tk8.6" - dst: "tk" - binaries: - curaengine: - package: "curaengine" - src: "bin" - dst: "." - binary: "CuraEngine" - hiddenimports: - - "pySavitar" - - "pyArcus" - - "pynest2d" - - "PyQt6" - - "PyQt6.QtNetwork" - - "PyQt6.sip" - - "logging.handlers" - - "zeroconf" - - "fcntl" - - "stl" - - "serial" - collect_all: - - "cura" - - "UM" - - "serial" - - "Charon" - - "sqlite3" - - "trimesh" - - "win32ctypes" - - "PyQt6" - - "PyQt6.QtNetwork" - - "PyQt6.sip" - - "stl" - icon: - Windows: "./icons/Cura.ico" - Macos: "./icons/cura.icns" - Linux: "./icons/cura-128.png" -"5.3.0-alpha": - requirements: - - "pyarcus/5.2.2" - - "curaengine/(latest)@ultimaker/testing" - - "pysavitar/5.2.2" - - "pynest2d/5.2.2" - - "uranium/(latest)@ultimaker/testing" - - "fdm_materials/(latest)@ultimaker/testing" - - "cura_binary_data/(latest)@ultimaker/testing" - - "cpython/3.10.4" - internal_requirements: - - "fdm_materials_private/(latest)@ultimaker/testing" - - "cura_private_data/(latest)@ultimaker/testing" - runinfo: - entrypoint: "cura_app.py" - pyinstaller: - datas: - cura_plugins: - package: "cura" - src: "plugins" - dst: "share/cura/plugins" - cura_resources: - package: "cura" - src: "resources" - dst: "share/cura/resources" - cura_private_data: - package: "cura_private_data" - src: "resources" - dst: "share/cura/resources" - internal: true - cura_private_data_plugins: - package: "cura_private_data" - src: "plugins" - dst: "share/cura/plugins" - internal: true - uranium_plugins: - package: "uranium" - src: "plugins" - dst: "share/uranium/plugins" - uranium_resources: - package: "uranium" - src: "resources" - dst: "share/uranium/resources" - uranium_um_qt_qml_um: - package: "uranium" - src: "site-packages/UM/Qt/qml/UM" - dst: "PyQt6/Qt6/qml/UM" - cura_binary_data: - package: "cura_binary_data" - src: "resources/cura/resources" - dst: "share/cura/resources" - uranium_binary_data: - package: "cura_binary_data" - src: "resources/uranium/resources" - dst: "share/uranium/resources" - windows_binary_data: - package: "cura_binary_data" - src: "windows" - dst: "share/windows" - fdm_materials: - package: "fdm_materials" - src: "materials" - dst: "share/cura/resources/materials" - fdm_materials_private: - package: "fdm_materials_private" - src: "resources/materials" - dst: "share/cura/resources/materials" - internal: true - tcl: - package: "tcl" - src: "lib/tcl8.6" - dst: "tcl" - tk: - package: "tk" - src: "lib/tk8.6" - dst: "tk" - binaries: - curaengine: - package: "curaengine" - src: "bin" - dst: "." - binary: "CuraEngine" - hiddenimports: - - "pySavitar" - - "pyArcus" - - "pynest2d" - - "PyQt6" - - "PyQt6.QtNetwork" - - "PyQt6.sip" - - "logging.handlers" - - "zeroconf" - - "fcntl" - - "stl" - - "serial" - collect_all: - - "cura" - - "UM" - - "serial" - - "Charon" - - "sqlite3" - - "trimesh" - - "win32ctypes" - - "PyQt6" - - "PyQt6.QtNetwork" - - "PyQt6.sip" - - "stl" - icon: - Windows: "./icons/Cura.ico" - Macos: "./icons/cura.icns" - Linux: "./icons/cura-128.png" + requirements: + - "pyarcus/5.2.2" + - "curaengine/5.3.0" + - "pysavitar/5.2.2" + - "pynest2d/5.2.2" + - "uranium/5.3.0" + - "fdm_materials/5.3.0" + - "cura_binary_data/5.3.0" + - "cpython/3.10.4" + internal_requirements: + - "fdm_materials_private/(latest)@ultimaker/testing" + - "cura_private_data/(latest)@ultimaker/testing" + runinfo: + entrypoint: "cura_app.py" + pyinstaller: + datas: + cura_plugins: + package: "cura" + src: "plugins" + dst: "share/cura/plugins" + cura_resources: + package: "cura" + src: "resources" + dst: "share/cura/resources" + cura_private_data: + package: "cura_private_data" + src: "resources" + dst: "share/cura/resources" + internal: true + cura_private_data_plugins: + package: "cura_private_data" + src: "plugins" + dst: "share/cura/plugins" + internal: true + uranium_plugins: + package: "uranium" + src: "plugins" + dst: "share/uranium/plugins" + uranium_resources: + package: "uranium" + src: "resources" + dst: "share/uranium/resources" + uranium_um_qt_qml_um: + package: "uranium" + src: "site-packages/UM/Qt/qml/UM" + dst: "PyQt6/Qt6/qml/UM" + cura_binary_data: + package: "cura_binary_data" + src: "resources/cura/resources" + dst: "share/cura/resources" + uranium_binary_data: + package: "cura_binary_data" + src: "resources/uranium/resources" + dst: "share/uranium/resources" + windows_binary_data: + package: "cura_binary_data" + src: "windows" + dst: "share/windows" + fdm_materials: + package: "fdm_materials" + src: "materials" + dst: "share/cura/resources/materials" + fdm_materials_private: + package: "fdm_materials_private" + src: "resources/materials" + dst: "share/cura/resources/materials" + internal: true + tcl: + package: "tcl" + src: "lib/tcl8.6" + dst: "tcl" + tk: + package: "tk" + src: "lib/tk8.6" + dst: "tk" + binaries: + curaengine: + package: "curaengine" + src: "bin" + dst: "." + binary: "CuraEngine" + hiddenimports: + - "pySavitar" + - "pyArcus" + - "pynest2d" + - "PyQt6" + - "PyQt6.QtNetwork" + - "PyQt6.sip" + - "logging.handlers" + - "zeroconf" + - "fcntl" + - "stl" + - "serial" + collect_all: + - "cura" + - "UM" + - "serial" + - "Charon" + - "sqlite3" + - "trimesh" + - "win32ctypes" + - "PyQt6" + - "PyQt6.QtNetwork" + - "PyQt6.sip" + - "stl" + icon: + Windows: "./icons/Cura.ico" + Macos: "./icons/cura.icns" + Linux: "./icons/cura-128.png" pycharm_targets: - - jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja - module_name: Cura - name: cura - script_name: cura_app.py - - jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja - module_name: Cura - name: cura_external_engine - parameters: --external-backend - script_name: cura_app.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in tests - script_name: tests/ - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestBuildVolume.py - script_name: tests/TestBuildVolume.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestConvexHullDecorator.py - script_name: tests/TestConvexHullDecorator.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestCuraSceneNode.py - script_name: tests/TestCuraSceneNode.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestCuraSceneNode.py - script_name: tests/TestExtruderManager.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestGCodeListDecorator.py - script_name: tests/TestGCodeListDecorator.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestIntentManager.py - script_name: tests/TestIntentManager.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestLayer.py - script_name: tests/TestLayer.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestMachineAction.py - script_name: tests/TestMachineAction.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestMachineManager.py - script_name: tests/TestMachineManager.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestOAuth2.py - script_name: tests/TestOAuth2.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestObjectsModel.py - script_name: tests/TestObjectsModel.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestPrintInformation.py - script_name: tests/TestPrintInformation.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestProfileRequirements.py - script_name: tests/TestProfileRequirements.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestThemes.py - script_name: tests/TestThemes.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestContainerManager.py - script_name: tests/Settings/TestContainerManager.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestCuraContainerRegistry.py - script_name: tests/Settings/TestCuraContainerRegistry.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestCuraStackBuilder.py - script_name: tests/Settings/TestCuraStackBuilder.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestDefinitionContainer.py - script_name: tests/Settings/TestDefinitionContainer.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestExtruderStack.py - script_name: tests/Settings/TestExtruderStack.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestGlobalStack.py - script_name: tests/Settings/TestGlobalStack.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestProfiles.py - script_name: tests/Settings/TestProfiles.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestSettingInheritanceManager.py - script_name: tests/Settings/TestSettingInheritanceManager.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestSettingOverrideDecorator.py - script_name: tests/Settings/TestSettingOverrideDecorator.py - - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja - module_name: Cura - name: pytest in TestSettingVisibilityPresets.py - script_name: tests/Settings/TestSettingVisibilityPresets.py + - jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja + module_name: Cura + name: cura + script_name: cura_app.py + - jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja + module_name: Cura + name: cura_external_engine + parameters: --external-backend + script_name: cura_app.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in tests + script_name: tests/ + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestBuildVolume.py + script_name: tests/TestBuildVolume.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestConvexHullDecorator.py + script_name: tests/TestConvexHullDecorator.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestCuraSceneNode.py + script_name: tests/TestCuraSceneNode.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestCuraSceneNode.py + script_name: tests/TestExtruderManager.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestGCodeListDecorator.py + script_name: tests/TestGCodeListDecorator.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestIntentManager.py + script_name: tests/TestIntentManager.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestLayer.py + script_name: tests/TestLayer.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestMachineAction.py + script_name: tests/TestMachineAction.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestMachineManager.py + script_name: tests/TestMachineManager.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestOAuth2.py + script_name: tests/TestOAuth2.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestObjectsModel.py + script_name: tests/TestObjectsModel.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestPrintInformation.py + script_name: tests/TestPrintInformation.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestProfileRequirements.py + script_name: tests/TestProfileRequirements.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestThemes.py + script_name: tests/TestThemes.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestContainerManager.py + script_name: tests/Settings/TestContainerManager.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestCuraContainerRegistry.py + script_name: tests/Settings/TestCuraContainerRegistry.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestCuraStackBuilder.py + script_name: tests/Settings/TestCuraStackBuilder.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestDefinitionContainer.py + script_name: tests/Settings/TestDefinitionContainer.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestExtruderStack.py + script_name: tests/Settings/TestExtruderStack.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestGlobalStack.py + script_name: tests/Settings/TestGlobalStack.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestProfiles.py + script_name: tests/Settings/TestProfiles.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestSettingInheritanceManager.py + script_name: tests/Settings/TestSettingInheritanceManager.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestSettingOverrideDecorator.py + script_name: tests/Settings/TestSettingOverrideDecorator.py + - jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja + module_name: Cura + name: pytest in TestSettingVisibilityPresets.py + script_name: tests/Settings/TestSettingVisibilityPresets.py From a4a3296dd2c4760e8fda5499ee0742487ec8dfd1 Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:03:55 +0100 Subject: [PATCH 18/31] Bump up minor based on latest release branch --- .github/workflows/conan-recipe-version.yml | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 3d5da55d6a..8ab9c8a61b 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -170,9 +170,17 @@ jobs: bump_up_release_tag = int(latest_branch_version.prerelease.split('.')[1]) + 1 actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{latest_branch_version.prerelease.split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}" else: - bump_up_minor = int(latest_branch_version.minor) + 1 - reset_patch = 0 - actual_version = f"{latest_branch_version.major}.{bump_up_minor}.{reset_patch}-alpha+{buildmetadata}{channel_metadata}" + branches_version = [] + for branch in repo.branches: + try: + branches_version.append(tools.Version(branch.name)) + except: + pass + latest_branches_version = max(sorted(branches_version)) + if latest_branches_version > latest_branch_version: + actual_version = f"{latest_branches_version.major}.{int(latest_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" + else: + actual_version = f"{latest_branch_version.major}.{int(latest_branch_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" # %% Set the environment output output_env = os.environ["GITHUB_OUTPUT"] From be057d495d2ffb6f439ecdd7d123945dd215fb0e Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:10:42 +0100 Subject: [PATCH 19/31] Check against older versions --- .github/workflows/conan-recipe-version.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 8ab9c8a61b..30fbe525f6 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -173,7 +173,9 @@ jobs: branches_version = [] for branch in repo.branches: try: - branches_version.append(tools.Version(branch.name)) + b_version = tools.Version(branch.name) + if b_version < tools.Version("10.0.0"): + branches_version.append(b_version) except: pass latest_branches_version = max(sorted(branches_version)) From cc3664fa6afbc78e0c901dc50b08d2d76beb975b Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:18:54 +0100 Subject: [PATCH 20/31] Check against refs --- .github/workflows/conan-recipe-version.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 30fbe525f6..57f2f53c99 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -171,11 +171,12 @@ jobs: actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{latest_branch_version.prerelease.split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}" else: branches_version = [] - for branch in repo.branches: + for branch in repo.references: try: - b_version = tools.Version(branch.name) - if b_version < tools.Version("10.0.0"): - branches_version.append(b_version) + if "heads" in branch.abspath: + b_version = tools.Version(branch.name) + if b_version < tools.Version("10.0.0"): + branches_version.append(b_version) except: pass latest_branches_version = max(sorted(branches_version)) From c3a75fd3d3588d12bb3b268bff002f87c7dae4a9 Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:24:27 +0100 Subject: [PATCH 21/31] Dirty fix --- .github/workflows/conan-recipe-version.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 57f2f53c99..c65d832b45 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -173,13 +173,17 @@ jobs: branches_version = [] for branch in repo.references: try: + print(branch.abspath) if "heads" in branch.abspath: b_version = tools.Version(branch.name) if b_version < tools.Version("10.0.0"): branches_version.append(b_version) except: pass - latest_branches_version = max(sorted(branches_version)) + if len(branches_version) > 0: + latest_branches_version = max(sorted(branches_version)) + else: + latest_branches_version = tools.Version("5.3") # FIXME: Do not hardcode if latest_branches_version > latest_branch_version: actual_version = f"{latest_branches_version.major}.{int(latest_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" else: From e7d7a23be8bd3814b7bc96fd59895d3b5d29d701 Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:27:00 +0100 Subject: [PATCH 22/31] Use remotes/origin --- .github/workflows/conan-recipe-version.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index c65d832b45..da444a7e4d 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -173,17 +173,13 @@ jobs: branches_version = [] for branch in repo.references: try: - print(branch.abspath) - if "heads" in branch.abspath: + if "remotes/origin" in branch.abspath: b_version = tools.Version(branch.name) if b_version < tools.Version("10.0.0"): branches_version.append(b_version) except: pass - if len(branches_version) > 0: - latest_branches_version = max(sorted(branches_version)) - else: - latest_branches_version = tools.Version("5.3") # FIXME: Do not hardcode + latest_branches_version = max(sorted(branches_version)) if latest_branches_version > latest_branch_version: actual_version = f"{latest_branches_version.major}.{int(latest_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" else: From 4ef6af26df44ad39b9814263f969c946ad230abf Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:35:26 +0100 Subject: [PATCH 23/31] reset the count from the branching of commit --- .github/workflows/conan-recipe-version.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index da444a7e4d..3cc120904b 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -170,17 +170,18 @@ jobs: bump_up_release_tag = int(latest_branch_version.prerelease.split('.')[1]) + 1 actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{latest_branch_version.prerelease.split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}" else: - branches_version = [] + max_branches_version = tools.Version("0.0.0") + branches_no_commits = no_commits for branch in repo.references: try: if "remotes/origin" in branch.abspath: b_version = tools.Version(branch.name) - if b_version < tools.Version("10.0.0"): - branches_version.append(b_version) + if b_version < tools.Version("10.0.0") and b_version > max_branches_version: + max_branches_version = b_version + branches_no_commits = repo.commit().count() - branch..commit.count() except: pass - latest_branches_version = max(sorted(branches_version)) - if latest_branches_version > latest_branch_version: + if max_branches_version > latest_branch_version: actual_version = f"{latest_branches_version.major}.{int(latest_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" else: actual_version = f"{latest_branch_version.major}.{int(latest_branch_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" From 81845ab58e3f15235429a1f05f125d27b5641ebd Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:36:46 +0100 Subject: [PATCH 24/31] fix typo --- .github/workflows/conan-recipe-version.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 3cc120904b..61ebe3a23e 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -178,7 +178,7 @@ jobs: b_version = tools.Version(branch.name) if b_version < tools.Version("10.0.0") and b_version > max_branches_version: max_branches_version = b_version - branches_no_commits = repo.commit().count() - branch..commit.count() + branches_no_commits = repo.commit().count() - branch.commit.count() except: pass if max_branches_version > latest_branch_version: From dc66a4d4ff37d1dcc8bee748cbdb720150206eb2 Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:51:55 +0100 Subject: [PATCH 25/31] split on / --- .github/workflows/conan-recipe-version.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 61ebe3a23e..42cdad6d9f 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -175,14 +175,14 @@ jobs: for branch in repo.references: try: if "remotes/origin" in branch.abspath: - b_version = tools.Version(branch.name) + b_version = tools.Version(branch.name.split("/")[-1]) if b_version < tools.Version("10.0.0") and b_version > max_branches_version: max_branches_version = b_version branches_no_commits = repo.commit().count() - branch.commit.count() except: pass if max_branches_version > latest_branch_version: - actual_version = f"{latest_branches_version.major}.{int(latest_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" + actual_version = f"{max_branches_version.major}.{int(max_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" else: actual_version = f"{latest_branch_version.major}.{int(latest_branch_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" From 6d22950c18667d069e27070f78cb015884b72dd2 Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 10:55:44 +0100 Subject: [PATCH 26/31] Use the branched of no commits --- .github/workflows/conan-recipe-version.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/conan-recipe-version.yml b/.github/workflows/conan-recipe-version.yml index 42cdad6d9f..75408b0d16 100644 --- a/.github/workflows/conan-recipe-version.yml +++ b/.github/workflows/conan-recipe-version.yml @@ -182,7 +182,7 @@ jobs: except: pass if max_branches_version > latest_branch_version: - actual_version = f"{max_branches_version.major}.{int(max_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" + actual_version = f"{max_branches_version.major}.{int(max_branches_version.minor) + 1}.0-alpha+{buildmetadata}{channel}_{branches_no_commits}" else: actual_version = f"{latest_branch_version.major}.{int(latest_branch_version.minor) + 1}.0-alpha+{buildmetadata}{channel_metadata}" From 1c465614d8cb996cc6d567da8fd15fd2d4f9dd31 Mon Sep 17 00:00:00 2001 From: jspijker Date: Wed, 1 Feb 2023 11:06:35 +0100 Subject: [PATCH 27/31] Revert "Use Conandata directly" This reverts commit b8c9fa3f5186bfe564c1ed3b3c09e89218080fe9. --- conanfile.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/conanfile.py b/conanfile.py index eec875efe6..7dd3c9db9b 100644 --- a/conanfile.py +++ b/conanfile.py @@ -179,7 +179,7 @@ class CuraConan(ConanFile): cura_latest_url = self._cura_latest_url)) def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file): - pyinstaller_metadata = self.conan_data[self.version]["pyinstaller"] + pyinstaller_metadata = self._um_data()["pyinstaller"] datas = [(str(self._base_dir.joinpath("conan_install_info.json")), ".")] for data in pyinstaller_metadata["datas"].values(): if not self.options.internal and data.get("internal", False): @@ -275,10 +275,10 @@ class CuraConan(ConanFile): raise ConanInvalidConfiguration("Only versions 5+ are support") def requirements(self): - for req in self.conan_data[self.version]["requirements"]: + for req in self._um_data()["requirements"]: self.requires(req) if self.options.internal: - for req in self.conan_data[self.version]["internal_requirements"]: + for req in self._um_data()["internal_requirements"]: self.requires(req) def build_requirements(self): @@ -319,8 +319,8 @@ class CuraConan(ConanFile): if self.options.devtools: entitlements_file = "'{}'".format(Path(self.source_folder, "packaging", "MacOS", "cura.entitlements")) self._generate_pyinstaller_spec(location = self.generators_folder, - entrypoint_location = "'{}'".format(Path(self.source_folder, self.conan_data[self.version]["runinfo"]["entrypoint"])).replace("\\", "\\\\"), - icon_path = "'{}'".format(Path(self.source_folder, "packaging", self.conan_data[self.version]["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"), + entrypoint_location = "'{}'".format(Path(self.source_folder, self._um_data()["runinfo"]["entrypoint"])).replace("\\", "\\\\"), + icon_path = "'{}'".format(Path(self.source_folder, "packaging", self._um_data()["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"), entitlements_file = entitlements_file if self.settings.os == "Macos" else "None") # Update the po files @@ -447,8 +447,8 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV entitlements_file = "'{}'".format(Path(self.cpp_info.res_paths[2], "MacOS", "cura.entitlements")) self._generate_pyinstaller_spec(location = self._base_dir, - entrypoint_location = "'{}'".format(Path(self.cpp_info.bin_paths[0], self.conan_data[self.version]["runinfo"]["entrypoint"])).replace("\\", "\\\\"), - icon_path = "'{}'".format(Path(self.cpp_info.res_paths[2], self.conan_data[self.version]["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"), + entrypoint_location = "'{}'".format(Path(self.cpp_info.bin_paths[0], self._um_data()["runinfo"]["entrypoint"])).replace("\\", "\\\\"), + icon_path = "'{}'".format(Path(self.cpp_info.res_paths[2], self._um_data()["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"), entitlements_file = entitlements_file if self.settings.os == "Macos" else "None") def package(self): From f1cc551309b005f088ac725a3654e05a93b607e5 Mon Sep 17 00:00:00 2001 From: Rijk van Manen Date: Thu, 2 Feb 2023 11:08:21 +0100 Subject: [PATCH 28/31] add value limits Slicing becomes very slow for large interlocking_boundary_avoidance and interlocking_depth values. The minimum interlocking_beam_width is the minimum wall line width. And the interlocking_rotation should be between 0 and 360 degrees. PP-283 --- resources/definitions/fdmprinter.def.json | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/resources/definitions/fdmprinter.def.json b/resources/definitions/fdmprinter.def.json index 881f0dc04c..8d0a7de308 100644 --- a/resources/definitions/fdmprinter.def.json +++ b/resources/definitions/fdmprinter.def.json @@ -6870,7 +6870,7 @@ "enabled": "extruders_enabled_count > 1 and resolveOrValue('interlocking_enable')", "default_value": 0.8, "value": "2 * wall_line_width_0", - "minimum_value": "0.001", + "minimum_value": "min_odd_wall_line_width", "maximum_value": "min(0.5 * machine_width, 0.5 * machine_depth)", "maximum_value_warning": "max(extruderValues('wall_line_width_0')) * 6", "settable_per_mesh": true, @@ -6884,6 +6884,8 @@ "type": "float", "enabled": "extruders_enabled_count > 1 and resolveOrValue('interlocking_enable')", "default_value": 22.5, + "minimum_value": "0", + "maximum_value": "360", "resolve": "min(extruderValues('interlocking_orientation'))", "settable_per_mesh": false, "settable_per_extruder": false @@ -6896,6 +6898,7 @@ "enabled": "extruders_enabled_count > 1 and resolveOrValue('interlocking_enable')", "default_value": 2, "minimum_value": "1", + "maximum_value_warning": "50", "resolve": "max(extruderValues('interlocking_beam_layer_count'))", "settable_per_mesh": false, "settable_per_extruder": false @@ -6908,6 +6911,8 @@ "enabled": "extruders_enabled_count > 1 and resolveOrValue('interlocking_enable')", "default_value": 2, "minimum_value": "1", + "maximum_value": "10", + "maximum_value_warning": "5", "resolve": "max(extruderValues('interlocking_depth'))", "settable_per_mesh": false, "settable_per_extruder": false @@ -6920,8 +6925,10 @@ "enabled": "extruders_enabled_count > 1 and resolveOrValue('interlocking_enable')", "default_value": 3, "minimum_value": "0", - "resolve": "max(extruderValues('interlocking_boundary_avoidance'))", "minimum_value_warning": "resolveOrValue('interlocking_depth')", + "maximum_value": "10", + "maximum_value_warning": "5", + "resolve": "max(extruderValues('interlocking_boundary_avoidance'))", "settable_per_mesh": false, "settable_per_extruder": false }, From 5b2fb0c0d986a75b3b7a7d6e357e33df7e7b35ca Mon Sep 17 00:00:00 2001 From: Rijk van Manen Date: Thu, 2 Feb 2023 11:10:22 +0100 Subject: [PATCH 29/31] reduce the interlocking_boundary_avoidance The interlocking_boundary_avoidance was a bit larger originally to prevent interlocking structures outside the model for edge cases. But this behaviour is improved a lot, so the boundary avoidance can be reduced, which helps for thin and small structures. PP-283 --- resources/definitions/fdmprinter.def.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resources/definitions/fdmprinter.def.json b/resources/definitions/fdmprinter.def.json index 8d0a7de308..536ce27e06 100644 --- a/resources/definitions/fdmprinter.def.json +++ b/resources/definitions/fdmprinter.def.json @@ -6923,7 +6923,7 @@ "description": "The distance close to the boundary of the print where not to generate an interlocking structure as measued in number of cells times 2. If set to a value lower than the Inerlocking Depth then the interlocking structure can become visible on the outside of the print near the interfaces where two models meet.", "type": "int", "enabled": "extruders_enabled_count > 1 and resolveOrValue('interlocking_enable')", - "default_value": 3, + "default_value": 2, "minimum_value": "0", "minimum_value_warning": "resolveOrValue('interlocking_depth')", "maximum_value": "10", From 63108dd9f6b7758922133e21b28b15ba2ceeb06f Mon Sep 17 00:00:00 2001 From: Joeydelarago Date: Thu, 2 Feb 2023 15:56:31 +0000 Subject: [PATCH 30/31] Applied printer-linter format --- resources/definitions/gutenberg_base.def.json | 180 ++++++++++-------- .../definitions/gutenberg_gzero.def.json | 12 +- .../extruders/gutenberg_extruder_0.def.json | 11 +- .../gutenberg_global_fast_quality.inst.cfg | 22 +-- .../gutenberg_global_fine_quality.inst.cfg | 22 +-- .../gutenberg_global_normal_quality.inst.cfg | 22 +-- .../gutenberg_global_strong_quality.inst.cfg | 22 +-- 7 files changed, 157 insertions(+), 134 deletions(-) diff --git a/resources/definitions/gutenberg_base.def.json b/resources/definitions/gutenberg_base.def.json index 9394d841bb..f0329bef6d 100644 --- a/resources/definitions/gutenberg_base.def.json +++ b/resources/definitions/gutenberg_base.def.json @@ -1,95 +1,115 @@ { - "name": "Gutenberg Base", "version": 2, + "name": "Gutenberg Base", "inherits": "fdmprinter", - "metadata": + "metadata": { "visible": false, "author": "Gutenberg Dev", "manufacturer": "Gutenberg", "file_formats": "text/x-gcode", - "first_start_actions": ["MachineSettingsAction"], - "preferred_quality_type": "normal", + "first_start_actions": [ "MachineSettingsAction" ], "has_machine_quality": true, "machine_extruder_trains": { "0": "gutenberg_extruder_0" }, - "preferred_material": "generic_abs" + "preferred_material": "generic_abs", + "preferred_quality_type": "normal" }, "overrides": { - "machine_name": { "default_value": "GUTENBERG 3DP" }, - "machine_width": { "default_value": 165 }, - "machine_depth": { "default_value": 165 }, - "machine_height": { "default_value": 165 }, - "material_diameter": { "default_value": 1.75 }, - "gantry_height": { "value": 30 }, + "acceleration_enabled": { "default_value": true }, + "acceleration_layer_0": { "value": 10000 }, + "acceleration_print": { "default_value": 15000 }, + "acceleration_roofing": { "value": 10000 }, + "acceleration_travel_layer_0": { "value": 10000 }, + "acceleration_wall_0": { "value": 7500 }, + "adhesion_type": { "default_value": "skirt" }, + "alternate_extra_perimeter": { "default_value": true }, + "bridge_fan_speed_2": { "resolve": "max(cool_fan_speed, 50)" }, + "bridge_fan_speed_3": { "resolve": "max(cool_fan_speed, 20)" }, + "bridge_settings_enabled": { "default_value": true }, + "bridge_wall_coast": { "default_value": 10 }, + "cool_fan_full_at_height": { "value": "resolveOrValue('layer_height_0') + resolveOrValue('layer_height') * max(1, cool_fan_full_layer - 1)" }, + "cool_fan_full_layer": { "value": 4 }, + "cool_min_layer_time": { "default_value": 15 }, + "cool_min_layer_time_fan_speed_max": { "default_value": 20 }, + "gantry_height": { "value": 30 }, + "infill_before_walls": { "default_value": false }, + "infill_enable_travel_optimization": { "default_value": true }, + "jerk_roofing": { "value": 10 }, + "jerk_wall_0": { "value": 10 }, + "layer_height_0": { "resolve": "max(0.2, min(extruderValues('layer_height')))" }, + "line_width": { "value": "machine_nozzle_size * 1.125" }, + "machine_acceleration": { "default_value": 1500 }, + "machine_depth": { "default_value": 165 }, + "machine_end_gcode": { "default_value": "END_PRINT" }, + "machine_endstop_positive_direction_x": { "default_value": true }, + "machine_endstop_positive_direction_y": { "default_value": true }, + "machine_endstop_positive_direction_z": { "default_value": false }, + "machine_feeder_wheel_diameter": { "default_value": 7.5 }, + "machine_head_with_fans_polygon": + { + "default_value": [ + [-35, 65], + [-35, -50], + [35, -50], + [35, 65] + ] + }, + "machine_heated_bed": { "default_value": true }, + "machine_height": { "default_value": 165 }, + "machine_max_acceleration_x": { "default_value": 15000 }, + "machine_max_acceleration_y": { "default_value": 15000 }, + "machine_max_acceleration_z": { "default_value": 250 }, + "machine_max_feedrate_e": { "default_value": 120 }, + "machine_max_feedrate_z": { "default_value": 40 }, + "machine_max_jerk_e": { "default_value": 60 }, + "machine_name": { "default_value": "GUTENBERG 3DP" }, + "machine_start_gcode": { "default_value": ";Simple\nSTART_PRINT EXTRUDER_TEMP={material_print_temperature_layer_0} BED_TEMP={material_bed_temperature_layer_0}\n;Or with custom bed mesh area\n;START_PRINT EXTRUDER_TEMP={material_print_temperature_layer_0} BED_TEMP={material_bed_temperature_layer_0} AREA_START=%MINX%,%MINY% AREA_END=%MAXX%,%MAXY% FILAMENT_TYPE={material_type}" }, + "machine_steps_per_mm_x": { "default_value": 160 }, + "machine_steps_per_mm_y": { "default_value": 160 }, + "machine_steps_per_mm_z": { "default_value": 800 }, "machine_use_extruder_offset_to_offset_coords": { "value": false }, - "machine_heated_bed": { "default_value": true }, - "machine_max_acceleration_x": { "default_value": 15000 }, - "machine_max_acceleration_y": { "default_value": 15000 }, - "machine_max_acceleration_z": { "default_value": 250 }, - "machine_acceleration": { "default_value": 1500 }, - "machine_max_jerk_e": { "default_value": 60 }, - "machine_steps_per_mm_x": { "default_value": 160 }, - "machine_steps_per_mm_y": { "default_value": 160 }, - "machine_steps_per_mm_z": { "default_value": 800 }, - "machine_endstop_positive_direction_x": { "default_value": true }, - "machine_endstop_positive_direction_y": { "default_value": true }, - "machine_endstop_positive_direction_z": { "default_value": false }, - "machine_feeder_wheel_diameter": { "default_value": 7.5 }, - "machine_head_with_fans_polygon": { "default_value": [ [-35, 65], [-35, -50], [35, -50], [35, 65] ] }, - "machine_max_feedrate_z": { "default_value": 40 }, - "machine_max_feedrate_e": { "default_value": 120 }, - "machine_start_gcode": { "default_value": ";Simple\nSTART_PRINT EXTRUDER_TEMP={material_print_temperature_layer_0} BED_TEMP={material_bed_temperature_layer_0}\n;Or with custom bed mesh area\n;START_PRINT EXTRUDER_TEMP={material_print_temperature_layer_0} BED_TEMP={material_bed_temperature_layer_0} AREA_START=%MINX%,%MINY% AREA_END=%MAXX%,%MAXY% FILAMENT_TYPE={material_type}" }, - "machine_end_gcode": { "default_value": "END_PRINT" }, - "adhesion_type": { "default_value": "skirt" }, - "retraction_amount": { "default_value": 0.80 }, - "skirt_brim_minimal_length": { "default_value": 550 }, - "retraction_speed": { "default_value": 35, "maximum_value_warning": 130 }, - "retraction_retract_speed": { "maximum_value_warning": 130 }, - "retraction_prime_speed": { "value": "math.ceil(retraction_speed * 0.4)", "maximum_value_warning": 130 }, - "retraction_hop_enabled": { "default_value": true }, - "retraction_hop": { "default_value": 0.2 }, - "retraction_combing": { "value": "'noskin'" }, - "retraction_combing_max_distance": { "default_value": 10 }, - "travel_avoid_other_parts": { "default_value": false }, - "speed_travel": { "value": 300, "maximum_value_warning": 501 }, - "speed_travel_layer_0": { "value": "math.ceil(speed_travel * 0.4)" }, - "speed_layer_0": { "value": "math.ceil(speed_print * 0.25)" }, - "speed_wall": { "value": "math.ceil(speed_print * 0.33)" }, - "speed_wall_0": { "value": "math.ceil(speed_print * 0.33)" }, - "speed_wall_x": { "value": "math.ceil(speed_print * 0.66)" }, - "speed_topbottom": { "value": "math.ceil(speed_print * 0.33)" }, - "speed_roofing": { "value": "math.ceil(speed_print * 0.33)" }, - "speed_slowdown_layers": { "default_value": 4 }, - "roofing_layer_count": { "value": 1 }, - "optimize_wall_printing_order": { "default_value": true }, - "infill_enable_travel_optimization": { "default_value": true }, - "minimum_polygon_circumference": { "default_value": 0.2 }, - "wall_overhang_angle": { "default_value": 75 }, - "wall_overhang_speed_factor": { "default_value": 50 }, - "bridge_settings_enabled": { "default_value": true }, - "bridge_wall_coast": { "default_value": 10 }, - "bridge_fan_speed_2": { "resolve": "max(cool_fan_speed, 50)" }, - "bridge_fan_speed_3": { "resolve": "max(cool_fan_speed, 20)" }, - "alternate_extra_perimeter": { "default_value": true }, - "cool_min_layer_time_fan_speed_max": { "default_value": 20 }, - "cool_min_layer_time": { "default_value": 15 }, - "cool_fan_full_at_height": { "value": "resolveOrValue('layer_height_0') + resolveOrValue('layer_height') * max(1, cool_fan_full_layer - 1)" }, - "cool_fan_full_layer": { "value": 4 }, - "layer_height_0": { "resolve": "max(0.2, min(extruderValues('layer_height')))" }, - "line_width": { "value": "machine_nozzle_size * 1.125" }, - "wall_line_width": { "value": "machine_nozzle_size" }, - "meshfix_maximum_resolution": { "default_value": 0.01 }, - "infill_before_walls": { "default_value": false }, - "zig_zaggify_infill": { "value": true }, - "min_infill_area": { "default_value": 5.0 }, - "acceleration_enabled": { "default_value": true }, - "acceleration_print": { "default_value": 15000 }, - "acceleration_wall_0": { "value": 7500 }, - "acceleration_layer_0": { "value": 10000 }, - "acceleration_travel_layer_0": { "value": 10000 }, - "acceleration_roofing": { "value": 10000 }, - "jerk_wall_0": { "value": 10 }, - "jerk_roofing": { "value": 10 } + "machine_width": { "default_value": 165 }, + "material_diameter": { "default_value": 1.75 }, + "meshfix_maximum_resolution": { "default_value": 0.01 }, + "min_infill_area": { "default_value": 5.0 }, + "minimum_polygon_circumference": { "default_value": 0.2 }, + "optimize_wall_printing_order": { "default_value": true }, + "retraction_amount": { "default_value": 0.8 }, + "retraction_combing": { "value": "'noskin'" }, + "retraction_combing_max_distance": { "default_value": 10 }, + "retraction_hop": { "default_value": 0.2 }, + "retraction_hop_enabled": { "default_value": true }, + "retraction_prime_speed": + { + "maximum_value_warning": 130, + "value": "math.ceil(retraction_speed * 0.4)" + }, + "retraction_retract_speed": { "maximum_value_warning": 130 }, + "retraction_speed": + { + "default_value": 35, + "maximum_value_warning": 130 + }, + "roofing_layer_count": { "value": 1 }, + "skirt_brim_minimal_length": { "default_value": 550 }, + "speed_layer_0": { "value": "math.ceil(speed_print * 0.25)" }, + "speed_roofing": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_slowdown_layers": { "default_value": 4 }, + "speed_topbottom": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_travel": + { + "maximum_value_warning": 501, + "value": 300 + }, + "speed_travel_layer_0": { "value": "math.ceil(speed_travel * 0.4)" }, + "speed_wall": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_wall_0": { "value": "math.ceil(speed_print * 0.33)" }, + "speed_wall_x": { "value": "math.ceil(speed_print * 0.66)" }, + "travel_avoid_other_parts": { "default_value": false }, + "wall_line_width": { "value": "machine_nozzle_size" }, + "wall_overhang_angle": { "default_value": 75 }, + "wall_overhang_speed_factor": { "default_value": 50 }, + "zig_zaggify_infill": { "value": true } } -} +} \ No newline at end of file diff --git a/resources/definitions/gutenberg_gzero.def.json b/resources/definitions/gutenberg_gzero.def.json index f03d5e3d9f..f9f317c3f4 100644 --- a/resources/definitions/gutenberg_gzero.def.json +++ b/resources/definitions/gutenberg_gzero.def.json @@ -1,6 +1,6 @@ { - "name": "G-ZERO", "version": 2, + "name": "G-ZERO", "inherits": "gutenberg_base", "metadata": { @@ -9,9 +9,9 @@ }, "overrides": { - "machine_name": { "default_value": "G-ZERO" }, - "machine_width": { "default_value": 250 }, - "machine_depth": { "default_value": 200 }, - "machine_height": { "default_value": 201 } + "machine_depth": { "default_value": 200 }, + "machine_height": { "default_value": 201 }, + "machine_name": { "default_value": "G-ZERO" }, + "machine_width": { "default_value": 250 } } -} +} \ No newline at end of file diff --git a/resources/extruders/gutenberg_extruder_0.def.json b/resources/extruders/gutenberg_extruder_0.def.json index 58b41d62c6..aafe35e92b 100644 --- a/resources/extruders/gutenberg_extruder_0.def.json +++ b/resources/extruders/gutenberg_extruder_0.def.json @@ -7,10 +7,13 @@ "machine": "gutenberg_base", "position": "0" }, - "overrides": { - "extruder_nr": { "default_value": 0, "maximum_value": 1 }, - "material_diameter": { "default_value": 1.75 } + "extruder_nr": + { + "default_value": 0, + "maximum_value": 1 + }, + "material_diameter": { "default_value": 1.75 } } -} +} \ No newline at end of file diff --git a/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg index 0a50bc02cf..cfe2a2fc52 100644 --- a/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg @@ -1,34 +1,32 @@ - [general] -version = 4 -name = Fast definition = gutenberg_base +name = Fast +version = 4 [metadata] +global_quality = True +quality_type = fast setting_version = 20 type = quality -quality_type = fast -global_quality = True [values] acceleration_enabled = True -adhesion_type = skirt -layer_height = 0.2 -layer_height_0 = 0.2 -retraction_combing = noskin -support_enable = False acceleration_print = 15000 acceleration_roofing = 10000 acceleration_topbottom = 10000 acceleration_travel = 15000 acceleration_wall = 15000 acceleration_wall_0 = 10000 +adhesion_type = skirt alternate_extra_perimeter = False infill_line_width = 0.5 infill_pattern = lines infill_sparse_density = 10 +layer_height = 0.2 +layer_height_0 = 0.2 optimize_wall_printing_order = True retraction_amount = 0.8 +retraction_combing = noskin retraction_hop = 0.2 retraction_hop_enabled = True retraction_hop_only_when_collides = True @@ -49,9 +47,11 @@ speed_wall = 100 speed_wall_0 = 100 speed_wall_x = 150 support_angle = 46 +support_enable = False top_bottom_thickness = 0.6 travel_avoid_supports = True wall_line_width = 0.45 wall_line_width_0 = 0.4 wall_thickness = 1.25 -zig_zaggify_infill = True \ No newline at end of file +zig_zaggify_infill = True + diff --git a/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg index ca97c99090..17793b591a 100644 --- a/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg @@ -1,36 +1,34 @@ - [general] -version = 4 -name = Fine definition = gutenberg_base +name = Fine +version = 4 [metadata] +global_quality = True +quality_type = fine setting_version = 20 type = quality -quality_type = fine -global_quality = True [values] acceleration_enabled = True -adhesion_type = skirt -layer_height = 0.2 -layer_height_0 = 0.2 -retraction_combing = noskin -support_enable = False acceleration_print = 15000 acceleration_roofing = 10000 acceleration_topbottom = 10000 acceleration_travel = 15000 acceleration_wall = 7500 acceleration_wall_0 = 7500 +adhesion_type = skirt alternate_extra_perimeter = False cool_min_layer_time = 10 infill_line_width = 0.5 infill_pattern = gyroid infill_sparse_density = 15 inset_direction = outside_in +layer_height = 0.2 +layer_height_0 = 0.2 optimize_wall_printing_order = True retraction_amount = 0.8 +retraction_combing = noskin retraction_hop = 0.2 retraction_hop_enabled = True retraction_hop_only_when_collides = True @@ -49,9 +47,11 @@ speed_wall = 100 speed_wall_0 = 75 speed_wall_x = 100 support_angle = 46 +support_enable = False top_bottom_thickness = 0.8 travel_avoid_supports = True wall_line_width = 0.45 wall_line_width_0 = 0.4 wall_thickness = 1.65 -zig_zaggify_infill = True \ No newline at end of file +zig_zaggify_infill = True + diff --git a/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg index 0cc2559993..1c59caaf9c 100644 --- a/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg @@ -1,35 +1,33 @@ - [general] -version = 4 -name = Normal definition = gutenberg_base +name = Normal +version = 4 [metadata] +global_quality = True +quality_type = normal setting_version = 20 type = quality -quality_type = normal -global_quality = True [values] acceleration_enabled = True -adhesion_type = skirt -layer_height = 0.2 -layer_height_0 = 0.2 -retraction_combing = noskin -support_enable = False acceleration_print = 15000 acceleration_roofing = 10000 acceleration_topbottom = 10000 acceleration_travel = 15000 acceleration_wall = 10000 acceleration_wall_0 = 7500 +adhesion_type = skirt alternate_extra_perimeter = True bottom_layers = 3 infill_line_width = 0.5 infill_pattern = gyroid infill_sparse_density = 15 +layer_height = 0.2 +layer_height_0 = 0.2 optimize_wall_printing_order = True retraction_amount = 0.8 +retraction_combing = noskin retraction_hop = 0.2 retraction_hop_enabled = True retraction_hop_only_when_collides = True @@ -48,10 +46,12 @@ speed_wall = 100 speed_wall_0 = 100 speed_wall_x = 150 support_angle = 46 +support_enable = False top_bottom_thickness = 0.6 top_layers = 3 travel_avoid_supports = True wall_line_width = 0.45 wall_line_width_0 = 0.4 wall_thickness = 1.25 -zig_zaggify_infill = True \ No newline at end of file +zig_zaggify_infill = True + diff --git a/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg index 71ca4f3d1c..75b12e22ff 100644 --- a/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg @@ -1,34 +1,32 @@ - [general] -version = 4 -name = Strong definition = gutenberg_base +name = Strong +version = 4 [metadata] +global_quality = True +quality_type = strong setting_version = 20 type = quality -quality_type = strong -global_quality = True [values] acceleration_enabled = True -adhesion_type = skirt -layer_height = 0.2 -layer_height_0 = 0.2 -retraction_combing = noskin -support_enable = False acceleration_print = 15000 acceleration_roofing = 10000 acceleration_topbottom = 10000 acceleration_travel = 15000 acceleration_wall = 10000 acceleration_wall_0 = 7500 +adhesion_type = skirt alternate_extra_perimeter = True infill_line_width = 0.5 infill_pattern = gyroid infill_sparse_density = 30 +layer_height = 0.2 +layer_height_0 = 0.2 optimize_wall_printing_order = True retraction_amount = 0.8 +retraction_combing = noskin retraction_hop = 0.2 retraction_hop_enabled = True retraction_hop_only_when_collides = True @@ -48,9 +46,11 @@ speed_wall = 100 speed_wall_0 = 75 speed_wall_x = 100 support_angle = 46 +support_enable = False top_bottom_thickness = 1 travel_avoid_supports = True wall_line_width = 0.45 wall_line_width_0 = 0.4 wall_thickness = 2.05 -zig_zaggify_infill = True \ No newline at end of file +zig_zaggify_infill = True + From 8f2191f734b987cb3c0417514e3fa4ee11b5fb87 Mon Sep 17 00:00:00 2001 From: jspijker Date: Fri, 3 Feb 2023 10:40:23 +0100 Subject: [PATCH 31/31] Bump up version to 21 for gutenberg profiles --- .../quality/gutenberg/gutenberg_global_fast_quality.inst.cfg | 2 +- .../quality/gutenberg/gutenberg_global_fine_quality.inst.cfg | 2 +- .../quality/gutenberg/gutenberg_global_normal_quality.inst.cfg | 2 +- .../quality/gutenberg/gutenberg_global_strong_quality.inst.cfg | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg index cfe2a2fc52..71b883c67f 100644 --- a/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_fast_quality.inst.cfg @@ -6,7 +6,7 @@ version = 4 [metadata] global_quality = True quality_type = fast -setting_version = 20 +setting_version = 21 type = quality [values] diff --git a/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg index 17793b591a..f2feaa462c 100644 --- a/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_fine_quality.inst.cfg @@ -6,7 +6,7 @@ version = 4 [metadata] global_quality = True quality_type = fine -setting_version = 20 +setting_version = 21 type = quality [values] diff --git a/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg index 1c59caaf9c..8aa92b226e 100644 --- a/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_normal_quality.inst.cfg @@ -6,7 +6,7 @@ version = 4 [metadata] global_quality = True quality_type = normal -setting_version = 20 +setting_version = 21 type = quality [values] diff --git a/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg b/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg index 75b12e22ff..28b09c55fa 100644 --- a/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg +++ b/resources/quality/gutenberg/gutenberg_global_strong_quality.inst.cfg @@ -6,7 +6,7 @@ version = 4 [metadata] global_quality = True quality_type = strong -setting_version = 20 +setting_version = 21 type = quality [values]