Remove unused scripts

CURA-9814
This commit is contained in:
Joey de l'Arago 2023-01-20 11:49:34 +01:00
parent 0f52521be2
commit c99ddb30e5
7 changed files with 0 additions and 423 deletions

View File

@ -1,103 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright 2014 Burkhard Lück <lueck@hube-lueck.de>
Permission to use, copy, modify, and distribute this software
and its documentation for any purpose and without fee is hereby
granted, provided that the above copyright notice appear in all
copies and that both that the copyright notice and this
permission notice and warranty disclaimer appear in supporting
documentation, and that the name of the author not be used in
advertising or publicity pertaining to distribution of the
software without specific, written prior permission.
The author disclaim all warranties with regard to this
software, including all implied warranties of merchantability
and fitness. In no event shall the author be liable for any
special, indirect or consequential damages or any damages
whatsoever resulting from loss of use, data or profits, whether
in an action of contract, negligence or other tortious action,
arising out of or in connection with the use or performance of
this software.
"""
# This script generates a POT file from a JSON settings file. It
# has been adapted from createjsoncontext.py of KDE's translation
# scripts. It extracts the "label" and "description" values of
# the JSON file using the structure as used by Uranium settings files.
import sys
import os
import json
import time
import os.path
import collections
debugoutput = False #set True to print debug output in scripty's logs
basedir = sys.argv[-1]
pottxt = ""
def appendMessage(file, setting, field, value):
global pottxt
pottxt += "#: {0}\nmsgctxt \"{1} {2}\"\nmsgid \"{3}\"\nmsgstr \"\"\n\n".format(file, setting, field, value.replace("\n", "\\n").replace("\"", "\\\""))
def processSettings(file, settings):
for name, value in settings.items():
appendMessage(file, name, "label", value["label"])
if "description" in value:
appendMessage(file, name, "description", value["description"])
if "warning_description" in value:
appendMessage(file, name, "warning_description", value["warning_description"])
if "error_description" in value:
appendMessage(file, name, "error_description", value["error_description"])
if "options" in value:
for item, description in value["options"].items():
appendMessage(file, name, "option {0}".format(item), description)
if "children" in value:
processSettings(file, value["children"])
def potheader():
headertxt = "#, fuzzy\n"
headertxt += "msgid \"\"\n"
headertxt += "msgstr \"\"\n"
headertxt += "\"Project-Id-Version: Uranium json setting files\\n\"\n"
headertxt += "\"Report-Msgid-Bugs-To: plugins@ultimaker.com\\n\"\n"
headertxt += "\"POT-Creation-Date: %s+0000\\n\"\n" %time.strftime("%Y-%m-%d %H:%M")
headertxt += "\"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n\"\n"
headertxt += "\"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n\"\n"
headertxt += "\"Language-Team: LANGUAGE\\n\"\n"
headertxt += "\"MIME-Version: 1.0\\n\"\n"
headertxt += "\"Content-Type: text/plain; charset=UTF-8\\n\"\n"
headertxt += "\"Content-Transfer-Encoding: 8bit\\n\"\n"
headertxt += "\n"
return headertxt
if len(sys.argv) < 3:
print("wrong number of args: %s" % sys.argv)
print("\nUsage: python %s jsonfilenamelist basedir" % os.path.basename(sys.argv[0]))
else:
jsonfilename = sys.argv[1]
basedir = sys.argv[2]
outputfilename = sys.argv[3]
with open(jsonfilename, "r", encoding = "utf-8") as data_file:
error = False
jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict)
if "settings" not in jsondatadict:
print(f"Nothing to translate in file: {jsondatadict}")
exit(1)
processSettings(jsonfilename.replace(basedir, ""), jsondatadict["settings"])
if pottxt != "":
with open(outputfilename, "w", encoding = "utf-8") as output_file:
output_file.write(potheader())
output_file.write(pottxt)

View File

@ -1,47 +0,0 @@
#!/usr/bin/env python3
import argparse
from typing import Optional
import sys
from UM.Trust import TrustBasics
# Default arguments, if arguments to the script are omitted, these values are used:
DEFAULT_PRIVATE_KEY_PATH = "./private_key.pem"
DEFAULT_PUBLIC_KEY_PATH = "./public_key.pem"
DEFAULT_PASSWORD = ""
def createAndStoreNewKeyPair(private_filename: str, public_filename: str, optional_password: Optional[str]) -> None:
"""Creates a new public and private key, and saves them to the provided filenames.
See also 'Trust.py' in the main library and the related scripts; 'signfile.py', 'signfolder.py' in this folder.
:param private_filename: Filename to save the private key to.
:param public_filename: Filename to save the public key to.
:param optional_password: Private keys can have a password (or not).
"""
password = None if optional_password == "" else optional_password
private_key, public_key = TrustBasics.generateNewKeyPair()
TrustBasics.saveKeyPair(private_key, private_filename, public_filename, password)
def mainfunc():
"""Arguments:
`-k <filename>` or `--private <filename>` will store the generated private key to <filename>
`-p <filename>` or `--public <filename>` will store the generated public key to <filename>
`-w <password>` or `--password <password>` will give the private key a password (none if omitted, which is default)
"""
parser = argparse.ArgumentParser()
parser.add_argument("-k", "--private", type = str, default = DEFAULT_PRIVATE_KEY_PATH)
parser.add_argument("-p", "--public", type = str, default = DEFAULT_PUBLIC_KEY_PATH)
parser.add_argument("-w", "--password", type = str, default = DEFAULT_PASSWORD)
args = parser.parse_args()
createAndStoreNewKeyPair(args.private, args.public, args.password)
if __name__ == "__main__":
sys.exit(mainfunc())

View File

@ -1,72 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright 2014 Burkhard Lück <lueck@hube-lueck.de>
Permission to use, copy, modify, and distribute this software
and its documentation for any purpose and without fee is hereby
granted, provided that the above copyright notice appear in all
copies and that both that the copyright notice and this
permission notice and warranty disclaimer appear in supporting
documentation, and that the name of the author not be used in
advertising or publicity pertaining to distribution of the
software without specific, written prior permission.
The author disclaim all warranties with regard to this
software, including all implied warranties of merchantability
and fitness. In no event shall the author be liable for any
special, indirect or consequential damages or any damages
whatsoever resulting from loss of use, data or profits, whether
in an action of contract, negligence or other tortious action,
arising out of or in connection with the use or performance of
this software.
"""
# This script generates a POT file from a JSON settings file. It
# has been adapted from createjsoncontext.py of KDE's translation
# scripts. It extracts the "label" and "description" values of
# the JSON file using the structure as used by Uranium settings files.
import sys
import os.path
import collections
import json
debugoutput = False #set True to print debug output in scripty's logs
basedir = sys.argv[-1]
pottxt = ""
def appendMessage(file, field, value):
global pottxt
pottxt += "#: {0}\nmsgctxt \"{1}\"\nmsgid \"{2}\"\nmsgstr \"\"\n\n".format(file, field, value.replace("\n", "\\n").replace("\"", "\\\""))
if len(sys.argv) < 3:
print("wrong number of args: %s" % sys.argv)
print("\nUsage: python %s jsonfilenamelist basedir" % os.path.basename(sys.argv[0]))
else:
json_filename = sys.argv[1]
basedir = sys.argv[2]
output_filename = sys.argv[3]
with open(json_filename, "r", encoding = "utf-8") as data_file:
error = False
jsondatadict = json.load(data_file, object_pairs_hook=collections.OrderedDict)
if "name" not in jsondatadict or ("api" not in jsondatadict and "supported_sdk_versions" not in jsondatadict) or "version" not in jsondatadict:
print("The plugin.json file found on %s is invalid, ignoring it" % json_filename)
exit(1)
file = json_filename.replace(basedir, "")
if "description" in jsondatadict:
appendMessage(file, "description", jsondatadict["description"])
if "name" in jsondatadict:
appendMessage(file, "name", jsondatadict["name"])
if pottxt != "":
with open(output_filename, "a", encoding = "utf-8") as output_file:
output_file.write(pottxt)

View File

@ -1,23 +0,0 @@
#!/bin/bash
#
# Use xgettext to extract all strings from a set of python files.
# Argument 1 is the directory to search for python files, argument 2
# is the destination file.
#
# This script will extract strings marked using i18n or i18nc methods.
# See UM/i18n.py for the relevant methods.
#
dir=$1
dest=$2
touch $dest
for f in $(find -L "$dir" -name \*.py)
do
echo "Extracting strings from python file: $f"
xgettext --from-code=UTF-8 --join-existing --sort-by-file --language=python -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f
done
for f in $(find -L "$dir" -name \*.qml)
do
echo "Extracting strings from qml file: $f"
xgettext --from-code=UTF-8 --join-existing --sort-by-file --language=javascript -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -o $dest $f
done

View File

@ -1,42 +0,0 @@
#! /bin/bash
# Extract strings from all JSON files in a directory into files with matching names ending with .pot.
#
# This script will extract strings from all JSON files in the directory
# passed as first argument. The second argument is the destination
# directory for the extracted message file.
#
# This script uses createjsoncontext to generate the actual message file
# from the JSON file.
#
# This script is based on handle_json_files.sh from KDE's translation
# scripts.
# handle_json_files.sh is copyright 2014 Burkhard Lück <lueck@hube-lueck.de>
scriptdir=$(dirname $0)
extract() {
basedir=$1
dest=$2
file=$3
python3 $scriptdir/createjsoncontext.py $file $basedir json.$$.tmp
if test $? -eq 1; then
return
fi
echo "Extracted messages from $file"
msguniq --to-code=UTF-8 -o json.$$ json.$$.tmp
if test -f json.$$; then
destfile="$dest/$(basename $file).pot"
mv json.$$ $destfile
fi
rm -f json.$$ json.$$.tmp
}
dir=$1; shift
dest=$1; shift
for file in $(find -L "$dir" -name *.json | grep -v 'tests'); do
extract $dir $dest $file
done

View File

@ -1,14 +0,0 @@
#! /bin/bash
# Extract strings from all plugins
#
scriptdir=$(dirname $0)
dir=$1; shift
dest=$1; shift
for file in $(find -L "$dir" -name plugin.json | grep -v 'tests'); do
python3 $scriptdir/createplugincontext.py $file $dir $dest
done

View File

@ -1,122 +0,0 @@
# Copyright (c) 2023 UltiMaker.
# Cura is released under the terms of the LGPLv3 or higher.
import argparse
import os
import subprocess
from os.path import isfile
from pathlib import Path
def extract_all_strings(root_path: Path, script_path: Path, translations_root_path: Path, all_strings_pot_path: Path):
""" Extracts all strings into a pot file with empty translations.
Strings are extracted everywhere that i18n is used in python and qml in the project. It also checks the project
for JSON files with 'settings' in the root node and extracts these for translation as well.
@param root_path: The root path of the project. This is the root for string searching.
@param script_path: The location of the bash scripts used for translating.
@param translations_root_path: The root of the translations folder (resources/i18n).
@param all_strings_pot_path: The path of the pot file where all strings will be outputted (resources/i8n/cura.pot).
"""
# Extract the setting strings from any json file with settings at its root
extract_json_arguments = [
script_path.joinpath("extract-json"),
root_path.joinpath("resources", "definitions"),
translations_root_path
]
subprocess.run(extract_json_arguments)
# Extract all strings from qml and py files
extract_qml_py_arguments = [
script_path.joinpath("extract-all"),
root_path,
all_strings_pot_path
]
subprocess.run(extract_qml_py_arguments)
# Extract all the name and description from all plugins
extract_plugin_arguments = [
script_path.joinpath("extract-plugins"),
root_path.joinpath("plugins"),
all_strings_pot_path
]
subprocess.run(extract_plugin_arguments)
# Convert the output file to utf-8
convert_encoding_arguments = [
"msgconv",
"--to-code=UTF-8",
all_strings_pot_path,
"-o",
all_strings_pot_path
]
subprocess.run(convert_encoding_arguments)
def update_po_files_all_languages(translation_root_path: Path) -> None:
""" Updates all po files in translation_root_path with new strings mapped to blank translations.
This will take all newly generated po files in the root of the translations path (i18n/cura.pot, i18n/fdmextruder.json.def.pot)
and merge them with the existing po files for every language. This will create new po files with empty translations
for all new words added to the project.
@param translation_root_path: Root of the translations folder (resources/i18n).
"""
new_pot_files = []
for file in os.listdir(translation_root_path):
path = translations_root_path.joinpath(file)
if path.suffix == ".pot":
new_pot_files.append(path)
print(new_pot_files)
for directory, _, po_files in os.walk(translation_root_path):
print(directory)
print(po_files)
for pot in new_pot_files:
po_filename = pot.name.rstrip("t")
if po_filename not in po_files:
continue # We only want to merge files that have matching names
pot_file = pot
po_file = Path(directory, po_filename).absolute()
merge_files_arguments = [
"msgmerge",
"--no-wrap",
"--no-fuzzy-matching",
"--update",
"--sort-by-file", # Sort by file location, this is better than pure sorting for translators
po_file, # po file that will be updated
pot_file # source of new strings
]
subprocess.run(merge_files_arguments)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Extract strings from project into .po files")
parser.add_argument("root_path", type=str, help="The root of the project to extract translatable strings from")
parser.add_argument("translation_file_name", type=str, help="The .pot file that all strings from python/qml files will be inserted into")
parser.add_argument("script_path", type=str, help="The path containing the scripts for translating files")
args = parser.parse_args()
root_path = Path(args.root_path) # root of the project
script_path = Path(args.script_path) # location of bash scripts
# All the translation files should be in this path. Each language in a folder corresponding with its lang code (resource/i18n/en_US/)
translations_root_path = root_path.joinpath("resources", "i18n")
translations_root_path.mkdir(parents=True, exist_ok=True) # Make sure we have an output path
all_strings_pot_path = translations_root_path.joinpath(args.translation_file_name) # pot file containing all strings untranslated
if os.path.exists(all_strings_pot_path):
os.remove(all_strings_pot_path) # Clear the output file, otherwise deleted strings will still be in the output.
extract_all_strings(root_path, script_path, translations_root_path, all_strings_pot_path)
update_po_files_all_languages(translations_root_path)