1
0
forked from 0ad/0ad

Enable ruff rules for code simplification

This enables ruff rules which check for code which can be simplified to
improve readability.

The additionally rules getting enabled by this are:

- remove unnecessary nesting of if-statements (SIM102)
- use contextlib.suppress() for no-op exception handling (SIM105)
- use enumerate() for counting in loops (SIM113)
- use context managers for opening files (SIM115)
This commit is contained in:
Dunedan 2024-08-29 07:00:43 +02:00
parent 028ec40165
commit c3b99feb60
Signed by untrusted user: Dunedan
GPG Key ID: 885B16854284E0B2
10 changed files with 131 additions and 127 deletions

View File

@ -35,10 +35,6 @@ ignore = [
"S320",
"S603",
"S607",
"SIM102",
"SIM105",
"SIM113",
"SIM115",
"T20",
"TD",
"TRY002",

View File

@ -4,6 +4,7 @@
import os
import xml.etree.ElementTree as ET
from contextlib import suppress
from ctypes import *
@ -30,7 +31,8 @@ def log(severity, message):
clog = CFUNCTYPE(None, c_int, c_char_p)(log)
# (the CFUNCTYPE must not be GC'd, so try to keep a reference)
library.set_logger(clog)
skeleton_definitions = open(f"{binaries}/data/tests/collada/skeletons.xml").read()
with open(f"{binaries}/data/tests/collada/skeletons.xml") as fd:
skeleton_definitions = fd.read()
library.set_skeleton_definitions(skeleton_definitions, len(skeleton_definitions))
@ -63,10 +65,8 @@ def clean_dir(path):
except OSError:
pass # (ignore errors if files are in use)
# Make sure the directory exists
try:
with suppress(OSError):
os.makedirs(path)
except OSError:
pass # (ignore errors if it already exists)
def create_actor(mesh, texture, anims, props_):
@ -127,9 +127,10 @@ for test_file in ["xsitest3c", "xsitest3e", "jav2d", "jav2d2"]:
input_filename = f"{test_data}/{test_file}.dae"
output_filename = f"{test_mod}/art/meshes/{test_file}.pmd"
file_input = open(input_filename).read()
file_output = convert_dae_to_pmd(file_input)
open(output_filename, "wb").write(file_output)
with open(input_filename) as input_fd, open(output_filename, "wb") as output_fd:
file_input = input_fd.read()
file_output = convert_dae_to_pmd(file_input)
output_fd.write(file_output)
xml = create_actor(
test_file,
@ -142,10 +143,12 @@ for test_file in ["xsitest3c", "xsitest3e", "jav2d", "jav2d2"]:
],
[("helmet", "teapot_basic_static")],
)
open(f"{test_mod}/art/actors/{test_file}.xml", "w").write(xml)
with open(f"{test_mod}/art/actors/{test_file}.xml", "w") as fd:
fd.write(xml)
xml = create_actor_static(test_file, "male")
open(f"{test_mod}/art/actors/{test_file}_static.xml", "w").write(xml)
with open(f"{test_mod}/art/actors/{test_file}_static.xml", "w") as fd:
fd.write(xml)
# for test_file in ['jav2','jav2b', 'jav2d']:
for test_file in ["xsitest3c", "xsitest3e", "jav2d", "jav2d2"]:
@ -155,6 +158,7 @@ for test_file in ["xsitest3c", "xsitest3e", "jav2d", "jav2d2"]:
input_filename = f"{test_data}/{test_file}.dae"
output_filename = f"{test_mod}/art/animation/{test_file}.psa"
file_input = open(input_filename).read()
file_output = convert_dae_to_psa(file_input)
open(output_filename, "wb").write(file_output)
with open(input_filename) as input_fd, open(output_filename, "wb") as output_fd:
file_input = input_fd.read()
file_output = convert_dae_to_psa(file_input)
output_fd.write(file_output)

View File

@ -273,12 +273,14 @@ class CheckRefs:
custom_phase_techs = []
for fp, _ in self.find_files("simulation/data/technologies", "json"):
path_str = str(fp)
if "phase" in path_str:
# Get the last part of the phase tech name.
if Path(path_str).stem.split("_")[-1] in existing_civs:
custom_phase_techs.append(
fp.relative_to("simulation/data/technologies").as_posix()
)
if "phase" not in path_str:
continue
# Get the last part of the phase tech name.
if Path(path_str).stem.split("_")[-1] in existing_civs:
custom_phase_techs.append(
fp.relative_to("simulation/data/technologies").as_posix()
)
return custom_phase_techs
@ -302,27 +304,28 @@ class CheckRefs:
if (
entity.find("VisualActor") is not None
and entity.find("VisualActor").find("Actor") is not None
and entity.find("Identity") is not None
):
if entity.find("Identity") is not None:
phenotype_tag = entity.find("Identity").find("Phenotype")
phenotypes = (
phenotype_tag.text.split()
if (phenotype_tag is not None and phenotype_tag.text)
else ["default"]
)
actor = entity.find("VisualActor").find("Actor")
if "{phenotype}" in actor.text:
for phenotype in phenotypes:
# See simulation2/components/CCmpVisualActor.cpp and Identity.js
# for explanation.
actor_path = actor.text.replace("{phenotype}", phenotype)
self.deps.append((fp, Path(f"art/actors/{actor_path}")))
else:
actor_path = actor.text
phenotype_tag = entity.find("Identity").find("Phenotype")
phenotypes = (
phenotype_tag.text.split()
if (phenotype_tag is not None and phenotype_tag.text)
else ["default"]
)
actor = entity.find("VisualActor").find("Actor")
if "{phenotype}" in actor.text:
for phenotype in phenotypes:
# See simulation2/components/CCmpVisualActor.cpp and Identity.js
# for explanation.
actor_path = actor.text.replace("{phenotype}", phenotype)
self.deps.append((fp, Path(f"art/actors/{actor_path}")))
foundation_actor = entity.find("VisualActor").find("FoundationActor")
if foundation_actor is not None:
self.deps.append((fp, Path(f"art/actors/{foundation_actor.text}")))
else:
actor_path = actor.text
self.deps.append((fp, Path(f"art/actors/{actor_path}")))
foundation_actor = entity.find("VisualActor").find("FoundationActor")
if foundation_actor is not None:
self.deps.append((fp, Path(f"art/actors/{foundation_actor.text}")))
if entity.find("Sound") is not None:
phenotype_tag = entity.find("Identity").find("Phenotype")
phenotypes = (

View File

@ -1,6 +1,5 @@
#!/usr/bin/env python3
import codecs
import math
import cairo
@ -87,9 +86,8 @@ class Glyph:
# Load the set of characters contained in the given text file
def load_char_list(filename):
f = codecs.open(filename, "r", "utf-8")
chars = f.read()
f.close()
with open(filename) as f:
chars = f.read()
return set(chars)
@ -169,33 +167,31 @@ def generate_font(outname, ttfNames, loadopts, size, renderstyle, dsizes):
surface.write_to_png(f"{outname}.png")
# Output the .fnt file with all the glyph positions etc
fnt = open(f"{outname}.fnt", "w")
fnt.write("101\n")
fnt.write("%d %d\n" % (w, h))
fnt.write("%s\n" % ("rgba" if "colour" in renderstyle else "a"))
fnt.write("%d\n" % len(glyphs))
fnt.write("%d\n" % linespacing)
fnt.write("%d\n" % charheight)
# sorting unneeded, as glyphs are added in increasing order
# glyphs.sort(key = lambda g: ord(g.char))
for g in glyphs:
x0 = g.x0
y0 = g.y0
# UGLY HACK: see http://trac.wildfiregames.com/ticket/1039 ;
# to handle a-macron-acute characters without the hassle of
# doing proper OpenType GPOS layout (which the font
# doesn't support anyway), we'll just shift the combining acute
# glyph by an arbitrary amount to make it roughly the right
# place when used after an a-macron glyph.
if ord(g.char) == 0x0301:
y0 += charheight / 3
with open(f"{outname}.fnt", "w") as fnt:
fnt.write("101\n")
fnt.write("%d %d\n" % (w, h))
fnt.write("%s\n" % ("rgba" if "colour" in renderstyle else "a"))
fnt.write("%d\n" % len(glyphs))
fnt.write("%d\n" % linespacing)
fnt.write("%d\n" % charheight)
# sorting unneeded, as glyphs are added in increasing order
# glyphs.sort(key = lambda g: ord(g.char))
for g in glyphs:
x0 = g.x0
y0 = g.y0
# UGLY HACK: see http://trac.wildfiregames.com/ticket/1039 ;
# to handle a-macron-acute characters without the hassle of
# doing proper OpenType GPOS layout (which the font
# doesn't support anyway), we'll just shift the combining acute
# glyph by an arbitrary amount to make it roughly the right
# place when used after an a-macron glyph.
if ord(g.char) == 0x0301:
y0 += charheight / 3
fnt.write(
"%d %d %d %d %d %d %d %d\n"
% (ord(g.char), g.pos.x, h - g.pos.y, g.w, g.h, -x0, y0, g.xadvance)
)
fnt.close()
fnt.write(
"%d %d %d %d %d %d %d %d\n"
% (ord(g.char), g.pos.x, h - g.pos.y, g.w, g.h, -x0, y0, g.xadvance)
)
return
print("Failed to fit glyphs in texture")

View File

@ -41,32 +41,36 @@ def main():
for root, folders, _ in os.walk(projectRootDirectory):
for folder in folders:
if folder == l10nFolderName:
if os.path.exists(os.path.join(root, folder, transifexClientFolder)):
path = os.path.join(root, folder, "*.po")
files = glob.glob(path)
for file in files:
usernames = []
reached = False
for line in fileinput.input(
file.replace("\\", "/"), inplace=True, encoding="utf-8"
):
if reached:
if line == "# \n":
line = ""
m = translatorMatch.match(line)
if m:
if m.group(1) in usernames:
line = ""
else:
line = m.group(1) + m.group(2) + "\n"
usernames.append(m.group(1))
m2 = lastTranslatorMatch.match(line)
if m2:
line = re.sub(lastTranslatorMatch, r"\1\2", line)
elif line.strip() == "# Translators:":
reached = True
sys.stdout.write(line)
if folder != l10nFolderName:
continue
if not os.path.exists(os.path.join(root, folder, transifexClientFolder)):
continue
path = os.path.join(root, folder, "*.po")
files = glob.glob(path)
for file in files:
usernames = []
reached = False
for line in fileinput.input(
file.replace("\\", "/"), inplace=True, encoding="utf-8"
):
if reached:
if line == "# \n":
line = ""
m = translatorMatch.match(line)
if m:
if m.group(1) in usernames:
line = ""
else:
line = m.group(1) + m.group(2) + "\n"
usernames.append(m.group(1))
m2 = lastTranslatorMatch.match(line)
if m2:
line = re.sub(lastTranslatorMatch, r"\1\2", line)
elif line.strip() == "# Translators:":
reached = True
sys.stdout.write(line)
if __name__ == "__main__":

View File

@ -42,9 +42,11 @@ from i18n_helper import l10nFolderName, projectRootDirectory, transifexClientFol
poLocations = []
for root, folders, _filenames in os.walk(projectRootDirectory):
for folder in folders:
if folder == l10nFolderName:
if os.path.exists(os.path.join(root, folder, transifexClientFolder)):
poLocations.append(os.path.join(root, folder))
if folder != l10nFolderName:
continue
if os.path.exists(os.path.join(root, folder, transifexClientFolder)):
poLocations.append(os.path.join(root, folder))
creditsLocation = os.path.join(
projectRootDirectory,
@ -122,6 +124,5 @@ for langCode, langList in sorted(langsLists.items()):
newJSONData["Content"] = sorted(newJSONData["Content"], key=lambda x: x["LangName"])
# Save the JSON data to the credits file
creditsFile = open(creditsLocation, "w", encoding="utf-8")
json.dump(newJSONData, creditsFile, indent=4)
creditsFile.close()
with open(creditsLocation, "w", encoding="utf-8") as creditsFile:
json.dump(newJSONData, creditsFile, indent=4)

View File

@ -300,9 +300,9 @@ class txt(Extractor):
def extractFromFile(self, filepath):
with codecs.open(filepath, "r", encoding="utf-8-sig") as fileObject:
lineno = 0
for line in [line.strip("\n\r") for line in fileObject.readlines()]:
lineno += 1
for lineno, line in enumerate(
[line.strip("\n\r") for line in fileObject.readlines()], start=1
):
if line:
yield line, None, None, lineno, []
@ -348,7 +348,6 @@ class json(Extractor):
)
def parseList(self, itemsList):
index = 0
for listItem in itemsList:
if isinstance(listItem, list):
for message, context in self.parseList(listItem):
@ -356,7 +355,6 @@ class json(Extractor):
elif isinstance(listItem, dict):
for message, context in self.parseDictionary(listItem):
yield message, context
index += 1
def parseDictionary(self, dictionary):
for keyword in dictionary:
@ -398,7 +396,6 @@ class json(Extractor):
return string, context
def extractList(self, itemsList, keyword):
index = 0
for listItem in itemsList:
if isinstance(listItem, str):
yield self.extractString(listItem, keyword)
@ -406,7 +403,6 @@ class json(Extractor):
extract = self.extractDictionary(listItem[keyword], keyword)
if extract:
yield extract
index += 1
def extractDictionary(self, dictionary, keyword):
message = dictionary.get("_string", None)
@ -515,7 +511,8 @@ class ini(Extractor):
import ConfigParser
config = ConfigParser.RawConfigParser()
config.readfp(FakeSectionHeader(open(filepath)))
with open(filepath) as fd:
config.read_file(FakeSectionHeader(fd))
for keyword in self.keywords:
message = config.get("root", keyword).strip('"').strip("'")
context = None

View File

@ -45,12 +45,9 @@ class Catalog(BabelCatalog):
@staticmethod
def readFrom(file_path, locale=None):
return read_po(open(file_path, "r+", encoding="utf-8"), locale=locale)
with open(file_path, "r+", encoding="utf-8") as fd:
return read_po(fd, locale=locale)
def writeTo(self, file_path):
return write_po(
fileobj=open(file_path, "wb+"),
catalog=self,
width=90,
sort_by_file=True,
)
with open(file_path, "wb+") as fd:
return write_po(fileobj=fd, catalog=self, width=90, sort_by_file=True)

View File

@ -25,12 +25,14 @@ from i18n_helper import l10nFolderName, projectRootDirectory, transifexClientFol
def main():
for root, folders, _ in os.walk(projectRootDirectory):
for folder in folders:
if folder == l10nFolderName:
if os.path.exists(os.path.join(root, folder, transifexClientFolder)):
path = os.path.join(root, folder)
os.chdir(path)
print(f"INFO: Starting to pull translations in {path}...")
subprocess.run(["tx", "pull", "-a", "-f"], check=False)
if folder != l10nFolderName:
continue
if os.path.exists(os.path.join(root, folder, transifexClientFolder)):
path = os.path.join(root, folder)
os.chdir(path)
print(f"INFO: Starting to pull translations in {path}...")
subprocess.run(["tx", "pull", "-a", "-f"], check=False)
if __name__ == "__main__":

View File

@ -21,6 +21,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ruff: noqa: SIM115
import glob
import os
import sys
@ -271,9 +273,11 @@ def CalcUnit(UnitName, existingUnit=None):
Template.find("./Resistance/Entity/Damage/" + atttype)
)
if Template.find("./UnitMotion") is not None:
if Template.find("./UnitMotion/WalkSpeed") is not None:
unit["WalkSpeed"] = ExtractValue(Template.find("./UnitMotion/WalkSpeed"))
if (
Template.find("./UnitMotion") is not None
and Template.find("./UnitMotion/WalkSpeed") is not None
):
unit["WalkSpeed"] = ExtractValue(Template.find("./UnitMotion/WalkSpeed"))
if Template.find("./Identity/VisibleClasses") is not None:
newClasses = Template.find("./Identity/VisibleClasses").text.split(" ")