Enable ruff rules for docstrings and comments
This enables some ruff rules for docstrings and comments. The idea is to not enforce the presence of docstrings, but to ensure they are properly formatted if they're present. For comments this adds checks that they don't contain code and verify the formatting of comments with "TODO" tags. As part of this, some commented out code which hasn't been touch in the past 10 years gets removed as well. The rules enabled enabled by this are: - check formatting of existing docstrings (D200-) - check comments for code (ERA) - check formatting of TODO tags (TD001, TD004-)
This commit is contained in:
parent
91ab55d6ea
commit
bcf97b608b
@ -9,10 +9,9 @@ ignore = [
|
|||||||
"ANN",
|
"ANN",
|
||||||
"C90",
|
"C90",
|
||||||
"COM812",
|
"COM812",
|
||||||
"D",
|
"D10",
|
||||||
"DTZ005",
|
"DTZ005",
|
||||||
"EM",
|
"EM",
|
||||||
"ERA",
|
|
||||||
"FA",
|
"FA",
|
||||||
"FIX",
|
"FIX",
|
||||||
"FBT",
|
"FBT",
|
||||||
@ -36,7 +35,8 @@ ignore = [
|
|||||||
"S603",
|
"S603",
|
||||||
"S607",
|
"S607",
|
||||||
"T20",
|
"T20",
|
||||||
"TD",
|
"TD002",
|
||||||
|
"TD003",
|
||||||
"TRY002",
|
"TRY002",
|
||||||
"TRY003",
|
"TRY003",
|
||||||
"TRY004",
|
"TRY004",
|
||||||
|
@ -33,9 +33,7 @@ class SimulTemplateEntity:
|
|||||||
return main_mod
|
return main_mod
|
||||||
|
|
||||||
def apply_layer(self, base_tag, tag):
|
def apply_layer(self, base_tag, tag):
|
||||||
"""
|
"""Apply tag layer to base_tag."""
|
||||||
apply tag layer to base_tag
|
|
||||||
"""
|
|
||||||
if tag.get("datatype") == "tokens":
|
if tag.get("datatype") == "tokens":
|
||||||
base_tokens = split(r"\s+", base_tag.text or "")
|
base_tokens = split(r"\s+", base_tag.text or "")
|
||||||
tokens = split(r"\s+", tag.text or "")
|
tokens = split(r"\s+", tag.text or "")
|
||||||
@ -89,9 +87,7 @@ class SimulTemplateEntity:
|
|||||||
return entity
|
return entity
|
||||||
|
|
||||||
def _load_inherited(self, base_path, vfs_path, mods, base=None):
|
def _load_inherited(self, base_path, vfs_path, mods, base=None):
|
||||||
"""
|
# vfs_path should be relative to base_path in a mod
|
||||||
vfs_path should be relative to base_path in a mod
|
|
||||||
"""
|
|
||||||
if "|" in vfs_path:
|
if "|" in vfs_path:
|
||||||
paths = vfs_path.split("|", 1)
|
paths = vfs_path.split("|", 1)
|
||||||
base = self._load_inherited(base_path, paths[1], mods, base)
|
base = self._load_inherited(base_path, paths[1], mods, base)
|
||||||
@ -119,15 +115,16 @@ class SimulTemplateEntity:
|
|||||||
|
|
||||||
|
|
||||||
def find_files(vfs_root, mods, vfs_path, *ext_list):
|
def find_files(vfs_root, mods, vfs_path, *ext_list):
|
||||||
"""
|
"""Find files.
|
||||||
returns a list of 2-size tuple with:
|
|
||||||
|
Returns a list of 2-size tuple with:
|
||||||
- Path relative to the mod base
|
- Path relative to the mod base
|
||||||
- full Path
|
- full Path
|
||||||
"""
|
"""
|
||||||
full_exts = ["." + ext for ext in ext_list]
|
full_exts = ["." + ext for ext in ext_list]
|
||||||
|
|
||||||
def find_recursive(dp, base):
|
def find_recursive(dp, base):
|
||||||
"""(relative Path, full Path) generator"""
|
"""(relative Path, full Path) generator."""
|
||||||
if dp.is_dir():
|
if dp.is_dir():
|
||||||
if dp.name not in (".svn", ".git") and not dp.name.endswith("~"):
|
if dp.name not in (".svn", ".git") and not dp.name.endswith("~"):
|
||||||
for fp in dp.iterdir():
|
for fp in dp.iterdir():
|
||||||
|
@ -29,12 +29,12 @@ class Point:
|
|||||||
self.y = y
|
self.y = y
|
||||||
|
|
||||||
def __cmp__(self, other):
|
def __cmp__(self, other):
|
||||||
"""Compares the starting position of height slices"""
|
"""Compare the starting position of height slices."""
|
||||||
return self.x - other.x
|
return self.x - other.x
|
||||||
|
|
||||||
|
|
||||||
class RectanglePacker:
|
class RectanglePacker:
|
||||||
"""Base class for rectangle packing algorithms
|
"""Base class for rectangle packing algorithms.
|
||||||
|
|
||||||
By uniting all rectangle packers under this common base class, you can
|
By uniting all rectangle packers under this common base class, you can
|
||||||
easily switch between different algorithms to find the most efficient or
|
easily switch between different algorithms to find the most efficient or
|
||||||
@ -45,7 +45,7 @@ class RectanglePacker:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, packingAreaWidth, packingAreaHeight):
|
def __init__(self, packingAreaWidth, packingAreaHeight):
|
||||||
"""Initializes a new rectangle packer
|
"""Initialize a new rectangle packer.
|
||||||
|
|
||||||
packingAreaWidth: Maximum width of the packing area
|
packingAreaWidth: Maximum width of the packing area
|
||||||
packingAreaHeight: Maximum height of the packing area
|
packingAreaHeight: Maximum height of the packing area
|
||||||
@ -54,7 +54,7 @@ class RectanglePacker:
|
|||||||
self.packingAreaHeight = packingAreaHeight
|
self.packingAreaHeight = packingAreaHeight
|
||||||
|
|
||||||
def Pack(self, rectangleWidth, rectangleHeight):
|
def Pack(self, rectangleWidth, rectangleHeight):
|
||||||
"""Allocates space for a rectangle in the packing area
|
"""Allocate space for a rectangle in the packing area.
|
||||||
|
|
||||||
rectangleWidth: Width of the rectangle to allocate
|
rectangleWidth: Width of the rectangle to allocate
|
||||||
rectangleHeight: Height of the rectangle to allocate
|
rectangleHeight: Height of the rectangle to allocate
|
||||||
@ -69,7 +69,7 @@ class RectanglePacker:
|
|||||||
return point
|
return point
|
||||||
|
|
||||||
def TryPack(self, rectangleWidth, rectangleHeight):
|
def TryPack(self, rectangleWidth, rectangleHeight):
|
||||||
"""Tries to allocate space for a rectangle in the packing area
|
"""Try to allocate space for a rectangle in the packing area.
|
||||||
|
|
||||||
rectangleWidth: Width of the rectangle to allocate
|
rectangleWidth: Width of the rectangle to allocate
|
||||||
rectangleHeight: Height of the rectangle to allocate
|
rectangleHeight: Height of the rectangle to allocate
|
||||||
@ -102,8 +102,7 @@ class DumbRectanglePacker(RectanglePacker):
|
|||||||
|
|
||||||
|
|
||||||
class CygonRectanglePacker(RectanglePacker):
|
class CygonRectanglePacker(RectanglePacker):
|
||||||
"""
|
"""Packer using a custom algorithm by Markus 'Cygon' Ewald.
|
||||||
Packer using a custom algorithm by Markus 'Cygon' Ewald
|
|
||||||
|
|
||||||
Algorithm conceived by Markus Ewald (cygon at nuclex dot org), though
|
Algorithm conceived by Markus Ewald (cygon at nuclex dot org), though
|
||||||
I'm quite sure I'm not the first one to come up with it :)
|
I'm quite sure I'm not the first one to come up with it :)
|
||||||
@ -120,7 +119,7 @@ class CygonRectanglePacker(RectanglePacker):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, packingAreaWidth, packingAreaHeight):
|
def __init__(self, packingAreaWidth, packingAreaHeight):
|
||||||
"""Initializes a new rectangle packer
|
"""Initialize a new rectangle packer.
|
||||||
|
|
||||||
packingAreaWidth: Maximum width of the packing area
|
packingAreaWidth: Maximum width of the packing area
|
||||||
packingAreaHeight: Maximum height of the packing area
|
packingAreaHeight: Maximum height of the packing area
|
||||||
@ -134,7 +133,7 @@ class CygonRectanglePacker(RectanglePacker):
|
|||||||
self.heightSlices.append(Point(0, 0))
|
self.heightSlices.append(Point(0, 0))
|
||||||
|
|
||||||
def TryPack(self, rectangleWidth, rectangleHeight):
|
def TryPack(self, rectangleWidth, rectangleHeight):
|
||||||
"""Tries to allocate space for a rectangle in the packing area
|
"""Try to allocate space for a rectangle in the packing area.
|
||||||
|
|
||||||
rectangleWidth: Width of the rectangle to allocate
|
rectangleWidth: Width of the rectangle to allocate
|
||||||
rectangleHeight: Height of the rectangle to allocate
|
rectangleHeight: Height of the rectangle to allocate
|
||||||
@ -160,7 +159,7 @@ class CygonRectanglePacker(RectanglePacker):
|
|||||||
return placement
|
return placement
|
||||||
|
|
||||||
def tryFindBestPlacement(self, rectangleWidth, rectangleHeight):
|
def tryFindBestPlacement(self, rectangleWidth, rectangleHeight):
|
||||||
"""Finds the best position for a rectangle of the given dimensions
|
"""Find the best position for a rectangle of the given dimensions.
|
||||||
|
|
||||||
rectangleWidth: Width of the rectangle to find a position for
|
rectangleWidth: Width of the rectangle to find a position for
|
||||||
rectangleHeight: Height of the rectangle to find a position for
|
rectangleHeight: Height of the rectangle to find a position for
|
||||||
@ -234,7 +233,7 @@ class CygonRectanglePacker(RectanglePacker):
|
|||||||
return Point(self.heightSlices[bestSliceIndex].x, bestSliceY)
|
return Point(self.heightSlices[bestSliceIndex].x, bestSliceY)
|
||||||
|
|
||||||
def integrateRectangle(self, left, width, bottom):
|
def integrateRectangle(self, left, width, bottom):
|
||||||
"""Integrates a new rectangle into the height slice table
|
"""Integrate a new rectangle into the height slice table.
|
||||||
|
|
||||||
left: Position of the rectangle's left side
|
left: Position of the rectangle's left side
|
||||||
width: Width of the rectangle
|
width: Width of the rectangle
|
||||||
|
@ -50,11 +50,6 @@ class Glyph:
|
|||||||
self.w = bb[2] - bb[0]
|
self.w = bb[2] - bb[0]
|
||||||
self.h = bb[3] - bb[1]
|
self.h = bb[3] - bb[1]
|
||||||
|
|
||||||
# Force multiple of 4, to avoid leakage across S3TC blocks
|
|
||||||
# (TODO: is this useful?)
|
|
||||||
# self.w += (4 - (self.w % 4)) % 4
|
|
||||||
# self.h += (4 - (self.h % 4)) % 4
|
|
||||||
|
|
||||||
def pack(self, packer):
|
def pack(self, packer):
|
||||||
self.pos = packer.Pack(self.w, self.h)
|
self.pos = packer.Pack(self.w, self.h)
|
||||||
|
|
||||||
@ -114,7 +109,7 @@ def generate_font(outname, ttfNames, loadopts, size, renderstyle, dsizes):
|
|||||||
|
|
||||||
(ctx, _) = setup_context(1, 1, renderstyle)
|
(ctx, _) = setup_context(1, 1, renderstyle)
|
||||||
|
|
||||||
# TODO this gets the line height from the default font
|
# TODO: this gets the line height from the default font
|
||||||
# while entire texts can be in the fallback font
|
# while entire texts can be in the fallback font
|
||||||
ctx.set_font_face(faceList[0])
|
ctx.set_font_face(faceList[0])
|
||||||
ctx.set_font_size(size + dsizes[ttfNames[0]])
|
ctx.set_font_size(size + dsizes[ttfNames[0]])
|
||||||
@ -155,7 +150,6 @@ def generate_font(outname, ttfNames, loadopts, size, renderstyle, dsizes):
|
|||||||
# Using the dump pacher usually creates bigger textures, but runs faster
|
# Using the dump pacher usually creates bigger textures, but runs faster
|
||||||
# In practice the size difference is so small it always ends up in the same size
|
# In practice the size difference is so small it always ends up in the same size
|
||||||
packer = Packer.DumbRectanglePacker(w, h)
|
packer = Packer.DumbRectanglePacker(w, h)
|
||||||
# packer = Packer.CygonRectanglePacker(w, h)
|
|
||||||
for g in glyphs:
|
for g in glyphs:
|
||||||
g.pack(packer)
|
g.pack(packer)
|
||||||
except Packer.OutOfSpaceError:
|
except Packer.OutOfSpaceError:
|
||||||
@ -174,8 +168,6 @@ def generate_font(outname, ttfNames, loadopts, size, renderstyle, dsizes):
|
|||||||
fnt.write("%d\n" % len(glyphs))
|
fnt.write("%d\n" % len(glyphs))
|
||||||
fnt.write("%d\n" % linespacing)
|
fnt.write("%d\n" % linespacing)
|
||||||
fnt.write("%d\n" % charheight)
|
fnt.write("%d\n" % charheight)
|
||||||
# sorting unneeded, as glyphs are added in increasing order
|
|
||||||
# glyphs.sort(key = lambda g: ord(g.char))
|
|
||||||
for g in glyphs:
|
for g in glyphs:
|
||||||
x0 = g.x0
|
x0 = g.x0
|
||||||
y0 = g.y0
|
y0 = g.y0
|
||||||
|
@ -27,7 +27,7 @@ from i18n_helper import projectRootDirectory
|
|||||||
|
|
||||||
|
|
||||||
def get_diff():
|
def get_diff():
|
||||||
"""Return a diff using svn diff"""
|
"""Return a diff using svn diff."""
|
||||||
os.chdir(projectRootDirectory)
|
os.chdir(projectRootDirectory)
|
||||||
|
|
||||||
diff_process = subprocess.run(["svn", "diff", "binaries"], capture_output=True, check=False)
|
diff_process = subprocess.run(["svn", "diff", "binaries"], capture_output=True, check=False)
|
||||||
@ -38,7 +38,9 @@ def get_diff():
|
|||||||
|
|
||||||
|
|
||||||
def check_diff(diff: io.StringIO) -> List[str]:
|
def check_diff(diff: io.StringIO) -> List[str]:
|
||||||
"""Run through a diff of .po files and check that some of the changes
|
"""Check a diff of .po files for meaningful changes.
|
||||||
|
|
||||||
|
Run through a diff of .po files and check that some of the changes
|
||||||
are real translations changes and not just noise (line changes....).
|
are real translations changes and not just noise (line changes....).
|
||||||
The algorithm isn't extremely clever, but it is quite fast.
|
The algorithm isn't extremely clever, but it is quite fast.
|
||||||
"""
|
"""
|
||||||
@ -97,7 +99,7 @@ def revert_files(files: List[str], verbose=False):
|
|||||||
|
|
||||||
|
|
||||||
def add_untracked(verbose=False):
|
def add_untracked(verbose=False):
|
||||||
"""Add untracked .po files to svn"""
|
"""Add untracked .po files to svn."""
|
||||||
diff_process = subprocess.run(["svn", "st", "binaries"], capture_output=True, check=False)
|
diff_process = subprocess.run(["svn", "st", "binaries"], capture_output=True, check=False)
|
||||||
if diff_process.stderr != b"":
|
if diff_process.stderr != b"":
|
||||||
print(f"Error running svn st: {diff_process.stderr.decode('utf-8')}. Exiting.")
|
print(f"Error running svn st: {diff_process.stderr.decode('utf-8')}. Exiting.")
|
||||||
|
@ -16,7 +16,8 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with 0 A.D. If not, see <http://www.gnu.org/licenses/>.
|
# along with 0 A.D. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""Remove unnecessary personal data of translators.
|
||||||
|
|
||||||
This file removes unneeded personal data from the translators. Most notably
|
This file removes unneeded personal data from the translators. Most notably
|
||||||
the e-mail addresses. We need to translators' nicks for the credits, but no
|
the e-mail addresses. We need to translators' nicks for the credits, but no
|
||||||
more data is required.
|
more data is required.
|
||||||
|
@ -16,9 +16,10 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with 0 A.D. If not, see <http://www.gnu.org/licenses/>.
|
# along with 0 A.D. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""Update the translator credits.
|
||||||
This file updates the translators credits located in the public mod GUI files, using
|
|
||||||
translators names from the .po files.
|
This file updates the translator credits located in the public mod GUI files, using
|
||||||
|
translator names from the .po files.
|
||||||
|
|
||||||
If translators change their names on Transifex, the script will remove the old names.
|
If translators change their names on Transifex, the script will remove the old names.
|
||||||
TODO: It should be possible to add people in the list manually, and protect them against
|
TODO: It should be possible to add people in the list manually, and protect them against
|
||||||
|
@ -32,7 +32,7 @@ from textwrap import dedent
|
|||||||
|
|
||||||
|
|
||||||
def pathmatch(mask, path):
|
def pathmatch(mask, path):
|
||||||
"""Matches paths to a mask, where the mask supports * and **.
|
"""Match paths to a mask, where the mask supports * and **.
|
||||||
|
|
||||||
Paths use / as the separator
|
Paths use / as the separator
|
||||||
* matches a sequence of characters without /.
|
* matches a sequence of characters without /.
|
||||||
@ -66,7 +66,7 @@ class Extractor:
|
|||||||
self.excludeMasks = []
|
self.excludeMasks = []
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Extracts messages.
|
"""Extract messages.
|
||||||
|
|
||||||
:return: An iterator over ``(message, plural, context, (location, pos), comment)``
|
:return: An iterator over ``(message, plural, context, (location, pos), comment)``
|
||||||
tuples.
|
tuples.
|
||||||
@ -106,7 +106,7 @@ class Extractor:
|
|||||||
yield message, plural, context, (filename, position), comments
|
yield message, plural, context, (filename, position), comments
|
||||||
|
|
||||||
def extractFromFile(self, filepath):
|
def extractFromFile(self, filepath):
|
||||||
"""Extracts messages from a specific file.
|
"""Extract messages from a specific file.
|
||||||
|
|
||||||
:return: An iterator over ``(message, plural, context, position, comments)`` tuples.
|
:return: An iterator over ``(message, plural, context, position, comments)`` tuples.
|
||||||
:rtype: ``iterator``
|
:rtype: ``iterator``
|
||||||
|
@ -30,8 +30,8 @@ DEBUG_PREFIX = "X_X "
|
|||||||
|
|
||||||
|
|
||||||
def generate_long_strings(root_path, input_file_name, output_file_name, languages=None):
|
def generate_long_strings(root_path, input_file_name, output_file_name, languages=None):
|
||||||
"""
|
"""Generate the 'long strings' debug catalog.
|
||||||
Generate the 'long strings' debug catalog.
|
|
||||||
This catalog contains the longest singular and plural string,
|
This catalog contains the longest singular and plural string,
|
||||||
found amongst all translated languages or a filtered subset.
|
found amongst all translated languages or a filtered subset.
|
||||||
It can be used to check if GUI elements are large enough.
|
It can be used to check if GUI elements are large enough.
|
||||||
@ -104,8 +104,8 @@ def generate_long_strings(root_path, input_file_name, output_file_name, language
|
|||||||
|
|
||||||
|
|
||||||
def generate_debug(root_path, input_file_name, output_file_name):
|
def generate_debug(root_path, input_file_name, output_file_name):
|
||||||
"""
|
"""Generate a debug catalog to identify untranslated strings.
|
||||||
Generate a debug catalog to identify untranslated strings.
|
|
||||||
This prefixes all strings with DEBUG_PREFIX, to easily identify
|
This prefixes all strings with DEBUG_PREFIX, to easily identify
|
||||||
untranslated strings while still making the game navigable.
|
untranslated strings while still making the game navigable.
|
||||||
The catalog is debug.*.po
|
The catalog is debug.*.po
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
"""Wrapper around babel Catalog / .po handling"""
|
"""Wrapper around babel Catalog / .po handling."""
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
"""Utils to list .po"""
|
"""Utils to list .po."""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
@ -7,7 +7,7 @@ from i18n_helper.catalog import Catalog
|
|||||||
|
|
||||||
|
|
||||||
def getCatalogs(inputFilePath, filters: Optional[List[str]] = None) -> List[Catalog]:
|
def getCatalogs(inputFilePath, filters: Optional[List[str]] = None) -> List[Catalog]:
|
||||||
"""Returns a list of "real" catalogs (.po) in the given folder."""
|
"""Return a list of "real" catalogs (.po) in the given folder."""
|
||||||
existingTranslationCatalogs = []
|
existingTranslationCatalogs = []
|
||||||
l10nFolderPath = os.path.dirname(inputFilePath)
|
l10nFolderPath = os.path.dirname(inputFilePath)
|
||||||
inputFileName = os.path.basename(inputFilePath)
|
inputFileName = os.path.basename(inputFilePath)
|
||||||
|
@ -29,9 +29,7 @@ messagesFilename = "messages.json"
|
|||||||
|
|
||||||
|
|
||||||
def warnAboutUntouchedMods():
|
def warnAboutUntouchedMods():
|
||||||
"""
|
"""Warn about mods that are not properly configured to get their messages extracted."""
|
||||||
Warn about mods that are not properly configured to get their messages extracted.
|
|
||||||
"""
|
|
||||||
modsRootFolder = os.path.join(projectRootDirectory, "binaries", "data", "mods")
|
modsRootFolder = os.path.join(projectRootDirectory, "binaries", "data", "mods")
|
||||||
untouchedMods = {}
|
untouchedMods = {}
|
||||||
for modFolder in os.listdir(modsRootFolder):
|
for modFolder in os.listdir(modsRootFolder):
|
||||||
|
@ -374,7 +374,8 @@ def SortFn(A):
|
|||||||
|
|
||||||
|
|
||||||
def WriteColouredDiff(file, diff, isChanged):
|
def WriteColouredDiff(file, diff, isChanged):
|
||||||
"""Helper to write coloured text.
|
"""Help to write coloured text.
|
||||||
|
|
||||||
diff value must always be computed as a unit_spec - unit_generic.
|
diff value must always be computed as a unit_spec - unit_generic.
|
||||||
A positive imaginary part represents advantageous trait.
|
A positive imaginary part represents advantageous trait.
|
||||||
"""
|
"""
|
||||||
@ -504,16 +505,18 @@ def computeTemplates(LoadTemplatesIfParent):
|
|||||||
|
|
||||||
|
|
||||||
def computeCivTemplates(Civs: list):
|
def computeCivTemplates(Civs: list):
|
||||||
"""Load Civ specific templates"""
|
"""Load Civ specific templates.
|
||||||
# NOTE: whether a Civ can train a certain unit is not recorded in the unit
|
|
||||||
# .xml files, and hence we have to get that info elsewhere, e.g. from the
|
|
||||||
# Civ tree. This should be delayed until this whole parser is based on the
|
|
||||||
# Civ tree itself.
|
|
||||||
|
|
||||||
# This function must always ensure that Civ unit parenthood works as
|
NOTE: whether a Civ can train a certain unit is not recorded in the unit
|
||||||
# intended, i.e. a unit in a Civ indeed has a 'Civ' field recording its
|
.xml files, and hence we have to get that info elsewhere, e.g. from the
|
||||||
# loyalty to that Civ. Check this when upgrading this script to keep
|
Civ tree. This should be delayed until this whole parser is based on the
|
||||||
# up with the game engine.
|
Civ tree itself.
|
||||||
|
|
||||||
|
This function must always ensure that Civ unit parenthood works as
|
||||||
|
intended, i.e. a unit in a Civ indeed has a 'Civ' field recording its
|
||||||
|
loyalty to that Civ. Check this when upgrading this script to keep
|
||||||
|
up with the game engine.
|
||||||
|
"""
|
||||||
pwd = os.getcwd()
|
pwd = os.getcwd()
|
||||||
os.chdir(basePath)
|
os.chdir(basePath)
|
||||||
|
|
||||||
@ -557,7 +560,7 @@ def computeCivTemplates(Civs: list):
|
|||||||
|
|
||||||
|
|
||||||
def computeTemplatesByParent(templates: dict, Civs: list, CivTemplates: dict):
|
def computeTemplatesByParent(templates: dict, Civs: list, CivTemplates: dict):
|
||||||
"""Get them in the array"""
|
"""Get them in the array."""
|
||||||
# Civs:list -> CivTemplates:dict -> templates:dict -> TemplatesByParent
|
# Civs:list -> CivTemplates:dict -> templates:dict -> TemplatesByParent
|
||||||
TemplatesByParent = {}
|
TemplatesByParent = {}
|
||||||
for Civ in Civs:
|
for Civ in Civs:
|
||||||
@ -590,7 +593,7 @@ efficiencyTable = computeUnitEfficiencyDiff(TemplatesByParent, Civs)
|
|||||||
|
|
||||||
############################################################
|
############################################################
|
||||||
def writeHTML():
|
def writeHTML():
|
||||||
"""Create the HTML file"""
|
"""Create the HTML file."""
|
||||||
f = open(
|
f = open(
|
||||||
os.path.realpath(__file__).replace("unitTables.py", "") + "unit_summary_table.html",
|
os.path.realpath(__file__).replace("unitTables.py", "") + "unit_summary_table.html",
|
||||||
"w",
|
"w",
|
||||||
|
@ -65,22 +65,23 @@ class RelaxNGValidator:
|
|||||||
return not self.inError
|
return not self.inError
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
"""Program entry point, parses command line arguments and launches the validation"""
|
"""Program entry point, parses command line arguments and launches the validation."""
|
||||||
# ordered uniq mods (dict maintains ordered keys from python 3.6)
|
# ordered uniq mods (dict maintains ordered keys from python 3.6)
|
||||||
self.logger.info("Checking %s's integrity.", "|".join(self.mods))
|
self.logger.info("Checking %s's integrity.", "|".join(self.mods))
|
||||||
self.logger.info("The following mods will be loaded: %s.", "|".join(self.mods))
|
self.logger.info("The following mods will be loaded: %s.", "|".join(self.mods))
|
||||||
return self.run()
|
return self.run()
|
||||||
|
|
||||||
def find_files(self, vfs_root, mods, vfs_path, *ext_list):
|
def find_files(self, vfs_root, mods, vfs_path, *ext_list):
|
||||||
"""
|
"""Find files.
|
||||||
returns a list of 2-size tuple with:
|
|
||||||
|
Returns a list of 2-size tuple with:
|
||||||
- Path relative to the mod base
|
- Path relative to the mod base
|
||||||
- full Path
|
- full Path
|
||||||
"""
|
"""
|
||||||
full_exts = ["." + ext for ext in ext_list]
|
full_exts = ["." + ext for ext in ext_list]
|
||||||
|
|
||||||
def find_recursive(dp, base):
|
def find_recursive(dp, base):
|
||||||
"""(relative Path, full Path) generator"""
|
"""(relative Path, full Path) generator."""
|
||||||
if dp.is_dir():
|
if dp.is_dir():
|
||||||
if dp.name not in (".svn", ".git") and not dp.name.endswith("~"):
|
if dp.name not in (".svn", ".git") and not dp.name.endswith("~"):
|
||||||
for fp in dp.iterdir():
|
for fp in dp.iterdir():
|
||||||
@ -179,7 +180,7 @@ class RelaxNGValidator:
|
|||||||
return realpath(join(self.vfs_root, mod_name, vfs_path))
|
return realpath(join(self.vfs_root, mod_name, vfs_path))
|
||||||
|
|
||||||
def get_relaxng_file(self, schemapath):
|
def get_relaxng_file(self, schemapath):
|
||||||
"""We look for the highest priority mod relax NG file"""
|
"""Get the highest priority mod relax NG file."""
|
||||||
for mod in self.mods:
|
for mod in self.mods:
|
||||||
relax_ng_path = self.get_physical_path(mod, schemapath)
|
relax_ng_path = self.get_physical_path(mod, schemapath)
|
||||||
if exists(relax_ng_path):
|
if exists(relax_ng_path):
|
||||||
|
Loading…
Reference in New Issue
Block a user