Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

utils: Fixed ruff errors in utils/ #5051

Merged
merged 1 commit into from
Feb 5, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -374,9 +374,6 @@ ignore = [
"temporal/t.register/testsuite/test_t_register_raster.py" = ["FLY002"]
"temporal/t.register/testsuite/test_t_register_raster_file.py" = ["FLY002"]
"temporal/t.remove/t.remove.py" = ["SIM115"]
"utils/generate_release_notes.py" = ["PGH004"]
"utils/gitlog2changelog.py" = ["SIM115"]
"utils/thumbnails.py" = ["PTH208"]
"vector/v.fill.holes/examples.ipynb" = ["PTH201"]

[tool.ruff.lint.flake8-import-conventions.extend-aliases]
Expand Down
2 changes: 1 addition & 1 deletion utils/generate_release_notes.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def print_by_category(changes, categories, file=None):
def binder_badge(tag):
"""Get mybinder Binder badge from a given tag, hash, or branch"""
binder_image_url = "https://mybinder.org/badge_logo.svg"
binder_url = f"https://mybinder.org/v2/gh/OSGeo/grass/{tag}?urlpath=lab%2Ftree%2Fdoc%2Fexamples%2Fnotebooks%2Fjupyter_example.ipynb" # noqa
binder_url = f"https://mybinder.org/v2/gh/OSGeo/grass/{tag}?urlpath=lab%2Ftree%2Fdoc%2Fexamples%2Fnotebooks%2Fjupyter_example.ipynb"
return f"[![Binder]({binder_image_url})]({binder_url})"


Expand Down
179 changes: 88 additions & 91 deletions utils/gitlog2changelog.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,6 @@
process = subprocess.Popen(git_command, stdout=subprocess.PIPE, encoding="utf8")
fin = process.stdout

# Create a ChangeLog file in the current directory.
fout = open("ChangeLog", "w")

# Set up the loop variables in order to locate the blocks we want
authorFound = False
dateFound = False
Expand All @@ -45,101 +42,101 @@

wrapper = TextWrapper(initial_indent="\t", subsequent_indent="\t ")

# The main part of the loop
for line in fin:
# The commit line marks the start of a new commit object.
if line.startswith("commit"):
# Start all over again...
authorFound = False
dateFound = False
messageFound = False
messageNL = False
message = ""
filesFound = False
files = ""
continue
# Match the author line and extract the part we want
# (Don't use startswith to allow Author override inside commit message.)
elif "Author:" in line:
authorList = re.split(r": ", line, 1)
try:
author = authorList[1]
author = author[0 : len(author) - 1]
authorFound = True
except Exception as e:
print(f"Could not parse authorList = '{line}'. Error: {e!s}")
with open("ChangeLog", "w") as fout:
# The main part of the loop
for line in fin:
# The commit line marks the start of a new commit object.
if line.startswith("commit"):
# Start all over again...
authorFound = False
dateFound = False
messageFound = False
messageNL = False
message = ""
filesFound = False
files = ""
continue
# Match the author line and extract the part we want
# (Don't use startswith to allow Author override inside commit message.)
elif "Author:" in line:
authorList = re.split(r": ", line, 1)
try:
author = authorList[1]
author = author[0 : len(author) - 1]
authorFound = True
except Exception as e:
print(f"Could not parse authorList = '{line}'. Error: {e!s}")

# Match the date line
elif line.startswith("Date:"):
dateList = re.split(r": ", line, 1)
try:
date = dateList[1]
date = date[0 : len(date) - 1]
dateFound = True
except Exception as e:
print(f"Could not parse dateList = '{line}'. Error: {e!s}")
# The Fossil-IDs, svn-id, ad sign off lines are ignored:
elif (
line.startswith((" Fossil-ID:", " [[SVN:"))
or " git-svn-id:" in line
or "Signed-off-by" in line
):
continue
# Extract the actual commit message for this commit
elif authorFound & dateFound & messageFound is False:
# Find the commit message if we can
if len(line) == 1:
if messageNL:
# Match the date line
elif line.startswith("Date:"):
dateList = re.split(r": ", line, 1)
try:
date = dateList[1]
date = date[0 : len(date) - 1]
dateFound = True
except Exception as e:
print(f"Could not parse dateList = '{line}'. Error: {e!s}")
# The Fossil-IDs, svn-id, ad sign off lines are ignored:
elif (
line.startswith((" Fossil-ID:", " [[SVN:"))
or " git-svn-id:" in line
or "Signed-off-by" in line
):
continue
# Extract the actual commit message for this commit
elif authorFound & dateFound & messageFound is False:
# Find the commit message if we can
if len(line) == 1:
if messageNL:
messageFound = True
else:
messageNL = True
elif len(line) == 4:
messageFound = True
elif len(message) == 0:
message += line.strip()
else:
messageNL = True
elif len(line) == 4:
messageFound = True
elif len(message) == 0:
message += line.strip()
else:
message = message + " " + line.strip()
# If this line is hit all of the files have been stored for this commit
elif re.search(r"files? changed", line):
filesFound = True
continue
# Collect the files for this commit. FIXME: Still need to add +/- to files
elif authorFound & dateFound & messageFound:
fileList = re.split(r" \| ", line, 2)
if len(fileList) > 1:
if len(files) > 0:
files = files + ", " + fileList[0].strip()
message = message + " " + line.strip()
# If this line is hit all of the files have been stored for this commit
elif re.search(r"files? changed", line):
filesFound = True
continue
# Collect the files for this commit. FIXME: Still need to add +/- to files
elif authorFound & dateFound & messageFound:
fileList = re.split(r" \| ", line, 2)
if len(fileList) > 1:
if len(files) > 0:
files = files + ", " + fileList[0].strip()
else:
files = fileList[0].strip()
# All of the parts of the commit have been found - write out the entry
if authorFound & dateFound & messageFound & filesFound:
# First the author line, only outputted if it is the first for that
# author on this day
authorLine = date + " " + author
if len(prevAuthorLine) == 0:
fout.write(authorLine + "\n\n")
elif authorLine == prevAuthorLine:
pass
else:
files = fileList[0].strip()
# All of the parts of the commit have been found - write out the entry
if authorFound & dateFound & messageFound & filesFound:
# First the author line, only outputted if it is the first for that
# author on this day
authorLine = date + " " + author
if len(prevAuthorLine) == 0:
fout.write(authorLine + "\n\n")
elif authorLine == prevAuthorLine:
pass
else:
fout.write("\n" + authorLine + "\n\n")
fout.write("\n" + authorLine + "\n\n")

# Assemble the actual commit message line(s) and limit the line length
# to 80 characters.
commitLine = "* " + files + ": " + message
# Assemble the actual commit message line(s) and limit the line length
# to 80 characters.
commitLine = "* " + files + ": " + message

# Write out the commit line
fout.write(wrapper.fill(commitLine) + "\n")
# Write out the commit line
fout.write(wrapper.fill(commitLine) + "\n")

# Now reset all the variables ready for a new commit block.
authorFound = False
dateFound = False
messageFound = False
messageNL = False
message = ""
filesFound = False
files = ""
prevAuthorLine = authorLine
# Now reset all the variables ready for a new commit block.
authorFound = False
dateFound = False
messageFound = False
messageNL = False
message = ""
filesFound = False
files = ""
prevAuthorLine = authorLine

# Close the input and output lines now that we are finished.
fin.close()
fout.close()
7 changes: 4 additions & 3 deletions utils/thumbnails.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,9 +200,10 @@ def main():

gs.mapcalc("$grad = float(col())", grad=tmp_grad_rel, quiet=True)

for table in os.listdir(color_dir):
path = os.path.join(color_dir, table)
grad = make_gradient(path)
color_dir_path = Path(color_dir)
for table_path in color_dir_path.iterdir():
table = table_path.name
grad = make_gradient(table_path)
make_image(output_dir, table, grad, height=height, width=width)

gs.mapcalc("$grad = col()", grad=tmp_grad_abs, quiet=True)
Expand Down
Loading