From 769032fa38ecea285f6e538302e7129884bd1fb1 Mon Sep 17 00:00:00 2001 From: Ivan Mincik Date: Mon, 19 Aug 2024 11:10:51 +0000 Subject: [PATCH 01/30] nix: fix build by using libxml2 with http support (#4196) * CI(deps): Lock file maintenance * nix: fix build by using libxml2 with http support Related nixpkgs PR: https://github.com/NixOS/nixpkgs/pull/331118 --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- flake.lock | 20 ++++++++++---------- package.nix | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/flake.lock b/flake.lock index c30f9b90775..96d1a96a3cf 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1719994518, - "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", + "lastModified": 1722555600, + "narHash": "sha256-XOQkdLafnb/p9ij77byFQjDf5m5QYl9b2REiVClC+x4=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", + "rev": "8471fe90ad337a8074e957b69ca4d0089218391d", "type": "github" }, "original": { @@ -19,11 +19,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1720955038, - "narHash": "sha256-GaliJqfFwyYxReFywxAa8orCO+EnDq2NK2F+5aSc8vo=", + "lastModified": 1723891200, + "narHash": "sha256-uljX21+D/DZgb9uEFFG2dkkQbPZN+ig4Z6+UCLWFVAk=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "aa247c0c90ecf4ae7a032c54fdc21b91ca274062", + "rev": "a0d6390cb3e82062a35d0288979c45756e481f60", "type": "github" }, "original": { @@ -35,14 +35,14 @@ }, "nixpkgs-lib": { "locked": { - "lastModified": 1719876945, - "narHash": "sha256-Fm2rDDs86sHy0/1jxTOKB1118Q0O3Uc7EC0iXvXKpbI=", + "lastModified": 1722555339, + "narHash": "sha256-uFf2QeW7eAHlYXuDktm9c25OxOyCoUOQmh5SZ9amE5Q=", "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" + "url": "https://github.com/NixOS/nixpkgs/archive/a5d394176e64ab29c852d03346c1fc9b0b7d33eb.tar.gz" }, "original": { "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" + "url": "https://github.com/NixOS/nixpkgs/archive/a5d394176e64ab29c852d03346c1fc9b0b7d33eb.tar.gz" } }, "root": { diff --git a/package.nix b/package.nix index 7abbb15275d..3f1d03697c7 100644 --- a/package.nix +++ b/package.nix @@ -79,7 +79,7 @@ stdenv.mkDerivation (finalAttrs: { libpng libsvm libtiff - libxml2 + (libxml2.override { enableHttp = true; }) netcdf pdal postgresql From 13e9a1b1ce1a03cc72ec262a59d4972edc76c729 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:27:56 +0000 Subject: [PATCH 02/30] CI(deps): Lock file maintenance (#4198) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 96d1a96a3cf..58a8dd324c0 100644 --- a/flake.lock +++ b/flake.lock @@ -19,11 +19,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1723891200, - "narHash": "sha256-uljX21+D/DZgb9uEFFG2dkkQbPZN+ig4Z6+UCLWFVAk=", + "lastModified": 1724015816, + "narHash": "sha256-hVESnM7Eiz93+4DeiE0a1TwMeaeph1ytRJ5QtqxYRWg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "a0d6390cb3e82062a35d0288979c45756e481f60", + "rev": "9aa35efbea27d320d0cdc5f922f0890812affb60", "type": "github" }, "original": { From 2cf98dac35a6a3d2ab14120a68e345ff7609ba7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Mon, 19 Aug 2024 16:31:36 -0400 Subject: [PATCH 03/30] style: Fix unnecessary-dunder-call (PLC2801) (#4170) * style: Fix unnecessary-dunder-call (PLC2801) Ruff rule: https://docs.astral.sh/ruff/rules/unnecessary-dunder-call/ 12 instances fixed. Solves some reported issues by Pylint 3.2.6 through unnecessary-dunder-call / C2801 * style: Fix unnecessary-dunder-call (PLC2801) for __init__ 4 instances fixed * style: Fix unnecessary-dunder-call (PLC2801) using getattr 6 instances fixed * Ignore PLC2801 in test suite file for preserving meaning of test during the change * style: Fix unnecessary-dunder-call (PLC2801) for get and set item 3 instances fixed * style: Fix unnecessary-dunder-call (PLC2801) for repr 1 instance fixed * Ignore PLC2801 for __del__, as there seems to be some differences between both * Update pyproject.toml to remove PLC2801 exclusion * gui.wxpython.mapdisp.main: Remove next(self) from LayerList * grass.pygrass.vector: Remove next(self) from vector class (old Python 2 iterator) * Revert "grass.pygrass.vector: Remove next(self) from vector class (old Python 2 iterator)" This reverts commit f79172c1bcb69df75e51d76ca27c78d41307ed7a. * Revert "gui.wxpython.mapdisp.main: Remove next(self) from LayerList" This reverts commit 9963dd16ad0e73ea6111c86125e3ff5c61cd3da2. --- gui/wxpython/gcp/manager.py | 2 +- gui/wxpython/gmodeler/canvas.py | 4 ++-- gui/wxpython/gmodeler/model.py | 4 ++-- gui/wxpython/gmodeler/panels.py | 2 +- gui/wxpython/image2target/ii2t_manager.py | 2 +- gui/wxpython/mapdisp/main.py | 2 +- gui/wxpython/photo2image/ip2i_manager.py | 2 +- gui/wxpython/tplot/frame.py | 5 ++++- pyproject.toml | 1 - python/grass/gunittest/utils.py | 2 +- python/grass/pygrass/gis/region.py | 2 +- python/grass/pygrass/modules/interface/typedict.py | 4 ++-- python/grass/pygrass/raster/abstract.py | 4 ++-- python/grass/pygrass/raster/category.py | 12 ++++++------ python/grass/pygrass/vector/__init__.py | 2 +- python/grass/pygrass/vector/basic.py | 4 ++-- python/grass/pygrass/vector/geometry.py | 13 +++++-------- python/grass/pygrass/vector/table.py | 2 +- .../pygrass/vector/testsuite/test_geometry_attrs.py | 6 +++--- 19 files changed, 37 insertions(+), 38 deletions(-) diff --git a/gui/wxpython/gcp/manager.py b/gui/wxpython/gcp/manager.py index 98323112972..ac006a95cbf 100644 --- a/gui/wxpython/gcp/manager.py +++ b/gui/wxpython/gcp/manager.py @@ -1272,7 +1272,7 @@ def InitMapDisplay(self): # initialize column sorter self.itemDataMap = self.mapcoordlist ncols = self.list.GetColumnCount() - ColumnSorterMixin.__init__(self, ncols) + ColumnSorterMixin(self, ncols) # init to ascending sort on first click self._colSortFlag = [1] * ncols diff --git a/gui/wxpython/gmodeler/canvas.py b/gui/wxpython/gmodeler/canvas.py index ab49ab770d1..7c604ec6624 100644 --- a/gui/wxpython/gmodeler/canvas.py +++ b/gui/wxpython/gmodeler/canvas.py @@ -85,10 +85,10 @@ def RemoveShapes(self, shapes): remList, upList = self.parent.GetModel().RemoveItem(shape) shape.Select(False) diagram.RemoveShape(shape) - shape.__del__() + shape.__del__() # noqa: PLC2801, C2801 for item in remList: diagram.RemoveShape(item) - item.__del__() + item.__del__() # noqa: PLC2801, C2801 for item in upList: item.Update() diff --git a/gui/wxpython/gmodeler/model.py b/gui/wxpython/gmodeler/model.py index 4a587230fa9..b243f0beb5c 100644 --- a/gui/wxpython/gmodeler/model.py +++ b/gui/wxpython/gmodeler/model.py @@ -1577,7 +1577,7 @@ def _defineShape(self, width, height, x, y): :param width, height: dimension of the shape :param x, y: position of the shape """ - ogl.EllipseShape.__init__(self, width, height) + ogl.EllipseShape(self, width, height) if self.parent.GetCanvas(): self.SetCanvas(self.parent.GetCanvas()) @@ -1592,7 +1592,7 @@ def _defineShape(self, width, height, x, y): :param width, height: dimension of the shape :param x, y: position of the shape """ - ogl.CompositeShape.__init__(self) + ogl.CompositeShape(self) if self.parent.GetCanvas(): self.SetCanvas(self.parent.GetCanvas()) diff --git a/gui/wxpython/gmodeler/panels.py b/gui/wxpython/gmodeler/panels.py index 6d6bff45387..300220b9f7c 100644 --- a/gui/wxpython/gmodeler/panels.py +++ b/gui/wxpython/gmodeler/panels.py @@ -542,7 +542,7 @@ def GetOptData(self, dcmd, layer, params, propwin): remList, upList = self.model.RemoveItem(data, layer) for item in remList: self.canvas.diagram.RemoveShape(item) - item.__del__() + item.__del__() # noqa: PLC2801, C2801 for item in upList: item.Update() diff --git a/gui/wxpython/image2target/ii2t_manager.py b/gui/wxpython/image2target/ii2t_manager.py index 968e6c50b7d..4aec0eeba5e 100644 --- a/gui/wxpython/image2target/ii2t_manager.py +++ b/gui/wxpython/image2target/ii2t_manager.py @@ -1257,7 +1257,7 @@ def InitMapDisplay(self): # initialize column sorter self.itemDataMap = self.mapcoordlist ncols = self.list.GetColumnCount() - ColumnSorterMixin.__init__(self, ncols) + ColumnSorterMixin(self, ncols) # init to ascending sort on first click self._colSortFlag = [1] * ncols diff --git a/gui/wxpython/mapdisp/main.py b/gui/wxpython/mapdisp/main.py index 5bef25ad128..1e94d3d8ba2 100644 --- a/gui/wxpython/mapdisp/main.py +++ b/gui/wxpython/mapdisp/main.py @@ -370,7 +370,7 @@ def __next__(self): return result def next(self): - return self.__next__() + return next(self) def GetSelectedLayers(self, checkedOnly=True): # hidden and selected vs checked and selected diff --git a/gui/wxpython/photo2image/ip2i_manager.py b/gui/wxpython/photo2image/ip2i_manager.py index 042053b65e2..2a9ae19d26a 100644 --- a/gui/wxpython/photo2image/ip2i_manager.py +++ b/gui/wxpython/photo2image/ip2i_manager.py @@ -625,7 +625,7 @@ def InitMapDisplay(self): # initialize column sorter self.itemDataMap = self.mapcoordlist ncols = self.list.GetColumnCount() - ColumnSorterMixin.__init__(self, ncols) + ColumnSorterMixin(self, ncols) # init to ascending sort on first click self._colSortFlag = [1] * ncols diff --git a/gui/wxpython/tplot/frame.py b/gui/wxpython/tplot/frame.py index e15e6584808..62147f54ad6 100755 --- a/gui/wxpython/tplot/frame.py +++ b/gui/wxpython/tplot/frame.py @@ -140,7 +140,10 @@ def onClose(self, evt): if self._giface.GetMapDisplay(): self.coorval.OnClose() self.cats.OnClose() - self.__del__() + + # __del__() and del keyword seem to have differences, + # how can self.Destroy(), called after del, work otherwise + self.__del__() # noqa: PLC2801, C2801 self.Destroy() def _layout(self): diff --git a/pyproject.toml b/pyproject.toml index 930c538049b..eb866cb0e08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -158,7 +158,6 @@ ignore = [ "PLC0415", # import-outside-top-level "PLC1901", # compare-to-empty-string "PLC2701", # import-private-name - "PLC2801", # unnecessary-dunder-call "PLE0704", # misplaced-bare-raise "PLR0904", # too-many-public-methods "PLR0911", # too-many-return-statements diff --git a/python/grass/gunittest/utils.py b/python/grass/gunittest/utils.py index 082366cacaf..c1afea3d5ad 100644 --- a/python/grass/gunittest/utils.py +++ b/python/grass/gunittest/utils.py @@ -76,7 +76,7 @@ def safe_repr(obj, short=False): try: result = repr(obj) except Exception: - result = object.__repr__(obj) + result = object.__repr__(obj) # noqa: PLC2801 if not short or len(result) < _MAX_LENGTH: return result return result[:_MAX_LENGTH] + " [truncated]..." diff --git a/python/grass/pygrass/gis/region.py b/python/grass/pygrass/gis/region.py index 0f736978026..98c75a15631 100644 --- a/python/grass/pygrass/gis/region.py +++ b/python/grass/pygrass/gis/region.py @@ -370,7 +370,7 @@ def keys(self): def items(self): """Return a list of tuple with key and value.""" - return [(k, self.__getattribute__(k)) for k in self.keys()] + return [(k, getattr(self, k)) for k in self.keys()] # ----------METHODS---------- def zoom(self, raster_name): diff --git a/python/grass/pygrass/modules/interface/typedict.py b/python/grass/pygrass/modules/interface/typedict.py index c8fdeb48a76..fb3ddb368eb 100644 --- a/python/grass/pygrass/modules/interface/typedict.py +++ b/python/grass/pygrass/modules/interface/typedict.py @@ -64,6 +64,6 @@ def __reduce__(self): def used(self): key_dict = {} for key in self: - if self.__getattr__(key): - key_dict[key] = self.__getattr__(key) + if getattr(self, key): + key_dict[key] = getattr(self, key) return key_dict diff --git a/python/grass/pygrass/raster/abstract.py b/python/grass/pygrass/raster/abstract.py index 616964d8f7b..edea42e7bdd 100644 --- a/python/grass/pygrass/raster/abstract.py +++ b/python/grass/pygrass/raster/abstract.py @@ -247,10 +247,10 @@ def keys(self): ] def items(self): - return [(k, self.__getattribute__(k)) for k in self.keys()] + return [(k, getattr(self, k)) for k in self.keys()] def __iter__(self): - return ((k, self.__getattribute__(k)) for k in self.keys()) + return ((k, getattr(self, k)) for k in self.keys()) def _repr_html_(self): return dict2html(dict(self.items()), keys=self.keys(), border="1", kdec="b") diff --git a/python/grass/pygrass/raster/category.py b/python/grass/pygrass/raster/category.py index 0a5e42093ae..00381f91130 100644 --- a/python/grass/pygrass/raster/category.py +++ b/python/grass/pygrass/raster/category.py @@ -196,13 +196,13 @@ def __del__(self): libraster.Rast_free_cats(ctypes.byref(self.c_cats)) def get_cat(self, index): - return self.__getitem__(index) + return self[index] def set_cat(self, index, value): if index is None: self.append(value) - elif index < self.__len__(): - self.__setitem__(index, value) + elif index < (len(self)): + self[index] = value else: raise TypeError("Index outside range.") @@ -221,7 +221,7 @@ def _write_cats(self): # reset only the C struct libraster.Rast_init_cats("", ctypes.byref(self.c_cats)) # write to the c struct - for cat in self.__iter__(): + for cat in iter(self): label, min_cat, max_cat = cat if max_cat is None: max_cat = min_cat @@ -273,7 +273,7 @@ def copy(self, category): self._read_cats() def ncats(self): - return self.__len__() + return len(self) def set_cats_fmt(self, fmt, m1, a1, m2, a2): """Not implemented yet. @@ -327,7 +327,7 @@ def write_rules(self, filename, sep=":"): :param str sep: the separator used to divide values and category """ cats = [] - for cat in self.__iter__(): + for cat in iter(self): if cat[-1] is None: cat = cat[:-1] cats.append(sep.join([str(i) for i in cat])) diff --git a/python/grass/pygrass/vector/__init__.py b/python/grass/pygrass/vector/__init__.py index 7869b1a29b5..6f29a167b2e 100644 --- a/python/grass/pygrass/vector/__init__.py +++ b/python/grass/pygrass/vector/__init__.py @@ -108,7 +108,7 @@ def __next__(self): @must_be_open def next(self): - return self.__next__() + return next(self) @must_be_open def rewind(self): diff --git a/python/grass/pygrass/vector/basic.py b/python/grass/pygrass/vector/basic.py index 1ce2dccedd8..079f7ac829f 100644 --- a/python/grass/pygrass/vector/basic.py +++ b/python/grass/pygrass/vector/basic.py @@ -137,7 +137,7 @@ def contains(self, point): ) def items(self): - return [(k, self.__getattribute__(k)) for k in self.keys()] + return [(k, getattr(self, k)) for k in self.keys()] def nsewtb(self, tb=True): """Return a list of values from bounding box @@ -215,7 +215,7 @@ def append(self, box): 3 """ - indx = self.__len__() + indx = len(self) libvect.Vect_boxlist_append(self.c_boxlist, indx, box.c_bbox) # def extend(self, boxlist): diff --git a/python/grass/pygrass/vector/geometry.py b/python/grass/pygrass/vector/geometry.py index bc6ad32f10a..1c5d378e6ef 100644 --- a/python/grass/pygrass/vector/geometry.py +++ b/python/grass/pygrass/vector/geometry.py @@ -958,7 +958,7 @@ def pop(self, indx): indx += self.c_points.contents.n_points if indx >= self.c_points.contents.n_points: raise IndexError("Index out of range") - pnt = self.__getitem__(indx) + pnt = self[indx] libvect.Vect_line_delete_point(self.c_points, indx) return pnt @@ -1028,7 +1028,7 @@ def remove(self, pnt): .. """ - for indx, point in enumerate(self.__iter__()): + for indx, point in enumerate(iter(self)): if pnt == point: libvect.Vect_line_delete_point(self.c_points, indx) return @@ -1086,7 +1086,7 @@ def to_list(self): .. """ - return [pnt.coords() for pnt in self.__iter__()] + return [pnt.coords() for pnt in iter(self)] def to_array(self): """Return an array of coordinates. :: @@ -1112,10 +1112,7 @@ def to_wkt_p(self): .. """ return "LINESTRING(%s)" % ", ".join( - [ - " ".join(["%f" % coord for coord in pnt.coords()]) - for pnt in self.__iter__() - ] + [" ".join(["%f" % coord for coord in pnt.coords()]) for pnt in iter(self)] ) def from_wkt(self, wkt): @@ -1592,7 +1589,7 @@ def isles_ids(self): """Return the id of isles""" return [ libvect.Vect_get_area_isle(self.c_mapinfo, self.area_id, i) - for i in range(self.__len__()) + for i in range(len(self)) ] @mapinfo_must_be_set diff --git a/python/grass/pygrass/vector/table.py b/python/grass/pygrass/vector/table.py index dec3ffbba08..4b7e99c0a83 100644 --- a/python/grass/pygrass/vector/table.py +++ b/python/grass/pygrass/vector/table.py @@ -297,7 +297,7 @@ def update_odict(self): [ "?", ] - * self.__len__() + * (len(self)) ) kv = ",".join(["%s=?" % k for k in self.odict.keys() if k != self.key]) where = "%s=?" % self.key diff --git a/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py b/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py index 4c17ef03d4c..356a55ec4d7 100644 --- a/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py +++ b/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py @@ -60,11 +60,11 @@ def test_setitem(self): newvalue = 100.0 newpairs = ("setitem_point_2", 1000.0) - self.attrs.__setitem__("name", newname) + self.attrs.__setitem__("name", newname) # noqa: PLC2801 self.assertEqual(self.attrs["name"], newname) - self.attrs.__setitem__("value", newvalue) + self.attrs.__setitem__("value", newvalue) # noqa: PLC2801 self.assertEqual(self.attrs["value"], newvalue) - self.attrs.__setitem__(("name", "value"), newpairs) + self.attrs.__setitem__(("name", "value"), newpairs) # noqa: PLC2801 self.assertEqual(self.attrs["name", "value"], newpairs) From 4d17eb509fceac7fdb4fc3943d74ff620dc6f113 Mon Sep 17 00:00:00 2001 From: Kriti Birda <164247895+kritibirda26@users.noreply.github.com> Date: Tue, 20 Aug 2024 02:15:48 +0530 Subject: [PATCH 04/30] r.object.geometry: add json support (#4105) * r.object.geometry: add json support * debug test failure in ci * fix tests --- raster/r.object.geometry/Makefile | 2 +- raster/r.object.geometry/main.c | 111 ++++++++++++++---- .../r.object.geometry/r.object.geometry.html | 71 +++++++++++ ...etry_test.py => test_r_object_geometry.py} | 67 +++++++++-- 4 files changed, 212 insertions(+), 39 deletions(-) rename raster/r.object.geometry/testsuite/{r_object_geometry_test.py => test_r_object_geometry.py} (68%) diff --git a/raster/r.object.geometry/Makefile b/raster/r.object.geometry/Makefile index aeaa235c641..e508927fd24 100644 --- a/raster/r.object.geometry/Makefile +++ b/raster/r.object.geometry/Makefile @@ -2,7 +2,7 @@ MODULE_TOPDIR = ../.. PGM = r.object.geometry -LIBES = $(RASTERLIB) $(GISLIB) +LIBES = $(RASTERLIB) $(GISLIB) $(PARSONLIB) DEPENDENCIES = $(RASTERDEP) $(GISDEP) include $(MODULE_TOPDIR)/include/Make/Module.make diff --git a/raster/r.object.geometry/main.c b/raster/r.object.geometry/main.c index f808c4b88e1..fad0cf79099 100644 --- a/raster/r.object.geometry/main.c +++ b/raster/r.object.geometry/main.c @@ -22,6 +22,9 @@ #include #include #include +#include + +enum OutputFormat { PLAIN, JSON }; /* compare two cell values * return 0 if equal, 1 if different */ @@ -42,6 +45,7 @@ int main(int argc, char *argv[]) struct Option *opt_in; struct Option *opt_out; struct Option *opt_sep; + struct Option *fmt_opt; struct Flag *flag_m; char *sep; FILE *out_fp; @@ -59,6 +63,11 @@ int main(int argc, char *argv[]) int planimetric = 0, compute_areas = 0; struct Cell_head cellhd; + enum OutputFormat format; + JSON_Array *root_array; + JSON_Object *object; + JSON_Value *root_value, *object_value; + G_gisinit(argv[0]); /* Define the different options */ @@ -82,10 +91,22 @@ int main(int argc, char *argv[]) flag_m->key = 'm'; flag_m->label = _("Use meters as units instead of cells"); + fmt_opt = G_define_standard_option(G_OPT_F_FORMAT); + fmt_opt->guisection = _("Print"); + /* parse options */ if (G_parser(argc, argv)) exit(EXIT_FAILURE); + if (strcmp(fmt_opt->answer, "json") == 0) { + format = JSON; + root_value = json_value_init_array(); + root_array = json_array(root_value); + } + else { + format = PLAIN; + } + sep = G_option_to_separator(opt_sep); in_fd = Rast_open_old(opt_in->answer, ""); @@ -294,16 +315,18 @@ int main(int argc, char *argv[]) G_free(prev_in); G_message(_("Writing output")); - /* print table */ - fprintf(out_fp, "cat%s", sep); - fprintf(out_fp, "area%s", sep); - fprintf(out_fp, "perimeter%s", sep); - fprintf(out_fp, "compact_square%s", sep); - fprintf(out_fp, "compact_circle%s", sep); - fprintf(out_fp, "fd%s", sep); - fprintf(out_fp, "mean_x%s", sep); - fprintf(out_fp, "mean_y"); - fprintf(out_fp, "\n"); + if (format == PLAIN) { + /* print table */ + fprintf(out_fp, "cat%s", sep); + fprintf(out_fp, "area%s", sep); + fprintf(out_fp, "perimeter%s", sep); + fprintf(out_fp, "compact_square%s", sep); + fprintf(out_fp, "compact_circle%s", sep); + fprintf(out_fp, "fd%s", sep); + fprintf(out_fp, "mean_x%s", sep); + fprintf(out_fp, "mean_y"); + fprintf(out_fp, "\n"); + } /* print table body */ for (i = 0; i < n_objects; i++) { @@ -312,22 +335,42 @@ int main(int argc, char *argv[]) if (obj_geos[i].area == 0) continue; - fprintf(out_fp, "%d%s", min + i, sep); - fprintf(out_fp, "%f%s", obj_geos[i].area, sep); - fprintf(out_fp, "%f%s", obj_geos[i].perimeter, sep); - fprintf(out_fp, "%f%s", - 4 * sqrt(obj_geos[i].area) / obj_geos[i].perimeter, sep); - fprintf(out_fp, "%f%s", - obj_geos[i].perimeter / (2 * sqrt(M_PI * obj_geos[i].area)), - sep); + double compact_square = + 4 * sqrt(obj_geos[i].area) / obj_geos[i].perimeter; + double compact_circle = + obj_geos[i].perimeter / (2 * sqrt(M_PI * obj_geos[i].area)); /* log 1 = 0, so avoid that by always adding 0.001 to the area: */ - fprintf(out_fp, "%f%s", - 2 * log(obj_geos[i].perimeter) / log(obj_geos[i].area + 0.001), - sep); - if (!flag_m->answer) + double fd = + 2 * log(obj_geos[i].perimeter) / log(obj_geos[i].area + 0.001); + if (!flag_m->answer) { obj_geos[i].num = obj_geos[i].area; - fprintf(out_fp, "%f%s", obj_geos[i].x / obj_geos[i].num, sep); - fprintf(out_fp, "%f", obj_geos[i].y / obj_geos[i].num); + } + double mean_x = obj_geos[i].x / obj_geos[i].num; + double mean_y = obj_geos[i].y / obj_geos[i].num; + switch (format) { + case PLAIN: + fprintf(out_fp, "%d%s", min + i, sep); + fprintf(out_fp, "%f%s", obj_geos[i].area, sep); + fprintf(out_fp, "%f%s", obj_geos[i].perimeter, sep); + fprintf(out_fp, "%f%s", compact_square, sep); + fprintf(out_fp, "%f%s", compact_circle, sep); + fprintf(out_fp, "%f%s", fd, sep); + fprintf(out_fp, "%f%s", mean_x, sep); + fprintf(out_fp, "%f", mean_y); + break; + case JSON: + object_value = json_value_init_object(); + object = json_object(object_value); + json_object_set_number(object, "category", min + i); + json_object_set_number(object, "area", obj_geos[i].area); + json_object_set_number(object, "perimeter", obj_geos[i].perimeter); + json_object_set_number(object, "compact_square", compact_square); + json_object_set_number(object, "compact_circle", compact_circle); + json_object_set_number(object, "fd", fd); + json_object_set_number(object, "mean_x", mean_x); + json_object_set_number(object, "mean_y", mean_y); + break; + } /* object id: i + min */ /* TODO */ @@ -342,8 +385,26 @@ int main(int argc, char *argv[]) /* variance of X and Y to approximate bounding ellipsoid */ - fprintf(out_fp, "\n"); + switch (format) { + case PLAIN: + fprintf(out_fp, "\n"); + break; + case JSON: + json_array_append_value(root_array, object_value); + break; + } } + + if (format == JSON) { + char *serialized_string = json_serialize_to_string_pretty(root_value); + if (serialized_string == NULL) { + G_fatal_error(_("Failed to initialize pretty JSON string.")); + } + puts(serialized_string); + json_free_serialized_string(serialized_string); + json_value_free(root_value); + } + if (out_fp != stdout) fclose(out_fp); diff --git a/raster/r.object.geometry/r.object.geometry.html b/raster/r.object.geometry/r.object.geometry.html index 722c6c78801..bdca7566868 100644 --- a/raster/r.object.geometry/r.object.geometry.html +++ b/raster/r.object.geometry/r.object.geometry.html @@ -38,6 +38,77 @@

EXAMPLE

r.object.geometry input=soilsID output=soils_geom.txt +The format=json option can be used to change the output format to JSON: + +
+r.object.geometry input=zipcodes format=json
+
+ +
+[
+    {
+        "category": 1,
+        "area": 106,
+        "perimeter": 62,
+        "compact_circle": 1.6987670351864215,
+        "compact_square": 0.66423420264432265,
+        "fd": 1.7699924681225903,
+        "mean_x": 631382.07547169807,
+        "mean_y": 222764.15094339623
+    },
+    {
+        "category": 2,
+        "area": 57,
+        "perimeter": 36,
+        "compact_circle": 1.3451172460704992,
+        "compact_square": 0.83887049280786108,
+        "fd": 1.772672742164326,
+        "mean_x": 643460.52631578944,
+        "mean_y": 217232.45614035087
+    },
+    {
+        "category": 3,
+        "area": 10,
+        "perimeter": 16,
+        "compact_circle": 1.4272992929222168,
+        "compact_square": 0.79056941504209488,
+        "fd": 2.4081353865496951,
+        "mean_x": 631300,
+        "mean_y": 215450
+    },
+    {
+        "category": 4,
+        "area": 63,
+        "perimeter": 60,
+        "compact_circle": 2.1324361862292305,
+        "compact_square": 0.52915026221291817,
+        "fd": 1.9764401337147652,
+        "mean_x": 642345.23809523811,
+        "mean_y": 226599.20634920636
+    },
+    {
+        "category": 5,
+        "area": 491,
+        "perimeter": 156,
+        "compact_circle": 1.9859985189304281,
+        "compact_square": 0.56816717451693177,
+        "fd": 1.6299200778082998,
+        "mean_x": 637912.93279022397,
+        "mean_y": 220636.96537678209
+    },
+    {
+        "category": 6,
+        "area": 83,
+        "perimeter": 60,
+        "compact_circle": 1.8578355639603314,
+        "compact_square": 0.60736223860961991,
+        "fd": 1.8531256328449071,
+        "mean_x": 635846.38554216863,
+        "mean_y": 227219.8795180723
+    }
+]
+
+

SEE ALSO

diff --git a/raster/r.object.geometry/testsuite/r_object_geometry_test.py b/raster/r.object.geometry/testsuite/test_r_object_geometry.py similarity index 68% rename from raster/r.object.geometry/testsuite/r_object_geometry_test.py rename to raster/r.object.geometry/testsuite/test_r_object_geometry.py index e6df4d7570f..419df4d780b 100644 --- a/raster/r.object.geometry/testsuite/r_object_geometry_test.py +++ b/raster/r.object.geometry/testsuite/test_r_object_geometry.py @@ -4,9 +4,15 @@ module. """ +import json import os +from sys import stderr + from grass.gunittest.case import TestCase from grass.gunittest.main import test +from grass.gunittest.gmodules import call_module + +from grass.gunittest.gmodules import SimpleModule testraster1 = """\ north: 250000 @@ -30,31 +36,25 @@ class TestObjectGeometryPixel(TestCase): test_objects1 = "test_objects1" output_file_pixel = "output_file_pixel.csv" - @classmethod - def setUpClass(cls): + def setUp(self): """Imports test raster(s), ensures expected computational region and setup""" - cls.runModule( + self.runModule( "r.in.ascii", input="-", type="CELL", stdin_=testraster1, - output=cls.test_objects1, + output=self.test_objects1, ) - cls.use_temp_region() - cls.runModule("g.region", raster=cls.test_objects1) - - @classmethod - def tearDownClass(cls): - """Remove the temporary region""" - cls.del_temp_region() + self.use_temp_region() + self.runModule("g.region", raster=self.test_objects1) def tearDown(self): - """Remove the outputs created from the object geometry module - + """Remove the outputs created from the object geometry module and the temporary region This is executed after each test run. """ if os.path.isfile(self.output_file_pixel): os.remove(self.output_file_pixel) + self.del_temp_region() self.runModule("g.remove", flags="f", type="raster", name=self.test_objects1) def test_object_geometry_pixel(self): @@ -72,6 +72,47 @@ def test_object_geometry_pixel(self): msg="Output file is not equal to reference file", ) + def test_object_geometry_json(self): + """Test json format output""" + reference = [ + { + "category": 1, + "area": 4, + "perimeter": 8, + "compact_circle": 1.1283791670955126, + "compact_square": 1, + "fd": 2.999459154496928, + "mean_x": 625000, + "mean_y": 237500, + }, + { + "category": 2, + "area": 8, + "perimeter": 12, + "compact_circle": 1.1968268412042982, + "compact_square": 0.94280904158206347, + "fd": 2.3898313512153728, + "mean_x": 655000, + "mean_y": 225000, + }, + { + "category": 3, + "area": 4, + "perimeter": 8, + "compact_circle": 1.1283791670955126, + "compact_square": 1, + "fd": 2.999459154496928, + "mean_x": 625000, + "mean_y": 212500, + }, + ] + module = SimpleModule( + "r.object.geometry", input=self.test_objects1, format="json" + ) + self.runModule(module) + data = json.loads(module.outputs.stdout) + self.assertCountEqual(reference, data) + class TestObjectGeometryMeter(TestCase): """Test case for object geometry module""" From 309b417b1a528aa8ff03ab7ad9a47aa96ec4d66d Mon Sep 17 00:00:00 2001 From: Kriti Birda <164247895+kritibirda26@users.noreply.github.com> Date: Tue, 20 Aug 2024 03:48:07 +0530 Subject: [PATCH 05/30] db.describe: add JSON support (#4021) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * db.describe: add json support * initialize method args to NULL * Apply suggestions from code review Co-authored-by: Corey White * update documentation --------- Co-authored-by: Corey White Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> --- db/db.describe/Makefile | 2 +- db/db.describe/db.describe.html | 226 ++++++++++++++++++++ db/db.describe/local_proto.h | 11 +- db/db.describe/main.c | 88 +++++++- db/db.describe/printtab.c | 157 ++++++++++---- db/db.describe/testsuite/test_dbdescribe.py | 213 ++++++++++++++++++ 6 files changed, 646 insertions(+), 51 deletions(-) diff --git a/db/db.describe/Makefile b/db/db.describe/Makefile index ce8ce0afd16..ca6caf23313 100644 --- a/db/db.describe/Makefile +++ b/db/db.describe/Makefile @@ -1,7 +1,7 @@ MODULE_TOPDIR = ../.. -LIBES = $(DBMILIB) $(GISLIB) +LIBES = $(DBMILIB) $(GISLIB) $(PARSONLIB) DEPENDENCIES = $(DBMIDEP) $(GISDEP) PGM = db.describe diff --git a/db/db.describe/db.describe.html b/db/db.describe/db.describe.html index 2fe979d74ce..4be0245f35c 100644 --- a/db/db.describe/db.describe.html +++ b/db/db.describe/db.describe.html @@ -61,6 +61,232 @@

DBF example

[...] +

JSON Output

+
+db.describe table=hospitals format=json
+
+ +
+{
+    "table": "hospitals",
+    "description": "",
+    "insert": null,
+    "delete": null,
+    "ncols": 16,
+    "nrows": 160,
+    "columns": [
+        {
+            "position": 1,
+            "column": "cat",
+            "description": "",
+            "type": "INTEGER",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 2,
+            "column": "OBJECTID",
+            "description": "",
+            "type": "INTEGER",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 3,
+            "column": "AREA",
+            "description": "",
+            "type": "DOUBLE PRECISION",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 4,
+            "column": "PERIMETER",
+            "description": "",
+            "type": "DOUBLE PRECISION",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 5,
+            "column": "HLS_",
+            "description": "",
+            "type": "INTEGER",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 6,
+            "column": "HLS_ID",
+            "description": "",
+            "type": "INTEGER",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 7,
+            "column": "NAME",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 45,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 8,
+            "column": "ADDRESS",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 35,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 9,
+            "column": "CITY",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 16,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 10,
+            "column": "ZIP",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 5,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 11,
+            "column": "COUNTY",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 12,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 12,
+            "column": "PHONE",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 14,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 13,
+            "column": "CANCER",
+            "description": "",
+            "type": "CHARACTER",
+            "length": 4,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 14,
+            "column": "POLYGONID",
+            "description": "",
+            "type": "INTEGER",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 15,
+            "column": "SCALE",
+            "description": "",
+            "type": "DOUBLE PRECISION",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        },
+        {
+            "position": 16,
+            "column": "ANGLE",
+            "description": "",
+            "type": "DOUBLE PRECISION",
+            "length": 20,
+            "scale": 0,
+            "precision": 0,
+            "default": null,
+            "nullok": true,
+            "select": null,
+            "update": null
+        }
+    ]
+}
+
+

SEE ALSO

diff --git a/db/db.describe/local_proto.h b/db/db.describe/local_proto.h index af669ae46f1..6f7c4b9e682 100644 --- a/db/db.describe/local_proto.h +++ b/db/db.describe/local_proto.h @@ -1,8 +1,13 @@ #ifndef __LOCAL_PROTO_H__ #define __LOCAL_PROTO_H__ -int print_priv(char *, int); -int print_column_definition(dbColumn *); -int print_table_definition(dbDriver *, dbTable *); +#include + +enum OutputFormat { PLAIN, JSON }; + +int print_priv(char *, int, enum OutputFormat, JSON_Object *); +int print_column_definition(dbColumn *, int, enum OutputFormat, JSON_Array *); +int print_table_definition(dbDriver *, dbTable *, enum OutputFormat, + JSON_Object *, JSON_Array *); #endif /* __LOCAL_PROTO_H__ */ diff --git a/db/db.describe/main.c b/db/db.describe/main.c index b2e6398b447..e32d1572a1a 100644 --- a/db/db.describe/main.c +++ b/db/db.describe/main.c @@ -19,11 +19,13 @@ #include #include #include +#include #include "local_proto.h" struct { char *driver, *database, *table; int printcolnames; + enum OutputFormat format; } parms; /* function prototypes */ @@ -40,7 +42,25 @@ int main(int argc, char **argv) char buf[1024]; dbString stmt; + JSON_Object *root_object, *col_object; + JSON_Value *root_value, *cols_value, *col_value; + JSON_Array *cols_array = NULL; + parse_command_line(argc, argv); + + if (parms.format == JSON) { + root_value = json_value_init_object(); + if (root_value == NULL) { + G_fatal_error(_("Failed to initialize JSON array. Out of memory?")); + } + root_object = json_object(root_value); + cols_value = json_value_init_array(); + if (cols_value == NULL) { + G_fatal_error(_("Failed to initialize JSON array. Out of memory?")); + } + cols_array = json_array(cols_value); + } + if (!db_table_exists(parms.driver, parms.database, parms.table)) { G_warning(_("Table <%s> not found in database <%s> using driver <%s>"), parms.table, parms.database, parms.driver); @@ -63,7 +83,8 @@ int main(int argc, char **argv) db_get_string(&table_name)); if (!parms.printcolnames) - print_table_definition(driver, table); + print_table_definition(driver, table, parms.format, root_object, + cols_array); else { ncols = db_get_table_number_of_columns(table); @@ -71,17 +92,57 @@ int main(int argc, char **argv) sprintf(buf, "select * from %s", db_get_table_name(table)); db_set_string(&stmt, buf); nrows = db_get_table_number_of_rows(driver, &stmt); - fprintf(stdout, "ncols: %d\n", ncols); - fprintf(stdout, "nrows: %d\n", nrows); + + switch (parms.format) { + case PLAIN: + fprintf(stdout, "ncols: %d\n", ncols); + fprintf(stdout, "nrows: %d\n", nrows); + break; + case JSON: + json_object_set_number(root_object, "ncols", ncols); + json_object_set_number(root_object, "nrows", nrows); + break; + } + for (col = 0; col < ncols; col++) { column = db_get_table_column(table, col); - fprintf(stdout, "Column %d: %s:%s:%d\n", (col + 1), - db_get_column_name(column), - db_sqltype_name(db_get_column_sqltype(column)), - db_get_column_length(column)); + + switch (parms.format) { + case PLAIN: + fprintf(stdout, "Column %d: %s:%s:%d\n", (col + 1), + db_get_column_name(column), + db_sqltype_name(db_get_column_sqltype(column)), + db_get_column_length(column)); + break; + case JSON: + col_value = json_value_init_object(); + col_object = json_object(col_value); + json_object_set_number(col_object, "position", col + 1); + json_object_set_string(col_object, "name", + db_get_column_name(column)); + json_object_set_string( + col_object, "type", + db_sqltype_name(db_get_column_sqltype(column))); + json_object_set_number(col_object, "length", + db_get_column_length(column)); + json_array_append_value(cols_array, col_value); + break; + } } } + if (parms.format == JSON) { + json_object_set_value(root_object, "columns", cols_value); + char *serialized_string = NULL; + serialized_string = json_serialize_to_string_pretty(root_value); + if (serialized_string == NULL) { + G_fatal_error(_("Failed to initialize pretty JSON string.")); + } + puts(serialized_string); + json_free_serialized_string(serialized_string); + json_value_free(root_value); + } + db_close_database(driver); db_shutdown_driver(driver); @@ -90,7 +151,7 @@ int main(int argc, char **argv) static void parse_command_line(int argc, char **argv) { - struct Option *driver, *database, *table; + struct Option *driver, *database, *table, *format_opt; struct Flag *cols; struct GModule *module; const char *drv, *db; @@ -115,9 +176,13 @@ static void parse_command_line(int argc, char **argv) if ((db = db_get_default_database_name())) database->answer = (char *)db; + format_opt = G_define_standard_option(G_OPT_F_FORMAT); + format_opt->guisection = _("Print"); + /* Set description */ module = G_define_module(); G_add_keyword(_("database")); + G_add_keyword(_("json")); G_add_keyword(_("attribute table")); module->description = _("Describes a table in detail."); @@ -128,4 +193,11 @@ static void parse_command_line(int argc, char **argv) parms.database = database->answer; parms.table = table->answer; parms.printcolnames = cols->answer; + + if (strcmp(format_opt->answer, "json") == 0) { + parms.format = JSON; + } + else { + parms.format = PLAIN; + } } diff --git a/db/db.describe/printtab.c b/db/db.describe/printtab.c index 1412f23adcf..59da4194fe7 100644 --- a/db/db.describe/printtab.c +++ b/db/db.describe/printtab.c @@ -1,19 +1,31 @@ #include #include #include "local_proto.h" -#include +#include -int print_table_definition(dbDriver *driver, dbTable *table) +int print_table_definition(dbDriver *driver, dbTable *table, + enum OutputFormat format, JSON_Object *root_object, + JSON_Array *cols_array) { int ncols, col, nrows; dbColumn *column; char buf[1024]; dbString stmt; - fprintf(stdout, "table:%s\n", db_get_table_name(table)); - fprintf(stdout, "description:%s\n", db_get_table_description(table)); - print_priv("insert", db_get_table_insert_priv(table)); - print_priv("delete", db_get_table_delete_priv(table)); + switch (format) { + case PLAIN: + fprintf(stdout, "table:%s\n", db_get_table_name(table)); + fprintf(stdout, "description:%s\n", db_get_table_description(table)); + break; + case JSON: + json_object_set_string(root_object, "table", db_get_table_name(table)); + json_object_set_string(root_object, "description", + db_get_table_description(table)); + break; + } + + print_priv("insert", db_get_table_insert_priv(table), format, root_object); + print_priv("delete", db_get_table_delete_priv(table), format, root_object); ncols = db_get_table_number_of_columns(table); @@ -21,58 +33,125 @@ int print_table_definition(dbDriver *driver, dbTable *table) sprintf(buf, "select * from %s", db_get_table_name(table)); db_set_string(&stmt, buf); nrows = db_get_table_number_of_rows(driver, &stmt); - fprintf(stdout, "ncols:%d\n", ncols); - fprintf(stdout, "nrows:%d\n", nrows); + + switch (format) { + case PLAIN: + fprintf(stdout, "ncols:%d\n", ncols); + fprintf(stdout, "nrows:%d\n", nrows); + break; + case JSON: + json_object_set_number(root_object, "ncols", ncols); + json_object_set_number(root_object, "nrows", nrows); + break; + } + for (col = 0; col < ncols; col++) { column = db_get_table_column(table, col); - fprintf(stdout, "\n"); - print_column_definition(column); + print_column_definition(column, col + 1, format, cols_array); } return 0; } -int print_column_definition(dbColumn *column) +int print_column_definition(dbColumn *column, int position, + enum OutputFormat format, JSON_Array *cols_array) { + JSON_Object *col_object = NULL; + JSON_Value *col_value = NULL; + dbString value_string; - fprintf(stdout, "column:%s\n", db_get_column_name(column)); - fprintf(stdout, "description:%s\n", db_get_column_description(column)); - fprintf(stdout, "type:%s\n", - db_sqltype_name(db_get_column_sqltype(column))); - fprintf(stdout, "len:%d\n", db_get_column_length(column)); - fprintf(stdout, "scale:%d\n", db_get_column_scale(column)); - fprintf(stdout, "precision:%d\n", db_get_column_precision(column)); - fprintf(stdout, "default:"); - if (db_test_column_has_default_value(column)) { - db_init_string(&value_string); - db_convert_column_default_value_to_string(column, &value_string); - fprintf(stdout, "%s", db_get_string(&value_string)); + switch (format) { + case PLAIN: + fprintf(stdout, "\n"); + fprintf(stdout, "column:%s\n", db_get_column_name(column)); + fprintf(stdout, "description:%s\n", db_get_column_description(column)); + fprintf(stdout, "type:%s\n", + db_sqltype_name(db_get_column_sqltype(column))); + fprintf(stdout, "len:%d\n", db_get_column_length(column)); + fprintf(stdout, "scale:%d\n", db_get_column_scale(column)); + fprintf(stdout, "precision:%d\n", db_get_column_precision(column)); + fprintf(stdout, "default:"); + if (db_test_column_has_default_value(column)) { + db_init_string(&value_string); + db_convert_column_default_value_to_string(column, &value_string); + fprintf(stdout, "%s", db_get_string(&value_string)); + } + fprintf(stdout, "\n"); + fprintf(stdout, "nullok:%s\n", + db_test_column_null_allowed(column) ? "yes" : "no"); + break; + case JSON: + col_value = json_value_init_object(); + col_object = json_object(col_value); + json_object_set_number(col_object, "position", position); + json_object_set_string(col_object, "column", + db_get_column_name(column)); + json_object_set_string(col_object, "description", + db_get_column_description(column)); + json_object_set_string(col_object, "type", + db_sqltype_name(db_get_column_sqltype(column))); + json_object_set_number(col_object, "length", + db_get_column_length(column)); + json_object_set_number(col_object, "scale", + db_get_column_scale(column)); + json_object_set_number(col_object, "precision", + db_get_column_precision(column)); + if (db_test_column_has_default_value(column)) { + db_init_string(&value_string); + db_convert_column_default_value_to_string(column, &value_string); + json_object_set_string(col_object, "default", + db_get_string(&value_string)); + } + else { + json_object_set_null(col_object, "default"); + } + json_object_set_boolean(col_object, "nullok", + db_test_column_null_allowed(column)); + break; + } + print_priv("select", db_get_column_select_priv(column), format, col_object); + print_priv("update", db_get_column_update_priv(column), format, col_object); + if (format == JSON) { + json_array_append_value(cols_array, col_value); } - fprintf(stdout, "\n"); - fprintf(stdout, "nullok:%s\n", - db_test_column_null_allowed(column) ? "yes" : "no"); - print_priv("select", db_get_column_select_priv(column)); - print_priv("update", db_get_column_update_priv(column)); return 0; } -int print_priv(char *label, int priv) +int print_priv(char *label, int priv, enum OutputFormat format, + JSON_Object *root_object) { - fprintf(stdout, "%s:", label); - switch (priv) { - case DB_GRANTED: - fprintf(stdout, "yes"); - break; - case DB_NOT_GRANTED: - fprintf(stdout, "no"); + switch (format) { + case PLAIN: + fprintf(stdout, "%s:", label); + switch (priv) { + case DB_GRANTED: + fprintf(stdout, "yes"); + break; + case DB_NOT_GRANTED: + fprintf(stdout, "no"); + break; + default: + fprintf(stdout, "?"); + break; + } + fprintf(stdout, "\n"); break; - default: - fprintf(stdout, "?"); + case JSON: + switch (priv) { + case DB_GRANTED: + json_object_set_boolean(root_object, label, 1); + break; + case DB_NOT_GRANTED: + json_object_set_boolean(root_object, label, 0); + break; + default: + json_object_set_null(root_object, label); + break; + } break; } - fprintf(stdout, "\n"); return 0; } diff --git a/db/db.describe/testsuite/test_dbdescribe.py b/db/db.describe/testsuite/test_dbdescribe.py index 1d6cc82aca5..f48debf0388 100644 --- a/db/db.describe/testsuite/test_dbdescribe.py +++ b/db/db.describe/testsuite/test_dbdescribe.py @@ -5,6 +5,7 @@ @author: lucadelu """ +import json from grass.gunittest.case import TestCase from grass.gunittest.main import test @@ -166,6 +167,202 @@ Column 12: SHAPE_Area:DOUBLE PRECISION:20 """ +output_json = { + "table": "zipcodes", + "description": "", + "insert": None, + "delete": None, + "ncols": 12, + "nrows": 44, + "columns": [ + { + "column": "cat", + "description": "", + "type": "INTEGER", + "length": 20, + "scale": 0, + "position": 1, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "OBJECTID", + "description": "", + "type": "INTEGER", + "length": 20, + "scale": 0, + "position": 2, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "WAKE_ZIPCO", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 3, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "PERIMETER", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 4, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "ZIPCODE_", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 5, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "ZIPCODE_ID", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 6, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "ZIPNAME", + "description": "", + "type": "CHARACTER", + "length": 15, + "scale": 0, + "position": 7, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "ZIPNUM", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 8, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "ZIPCODE", + "description": "", + "type": "CHARACTER", + "length": 30, + "scale": 0, + "position": 9, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "NAME", + "description": "", + "type": "CHARACTER", + "length": 30, + "scale": 0, + "position": 10, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "SHAPE_Leng", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 11, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + { + "column": "SHAPE_Area", + "description": "", + "type": "DOUBLE PRECISION", + "length": 20, + "scale": 0, + "position": 12, + "precision": 0, + "default": None, + "nullok": True, + "select": None, + "update": None, + }, + ], +} + +outcol_json = { + "ncols": 12, + "nrows": 44, + "columns": [ + {"position": 1, "name": "cat", "type": "INTEGER", "length": 20}, + {"position": 2, "name": "OBJECTID", "type": "INTEGER", "length": 20}, + {"position": 3, "name": "WAKE_ZIPCO", "type": "DOUBLE PRECISION", "length": 20}, + {"position": 4, "name": "PERIMETER", "type": "DOUBLE PRECISION", "length": 20}, + {"position": 5, "name": "ZIPCODE_", "type": "DOUBLE PRECISION", "length": 20}, + {"position": 6, "name": "ZIPCODE_ID", "type": "DOUBLE PRECISION", "length": 20}, + {"position": 7, "name": "ZIPNAME", "type": "CHARACTER", "length": 15}, + {"position": 8, "name": "ZIPNUM", "type": "DOUBLE PRECISION", "length": 20}, + {"position": 9, "name": "ZIPCODE", "type": "CHARACTER", "length": 30}, + {"position": 10, "name": "NAME", "type": "CHARACTER", "length": 30}, + { + "position": 11, + "name": "SHAPE_Leng", + "type": "DOUBLE PRECISION", + "length": 20, + }, + { + "position": 12, + "name": "SHAPE_Area", + "type": "DOUBLE PRECISION", + "length": 20, + }, + ], +} + class TestDbCopy(TestCase): invect = "zipcodes" @@ -185,6 +382,22 @@ def test_columns(self): ) self.assertEqual(first=cols, second=outcol) + def test_describe_json(self): + cols = read_command( + "db.describe", table=self.invect, database=self.mapset, format="json" + ) + self.assertEqual(output_json, json.loads(cols)) + + def test_columns_json(self): + cols = read_command( + "db.describe", + table=self.invect, + flags="c", + database=self.mapset, + format="json", + ) + self.assertEqual(outcol_json, json.loads(cols)) + if __name__ == "__main__": test() From a4257a13182275acb50609c43207970d48250da6 Mon Sep 17 00:00:00 2001 From: Markus Neteler Date: Tue, 20 Aug 2024 00:25:31 +0200 Subject: [PATCH 06/30] g.download.location: Print target path in error message (#4155) * g.download.location: print target path in error message To be less obscure in case the target location directory already exists: ``` ERROR: Location named already exists, download canceled ``` this PR improves the error message to include the path: ``` g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME ERROR: Location named already exists in , download canceled ``` This is esp. relevant for scripted usage when the command isn't invoked directly by the user. In addition, section of **EXAMPLES** added. * fix message format Co-authored-by: Vaclav Petras * database -> project Co-authored-by: Veronica Andreo --------- Co-authored-by: Vaclav Petras Co-authored-by: Veronica Andreo --- .../g.download.location.html | 21 +++++++++++++++++++ .../g.download.location.py | 4 +++- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/scripts/g.download.location/g.download.location.html b/scripts/g.download.location/g.download.location.html index 3787e5e0fb5..6900acc4388 100644 --- a/scripts/g.download.location/g.download.location.html +++ b/scripts/g.download.location/g.download.location.html @@ -11,6 +11,27 @@

DESCRIPTION

The first directory which is a project is used. Other projects or any other files are ignored. +

EXAMPLES

+ +

Download the full GRASS GIS sample project within a running session

+ +Download and unpack the full North Carolina sample project into the user's +HOME directory: + +
+g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME
+
+ +

Download the full GRASS GIS sample project in a temporary session

+ +In a temporary session, download and unpack the full North Carolina sample project +into the user's HOME directory: + +
+grass --tmp-location XY --exec g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME
+
+ +

SEE ALSO

diff --git a/scripts/g.download.location/g.download.location.py b/scripts/g.download.location/g.download.location.py index a04012aedca..bca79b12492 100644 --- a/scripts/g.download.location/g.download.location.py +++ b/scripts/g.download.location/g.download.location.py @@ -102,7 +102,9 @@ def main(options, unused_flags): if destination.exists(): gs.fatal( - _("Location named <{}> already exists, download canceled").format(name) + _( + "Location named <{name}> already exists in <{directory}>, download canceled" + ).format(name=name, directory=database) ) gs.message(_("Downloading and extracting...")) From fdfec39c0301a3dcc5eddbd6f349e79017486744 Mon Sep 17 00:00:00 2001 From: Anna Petrasova Date: Tue, 20 Aug 2024 13:10:33 -0400 Subject: [PATCH 07/30] gui: replace python imp library with importlib for python 3.12 (#4201) --- gui/wxpython/core/gconsole.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/gui/wxpython/core/gconsole.py b/gui/wxpython/core/gconsole.py index 671e71d1548..bad4aac29cd 100644 --- a/gui/wxpython/core/gconsole.py +++ b/gui/wxpython/core/gconsole.py @@ -585,8 +585,23 @@ def RunCmd( if len(command) == 1: if command[0].startswith("g.gui."): - import imp import inspect + import importlib.util + import importlib.machinery + + def load_source(modname, filename): + loader = importlib.machinery.SourceFileLoader( + modname, filename + ) + spec = importlib.util.spec_from_file_location( + modname, filename, loader=loader + ) + module = importlib.util.module_from_spec(spec) + # Module is always executed and not cached in sys.modules. + # Uncomment the following line to cache the module. + # sys.modules[module.__name__] = module + loader.exec_module(module) + return module pyFile = command[0] if sys.platform == "win32": @@ -601,7 +616,7 @@ def RunCmd( parent=self._guiparent, message=_("Module <%s> not found.") % command[0], ) - pymodule = imp.load_source(command[0].replace(".", "_"), pyPath) + pymodule = load_source(command[0].replace(".", "_"), pyPath) pymain = inspect.getfullargspec(pymodule.main) if pymain and "giface" in pymain.args: pymodule.main(self._giface) From 296147767f9a9942bba8ab1409c8183432c7b814 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 17:54:25 -0400 Subject: [PATCH 08/30] CI(deps): Update github/codeql-action action to v3.26.3 (#4199) --- .github/workflows/codeql-analysis.yml | 4 ++-- .github/workflows/python-code-quality.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index c86e7bfc165..6e0115cc9a6 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -56,7 +56,7 @@ jobs: if: ${{ matrix.language == 'c-cpp' }} - name: Initialize CodeQL - uses: github/codeql-action/init@429e1977040da7a23b6822b13c129cd1ba93dbb2 # v3.26.2 + uses: github/codeql-action/init@883d8588e56d1753a8a58c1c86e88976f0c23449 # v3.26.3 with: languages: ${{ matrix.language }} config-file: ./.github/codeql/codeql-config.yml @@ -81,6 +81,6 @@ jobs: run: .github/workflows/build_ubuntu-22.04.sh "${HOME}/install" - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@429e1977040da7a23b6822b13c129cd1ba93dbb2 # v3.26.2 + uses: github/codeql-action/analyze@883d8588e56d1753a8a58c1c86e88976f0c23449 # v3.26.3 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 3ca7e512a3a..f47b0ec8fc2 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -135,7 +135,7 @@ jobs: path: bandit.sarif - name: Upload SARIF File into Security Tab - uses: github/codeql-action/upload-sarif@429e1977040da7a23b6822b13c129cd1ba93dbb2 # v3.26.2 + uses: github/codeql-action/upload-sarif@883d8588e56d1753a8a58c1c86e88976f0c23449 # v3.26.3 with: sarif_file: bandit.sarif From e1b289fb8aa0347a56b57f1f356c983875fc4724 Mon Sep 17 00:00:00 2001 From: Paulo van Breugel Date: Wed, 21 Aug 2024 00:34:33 +0200 Subject: [PATCH 09/30] startup: Change Location to Project in first-time user messages (#4193) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update infomanager.py: Change Location for Project in messages The startup message for first-time users refers to the old Locations. This should be changed to Projects to match the new terminology used in 8.4. * Update infomanager.py From Mapset to mapset (all lowercase) * Apply suggestions from code review Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> --------- Co-authored-by: Edouard Choinière <27212526+echoix@users.noreply.github.com> --- gui/wxpython/datacatalog/infomanager.py | 26 ++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/gui/wxpython/datacatalog/infomanager.py b/gui/wxpython/datacatalog/infomanager.py index 9f2f33a7fbe..d3daf4d38fa 100644 --- a/gui/wxpython/datacatalog/infomanager.py +++ b/gui/wxpython/datacatalog/infomanager.py @@ -33,16 +33,16 @@ def __init__(self, infobar, giface): def ShowDataStructureInfo(self, onCreateLocationHandler): """Show info about the data hierarchy focused on the first-time user""" buttons = [ - (_("Create new Location"), onCreateLocationHandler), + (_("Create new project"), onCreateLocationHandler), (_("Learn more"), self._onLearnMore), ] message = _( - "GRASS GIS helps you organize your data using Locations (projects) " - "which contain Mapsets (subprojects). All data in one Location is " + "GRASS GIS helps you organize your data using projects (locations) " + "which contain mapsets (subprojects). All data in one project is " "in the same coordinate reference system (CRS).\n\n" - "You are currently in Mapset PERMANENT in default Location {loc} " + "You are currently in mapset PERMANENT in default project {loc} " "which uses WGS 84 (EPSG:4326). " - "Consider creating a new Location with a CRS " + "Consider creating a new project with a CRS " "specific to your area. You can do it now or anytime later from " "the toolbar above." ).format(loc=gisenv()["LOCATION_NAME"]) @@ -55,11 +55,11 @@ def ShowImportDataInfo(self, OnImportOgrLayersHandler, OnImportGdalLayersHandler (_("Import raster data"), OnImportGdalLayersHandler), ] message = _( - "You have successfully created a new Location {loc}. " - "Currently you are in its PERMANENT Mapset which is used for " + "You have successfully created a new project {loc}. " + "Currently you are in its PERMANENT mapset which is used for " "storing your base maps to make them readily available in other " - "Mapsets. You can create new Mapsets for different tasks by right " - "clicking on the Location name.\n\n" + "mapsets. You can create new mapsets for different tasks by right " + "clicking on the project name.\n\n" "To import data, go to the toolbar above or use the buttons below." ).format(loc=gisenv()["LOCATION_NAME"]) self.infoBar.ShowMessage(message, wx.ICON_INFORMATION, buttons) @@ -81,8 +81,8 @@ def ShowFallbackSessionInfo(self, reason_id): """Show info when last used mapset is not usable""" string = self._text_from_reason_id(reason_id) message = _( - "{string} GRASS GIS has started in a temporary Location. " - "To continue, use Data Catalog below to switch to a different Location." + "{string} GRASS GIS has started in a temporary project. " + "To continue, use Data Catalog below to switch to a different project." ).format( string=string, ) @@ -94,8 +94,8 @@ def ShowLockedMapsetInfo(self, OnSwitchMapsetHandler): buttons = [(_("Switch to last used mapset"), OnSwitchMapsetHandler)] message = _( "Last used mapset in path '{mapsetpath}' is currently in use. " - "GRASS GIS has started in a temporary Location. " - "To continue, use Data Catalog below to switch to a different Location " + "GRASS GIS has started in a temporary project. " + "To continue, use Data Catalog below to switch to a different project " "or remove lock file and switch to the last used mapset." ).format(mapsetpath=last_used_mapset_path) self.infoBar.ShowMessage(message, wx.ICON_INFORMATION, buttons) From 037a5ed0fc11bfd1f267cd1c3309ab9c0049fa7e Mon Sep 17 00:00:00 2001 From: Anna Petrasova Date: Tue, 20 Aug 2024 18:39:18 -0400 Subject: [PATCH 10/30] g.download.project: rename g.download.location (#4187) --- .github/workflows/macos.yml | 2 +- .github/workflows/test_thorough.bat | 2 +- .github/workflows/test_thorough.sh | 2 +- scripts/Makefile | 1 + .../g.download.location.html | 43 +---- .../g.download.location.py | 115 +------------ scripts/g.download.project/Makefile | 7 + .../g.download.project.html | 46 ++++++ .../g.download.project/g.download.project.py | 153 ++++++++++++++++++ 9 files changed, 224 insertions(+), 147 deletions(-) create mode 100644 scripts/g.download.project/Makefile create mode 100644 scripts/g.download.project/g.download.project.html create mode 100644 scripts/g.download.project/g.download.project.py diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index bf3859334ec..5a82b1020ea 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -88,7 +88,7 @@ jobs: shell: bash -el {0} run: | grass --tmp-project XY --exec \ - g.download.location url=${{ env.SampleData }} path=$HOME + g.download.project url=${{ env.SampleData }} path=$HOME grass --tmp-project XY --exec \ python3 -m grass.gunittest.main \ --grassdata $HOME --location nc_spm_full_v2alpha2 --location-type nc \ diff --git a/.github/workflows/test_thorough.bat b/.github/workflows/test_thorough.bat index 4b03a5608be..963f24b9b43 100644 --- a/.github/workflows/test_thorough.bat +++ b/.github/workflows/test_thorough.bat @@ -1,5 +1,5 @@ set grass=%1 set python=%2 -call %grass% --tmp-project XY --exec g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=%USERPROFILE% +call %grass% --tmp-project XY --exec g.download.project url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=%USERPROFILE% call %grass% --tmp-project XY --exec %python% -m grass.gunittest.main --grassdata %USERPROFILE% --location nc_spm_full_v2alpha2 --location-type nc --min-success 80 diff --git a/.github/workflows/test_thorough.sh b/.github/workflows/test_thorough.sh index dba6b94b77d..6ed7d22078a 100755 --- a/.github/workflows/test_thorough.sh +++ b/.github/workflows/test_thorough.sh @@ -4,7 +4,7 @@ set -e grass --tmp-project XY --exec \ - g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME + g.download.project url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME grass --tmp-project XY --exec \ python3 -m grass.gunittest.main \ diff --git a/scripts/Makefile b/scripts/Makefile index 84a9ad21e26..dc3d80f4209 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -19,6 +19,7 @@ SUBDIRS = \ db.test \ db.univar \ g.download.location \ + g.download.project \ g.extension \ g.extension.all \ g.manual \ diff --git a/scripts/g.download.location/g.download.location.html b/scripts/g.download.location/g.download.location.html index 6900acc4388..d9a73dc5513 100644 --- a/scripts/g.download.location/g.download.location.html +++ b/scripts/g.download.location/g.download.location.html @@ -1,47 +1,16 @@

DESCRIPTION

-g.download.location downloads an archived (e.g., -.zip or .tar.gz) project (previously called -location) from a given URL -and unpacks it to a specified or current GRASS GIS Spatial Database. -URL can be also a local file on the disk. - -If the archive contains a directory which contains a project, the module -will recognize that and use the project automatically. -The first directory which is a project is used. -Other projects or any other files are ignored. - -

EXAMPLES

- -

Download the full GRASS GIS sample project within a running session

- -Download and unpack the full North Carolina sample project into the user's -HOME directory: - -
-g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME
-
- -

Download the full GRASS GIS sample project in a temporary session

- -In a temporary session, download and unpack the full North Carolina sample project -into the user's HOME directory: - -
-grass --tmp-location XY --exec g.download.location url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME
-
- +g.download.location has been renamed to +g.download.project +and exists for backwards compatibility reasons. +It will be removed in the next major version.

SEE ALSO

- g.mapset, - g.mapsets, - r.proj, - v.proj, - g.proj.all + g.download.project

AUTHOR

-Vaclav Petras, NCSU GeoForAll Lab +Vaclav Petras, NCSU GeoForAll Lab diff --git a/scripts/g.download.location/g.download.location.py b/scripts/g.download.location/g.download.location.py index bca79b12492..0e58a1ba395 100644 --- a/scripts/g.download.location/g.download.location.py +++ b/scripts/g.download.location/g.download.location.py @@ -3,8 +3,8 @@ # # MODULE: g.download.location # AUTHOR(S): Vaclav Petras -# PURPOSE: Download and extract location from web -# COPYRIGHT: (C) 2017 by the GRASS Development Team +# PURPOSE: Download and extract project (location) from web +# COPYRIGHT: (C) 2017-2024 by the GRASS Development Team # # This program is free software under the GNU General # Public License (>=v2). Read the file COPYING that @@ -12,11 +12,11 @@ # ############################################################################# -"""Download GRASS Locations""" +"""Download GRASS projects""" # %module -# % label: Download GRASS Location from the web -# % description: Get GRASS Location from an URL or file path +# % label: Download GRASS project (location) from the web +# % description: Get GRASS project from an URL or file path # % keyword: general # % keyword: data # % keyword: download @@ -26,7 +26,7 @@ # % key: url # % multiple: no # % type: string -# % label: URL of the archive with a location to be downloaded +# % label: URL of the archive with a project to be downloaded # % description: URL of ZIP, TAR.GZ, or other similar archive # % required: yes # %end @@ -42,111 +42,12 @@ # % multiple: no # %end -import atexit -import os -import shutil -from pathlib import Path - import grass.script as gs -from grass.grassdb.checks import is_location_valid -from grass.script.utils import try_rmdir -from grass.utils.download import DownloadError, download_and_extract, name_from_url - - -def find_location_in_directory(path, recurse=0): - """Return path to location in one of the subdirectories or None - - The first location found is returned. The expected usage is looking for one - location somewhere nested in subdirectories. - - By default only the immediate subdirectories of the provided directory are - tested, but with ``recurse >= 1`` additional levels of subdirectories - are tested for being locations. - - Directory names are sorted to provide a stable result. - - :param path: Path to the directory to search - :param recurse: How many additional levels of subdirectories to explore - """ - assert recurse >= 0 - full_paths = [os.path.join(path, i) for i in os.listdir(path)] - candidates = sorted([i for i in full_paths if os.path.isdir(i)]) - for candidate in candidates: - if is_location_valid(candidate): - return candidate - if recurse: - for candidate in candidates: - result = find_location_in_directory(candidate, recurse - 1) - if result: - return result - return None - - -def location_name_from_url(url): - """Create location name from URL""" - return gs.legalize_vector_name(name_from_url(url)) def main(options, unused_flags): - """Download and copy location to destination""" - url = options["url"] - name = options["name"] - database = options["path"] - - if not database: - # Use the current database path. - database = gs.gisenv()["GISDBASE"] - if not name: - name = location_name_from_url(url) - destination = Path(database) / name - - if destination.exists(): - gs.fatal( - _( - "Location named <{name}> already exists in <{directory}>, download canceled" - ).format(name=name, directory=database) - ) - - gs.message(_("Downloading and extracting...")) - try: - directory = download_and_extract(url) - if not directory.is_dir(): - gs.fatal(_("Archive contains only one file and no mapset directories")) - atexit.register(lambda: try_rmdir(directory)) - except DownloadError as error: - gs.fatal(_("Unable to get the location: {error}").format(error=error)) - if not is_location_valid(directory): - gs.verbose(_("Searching for valid location...")) - # This in fact deal with location being on the third level of directories - # thanks to how the extraction functions work (leaving out one level). - result = find_location_in_directory(directory, recurse=1) - if result: - # We just want to show relative path in the message. - # The relative path misses the root directory (name), because we - # loose it on the way. (We should use parent directory to get the - # full relative path, but the directory name is different now. - # This is the consequence of how the extract functions work.) - relative = os.path.relpath(result, start=directory) - gs.verbose( - _("Location found in a nested directory '{directory}'").format( - directory=relative - ) - ) - directory = result - else: - # The list is similarly misleading as the relative path above - # as it misses the root directory, but it still should be useful. - files_and_dirs = os.listdir(directory) - gs.fatal( - _( - "The downloaded file is not a valid GRASS Location." - " The extracted file contains these files and directories:" - "\n{files_and_dirs}" - ).format(files_and_dirs=" ".join(files_and_dirs)) - ) - gs.verbose(_("Copying to final destination...")) - shutil.copytree(src=directory, dst=destination) - gs.message(_("Path to the location now <{path}>").format(path=destination)) + """Download and copy project to destination""" + gs.run_command("g.dowload.project", **options) if __name__ == "__main__": diff --git a/scripts/g.download.project/Makefile b/scripts/g.download.project/Makefile new file mode 100644 index 00000000000..0f5e9d1c504 --- /dev/null +++ b/scripts/g.download.project/Makefile @@ -0,0 +1,7 @@ +MODULE_TOPDIR = ../.. + +PGM = g.download.project + +include $(MODULE_TOPDIR)/include/Make/Script.make + +default: script diff --git a/scripts/g.download.project/g.download.project.html b/scripts/g.download.project/g.download.project.html new file mode 100644 index 00000000000..7fc469fb0a6 --- /dev/null +++ b/scripts/g.download.project/g.download.project.html @@ -0,0 +1,46 @@ +

DESCRIPTION

+ +g.download.project downloads an archived (e.g., +.zip or .tar.gz) project (previously called +location) from a given URL +and unpacks it to a specified or current GRASS GIS Spatial Database. +URL can be also a local file on the disk. + +If the archive contains a directory which contains a project, the module +will recognize that and use the project automatically. +The first directory which is a project is used. +Other projects or any other files are ignored. + +

EXAMPLES

+ +

Download the full GRASS GIS sample project within a running session

+ +Download and unpack the full North Carolina sample project into the user's +HOME directory: + +
+g.download.project url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME
+
+ +

Download the full GRASS GIS sample project in a temporary session

+ +In a temporary session, download and unpack the full North Carolina sample project +into the user's HOME directory: + +
+grass --tmp-project XY --exec g.download.project url=https://grass.osgeo.org/sampledata/north_carolina/nc_spm_full_v2alpha2.tar.gz path=$HOME
+
+ +

SEE ALSO

+ + + g.mapset, + g.mapsets, + r.proj, + v.proj, + g.proj.all + + +

AUTHOR

+ +Vaclav Petras, NCSU GeoForAll Lab diff --git a/scripts/g.download.project/g.download.project.py b/scripts/g.download.project/g.download.project.py new file mode 100644 index 00000000000..aa963c31dd3 --- /dev/null +++ b/scripts/g.download.project/g.download.project.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +############################################################################ +# +# MODULE: g.download.project +# AUTHOR(S): Vaclav Petras +# PURPOSE: Download and extract project (location) from web +# COPYRIGHT: (C) 2017-2024 by the GRASS Development Team +# +# This program is free software under the GNU General +# Public License (>=v2). Read the file COPYING that +# comes with GRASS for details. +# +############################################################################# + +"""Download GRASS projects""" + +# %module +# % label: Download GRASS project from the web +# % description: Get GRASS project from an URL or file path +# % keyword: general +# % keyword: data +# % keyword: download +# % keyword: import +# %end +# %option +# % key: url +# % multiple: no +# % type: string +# % label: URL of the archive with a project to be downloaded +# % description: URL of ZIP, TAR.GZ, or other similar archive +# % required: yes +# %end +# %option G_OPT_M_LOCATION +# % key: name +# % required: no +# % multiple: no +# % key_desc: name +# %end +# %option G_OPT_M_DBASE +# % key: path +# % required: no +# % multiple: no +# %end + +import atexit +import os +import shutil +from pathlib import Path + +import grass.script as gs +from grass.grassdb.checks import is_location_valid +from grass.script.utils import try_rmdir +from grass.utils.download import DownloadError, download_and_extract, name_from_url + + +def find_location_in_directory(path, recurse=0): + """Return path to location in one of the subdirectories or None + + The first location found is returned. The expected usage is looking for one + location somewhere nested in subdirectories. + + By default only the immediate subdirectories of the provided directory are + tested, but with ``recurse >= 1`` additional levels of subdirectories + are tested for being locations. + + Directory names are sorted to provide a stable result. + + :param path: Path to the directory to search + :param recurse: How many additional levels of subdirectories to explore + """ + assert recurse >= 0 + full_paths = [os.path.join(path, i) for i in os.listdir(path)] + candidates = sorted([i for i in full_paths if os.path.isdir(i)]) + for candidate in candidates: + if is_location_valid(candidate): + return candidate + if recurse: + for candidate in candidates: + result = find_location_in_directory(candidate, recurse - 1) + if result: + return result + return None + + +def location_name_from_url(url): + """Create location name from URL""" + return gs.legalize_vector_name(name_from_url(url)) + + +def main(options, unused_flags): + """Download and copy location to destination""" + url = options["url"] + name = options["name"] + database = options["path"] + + if not database: + # Use the current database path. + database = gs.gisenv()["GISDBASE"] + if not name: + name = location_name_from_url(url) + destination = Path(database) / name + + if destination.exists(): + gs.fatal( + _( + "Project named <{}> already exists in <{directory}>, download canceled" + ).format(name=name, directory=database) + ) + + gs.message(_("Downloading and extracting...")) + try: + directory = download_and_extract(url) + if not directory.is_dir(): + gs.fatal(_("Archive contains only one file and no mapset directories")) + atexit.register(lambda: try_rmdir(directory)) + except DownloadError as error: + gs.fatal(_("Unable to get the project: {error}").format(error=error)) + if not is_location_valid(directory): + gs.verbose(_("Searching for valid project...")) + # This in fact deal with location being on the third level of directories + # thanks to how the extraction functions work (leaving out one level). + result = find_location_in_directory(directory, recurse=1) + if result: + # We just want to show relative path in the message. + # The relative path misses the root directory (name), because we + # loose it on the way. (We should use parent directory to get the + # full relative path, but the directory name is different now. + # This is the consequence of how the extract functions work.) + relative = os.path.relpath(result, start=directory) + gs.verbose( + _("Project found in a nested directory '{directory}'").format( + directory=relative + ) + ) + directory = result + else: + # The list is similarly misleading as the relative path above + # as it misses the root directory, but it still should be useful. + files_and_dirs = os.listdir(directory) + gs.fatal( + _( + "The downloaded file is not a valid GRASS project." + " The extracted file contains these files and directories:" + "\n{files_and_dirs}" + ).format(files_and_dirs=" ".join(files_and_dirs)) + ) + gs.verbose(_("Copying to final destination...")) + shutil.copytree(src=directory, dst=destination) + gs.message(_("Path to the project now <{path}>").format(path=destination)) + + +if __name__ == "__main__": + main(*gs.parser()) From 17bf103b17be28cad46895f350fc23dde1b1067b Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 21 Aug 2024 11:01:23 -0400 Subject: [PATCH 11/30] g.download.location: Fix typo in g.download.project call (#4205) The underlying tool name was misspelled. --- scripts/g.download.location/g.download.location.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/g.download.location/g.download.location.py b/scripts/g.download.location/g.download.location.py index 0e58a1ba395..61660457a4a 100644 --- a/scripts/g.download.location/g.download.location.py +++ b/scripts/g.download.location/g.download.location.py @@ -47,7 +47,7 @@ def main(options, unused_flags): """Download and copy project to destination""" - gs.run_command("g.dowload.project", **options) + gs.run_command("g.download.project", **options) if __name__ == "__main__": From 2171389179c2e8f596acc43638ef81b4234a560b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 18:32:06 -0400 Subject: [PATCH 12/30] CI(deps): Update github/codeql-action action to v3.26.4 (#4208) --- .github/workflows/codeql-analysis.yml | 4 ++-- .github/workflows/python-code-quality.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 6e0115cc9a6..73fbe757909 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -56,7 +56,7 @@ jobs: if: ${{ matrix.language == 'c-cpp' }} - name: Initialize CodeQL - uses: github/codeql-action/init@883d8588e56d1753a8a58c1c86e88976f0c23449 # v3.26.3 + uses: github/codeql-action/init@f0f3afee809481da311ca3a6ff1ff51d81dbeb24 # v3.26.4 with: languages: ${{ matrix.language }} config-file: ./.github/codeql/codeql-config.yml @@ -81,6 +81,6 @@ jobs: run: .github/workflows/build_ubuntu-22.04.sh "${HOME}/install" - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@883d8588e56d1753a8a58c1c86e88976f0c23449 # v3.26.3 + uses: github/codeql-action/analyze@f0f3afee809481da311ca3a6ff1ff51d81dbeb24 # v3.26.4 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index f47b0ec8fc2..3dd5e2834e8 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -135,7 +135,7 @@ jobs: path: bandit.sarif - name: Upload SARIF File into Security Tab - uses: github/codeql-action/upload-sarif@883d8588e56d1753a8a58c1c86e88976f0c23449 # v3.26.3 + uses: github/codeql-action/upload-sarif@f0f3afee809481da311ca3a6ff1ff51d81dbeb24 # v3.26.4 with: sarif_file: bandit.sarif From a46771f12fc338d76dffff6a6244245f69b8fdcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Wed, 21 Aug 2024 22:30:30 -0400 Subject: [PATCH 13/30] CI(python): Enable printing of all Python warnings with PYTHONWARNINGS (#4210) Enables to print all occurrences of warnings, and shows all warnings. --- .github/workflows/macos.yml | 2 ++ .github/workflows/osgeo4w.yml | 2 ++ .github/workflows/pytest.yml | 1 + .github/workflows/ubuntu.yml | 2 ++ 4 files changed, 7 insertions(+) diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 5a82b1020ea..0a90a022d98 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -18,6 +18,8 @@ jobs: macos_build: name: macOS build runs-on: macos-14 + env: + PYTHONWARNINGS: always steps: - name: Info run: | diff --git a/.github/workflows/osgeo4w.yml b/.github/workflows/osgeo4w.yml index ba1217620c3..7152265fd3d 100644 --- a/.github/workflows/osgeo4w.yml +++ b/.github/workflows/osgeo4w.yml @@ -18,6 +18,8 @@ jobs: cancel-in-progress: true runs-on: ${{ matrix.os }} + env: + PYTHONWARNINGS: always strategy: matrix: os: diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 997798d8873..9948fafff60 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -29,6 +29,7 @@ jobs: env: FORCE_COLOR: 1 # for software including pip: https://force-color.org/ CLICOLOR_FORCE: 1 # for other software including ninja: https://bixense.com/clicolors/ + PYTHONWARNINGS: always steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml index 4f70486e05e..626d3d26676 100644 --- a/.github/workflows/ubuntu.yml +++ b/.github/workflows/ubuntu.yml @@ -18,6 +18,8 @@ jobs: cancel-in-progress: true runs-on: ${{ matrix.os }} + env: + PYTHONWARNINGS: always strategy: matrix: name: From 42739e4a266d6f346278ab12a8f470eb0f9be609 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Wed, 21 Aug 2024 22:56:14 -0400 Subject: [PATCH 14/30] CI(pytest): Run pytest tests with Python 3.13 (#4209) Also replace Python 3.8 by Python 3.9 to keep a total of 3 versions tested --- .github/workflows/pytest.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 9948fafff60..c0625635980 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -20,9 +20,9 @@ jobs: os: - ubuntu-22.04 python-version: - - '3.8' - - '3.10' + - '3.9' - '3.12' + - '3.13' fail-fast: true runs-on: ${{ matrix.os }} @@ -39,6 +39,7 @@ jobs: with: python-version: ${{ matrix.python-version }} cache: pip + allow-prereleases: true - name: Install non-Python dependencies run: | From dd5547fd6a189ced99338de7c21f2f34a73d01da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Thu, 22 Aug 2024 07:58:55 -0400 Subject: [PATCH 15/30] configure: update to latest config.guess and config.sub (#4161) --- config.guess | 11 +- config.sub | 729 +++++++++++++++++++++++++++++++++++++++------------ 2 files changed, 563 insertions(+), 177 deletions(-) diff --git a/config.guess b/config.guess index f6d217a49f8..48a684601bd 100755 --- a/config.guess +++ b/config.guess @@ -4,7 +4,7 @@ # shellcheck disable=SC2006,SC2268 # see below for rationale -timestamp='2024-01-01' +timestamp='2024-07-27' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by @@ -123,7 +123,7 @@ set_cc_for_build() { dummy=$tmp/dummy case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in ,,) echo "int x;" > "$dummy.c" - for driver in cc gcc c89 c99 ; do + for driver in cc gcc c17 c99 c89 ; do if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then CC_FOR_BUILD=$driver break @@ -634,7 +634,8 @@ EOF sed 's/^ //' << EOF > "$dummy.c" #include - main() + int + main () { if (!__power_pc()) exit(1); @@ -718,7 +719,8 @@ EOF #include #include - int main () + int + main () { #if defined(_SC_KERNEL_BITS) long bits = sysconf(_SC_KERNEL_BITS); @@ -1621,6 +1623,7 @@ cat > "$dummy.c" <&2 exit 1 ;; - kfreebsd*-gnu*- | kopensolaris*-gnu*-) + kfreebsd*-gnu*- | knetbsd*-gnu*- | netbsd*-gnu*- | kopensolaris*-gnu*-) ;; vxworks-simlinux- | vxworks-simwindows- | vxworks-spe-) ;; @@ -1864,6 +2245,8 @@ case $kernel-$os-$obj in ;; os2-emx-) ;; + rtmk-nova-) + ;; *-eabi*- | *-gnueabi*-) ;; none--*) @@ -1890,7 +2273,7 @@ case $vendor in *-riscix*) vendor=acorn ;; - *-sunos*) + *-sunos* | *-solaris*) vendor=sun ;; *-cnk* | *-aix*) From ff98e4212640bac95e206771573506a37c8822b7 Mon Sep 17 00:00:00 2001 From: Kriti Birda <164247895+kritibirda26@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:45:01 +0530 Subject: [PATCH 16/30] db.describe: fix illegal memory access report (#4202) Fix issue reported in https://github.com/OSGeo/grass/pull/4021#issuecomment-2298894298. --- db/db.describe/main.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/db/db.describe/main.c b/db/db.describe/main.c index e32d1572a1a..ea0c6483778 100644 --- a/db/db.describe/main.c +++ b/db/db.describe/main.c @@ -42,8 +42,8 @@ int main(int argc, char **argv) char buf[1024]; dbString stmt; - JSON_Object *root_object, *col_object; - JSON_Value *root_value, *cols_value, *col_value; + JSON_Object *root_object = NULL, *col_object = NULL; + JSON_Value *root_value = NULL, *cols_value = NULL, *col_value = NULL; JSON_Array *cols_array = NULL; parse_command_line(argc, argv); From 92cb84ab2484aaf749d281d86ec31f5feb7828e0 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 12:01:13 -0400 Subject: [PATCH 17/30] CI(deps): Update ruff to v0.6.2 (#4212) --- .github/workflows/python-code-quality.yml | 2 +- .pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 3dd5e2834e8..5f8de26bc23 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -36,7 +36,7 @@ jobs: # renovate: datasource=pypi depName=bandit BANDIT_VERSION: "1.7.9" # renovate: datasource=pypi depName=ruff - RUFF_VERSION: "0.6.1" + RUFF_VERSION: "0.6.2" runs-on: ${{ matrix.os }} permissions: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4bb4781ebe6..d20d238f908 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,7 +37,7 @@ repos: ) - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.6.1 + rev: v0.6.2 hooks: # Run the linter. - id: ruff From 913c62074ce65505637d639e2cb95fae9d037861 Mon Sep 17 00:00:00 2001 From: Makiko Shukunobe Date: Thu, 22 Aug 2024 23:33:47 -0400 Subject: [PATCH 18/30] Checks: Fix flake8 F841 (local variable assigned to but never used) in /temporal/t.rast.to.vect (#4206) --- .flake8 | 2 +- temporal/t.rast.to.vect/t.rast.to.vect.py | 3 ++- temporal/t.rast.to.vect/testsuite/test_to_vect.py | 7 ------- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/.flake8 b/.flake8 index 7019d947c03..b628b1ad53c 100644 --- a/.flake8 +++ b/.flake8 @@ -177,7 +177,7 @@ per-file-ignores = # Line too long (esp. module interface definitions) scripts/*/*.py: E501 # local variable 'column' is assigned to but never used - temporal/t.rast.to.vect/t.rast.to.vect.py: F841, E501 + temporal/t.rast.to.vect/t.rast.to.vect.py: E501 # local variable 'stdstype' is assigned to but never used temporal/t.vect.algebra/t.vect.algebra.py: F841, E501 # ## used (##% key: r etc) diff --git a/temporal/t.rast.to.vect/t.rast.to.vect.py b/temporal/t.rast.to.vect/t.rast.to.vect.py index 744721ac433..603196265b0 100755 --- a/temporal/t.rast.to.vect/t.rast.to.vect.py +++ b/temporal/t.rast.to.vect/t.rast.to.vect.py @@ -163,7 +163,7 @@ def main(options, flags): return # Check the new stvds - new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) + tgis.check_new_stds(output, "stvds", dbif=dbif, overwrite=overwrite) # Setup the flags flags = "" @@ -189,6 +189,7 @@ def main(options, flags): type=method, overwrite=overwrite, quiet=True, + column=column, ) # The module queue for parallel execution, except if attribute tables should diff --git a/temporal/t.rast.to.vect/testsuite/test_to_vect.py b/temporal/t.rast.to.vect/testsuite/test_to_vect.py index 5a4cc8582fb..2217b46a909 100644 --- a/temporal/t.rast.to.vect/testsuite/test_to_vect.py +++ b/temporal/t.rast.to.vect/testsuite/test_to_vect.py @@ -64,7 +64,6 @@ def test_simple_points(self): output="result", type="point", flags="n", - column="values", basename="test", nprocs=1, overwrite=True, @@ -93,7 +92,6 @@ def test_simple_area(self): output="result", type="area", flags="n", - column="values", basename="test", nprocs=1, overwrite=True, @@ -127,7 +125,6 @@ def test_simple_area_smooth(self): output="result", type="area", flags="s", - column="values", basename="test", nprocs=1, overwrite=True, @@ -160,7 +157,6 @@ def test_parallel(self): output="result", type="point", flags="t", - column="values", basename="test", nprocs=4, overwrite=True, @@ -188,7 +184,6 @@ def test_num_suffix(self): output="result", type="point", flags="t", - column="values", basename="test", suffix="num%03", nprocs=4, @@ -204,7 +199,6 @@ def test_time_suffix(self): output="result", type="point", flags="t", - column="values", basename="test", suffix="time", nprocs=4, @@ -261,7 +255,6 @@ def test_empty_strds(self): output="result", type="point", flags="n", - column="values", basename="test", where="start_time > '2010-01-01'", nprocs=1, From 120513715d6b93c8257216cb83349fc2d3131b06 Mon Sep 17 00:00:00 2001 From: Ondrej Pesek Date: Fri, 23 Aug 2024 10:13:12 +0200 Subject: [PATCH 19/30] i.atcorr: fix plot_filter function in create_iwave (#3911) * missing import of pyplot * fix figures not shown correctly * fix wrong axes --- imagery/i.atcorr/create_iwave.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/imagery/i.atcorr/create_iwave.py b/imagery/i.atcorr/create_iwave.py index 022fdf8d12e..8afc6bebd36 100644 --- a/imagery/i.atcorr/create_iwave.py +++ b/imagery/i.atcorr/create_iwave.py @@ -155,17 +155,28 @@ def interpolate_band(values, step=2.5): def plot_filter(values): """Plot wl response values and interpolated filter function. This is just for checking... - value is a 2 column numpy array + value is a 2-column numpy array function has to be used inside Spyder python environment """ + import matplotlib.pyplot as plt + filter_f, limits = interpolate_band(values) # removing nodata w = values[:, 1] >= 0 response = values[w] - plot(response[:, 0], response[:, 1], "ro") - plot(arange(limits[0], limits[1], 2.5), filter_f) + fig = plt.figure() + ax1 = fig.add_subplot(2, 1, 1) + ax2 = fig.add_subplot(2, 1, 2) + + ax1.plot(response[:, 0], response[:, 1], "ro") + rounded = np.arange(limits[0], limits[1], 0.0025) * 1000 + if len(rounded) == len(filter_f): + ax2.plot(rounded, filter_f) + else: + ax2.plot(rounded[:-1], filter_f) + plt.show() def pretty_print(filter_f): From 917ba5890cf34cef9dcbf1477339c1b0caaaaf24 Mon Sep 17 00:00:00 2001 From: ymdatta Date: Fri, 23 Aug 2024 07:56:05 -0400 Subject: [PATCH 20/30] r.out.png: fix consecutive fclose calls on same pointer (#4214) This patch fixes two issues: 1. In one of the code paths, we are calling fclose on a file pointer which could potentially be NULL. Doing that would lead to undefined behavior. Check if a file pointer is NULL before closing it. 2. If we call fclose on same file pointer twice, in the second instance we could be closing file descriptor allocated to some other file, which typically happens to a freed descriptor. This issue was found by using cppcheck tool. Signed-off-by: Mohana Datta Yelugoti --- raster/r.out.png/main.c | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/raster/r.out.png/main.c b/raster/r.out.png/main.c index 2bc826fac59..4d3895e5d9f 100644 --- a/raster/r.out.png/main.c +++ b/raster/r.out.png/main.c @@ -63,7 +63,7 @@ int main(int argc, char *argv[]) int png_compr, /* ret, */ do_alpha; struct Cell_head win; FILEDESC cellfile = 0; - FILE *fp; + FILE *fp = NULL; /* now goes from pnmtopng.c* -A.Sh */ /* @@ -207,20 +207,29 @@ int main(int argc, char *argv[]) png_create_write_struct(PNG_LIBPNG_VER_STRING, &pnmtopng_jmpbuf_struct, pnmtopng_error_handler, NULL); if (png_ptr == NULL) { - fclose(fp); + if (fp) { + fclose(fp); + fp = NULL; + } G_fatal_error("cannot allocate LIBPNG structure"); } info_ptr = png_create_info_struct(png_ptr); if (info_ptr == NULL) { png_destroy_write_struct(&png_ptr, (png_infopp)NULL); - fclose(fp); + if (fp) { + fclose(fp); + fp = NULL; + } G_fatal_error("cannot allocate LIBPNG structure"); } if (setjmp(pnmtopng_jmpbuf_struct.jmpbuf)) { png_destroy_write_struct(&png_ptr, &info_ptr); - fclose(fp); + if (fp) { + fclose(fp); + fp = NULL; + } G_fatal_error("setjmp returns error condition (1)"); } @@ -360,7 +369,8 @@ int main(int argc, char *argv[]) /* G_free (info_ptr); */ png_destroy_write_struct(&png_ptr, &info_ptr); /* al 11/2000 */ - fclose(fp); + if (fp) + fclose(fp); if (wld_flag->answer) { if (do_stdout) From b794d953930489eea255f02a70140efd6ae79e5a Mon Sep 17 00:00:00 2001 From: Nicklas Larsson Date: Fri, 23 Aug 2024 18:29:27 +0200 Subject: [PATCH 21/30] lib: fix missing prototypes (#4191) * r.report: add missing prototype * lib/db: add missing prototype * CI: fail macOS runner on -Wstrict-prototypes * lib/gmath: suppress -Wstrict-prototypes for BLAS/LAPACK wrapper header --- .github/workflows/macos_install.sh | 2 +- include/grass/la.h | 16 +++++++++++++++- lib/db/sqlp/sqlp.l | 2 +- raster/r.report/global.h | 2 +- raster/r.report/prt_json.c | 2 +- 5 files changed, 19 insertions(+), 5 deletions(-) diff --git a/.github/workflows/macos_install.sh b/.github/workflows/macos_install.sh index ce652491bdd..79446f51867 100755 --- a/.github/workflows/macos_install.sh +++ b/.github/workflows/macos_install.sh @@ -69,7 +69,7 @@ export CPPFLAGS="-isystem${CONDA_PREFIX}/include" ./configure $CONFIGURE_FLAGS -EXEMPT="-Wno-error=deprecated-non-prototype -Wno-error=strict-prototypes" +EXEMPT="" make -j$(sysctl -n hw.ncpu) CFLAGS="$CFLAGS -Werror $EXEMPT" \ CXXFLAGS="$CXXFLAGS -Werror $EXEMPT" diff --git a/include/grass/la.h b/include/grass/la.h index bfae5a01c12..2981dc3401e 100644 --- a/include/grass/la.h +++ b/include/grass/la.h @@ -44,7 +44,7 @@ typedef long int __g77_longint; typedef unsigned long int __g77_ulongint; #include -#else /* for gcc4+ */ +#else /* for gcc4+ */ typedef int integer; typedef unsigned int uinteger; typedef char *address; @@ -67,6 +67,14 @@ typedef unsigned long ulongint; /* IO stuff */ typedef int ftnlen; +#if defined(__clang__) +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wstrict-prototypes" +#elif defined(__GNUC__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wstrict-prototypes" +#endif + /* procedure parameter types for -A */ typedef int (*U_fp)(); typedef shortint (*J_fp)(); @@ -80,6 +88,12 @@ typedef shortlogical (*K_fp)(); typedef void (*H_fp)(); typedef int (*S_fp)(); +#if defined(__clang__) +#pragma clang diagnostic pop +#elif defined(__GNUC__) +#pragma GCC diagnostic pop +#endif + /* E_fp is for real functions when -R is not specified */ typedef void C_f; /* complex function */ typedef void H_f; /* character function */ diff --git a/lib/db/sqlp/sqlp.l b/lib/db/sqlp/sqlp.l index 07c00b7a975..961c25b167d 100644 --- a/lib/db/sqlp/sqlp.l +++ b/lib/db/sqlp/sqlp.l @@ -273,7 +273,7 @@ void yyerror( const char *s ) * of this func anyway so we can avoid the link dependency. * **********************************************************************/ -int yywrap() +int yywrap(void) { return 1; } diff --git a/raster/r.report/global.h b/raster/r.report/global.h index a05ac1ae1d5..4ebc49b1916 100644 --- a/raster/r.report/global.h +++ b/raster/r.report/global.h @@ -121,7 +121,7 @@ int print_unit(int, int, int); JSON_Value *make_units(int, int); JSON_Value *make_category(int, int, JSON_Value *); JSON_Value *make_categories(int, int, int); -void print_json(); +void print_json(void); /* report.c */ int report(void); diff --git a/raster/r.report/prt_json.c b/raster/r.report/prt_json.c index 94590ea7b9f..5d6a33d2c28 100644 --- a/raster/r.report/prt_json.c +++ b/raster/r.report/prt_json.c @@ -129,7 +129,7 @@ JSON_Value *make_categories(int start, int end, int level) return array_value; } -void print_json() +void print_json(void) { compute_unit_format(0, nunits - 1, JSON); From e98d07b5eb48416c7544244db788bc0107ddb99b Mon Sep 17 00:00:00 2001 From: Makiko Shukunobe Date: Fri, 23 Aug 2024 18:53:29 -0400 Subject: [PATCH 22/30] Checks: fix Flake8 F841 (local variable assigned to but never used) in python/grass/temporal (#4200) * Delete variable Declarations with Definitions * Delete variable assignments and a variable * Update python/grass/temporal/datetime_math.py Refactor Co-authored-by: Anna Petrasova * Remove unused function declaration * Update `.flake8` by deleting F841 in python/grass/temporal * Delete declarations of unused functions --------- Co-authored-by: Anna Petrasova --- .flake8 | 14 ++++++-------- .../grass/temporal/abstract_space_time_dataset.py | 3 --- python/grass/temporal/datetime_math.py | 2 +- python/grass/temporal/open_stds.py | 2 -- python/grass/temporal/temporal_algebra.py | 2 -- python/grass/temporal/temporal_granularity.py | 4 ++-- .../temporal/temporal_raster_base_algebra.py | 12 +++++------- python/grass/temporal/temporal_vector_algebra.py | 15 +-------------- 8 files changed, 15 insertions(+), 39 deletions(-) diff --git a/.flake8 b/.flake8 index b628b1ad53c..573debabe1c 100644 --- a/.flake8 +++ b/.flake8 @@ -129,17 +129,15 @@ per-file-ignores = python/grass/pygrass/raster/category.py: E721 python/grass/pygrass/rpc/__init__.py: F401, F403 python/grass/pygrass/utils.py: E402 - python/grass/temporal/abstract_space_time_dataset.py: F841, E722 - python/grass/temporal/c_libraries_interface.py: F841, E722 + python/grass/temporal/abstract_space_time_dataset.py: E722 + python/grass/temporal/c_libraries_interface.py: E722 python/grass/temporal/core.py: E722 - python/grass/temporal/datetime_math.py: F841, E722 - python/grass/temporal/open_stds.py: F841 + python/grass/temporal/datetime_math.py: E722 python/grass/temporal/spatial_topology_dataset_connector.py: E722 - python/grass/temporal/temporal_algebra.py: F841, E722 - python/grass/temporal/temporal_granularity.py: F841, E722 - python/grass/temporal/temporal_raster_base_algebra.py: F841, E722 + python/grass/temporal/temporal_algebra.py: E722 + python/grass/temporal/temporal_granularity.py: E722 + python/grass/temporal/temporal_raster_base_algebra.py: E722 python/grass/temporal/temporal_topology_dataset_connector.py: E722 - python/grass/temporal/temporal_vector_algebra.py: F841 python/grass/temporal/univar_statistics.py: E231 # Current benchmarks/tests are changing sys.path before import. # Possibly, a different approach should be taken there anyway. diff --git a/python/grass/temporal/abstract_space_time_dataset.py b/python/grass/temporal/abstract_space_time_dataset.py index 929f74e4421..515502ff3f1 100644 --- a/python/grass/temporal/abstract_space_time_dataset.py +++ b/python/grass/temporal/abstract_space_time_dataset.py @@ -2549,7 +2549,6 @@ def register_map(self, map, dbif=None): # Get basic info map_id = map.base.get_id() - map_mapset = map.base.get_mapset() map_rel_time_unit = map.get_relative_time_unit() map_ttype = map.get_temporal_type() @@ -2804,8 +2803,6 @@ def update_from_registered_maps(self, dbif=None): use_start_time = False # Get basic info - stds_name = self.base.get_name() - stds_mapset = self.base.get_mapset() sql_path = get_sql_template_path() stds_register_table = self.get_map_register() diff --git a/python/grass/temporal/datetime_math.py b/python/grass/temporal/datetime_math.py index e5be1191551..9b680760775 100644 --- a/python/grass/temporal/datetime_math.py +++ b/python/grass/temporal/datetime_math.py @@ -791,7 +791,7 @@ def check_datetime_string(time_string, use_dateutil=True): # relative time. dateutil will interpret a single number as a valid # time string, so we have to catch this case beforehand try: - value = int(time_string) + int(time_string) return _("Time string seems to specify relative time") except ValueError: pass diff --git a/python/grass/temporal/open_stds.py b/python/grass/temporal/open_stds.py index f17288695e3..3b3a28ea99a 100644 --- a/python/grass/temporal/open_stds.py +++ b/python/grass/temporal/open_stds.py @@ -269,8 +269,6 @@ def open_new_map_dataset( """ - mapset = get_current_mapset() - dbif, connection_state_changed = init_dbif(dbif) new_map = check_new_map_dataset(name, layer, type, overwrite, dbif) diff --git a/python/grass/temporal/temporal_algebra.py b/python/grass/temporal/temporal_algebra.py index 4d48b7119b0..3736a15315a 100644 --- a/python/grass/temporal/temporal_algebra.py +++ b/python/grass/temporal/temporal_algebra.py @@ -2143,7 +2143,6 @@ def eval_global_var(self, gvar, maplist): :return: List of maps from maplist with added conditional boolean values. """ - boollist = [] # Loop over maps of input map list. for map_i in maplist: # Get dictionary with temporal variables for the map. @@ -2248,7 +2247,6 @@ def build_condition_list(self, tvarexpr, thenlist, topolist=["EQUAL"]): # self.msgr.fatal("Condition list is not complete. Elements missing") for iter in range(len(tvarexpr)): expr = tvarexpr[iter] - operator = tvarexpr[iter + 1] relexpr = tvarexpr[iter + 2] if all(issubclass(type(ele), list) for ele in [expr, relexpr]): resultlist = self.build_spatio_temporal_topology_list(expr, relexpr) diff --git a/python/grass/temporal/temporal_granularity.py b/python/grass/temporal/temporal_granularity.py index 3022c27ea77..c3920b8b00b 100644 --- a/python/grass/temporal/temporal_granularity.py +++ b/python/grass/temporal/temporal_granularity.py @@ -96,12 +96,12 @@ def check_granularity_string(granularity, temporal_type): return False try: - integer = int(num) + int(num) except: return False elif temporal_type == "relative": try: - integer = int(granularity) + int(granularity) except: return False else: diff --git a/python/grass/temporal/temporal_raster_base_algebra.py b/python/grass/temporal/temporal_raster_base_algebra.py index 8333fbb698e..76517db30cc 100644 --- a/python/grass/temporal/temporal_raster_base_algebra.py +++ b/python/grass/temporal/temporal_raster_base_algebra.py @@ -511,7 +511,6 @@ def operator_cmd_value( """ temporal_relations = map_i.get_temporal_relations() - spatial_relations = map_i.get_spatial_relations() # Build comandlist list with elements from related maps and given relation # operator. @@ -933,7 +932,7 @@ def p_statement_assign(self, t): map_i.insert(dbif) # Register map in result space time dataset. if self.dry_run is False: - success = resultstds.register_map(map_i, dbif) + resultstds.register_map(map_i, dbif) if self.dry_run is False: resultstds.update_from_registered_maps(dbif) @@ -1040,10 +1039,10 @@ def p_arith1_operation(self, t): break if count == 0: # Set map name. - name = map_new.get_id() + map_new.get_id() else: # Generate an intermediate map - name = self.generate_map_name() + self.generate_map_name() # Create r.mapcalc expression string for the operation. cmdstring = self.build_command_string( @@ -1208,10 +1207,10 @@ def p_arith2_operation(self, t): break if count == 0: # Set map name. - name = map_new.get_id() + map_new.get_id() else: # Generate an intermediate map - name = self.generate_map_name() + self.generate_map_name() # Create r.mapcalc expression string for the operation. cmdstring = self.build_command_string( @@ -2038,7 +2037,6 @@ def p_hash_operation(self, t): for obj in map_i.map_value: if isinstance(obj, GlobalTemporalVar): n_maps = obj.td - mapinput = map_i.get_id() # Create r.mapcalc expression string for the operation. cmdstring = "(%s)" % (n_maps) # Append module command. diff --git a/python/grass/temporal/temporal_vector_algebra.py b/python/grass/temporal/temporal_vector_algebra.py index 4a005a7baeb..551c69783f5 100644 --- a/python/grass/temporal/temporal_vector_algebra.py +++ b/python/grass/temporal/temporal_vector_algebra.py @@ -232,19 +232,6 @@ def build_spatio_temporal_topology_list( "STARTED", "FINISHED", ] - complementdict = { - "EQUAL": "EQUAL", - "FOLLOWS": "PRECEDES", - "PRECEDES": "FOLLOWS", - "OVERLAPS": "OVERLAPPED", - "OVERLAPPED": "OVERLAPS", - "DURING": "CONTAINS", - "CONTAINS": "DURING", - "STARTS": "STARTED", - "STARTED": "STARTS", - "FINISHES": "FINISHED", - "FINISHED": "FINISHES", - } resultdict = {} # Check if given temporal relation are valid. for topo in topolist: @@ -577,7 +564,7 @@ def p_statement_assign(self, t): # Register map in result space time dataset. if self.debug: print(map_i.get_temporal_extent_as_tuple()) - success = resultstds.register_map(map_i, dbif=dbif) + resultstds.register_map(map_i, dbif=dbif) resultstds.update_from_registered_maps(dbif) # Remove intermediate maps From ef72f2d719a08e87b36865681e10d37a25de4c05 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 24 Aug 2024 00:15:40 +0000 Subject: [PATCH 23/30] CI(deps): Update github/codeql-action action to v3.26.5 (#4222) --- .github/workflows/codeql-analysis.yml | 4 ++-- .github/workflows/python-code-quality.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 73fbe757909..17d40cdae7f 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -56,7 +56,7 @@ jobs: if: ${{ matrix.language == 'c-cpp' }} - name: Initialize CodeQL - uses: github/codeql-action/init@f0f3afee809481da311ca3a6ff1ff51d81dbeb24 # v3.26.4 + uses: github/codeql-action/init@2c779ab0d087cd7fe7b826087247c2c81f27bfa6 # v3.26.5 with: languages: ${{ matrix.language }} config-file: ./.github/codeql/codeql-config.yml @@ -81,6 +81,6 @@ jobs: run: .github/workflows/build_ubuntu-22.04.sh "${HOME}/install" - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@f0f3afee809481da311ca3a6ff1ff51d81dbeb24 # v3.26.4 + uses: github/codeql-action/analyze@2c779ab0d087cd7fe7b826087247c2c81f27bfa6 # v3.26.5 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 5f8de26bc23..dd1256b1c1c 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -135,7 +135,7 @@ jobs: path: bandit.sarif - name: Upload SARIF File into Security Tab - uses: github/codeql-action/upload-sarif@f0f3afee809481da311ca3a6ff1ff51d81dbeb24 # v3.26.4 + uses: github/codeql-action/upload-sarif@2c779ab0d087cd7fe7b826087247c2c81f27bfa6 # v3.26.5 with: sarif_file: bandit.sarif From cbc418e51b2dd92ea765131481967c55fb9a202a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 02:09:41 +0000 Subject: [PATCH 24/30] CI(deps): Lock file maintenance (#4226) From 22b07acb524d29fe980f16f8105f613adea83926 Mon Sep 17 00:00:00 2001 From: Anna Petrasova Date: Mon, 26 Aug 2024 12:31:16 -0400 Subject: [PATCH 25/30] contributing: add PR template (#4207) Also exclude MD041 globally --- .../pull_request_template.md | 32 +++++++++++++++++++ .markdownlint.yml | 2 ++ 2 files changed, 34 insertions(+) create mode 100644 .github/PULL_REQUEST_TEMPLATE/pull_request_template.md diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 00000000000..b9d45627ffc --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,32 @@ +## Description + + +## Motivation and context + + + +## How has this been tested? + + +## Screenshots (if appropriate) + +## Types of changes + + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing +functionality to not work as before) + +## Checklist + + +- [ ] PR title provides summary of the changes and starts with one of the +[pre-defined prefixes](../../utils/release.yml) + + +- [ ] My code follows the [code style](../../doc/development/style_guide.md) +of this project. +- [ ] My change requires a change to the documentation. +- [ ] I have updated the documentation accordingly. +- [ ] I have added tests to cover my changes. diff --git a/.markdownlint.yml b/.markdownlint.yml index e14a39a2289..44c5b53c8be 100644 --- a/.markdownlint.yml +++ b/.markdownlint.yml @@ -4,3 +4,5 @@ default: true # Fix any fixable errors (depending on the markdownlint wrapper tool used) fix: true + +MD041: false # first-line-h1 From 18bbf4b9ef438d38a623ceef174b5cfe9756a1d0 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 16:54:31 +0000 Subject: [PATCH 26/30] CI(deps): Lock file maintenance (#4227) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 58a8dd324c0..53be10c7de4 100644 --- a/flake.lock +++ b/flake.lock @@ -19,11 +19,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1724015816, - "narHash": "sha256-hVESnM7Eiz93+4DeiE0a1TwMeaeph1ytRJ5QtqxYRWg=", + "lastModified": 1724395761, + "narHash": "sha256-zRkDV/nbrnp3Y8oCADf5ETl1sDrdmAW6/bBVJ8EbIdQ=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "9aa35efbea27d320d0cdc5f922f0890812affb60", + "rev": "ae815cee91b417be55d43781eb4b73ae1ecc396c", "type": "github" }, "original": { From e0a028854621fbca67a2200d1b99bd52a71980d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Mon, 26 Aug 2024 23:36:18 -0400 Subject: [PATCH 27/30] init: Use pathlib Path.read_text for readfile in main executable (#4234) --- lib/init/grass.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/init/grass.py b/lib/init/grass.py index 2a9a585b92b..4726193b976 100755 --- a/lib/init/grass.py +++ b/lib/init/grass.py @@ -55,6 +55,7 @@ import unicodedata import argparse import json +from pathlib import Path # mechanism meant for debugging this script (only) @@ -167,10 +168,7 @@ def fatal(msg): def readfile(path): debug("Reading %s" % path) - f = open(path, "r") - s = f.read() - f.close() - return s + return Path(path).read_text() def writefile(path, s): From 4aa4abf43f69d23ecdea932f3e550b5479df428f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edouard=20Choini=C3=A8re?= <27212526+echoix@users.noreply.github.com> Date: Mon, 26 Aug 2024 23:40:05 -0400 Subject: [PATCH 28/30] grass.temporal.core: Replace open file + read with pathlib.Path's read_text() (#4233) Reads the whole file using a context manager internally. --- python/grass/temporal/core.py | 74 ++++++++++++----------------------- 1 file changed, 24 insertions(+), 50 deletions(-) diff --git a/python/grass/temporal/core.py b/python/grass/temporal/core.py index 60479cf4e08..dc74bff151d 100644 --- a/python/grass/temporal/core.py +++ b/python/grass/temporal/core.py @@ -32,6 +32,7 @@ # import traceback import os +from pathlib import Path import grass.script as gs from grass.pygrass import messages @@ -813,7 +814,7 @@ def _create_temporal_database_views(dbif): :param dbif: The database interface to be used """ - template_path = get_sql_template_path() + template_path = Path(get_sql_template_path()) for sql_filename in ( "raster_views", @@ -823,9 +824,7 @@ def _create_temporal_database_views(dbif): "str3ds_views", "stvds_views", ): - sql_filepath = open( - os.path.join(template_path, sql_filename + ".sql"), "r" - ).read() + sql_filepath = (template_path / f"{sql_filename}.sql").read_text() dbif.execute_transaction(sql_filepath) @@ -839,34 +838,18 @@ def create_temporal_database(dbif): """ global tgis_backend, tgis_version, tgis_db_version, tgis_database_string - template_path = get_sql_template_path() + template_path = Path(get_sql_template_path()) msgr = get_tgis_message_interface() # Read all SQL scripts and templates - map_tables_template_sql = open( - os.path.join(template_path, "map_tables_template.sql"), "r" - ).read() - raster_metadata_sql = open( - os.path.join(get_sql_template_path(), "raster_metadata_table.sql"), "r" - ).read() - raster3d_metadata_sql = open( - os.path.join(template_path, "raster3d_metadata_table.sql"), "r" - ).read() - vector_metadata_sql = open( - os.path.join(template_path, "vector_metadata_table.sql"), "r" - ).read() - stds_tables_template_sql = open( - os.path.join(template_path, "stds_tables_template.sql"), "r" - ).read() - strds_metadata_sql = open( - os.path.join(template_path, "strds_metadata_table.sql"), "r" - ).read() - str3ds_metadata_sql = open( - os.path.join(template_path, "str3ds_metadata_table.sql"), "r" - ).read() - stvds_metadata_sql = open( - os.path.join(template_path, "stvds_metadata_table.sql"), "r" - ).read() + map_tables_template_sql = (template_path / "map_tables_template.sql").read_text() + raster_metadata_sql = (template_path / "raster_metadata_table.sql").read_text() + raster3d_metadata_sql = (template_path / "raster3d_metadata_table.sql").read_text() + vector_metadata_sql = (template_path / "vector_metadata_table.sql").read_text() + stds_tables_template_sql = (template_path / "stds_tables_template.sql").read_text() + strds_metadata_sql = (template_path / "strds_metadata_table.sql").read_text() + str3ds_metadata_sql = (template_path / "str3ds_metadata_table.sql").read_text() + stvds_metadata_sql = (template_path / "stvds_metadata_table.sql").read_text() # Create the raster, raster3d and vector tables SQL statements raster_tables_sql = map_tables_template_sql.replace("GRASS_MAP", "raster") @@ -898,21 +881,15 @@ def create_temporal_database(dbif): # Set up the trigger that takes care of # the correct deletion of entries across the different tables - delete_trigger_sql = open( - os.path.join(template_path, "sqlite3_delete_trigger.sql"), "r" - ).read() - indexes_sql = open( - os.path.join(template_path, "sqlite3_indexes.sql"), "r" - ).read() + delete_trigger_sql = (template_path / "sqlite3_delete_trigger.sql").read_text() + indexes_sql = (template_path / "sqlite3_indexes.sql").read_text() else: # Set up the trigger that takes care of # the correct deletion of entries across the different tables - delete_trigger_sql = open( - os.path.join(template_path, "postgresql_delete_trigger.sql"), "r" - ).read() - indexes_sql = open( - os.path.join(template_path, "postgresql_indexes.sql"), "r" - ).read() + delete_trigger_sql = ( + template_path / "postgresql_delete_trigger.sql" + ).read_text() + indexes_sql = (template_path / "postgresql_indexes.sql").read_text() # Connect now to the database if dbif.connected is not True: @@ -989,22 +966,19 @@ def upgrade_temporal_database(dbif): dbif.close() return - template_path = get_sql_template_path() + template_path = Path(get_sql_template_path()) try: - upgrade_db_sql = open( - os.path.join( - template_path, - "upgrade_db_%s_to_%s.sql" % (upgrade_db_from, tgis_db_version), - ), - "r", - ).read() + upgrade_db_sql = ( + template_path + / "upgrade_db_{}_to_{}.sql".format(upgrade_db_from, tgis_db_version) + ).read_text() except FileNotFoundError: msgr.fatal( _("Unsupported TGIS DB upgrade scenario: from version %s to %s") % (upgrade_db_from, tgis_db_version) ) - drop_views_sql = open(os.path.join(template_path, "drop_views.sql"), "r").read() + drop_views_sql = (template_path / "drop_views.sql").read_text() msgr.message( _("Upgrading temporal database <%s> from version %s to %s...") From acfbdd891145c90de10050cfc8222be04070ac91 Mon Sep 17 00:00:00 2001 From: Anna Petrasova Date: Tue, 27 Aug 2024 09:03:07 -0400 Subject: [PATCH 29/30] wxGUI: fix adding and removing from history for new mapset (#4228) Bug introduced in 02e45e9 when trying to avoid calling complete tree refresh every time. --- gui/wxpython/history/tree.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gui/wxpython/history/tree.py b/gui/wxpython/history/tree.py index b616ef44698..929f2d9f53b 100644 --- a/gui/wxpython/history/tree.py +++ b/gui/wxpython/history/tree.py @@ -505,6 +505,7 @@ def InsertCommand(self, entry): }, ) self._model.SortChildren(self._model.root) + self.RefreshItems() # Populate today's node by executed command command_node = self._populateDayItem(today_node, entry) @@ -621,9 +622,9 @@ def OnRemoveCmd(self, event): selected_day = selected_command.parent if selected_day and len(selected_day.children) == 0: self._model.RemoveNode(selected_day) - - # Reload day node - self._reloadNode(selected_day) + self._reloadNode(self._model.root) + else: + self._reloadNode(selected_day) self.showNotification.emit(message=_("<{}> removed").format(command)) def OnItemSelected(self, node): From 6c5190e20016da5862b133dc0bbba64ab69a91f2 Mon Sep 17 00:00:00 2001 From: Paulo van Breugel Date: Tue, 27 Aug 2024 20:34:35 +0200 Subject: [PATCH 30/30] wxGUI: Update link to the Natural Earth sample dataset (#4223) Update locdownload.py with link to latest natural earth sample dataset --- gui/wxpython/startup/locdownload.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gui/wxpython/startup/locdownload.py b/gui/wxpython/startup/locdownload.py index 4592103a0f9..f0a1ab13fd2 100644 --- a/gui/wxpython/startup/locdownload.py +++ b/gui/wxpython/startup/locdownload.py @@ -73,8 +73,8 @@ }, { "label": "Natural Earth Dataset in WGS84", - "url": "https://zenodo.org/record/3968936/files/natural-earth-dataset.tar.gz", - "size": "207 MB", + "url": "https://zenodo.org/records/13370131/files/natural_earth_dataset.zip", + "size": "121.3 MB", "epsg": "4326", "license": "ODC Public Domain Dedication and License 1.0", "maintainer": "Brendan Harmon (brendan.harmon@gmail.com)",