From d283b8c883514407501b062d75b27b8177ec7c13 Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Tue, 17 Feb 2026 11:46:25 +0700 Subject: [PATCH 1/4] feat: add HXL modules and GIS indicator layers New modules: - spp_hxl: HXL hashtag/attribute management and export profiles, integrates with CEL variables and Studio - spp_hxl_area: area-level HXL indicator import, aggregation rules, batch import with area matching - spp_gis_indicators: choropleth visualization for area-level indicators, configurable color scales and classification methods (quantile, equal interval, manual breaks) --- spp_gis_indicators/README.rst | 147 +++++ spp_gis_indicators/__init__.py | 3 + spp_gis_indicators/__manifest__.py | 28 + spp_gis_indicators/data/color_scales.xml | 100 ++++ spp_gis_indicators/models/__init__.py | 5 + spp_gis_indicators/models/color_scale.py | 172 ++++++ spp_gis_indicators/models/data_layer.py | 26 + spp_gis_indicators/models/indicator_layer.py | 402 +++++++++++++ spp_gis_indicators/pyproject.toml | 3 + spp_gis_indicators/readme/DESCRIPTION.md | 52 ++ .../security/ir.model.access.csv | 5 + .../static/description/icon.png | Bin 0 -> 15480 bytes .../static/description/index.html | 532 +++++++++++++++++ spp_gis_indicators/tests/__init__.py | 5 + spp_gis_indicators/tests/test_color_scale.py | 375 ++++++++++++ spp_gis_indicators/tests/test_data_layer.py | 164 ++++++ .../tests/test_indicator_layer.py | 470 +++++++++++++++ .../views/color_scale_views.xml | 100 ++++ spp_gis_indicators/views/data_layer_views.xml | 42 ++ .../views/indicator_layer_views.xml | 124 ++++ spp_gis_indicators/views/menu.xml | 11 + spp_hxl/README.rst | 162 ++++++ spp_hxl/__init__.py | 1 + spp_hxl/__manifest__.py | 30 + spp_hxl/data/hxl_attributes.xml | 283 +++++++++ spp_hxl/data/hxl_hashtags.xml | 355 ++++++++++++ spp_hxl/models/__init__.py | 4 + spp_hxl/models/cel_variable.py | 58 ++ spp_hxl/models/hxl_attribute.py | 50 ++ spp_hxl/models/hxl_export_profile.py | 117 ++++ spp_hxl/models/hxl_tag.py | 61 ++ spp_hxl/pyproject.toml | 3 + spp_hxl/readme/DESCRIPTION.md | 59 ++ spp_hxl/security/ir.model.access.csv | 9 + spp_hxl/static/description/icon.png | Bin 0 -> 15480 bytes spp_hxl/static/description/index.html | 547 ++++++++++++++++++ spp_hxl/tests/__init__.py | 5 + spp_hxl/tests/test_hxl_attribute.py | 203 +++++++ spp_hxl/tests/test_hxl_export_profile.py | 312 ++++++++++ spp_hxl/tests/test_hxl_tag.py | 145 +++++ spp_hxl/tests/test_hxl_variable.py | 210 +++++++ spp_hxl/tests/test_security.py | 188 ++++++ spp_hxl/views/cel_variable_views.xml | 53 ++ spp_hxl/views/hxl_attribute_views.xml | 84 +++ spp_hxl/views/hxl_export_profile_views.xml | 119 ++++ spp_hxl/views/hxl_tag_views.xml | 87 +++ spp_hxl/views/menus.xml | 15 + spp_hxl_area/QUICKSTART.md | 226 ++++++++ spp_hxl_area/README.md | 308 ++++++++++ spp_hxl_area/README.rst | 153 +++++ spp_hxl_area/__init__.py | 5 + spp_hxl_area/__manifest__.py | 39 ++ spp_hxl_area/data/hxl_import_profiles.xml | 142 +++++ spp_hxl_area/models/__init__.py | 7 + spp_hxl_area/models/hxl_aggregation_rule.py | 90 +++ spp_hxl_area/models/hxl_area_indicator.py | 204 +++++++ spp_hxl_area/models/hxl_import_batch.py | 401 +++++++++++++ spp_hxl_area/models/hxl_import_mapping.py | 64 ++ spp_hxl_area/models/hxl_import_profile.py | 119 ++++ spp_hxl_area/pyproject.toml | 3 + spp_hxl_area/readme/DESCRIPTION.md | 59 ++ spp_hxl_area/security/ir.model.access.csv | 13 + spp_hxl_area/services/__init__.py | 4 + spp_hxl_area/services/aggregation_engine.py | 355 ++++++++++++ spp_hxl_area/services/area_matcher.py | 233 ++++++++ spp_hxl_area/static/description/icon.png | Bin 0 -> 15480 bytes spp_hxl_area/static/description/index.html | 536 +++++++++++++++++ spp_hxl_area/tests/__init__.py | 9 + spp_hxl_area/tests/test_aggregation_engine.py | 442 ++++++++++++++ spp_hxl_area/tests/test_area_matcher.py | 205 +++++++ .../tests/test_hxl_aggregation_rule.py | 251 ++++++++ spp_hxl_area/tests/test_hxl_area_indicator.py | 385 ++++++++++++ spp_hxl_area/tests/test_hxl_import_batch.py | 248 ++++++++ spp_hxl_area/tests/test_hxl_import_mapping.py | 230 ++++++++ spp_hxl_area/tests/test_hxl_import_profile.py | 209 +++++++ .../views/hxl_aggregation_rule_views.xml | 45 ++ .../views/hxl_area_indicator_views.xml | 104 ++++ spp_hxl_area/views/hxl_import_batch_views.xml | 182 ++++++ .../views/hxl_import_profile_views.xml | 175 ++++++ spp_hxl_area/views/menus.xml | 54 ++ spp_hxl_area/wizards/__init__.py | 3 + .../wizards/hxl_area_import_wizard.py | 285 +++++++++ .../wizards/hxl_area_import_wizard_views.xml | 109 ++++ 83 files changed, 11823 insertions(+) create mode 100644 spp_gis_indicators/README.rst create mode 100644 spp_gis_indicators/__init__.py create mode 100644 spp_gis_indicators/__manifest__.py create mode 100644 spp_gis_indicators/data/color_scales.xml create mode 100644 spp_gis_indicators/models/__init__.py create mode 100644 spp_gis_indicators/models/color_scale.py create mode 100644 spp_gis_indicators/models/data_layer.py create mode 100644 spp_gis_indicators/models/indicator_layer.py create mode 100644 spp_gis_indicators/pyproject.toml create mode 100644 spp_gis_indicators/readme/DESCRIPTION.md create mode 100644 spp_gis_indicators/security/ir.model.access.csv create mode 100644 spp_gis_indicators/static/description/icon.png create mode 100644 spp_gis_indicators/static/description/index.html create mode 100644 spp_gis_indicators/tests/__init__.py create mode 100644 spp_gis_indicators/tests/test_color_scale.py create mode 100644 spp_gis_indicators/tests/test_data_layer.py create mode 100644 spp_gis_indicators/tests/test_indicator_layer.py create mode 100644 spp_gis_indicators/views/color_scale_views.xml create mode 100644 spp_gis_indicators/views/data_layer_views.xml create mode 100644 spp_gis_indicators/views/indicator_layer_views.xml create mode 100644 spp_gis_indicators/views/menu.xml create mode 100644 spp_hxl/README.rst create mode 100644 spp_hxl/__init__.py create mode 100644 spp_hxl/__manifest__.py create mode 100644 spp_hxl/data/hxl_attributes.xml create mode 100644 spp_hxl/data/hxl_hashtags.xml create mode 100644 spp_hxl/models/__init__.py create mode 100644 spp_hxl/models/cel_variable.py create mode 100644 spp_hxl/models/hxl_attribute.py create mode 100644 spp_hxl/models/hxl_export_profile.py create mode 100644 spp_hxl/models/hxl_tag.py create mode 100644 spp_hxl/pyproject.toml create mode 100644 spp_hxl/readme/DESCRIPTION.md create mode 100644 spp_hxl/security/ir.model.access.csv create mode 100644 spp_hxl/static/description/icon.png create mode 100644 spp_hxl/static/description/index.html create mode 100644 spp_hxl/tests/__init__.py create mode 100644 spp_hxl/tests/test_hxl_attribute.py create mode 100644 spp_hxl/tests/test_hxl_export_profile.py create mode 100644 spp_hxl/tests/test_hxl_tag.py create mode 100644 spp_hxl/tests/test_hxl_variable.py create mode 100644 spp_hxl/tests/test_security.py create mode 100644 spp_hxl/views/cel_variable_views.xml create mode 100644 spp_hxl/views/hxl_attribute_views.xml create mode 100644 spp_hxl/views/hxl_export_profile_views.xml create mode 100644 spp_hxl/views/hxl_tag_views.xml create mode 100644 spp_hxl/views/menus.xml create mode 100644 spp_hxl_area/QUICKSTART.md create mode 100644 spp_hxl_area/README.md create mode 100644 spp_hxl_area/README.rst create mode 100644 spp_hxl_area/__init__.py create mode 100644 spp_hxl_area/__manifest__.py create mode 100644 spp_hxl_area/data/hxl_import_profiles.xml create mode 100644 spp_hxl_area/models/__init__.py create mode 100644 spp_hxl_area/models/hxl_aggregation_rule.py create mode 100644 spp_hxl_area/models/hxl_area_indicator.py create mode 100644 spp_hxl_area/models/hxl_import_batch.py create mode 100644 spp_hxl_area/models/hxl_import_mapping.py create mode 100644 spp_hxl_area/models/hxl_import_profile.py create mode 100644 spp_hxl_area/pyproject.toml create mode 100644 spp_hxl_area/readme/DESCRIPTION.md create mode 100644 spp_hxl_area/security/ir.model.access.csv create mode 100644 spp_hxl_area/services/__init__.py create mode 100644 spp_hxl_area/services/aggregation_engine.py create mode 100644 spp_hxl_area/services/area_matcher.py create mode 100644 spp_hxl_area/static/description/icon.png create mode 100644 spp_hxl_area/static/description/index.html create mode 100644 spp_hxl_area/tests/__init__.py create mode 100644 spp_hxl_area/tests/test_aggregation_engine.py create mode 100644 spp_hxl_area/tests/test_area_matcher.py create mode 100644 spp_hxl_area/tests/test_hxl_aggregation_rule.py create mode 100644 spp_hxl_area/tests/test_hxl_area_indicator.py create mode 100644 spp_hxl_area/tests/test_hxl_import_batch.py create mode 100644 spp_hxl_area/tests/test_hxl_import_mapping.py create mode 100644 spp_hxl_area/tests/test_hxl_import_profile.py create mode 100644 spp_hxl_area/views/hxl_aggregation_rule_views.xml create mode 100644 spp_hxl_area/views/hxl_area_indicator_views.xml create mode 100644 spp_hxl_area/views/hxl_import_batch_views.xml create mode 100644 spp_hxl_area/views/hxl_import_profile_views.xml create mode 100644 spp_hxl_area/views/menus.xml create mode 100644 spp_hxl_area/wizards/__init__.py create mode 100644 spp_hxl_area/wizards/hxl_area_import_wizard.py create mode 100644 spp_hxl_area/wizards/hxl_area_import_wizard_views.xml diff --git a/spp_gis_indicators/README.rst b/spp_gis_indicators/README.rst new file mode 100644 index 00000000..330eee78 --- /dev/null +++ b/spp_gis_indicators/README.rst @@ -0,0 +1,147 @@ +.. image:: https://odoo-community.org/readme-banner-image + :target: https://odoo-community.org/get-involved?utm_source=readme + :alt: Odoo Community Association + +====================== +OpenSPP GIS Indicators +====================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:3e0d3c935187c1430aaa36bc854f98fde0d3ffa19bf8bafb7b342ebb706da310 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github + :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_gis_indicators + :alt: OpenSPP/openspp-modules + +|badge1| |badge2| |badge3| + +Choropleth visualization for area-level indicators on GIS maps. Maps +indicator values from CEL variables to colors using configurable +classification methods and ColorBrewer-based color scales. Supports +quantile, equal interval, and manual break classification with automatic +legend generation. + +Key Capabilities +~~~~~~~~~~~~~~~~ + +- Define indicator layer configurations that link CEL variables to + color scales and classification methods +- Classify continuous indicator values into discrete color classes + using quantile, equal interval, or manual breaks +- Apply preset ColorBrewer color scales (sequential, diverging, + categorical) or define custom scales +- Compute break values automatically based on actual data distribution +- Generate HTML legends showing color-to-value mappings +- Map area features to colors for choropleth rendering in GIS data + layers +- Filter indicators by period and hazard incident context + +Key Models +~~~~~~~~~~ + ++-----------------------------+---------------------------------------+ +| Model | Description | ++=============================+=======================================+ +| ``spp.gis.indicator.layer`` | Configuration linking a CEL variable | +| | to color scale and classification | +| | settings | ++-----------------------------+---------------------------------------+ +| ``spp.gis.color.scale`` | Color scheme definition with JSON | +| | array of hex colors | ++-----------------------------+---------------------------------------+ +| ``spp.gis.data.layer`` | Extended with ``choropleth`` geo | +| | representation option | ++-----------------------------+---------------------------------------+ + +Configuration +~~~~~~~~~~~~~ + +After installing: + +1. Navigate to **Settings > GIS Configuration > Color Scales** +2. Review preset ColorBrewer scales (Blues, Greens, Red-Yellow-Green, + etc.) or create custom scales +3. Navigate to **Settings > GIS Configuration > Indicator Layers** +4. Create an indicator layer specifying the CEL variable, period key, + color scale, and classification method +5. In an existing GIS data layer, set ``geo_repr`` to ``choropleth`` and + select the indicator layer to visualize + +UI Location +~~~~~~~~~~~ + +- **Menu**: Settings > GIS Configuration > Indicator Layers +- **Menu**: Settings > GIS Configuration > Color Scales + +Security +~~~~~~~~ + +================================== ============================= +Group Access +================================== ============================= +``spp_security.group_spp_user`` Read +``spp_security.group_spp_manager`` Read/write/create (no delete) +``spp_security.group_spp_admin`` Full CRUD +================================== ============================= + +Extension Points +~~~~~~~~~~~~~~~~ + +- Override ``_compute_quantile_breaks()`` or + ``_compute_equal_interval_breaks()`` in ``spp.gis.indicator.layer`` + to add custom classification algorithms +- Inherit ``spp.gis.color.scale`` and override + ``get_color_for_value()`` to implement custom color mapping logic +- Extend ``spp.gis.indicator.layer._get_indicator_values()`` to support + additional data sources beyond ``spp.hxl.area.indicator`` + +Dependencies +~~~~~~~~~~~~ + +``spp_gis``, ``spp_hxl_area`` + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + `More details on development status `_ + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. + +You are welcome to contribute. diff --git a/spp_gis_indicators/__init__.py b/spp_gis_indicators/__init__.py new file mode 100644 index 00000000..c4ccea79 --- /dev/null +++ b/spp_gis_indicators/__init__.py @@ -0,0 +1,3 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import models diff --git a/spp_gis_indicators/__manifest__.py b/spp_gis_indicators/__manifest__.py new file mode 100644 index 00000000..de00df2b --- /dev/null +++ b/spp_gis_indicators/__manifest__.py @@ -0,0 +1,28 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +{ + "name": "OpenSPP GIS Indicators", + "summary": "Choropleth visualization for area-level indicators", + "version": "19.0.2.0.0", + "category": "OpenSPP/GIS", + "author": "OpenSPP.org", + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "development_status": "Alpha", + "depends": [ + "spp_gis", + "spp_hxl_area", + "spp_registry", + ], + "data": [ + "security/ir.model.access.csv", + "data/color_scales.xml", + "views/menu.xml", + "views/indicator_layer_views.xml", + "views/color_scale_views.xml", + "views/data_layer_views.xml", + ], + "application": False, + "installable": True, + "auto_install": False, +} diff --git a/spp_gis_indicators/data/color_scales.xml b/spp_gis_indicators/data/color_scales.xml new file mode 100644 index 00000000..62572bd6 --- /dev/null +++ b/spp_gis_indicators/data/color_scales.xml @@ -0,0 +1,100 @@ + + + + + + + + Blues (Sequential) + sequential + ["#f7fbff", "#deebf7", "#c6dbef", "#9ecae1", "#6baed6", "#4292c6", "#2171b5", "#08519c", "#08306b"] + Sequential blue color scale from light to dark. Good for showing increasing values like population density or service coverage. + 10 + + + + Greens (Sequential) + sequential + ["#f7fcf5", "#e5f5e0", "#c7e9c0", "#a1d99b", "#74c476", "#41ab5d", "#238b45", "#006d2c", "#00441b"] + Sequential green color scale from light to dark. Good for showing environmental or health indicators. + 20 + + + + Reds (Sequential) + sequential + ["#fff5f0", "#fee0d2", "#fcbba1", "#fc9272", "#fb6a4a", "#ef3b2c", "#cb181d", "#a50f15", "#67000d"] + Sequential red color scale from light to dark. Good for showing risk, danger, or negative indicators. + 30 + + + + Oranges (Sequential) + sequential + ["#fff5eb", "#fee6ce", "#fdd0a2", "#fdae6b", "#fd8d3c", "#f16913", "#d94801", "#a63603", "#7f2704"] + Sequential orange color scale from light to dark. Good for showing moderate risk or attention areas. + 40 + + + + Purples (Sequential) + sequential + ["#fcfbfd", "#efedf5", "#dadaeb", "#bcbddc", "#9e9ac8", "#807dba", "#6a51a3", "#54278f", "#3f007d"] + Sequential purple color scale from light to dark. Good for showing education or program enrollment indicators. + 50 + + + + + + Red-Yellow-Green (Diverging) + diverging + ["#d73027", "#f46d43", "#fdae61", "#fee08b", "#ffffbf", "#d9ef8b", "#a6d96a", "#66bd63", "#1a9850"] + Diverging scale from red (negative) through yellow to green (positive). Good for showing progress against targets or deviations from a norm. + 60 + + + + Red-Blue (Diverging) + diverging + ["#67001f", "#b2182b", "#d6604d", "#f4a582", "#fddbc7", "#d1e5f0", "#92c5de", "#4393c3", "#2166ac", "#053061"] + Diverging scale from red to blue. Good for showing opposing conditions or bidirectional change. + 70 + + + + Spectral (Diverging) + diverging + ["#9e0142", "#d53e4f", "#f46d43", "#fdae61", "#fee08b", "#e6f598", "#abdda4", "#66c2a5", "#3288bd", "#5e4fa2"] + Spectral diverging scale across the color spectrum. Good for showing complex diverging patterns. + 80 + + + + Brown-Green (Diverging) + diverging + ["#8c510a", "#bf812d", "#dfc27d", "#f6e8c3", "#f5f5f5", "#c7eae5", "#80cdc1", "#35978f", "#01665e"] + Diverging scale from brown to green. Good for environmental indicators or land use changes. + 90 + + + + + + Set1 (Categorical) + categorical + ["#e41a1c", "#377eb8", "#4daf4a", "#984ea3", "#ff7f00", "#ffff33", "#a65628", "#f781bf"] + Categorical color set with distinct colors. Good for showing different types or categories. + 100 + + + + Paired (Categorical) + categorical + ["#a6cee3", "#1f78b4", "#b2df8a", "#33a02c", "#fb9a99", "#e31a1c", "#fdbf6f", "#ff7f00"] + Paired categorical colors with light and dark variants. Good for comparing similar categories. + 110 + + + + diff --git a/spp_gis_indicators/models/__init__.py b/spp_gis_indicators/models/__init__.py new file mode 100644 index 00000000..cb359aad --- /dev/null +++ b/spp_gis_indicators/models/__init__.py @@ -0,0 +1,5 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import color_scale +from . import data_layer +from . import indicator_layer diff --git a/spp_gis_indicators/models/color_scale.py b/spp_gis_indicators/models/color_scale.py new file mode 100644 index 00000000..4ac193ef --- /dev/null +++ b/spp_gis_indicators/models/color_scale.py @@ -0,0 +1,172 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class GisColorScale(models.Model): + """GIS Color Scale for Choropleth Visualization. + + Defines color schemes for mapping data values to colors in choropleth maps. + Based on ColorBrewer schemes optimized for data visualization. + """ + + _name = "spp.gis.color.scale" + _description = "GIS Color Scale" + _order = "sequence, name" + + name = fields.Char( + string="Scale Name", + required=True, + translate=True, + help="Name of the color scale (e.g., 'Blues', 'RdYlGn')", + ) + + scale_type = fields.Selection( + [ + ("sequential", "Sequential (low to high)"), + ("diverging", "Diverging (negative to positive)"), + ("categorical", "Categorical (distinct values)"), + ], + required=True, + default="sequential", + help="Type of color scale: sequential for ordered data, " + "diverging for data with a meaningful center, " + "categorical for distinct categories", + ) + + colors_json = fields.Text( + string="Colors (JSON)", + required=True, + help='JSON array of hex colors, e.g., ["#f7fbff", "#deebf7", "#c6dbef"]', + ) + + description = fields.Text( + string="Description", + translate=True, + help="Description of the color scale and recommended use cases", + ) + + sequence = fields.Integer( + string="Sequence", + default=10, + help="Display order in selection lists", + ) + + active = fields.Boolean( + default=True, + help="Inactive scales are hidden from selection", + ) + + @api.constrains("colors_json") + def _check_colors_json(self): + """Validate that colors_json is valid JSON array of hex colors.""" + for rec in self: + if not rec.colors_json: + continue + + try: + colors = json.loads(rec.colors_json) + except json.JSONDecodeError as e: + raise ValidationError(_("Invalid JSON in colors_json: %s", str(e))) from e + + if not isinstance(colors, list): + raise ValidationError(_("colors_json must be a JSON array")) + + if len(colors) < 2: + raise ValidationError(_("Color scale must have at least 2 colors")) + + # Validate hex color format + for color in colors: + if not isinstance(color, str): + raise ValidationError(_("All colors must be strings, got: %s", type(color).__name__)) + if not color.startswith("#") or len(color) not in (4, 7): + raise ValidationError( + _( + "Invalid hex color format: %s. " "Expected #RGB or #RRGGBB", + color, + ) + ) + + def get_colors(self): + """Return list of color strings from JSON. + + Returns: + list: List of hex color strings + """ + self.ensure_one() + if not self.colors_json: + return [] + return json.loads(self.colors_json) + + def get_color_for_value(self, value, min_val, max_val, num_classes=None, center=None): + """Get color for a specific value based on the scale. + + For sequential scales, maps linearly from min to max. + For diverging scales, maps relative to a center point (default: midpoint). + + Args: + value: The data value to map to a color + min_val: Minimum value in the dataset + max_val: Maximum value in the dataset + num_classes: Number of color classes (defaults to scale length) + center: Center value for diverging scales (default: midpoint) + + Returns: + str: Hex color code + """ + self.ensure_one() + colors = self.get_colors() + + if num_classes is None: + num_classes = len(colors) + + # Handle edge cases + if max_val == min_val: + # For diverging, return center color; for sequential, return first + if self.scale_type == "diverging": + return colors[len(colors) // 2] + return colors[0] + + if value <= min_val: + return colors[0] + + if value >= max_val: + return colors[-1] + + # Handle diverging scales with center point + if self.scale_type == "diverging": + if center is None: + center = (min_val + max_val) / 2 + + center_idx = len(colors) // 2 + + if value < center: + # Map to lower half of colors + if center == min_val: + normalized = 0 + else: + normalized = (value - min_val) / (center - min_val) + color_idx = int(normalized * center_idx) + else: + # Map to upper half of colors + if max_val == center: + normalized = 1 + else: + normalized = (value - center) / (max_val - center) + color_idx = center_idx + int(normalized * (len(colors) - center_idx - 1)) + + color_idx = max(0, min(color_idx, len(colors) - 1)) + return colors[color_idx] + + # Sequential scale: linear mapping + normalized = (value - min_val) / (max_val - min_val) + color_idx = int(normalized * (num_classes - 1)) + color_idx = min(color_idx, len(colors) - 1) + + return colors[color_idx] diff --git a/spp_gis_indicators/models/data_layer.py b/spp_gis_indicators/models/data_layer.py new file mode 100644 index 00000000..c0d2525f --- /dev/null +++ b/spp_gis_indicators/models/data_layer.py @@ -0,0 +1,26 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import fields, models + +_logger = logging.getLogger(__name__) + + +class GisDataLayerIndicator(models.Model): + """Extend GIS Data Layer to support choropleth visualization.""" + + _inherit = "spp.gis.data.layer" + + geo_repr = fields.Selection( + selection_add=[ + ("choropleth", "Choropleth (Data-driven colors)"), + ], + ondelete={"choropleth": "set default"}, + ) + + indicator_layer_id = fields.Many2one( + "spp.gis.indicator.layer", + string="Indicator Configuration", + help="Configure which indicator to visualize as choropleth", + ) diff --git a/spp_gis_indicators/models/indicator_layer.py b/spp_gis_indicators/models/indicator_layer.py new file mode 100644 index 00000000..04d7f0c4 --- /dev/null +++ b/spp_gis_indicators/models/indicator_layer.py @@ -0,0 +1,402 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json +import logging +from statistics import quantiles + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class GisIndicatorLayer(models.Model): + """GIS Indicator Layer Configuration. + + Configures how to visualize area-level indicators as choropleth maps. + Links a CEL variable to a color scale and classification method. + """ + + _name = "spp.gis.indicator.layer" + _description = "GIS Indicator Layer Configuration" + _order = "sequence, name" + + name = fields.Char( + string="Configuration Name", + required=True, + help="Descriptive name for this indicator visualization", + ) + + sequence = fields.Integer( + string="Sequence", + default=10, + help="Display order", + ) + + active = fields.Boolean( + default=True, + help="Inactive configurations are hidden", + ) + + # ─── What to Visualize ─────────────────────────────────────── + + variable_id = fields.Many2one( + "spp.cel.variable", + string="Indicator Variable", + required=True, + help="CEL variable containing area-level indicator values", + ) + + variable_name = fields.Char( + related="variable_id.name", + string="Variable Name", + store=True, + ) + + period_key = fields.Char( + string="Period Key", + default="current", + help="Period identifier (e.g., '2024-12', 'current')", + ) + + incident_id = fields.Many2one( + "spp.hazard.incident", + string="Incident/Disaster", + help="Filter indicators by specific incident", + ) + + # ─── How to Visualize ─────────────────────────────────────── + + color_scale_id = fields.Many2one( + "spp.gis.color.scale", + string="Color Scale", + required=True, + help="Color scheme for mapping values to colors", + ) + + classification_method = fields.Selection( + [ + ("quantile", "Quantile (Equal count per class)"), + ("equal_interval", "Equal Interval (Equal range per class)"), + ("manual", "Manual Breaks"), + ], + default="quantile", + required=True, + help="Method for classifying continuous values into discrete color classes", + ) + + num_classes = fields.Integer( + string="Number of Classes", + default=5, + help="Number of color classes for quantile/equal_interval methods", + ) + + manual_breaks = fields.Char( + string="Manual Break Points", + help="Comma-separated break values for manual classification (e.g., '10,50,100,500')", + ) + + # ─── Computed Fields ─────────────────────────────────────── + + break_values = fields.Text( + string="Computed Breaks", + compute="_compute_break_values", + help="Computed break values for the legend", + ) + + legend_html = fields.Html( + string="Legend HTML", + compute="_compute_legend_html", + help="HTML representation of the legend for display", + ) + + @api.constrains("num_classes") + def _check_num_classes(self): + """Validate number of classes.""" + for rec in self: + if rec.num_classes < 2: + raise ValidationError(_("Number of classes must be at least 2")) + if rec.num_classes > 10: + raise ValidationError(_("Number of classes must not exceed 10")) + + @api.constrains("manual_breaks") + def _check_manual_breaks(self): + """Validate manual break points.""" + for rec in self: + if rec.classification_method == "manual" and not rec.manual_breaks: + raise ValidationError(_("Manual breaks are required when using manual classification")) + + if rec.manual_breaks: + try: + # Parse without sorting to validate user input order + breaks = self._parse_manual_breaks(rec.manual_breaks, sort=False) + if len(breaks) < 1: + raise ValidationError(_("Manual breaks must contain at least one value")) + # Check that breaks are in ascending order + if breaks != sorted(breaks): + raise ValidationError(_("Manual breaks must be in ascending order")) + except ValueError as e: + raise ValidationError(_("Invalid manual breaks format: %s", str(e))) from e + + @staticmethod + def _parse_manual_breaks(breaks_str, sort=True): + """Parse comma-separated break values. + + Args: + breaks_str: String of comma-separated numbers + sort: Whether to sort the result (default True) + + Returns: + list: List of float values (sorted if sort=True) + + Raises: + ValueError: If parsing fails + """ + if not breaks_str: + return [] + values = [float(x.strip()) for x in breaks_str.split(",")] + return sorted(values) if sort else values + + @api.depends("variable_id", "period_key", "incident_id", "classification_method", "num_classes", "manual_breaks") + def _compute_break_values(self): + """Compute break values based on actual data.""" + for rec in self: + if not rec.variable_id: + rec.break_values = "" + continue + + try: + # Get all indicator values for this configuration + values = rec._get_indicator_values() + + if not values: + rec.break_values = "" + continue + + # Compute breaks based on classification method + if rec.classification_method == "manual": + breaks = rec._parse_manual_breaks(rec.manual_breaks) + elif rec.classification_method == "quantile": + breaks = rec._compute_quantile_breaks(values, rec.num_classes) + elif rec.classification_method == "equal_interval": + breaks = rec._compute_equal_interval_breaks(values, rec.num_classes) + else: + breaks = [] + + rec.break_values = json.dumps(breaks) + + except Exception as e: + _logger.warning( + "Failed to compute break values for indicator layer %s: %s", + rec.id, + str(e), + ) + rec.break_values = "" + + @api.depends("break_values", "color_scale_id") + def _compute_legend_html(self): + """Generate HTML legend based on breaks and colors.""" + for rec in self: + if not rec.break_values or not rec.color_scale_id: + rec.legend_html = "" + continue + + try: + breaks = json.loads(rec.break_values) + colors = rec.color_scale_id.get_colors() + + if not breaks or not colors: + rec.legend_html = "" + continue + + # Generate legend HTML + html_parts = ['
'] + + # Number of classes is breaks + 1 + num_classes = len(breaks) + 1 + num_colors = len(colors) + + for i in range(num_classes): + # Get color for this class + color_idx = int((i / max(num_classes - 1, 1)) * (num_colors - 1)) + color = colors[color_idx] + + # Get range label + if i == 0: + label = f"< {breaks[0]:.2f}" + elif i == num_classes - 1: + label = f"≥ {breaks[-1]:.2f}" + else: + label = f"{breaks[i-1]:.2f} - {breaks[i]:.2f}" + + html_parts.append( + f'
' + f'' + f'{label}' + f"
" + ) + + html_parts.append("
") + rec.legend_html = "\n".join(html_parts) + + except Exception as e: + _logger.warning( + "Failed to compute legend HTML for indicator layer %s: %s", + rec.id, + str(e), + ) + rec.legend_html = "" + + def _get_indicator_values(self): + """Get indicator values from spp.hxl.area.indicator. + + Returns: + list: List of float values + """ + self.ensure_one() + + if not self.variable_id: + return [] + + # Build domain for indicator search + domain = [ + ("variable_id", "=", self.variable_id.id), + ] + + if self.period_key: + domain.append(("period_key", "=", self.period_key)) + + if self.incident_id: + domain.append(("incident_id", "=", self.incident_id.id)) + + # Search indicators + Indicator = self.env["spp.hxl.area.indicator"] + indicators = Indicator.search(domain) + + # Extract values + values = [ind.value for ind in indicators if ind.value is not False] + + return values + + @staticmethod + def _compute_quantile_breaks(values, num_classes): + """Compute quantile breaks for equal-count classification. + + Args: + values: List of numeric values + num_classes: Number of classes + + Returns: + list: Break points + """ + if not values or num_classes < 2: + return [] + + # Remove duplicates and sort + unique_values = sorted(set(values)) + + if len(unique_values) < num_classes: + # Not enough unique values for requested classes + return unique_values[:-1] # Use all but last as breaks + + # Compute quantiles + # quantiles(data, n=4) gives 3 cut points for 4 groups (quartiles) + # We want n-1 cut points for n classes + try: + breaks = quantiles(values, n=num_classes) + # quantiles returns n-1 values, which is what we want + return breaks + except Exception as e: + _logger.warning("Failed to compute quantiles: %s", str(e)) + return [] + + @staticmethod + def _compute_equal_interval_breaks(values, num_classes): + """Compute equal interval breaks. + + Args: + values: List of numeric values + num_classes: Number of classes + + Returns: + list: Break points + """ + if not values or num_classes < 2: + return [] + + min_val = min(values) + max_val = max(values) + + if min_val == max_val: + return [] + + # Compute interval size + interval = (max_val - min_val) / num_classes + + # Generate breaks + breaks = [min_val + (i * interval) for i in range(1, num_classes)] + + return breaks + + def get_feature_colors(self, area_ids): + """Return dict mapping area_id to hex color based on indicator values. + + Args: + area_ids: List of area IDs to get colors for + + Returns: + dict: Mapping of area_id (int) to color (str) + """ + self.ensure_one() + + if not self.variable_id or not self.color_scale_id: + return {} + + # Build domain for indicator search + domain = [ + ("variable_id", "=", self.variable_id.id), + ("area_id", "in", area_ids), + ] + + if self.period_key: + domain.append(("period_key", "=", self.period_key)) + + if self.incident_id: + domain.append(("incident_id", "=", self.incident_id.id)) + + # Search indicators + Indicator = self.env["spp.hxl.area.indicator"] + indicators = Indicator.search(domain) + + if not indicators: + return {} + + # Get breaks and colors + if not self.break_values: + return {} + + breaks = json.loads(self.break_values) + colors = self.color_scale_id.get_colors() + + if not colors: + return {} + + # Build color mapping + color_map = {} + num_classes = len(breaks) + 1 + num_colors = len(colors) + + for ind in indicators: + # Determine which class this value falls into + class_idx = 0 + for i, break_val in enumerate(breaks): + if ind.value >= break_val: + class_idx = i + 1 + else: + break + + # Map class to color + color_idx = int((class_idx / max(num_classes - 1, 1)) * (num_colors - 1)) + color_map[ind.area_id.id] = colors[color_idx] + + return color_map diff --git a/spp_gis_indicators/pyproject.toml b/spp_gis_indicators/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_gis_indicators/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_gis_indicators/readme/DESCRIPTION.md b/spp_gis_indicators/readme/DESCRIPTION.md new file mode 100644 index 00000000..649299cc --- /dev/null +++ b/spp_gis_indicators/readme/DESCRIPTION.md @@ -0,0 +1,52 @@ +Choropleth visualization for area-level indicators on GIS maps. Maps indicator values from CEL variables to colors using configurable classification methods and ColorBrewer-based color scales. Supports quantile, equal interval, and manual break classification with automatic legend generation. + +### Key Capabilities + +- Define indicator layer configurations that link CEL variables to color scales and classification methods +- Classify continuous indicator values into discrete color classes using quantile, equal interval, or manual breaks +- Apply preset ColorBrewer color scales (sequential, diverging, categorical) or define custom scales +- Compute break values automatically based on actual data distribution +- Generate HTML legends showing color-to-value mappings +- Map area features to colors for choropleth rendering in GIS data layers +- Filter indicators by period and hazard incident context + +### Key Models + +| Model | Description | +| -------------------------- | ------------------------------------------------------------------------------- | +| `spp.gis.indicator.layer` | Configuration linking a CEL variable to color scale and classification settings | +| `spp.gis.color.scale` | Color scheme definition with JSON array of hex colors | +| `spp.gis.data.layer` | Extended with `choropleth` geo representation option | + +### Configuration + +After installing: + +1. Navigate to **Settings > GIS Configuration > Color Scales** +2. Review preset ColorBrewer scales (Blues, Greens, Red-Yellow-Green, etc.) or create custom scales +3. Navigate to **Settings > GIS Configuration > Indicator Layers** +4. Create an indicator layer specifying the CEL variable, period key, color scale, and classification method +5. In an existing GIS data layer, set `geo_repr` to `choropleth` and select the indicator layer to visualize + +### UI Location + +- **Menu**: Settings > GIS Configuration > Indicator Layers +- **Menu**: Settings > GIS Configuration > Color Scales + +### Security + +| Group | Access | +| -------------------------------- | ----------------------------- | +| `spp_security.group_spp_user` | Read | +| `spp_security.group_spp_manager` | Read/write/create (no delete) | +| `spp_security.group_spp_admin` | Full CRUD | + +### Extension Points + +- Override `_compute_quantile_breaks()` or `_compute_equal_interval_breaks()` in `spp.gis.indicator.layer` to add custom classification algorithms +- Inherit `spp.gis.color.scale` and override `get_color_for_value()` to implement custom color mapping logic +- Extend `spp.gis.indicator.layer._get_indicator_values()` to support additional data sources beyond `spp.hxl.area.indicator` + +### Dependencies + +`spp_gis`, `spp_hxl_area` diff --git a/spp_gis_indicators/security/ir.model.access.csv b/spp_gis_indicators/security/ir.model.access.csv new file mode 100644 index 00000000..5f9d4107 --- /dev/null +++ b/spp_gis_indicators/security/ir.model.access.csv @@ -0,0 +1,5 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_spp_gis_color_scale_read,spp.gis.color.scale read,model_spp_gis_color_scale,spp_registry.group_registry_read,1,0,0,0 +access_spp_gis_color_scale_admin,spp.gis.color.scale admin,model_spp_gis_color_scale,spp_security.group_spp_admin,1,1,1,1 +access_spp_gis_indicator_layer_read,spp.gis.indicator.layer read,model_spp_gis_indicator_layer,spp_registry.group_registry_read,1,0,0,0 +access_spp_gis_indicator_layer_admin,spp.gis.indicator.layer admin,model_spp_gis_indicator_layer,spp_security.group_spp_admin,1,1,1,1 diff --git a/spp_gis_indicators/static/description/icon.png b/spp_gis_indicators/static/description/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..c7dbdaaf1dace8f0ccf8c2087047ddfcf584af0c GIT binary patch literal 15480 zcmbumbyQqU(=SR05Hz?GTnBdsm*BxQ_yEH|aCf%^cefzHo!|s_cXxLSE;*Cueee5y z-&tp!b=SRr%%17JtE+c)byrpYs^*)rqBI&Z5i$%644SOWM^)(ez~2ud0`yw0U6BR- zLb8+j><9z%zUS}fO(NraVi*{>9t(ACCvAmK{3f>6EFe=`V=#-GwH=fi21ZcC%?@N@ z33ehk216`tgy_y&+UdwGOoiyQxE0tG>?FYE7BU_VU^Nd#brTOu6QC)bh%mCC8$XnR zHP{J6?q+Red4B@!uI#I$jJr&Mb9s0>iD<$ zuR+wn_Wv~g)v~hqXCyn2gCkho-3}~7rwVqob#^cT|HI*Lr++h%Z~%jxz^1|+Y#iLo zY(Qpqpdjo2_UP{z|J6a#%}Lf&m<$tn~GKO;D=HTYw;RdpEvGW4C`Plx`;h%^9lV07{*~I*>D8d~7A^Wd; z|IiAu{+(Sbi+@eZKaGFS%71$NYs&sb_}|p>|6Wz5CjU{BowI}0KTE*WgcWQBwg%fc z{Z$hCzm;Ta!tZ3^WCi{&6^U6n{ZAD^*B-wW$Oa-r=f-RbHUl|ZInfDg*!P87jt$pw{;L! zurM(Pfvw2pY|U-RrEP6IKvrN!!N2tX4+V7f|D%KdPxB1jp8uKX|M5a@AiMvz6QE@L z|EyqJ2X$LpD`5$cjSGmJUKMO(3U&ZHFp!(tnh1RqllIVYQ3J`EJCZv)f*pi3#3YP4 zY;_;(mw~W(F*95)Y)WYoZkRgrLS)eSvJR)Y$S4!fK zScE24BMTw?G63}=yN?Nr!v4s(L#bh+ z0QHoB|LYajx?X9+TnwfJwuDj{M>z;4bu|DB7H;cherVEncj0{^h73csRh5-&U)E;4 zNLVpq{=h+rsFoNmYz*8AfN`m{D6C^2%WV~zRAFNZuAXKcKMErci*PnF0ZSfM)erUu zjcjUMJ_wuF3RSJ9O~@Z4hhap;#(_0ma`J>1A0~<{s?m|hcz{e!L&u6Tp}I}Ep<>4f zOJS|^MQ_DPOkz?*AhrH}k<9ZOEt4`FAyRDqXjTP|E_#oO27Gr&f`y5OM@B1VqH_ES zCTweSMCx}a*0xU}@o6fA8_gjjy z2Q57xXmg+m(g6q!aM8mCkithJ--tyXkCjku;FTF{?B>(>FABGzSGUggUumv`+C6Ow zvd1XmI~#j#dG0vl>e;QtxGX?gJsdQ+{-4BuDt%|kxthFj<_dORK@Rc;K*$U=E~?kF zJ$(-vwj?T<5%x2c(fneoKTjS|rpBh!8`&y_y)z)7Hj@j%)+~SkVR8K<@`g&WZjo&G z8?wNoqyeOzOEhl;E4C^_e6^7aF#Fx~(z-&NxzGQQC}?L?Gl>qxwKg;MZTpfMvw^V{ zmT;>h9A?JFxNyIC1IPqQldk82>?{LtnMt2Xo$HmXr3gvbffJCJF_|;ZU)lTX#2_{h zNT=4@taez10pm@hvzTLIAAD(`*Y6XZr7!w3a5sy>KWlOvJ92!fyI0Yjt7_+Syy+$Q z9i0@K!{?>N+F!J-sDJMIV zySlF4rF1c1>K1)CaHBkwkwVV z_lfaZhdgZH%&PK>eJxwrWn!sr5&Gc_9Cr|XDCGA_XN{>#)>Qgl3%Uyi`^M@mPTT`? zf;&`{13;P8O-+u@Hlr4IZO)ivM_w*HE{G3gydPIhU7gTd{}##Tw;S&&d-&?A1qaWy zLlnn3TyAMVFPcpfZ`1wMt^$+g?Z(_ki{MSWsfo#KTB33CzU=9qQnoXtdS(mcmLjCY zalOGBnh*x}*Hy&3cD8}2EUr+55qEqP9$UCvz=o=kb9%C^{(Ki9<6A_yTJAVGBAyn3 zIGGLv4!o55o*J5V_xfbsyPk=kC$C`%S6?3qh!N5V(<2M#9p=&i>al1cGc#6pd37`_ z3RMpN=*|e9{nd~zZKGX@%J-K$=_&@x#D$&<8NApJ?i3jM!5X8abIiAPla~}@BE@Ep zytt_iw|xY%OQxngqE(gy8xY@vUMZuc7&hw5I)$M+5$X^P z;i3S7-Tgw2w#pV1R->>O;O~UyyX#p3>DD8rfL3FNO@kS@Uw?F5(eln`lA5WMkAVwk z6(1gr5%VDf8>tN;vdaPZYs8yBSJ^oba~WDr`qr8Oh#ok4VLQ3lrJrZ_Xm(T@FM0qa z&kxcByGv0F-Fx%t@9vZ7JP$}yAKpn-r^LhBTLwsS1J)bs6T{~SIQ6H$7qanXOrs1*Z5c~M%>RPFWj8X;g2@Lhm?HnEOmg0If6exM<_Fa9>!5P zv6(xpC9c)Yz1{ue6}vOIV(QK_dbu(^ad>yOhx?(?cWg0n`J-318#Q=eVZOiuW}A1? z=YKkEE?wkr+3_PaFv)gRxm)xjwl4{Gcz$5;$RixdVH2Ds+=H?$xTUn`QZ<#!D zWRP4okEG?OLnjctlnTlg5)kz*Yn=}m<^joJPN)}L??y(J86Fk_PaZ`{q?IKql37h; zDKAk4_|={_s%_q*rZ}MznUn?=QC9T$A!MnV>~b~n=uXQdTx6` z)C4lw2Vd8?lJqhAV%eA%mg9eTcNjsG(q@@$etAi9{uE1m1hj1!jelwHV;%czJVoYcrZ=vANJHDiH$G) zek&XC9nl=^c*OxElr7lsK6+aN5c^^)p0n;58u$EC`TpvB9KEV=zK9QdPpmKCHANCK zliMaTnv1|oI8A%NctUtQg)_&D9wYY|Iwm&nkURyL3PVzKxQI{K6C{+zFGk`XQGDw} zv$z(!mCfUPd6h*?RowKmNy|p2Mri1laA2VU*^f5fL8Ne4IPc)ybITH=)f$-My53); zfsHD{N>w!&UkTyOxD>>Ey0g^%;L)A?P_Nyhcd+dwhH5DN?-^*`{IEk;(NK z+#s-OPFRbbX|Uo9=Y@)pgD@SCE!UCmYYVmF+$i4Kgz2lR3|L_DxX-u)DSS39jaf=r zT6deEL2ULQJHvU~(|2vtWZ zLueKkQ*#|Bj9fi4c9{)Y&z^&}>=~e5Y-HCkQ7Mw zXCH5+<@YAqb|zki@0M(%ccdpqTJ62ZPg~bZ9%dCF9k!S%_lroxG?x3NpXG4ZBn}!6 z+=_Y!1xqxCN~6zvXAyVg)}YKk4ib#`<>h_p{S$I>vi*LYB5ST+3mf_t)@{}Ih`};0 z29&^wWHWl>8kd64(wY}#hrVQAh&s7gbeHd|IZAStUZ&PSb3$B{PvD=+ zQkSe%LJ0K>h&Kj#S8^)h9GXvu0IZ=3Z>3DSi8{T;a z0b*muMkNGwF;o1RwtCZDg#97P8vE(~`hga&m%k(gTR6qI^gs7yTIO@ay}Te)Hx6Eg zd%2g}G&u)zqqNrD5nG*q8XFK&z9RjIS(Q6DYG^p!6>M30Ef+5|le|Ud>m9T2((_H@ zmT!+5i$HN{<G+1EEoc4AS9vm>QDZpO>K6M{G^b)txOnqNOvTfV zwR^y>(e?%b$$pu79ydu6M>3?3(>(2u(=dN7HK{92%u6nm^iDzS@)?5XBIF{B#CklVg~i#wA$0R9A~jYSgt2E^Wysxcp!2- zJy+&-mzNYaZTSq9cjqTE4)av2f-f$0H4?(;)nFcK>Cqg8V1?|=v!Y(*^*0|9I;_Rhhiwc^cQM&I zs2P#p?_{f-yhS#$Z%c?knJ_g7Zhv%L*{tf?J?E8j94bImWV|QMY5x(sTCL_62EdT)xWZ#KY;8qi zzh&-cv3YOkp`;b}=k-{kwTe#GjC6kh`OVE6++^#^n`2$=$t@u!WTiOfEEDax{k6!e z@X;4kniF^87>l=U_UXRvHKDfp>vDPBi03g%yHSkk525SM)oqOWGqYp4$RD*p_K`zZ zX5;Tx^`n&DE+;ujb3D5nIv6Mom3jfVZ5mIfq!jf|AhPk0p*BCT0x8R9-BE8{1h;FQswTy?v#0}-38B!kczy{x;$7!io^DZ=IcJY##vEYDk$eMl;r^~T9QM) zQtubaNKNtRwxEV=;ce#Z4d5>nKyB3}bT9N~-_eBgFflJtua+a>1#3WkFbOfK>wALd zZQJFC>tFY+A8cE=I=Kr&9)?klwAYSC8EBln7`QBc`8b2H&Uw!rU@nG`1p+M z_PaAlj^s@QS_#v-S7a>mvT=DTFWy=ZjjGOXi5cF@lwE;85aI6_m*ok~r?Q!5Pm%ZT?$+H*@!&OVYR1ei_3V-7Rug|y! z6$Mw3zfY~M&=eRqCgXBTaB?UI^f`~CMbB=}$Mp5L0V>1!a|Lt#a+4g!0f$6;UDKhZ zlL^j^u4Vmh%}jY4)Cwro5tJ1AQGq1f_B}RfX)D2nMS91)Y;HB$dH?2hjtC#Za)<9l z3Xk+rZ6knNtjm9pc2D}(wY6@|ZX5l(cbwO2oUZoqp~U011TV#IhMJfGfJ%N_y5pEr z$$IA>?#}aHx9?aiZ|z18x!q7sz$jnVblQi|AhW85+>7y6btIi|OvFBI?tT(4eXVCg zeP8}0!iu@r=PR>rJ3wq*!=CC<_ihZL5#EG)I$$%%kh7e$zQ1S@xv6Or7!_P&%MPMk zACVS&BE)NLV(qN8MOV5C`xbf8IbN#MmeEcdWYA$OwFX;!1z7PC6DoHe>+fVejhMzC z1S8qnm<(G9MXIvx3DE3&Qo+7^LNi#xb$$M2LL^jXh)cbb3h%G(i91(WK}lj~^MOAm zA?4cXvn!=%bKJ^P|1)ix8c1H28Z^2L({~B=9);^+7Yn7*L|+tIAJG4NPUMk$gC5&z zQeEbR@FbxHdE`+3^XSBSPAWGx5R7Z8yZbLJA~9Q9x(L@tqt{q61Em+ikqTux8^kZ8DQrK4FB3r5Qx$xHG!>D| zA6?vk{*>E?Mj18vgMk%hzN`ZwTFY1ltHNF5S%);i;&*l-ACcsI3pnD=iX?}s!s}HC z1As^77XFUGAm4O;CtDdaLT6%hOQ>4n&pujtYU7jL7onxKBM-_>lW}>$dS5% z{BRX)SUzjTUq2m{I3;m4ULG3n!EI@PR04_rJlShCF+6IG-&{VfY0G+|OLpY);~Tcs ze2Y)Mw|IXXzocJ3+sL=yh{1EwAusXV3dh~TOl+|FVY|@xU{j6Ef?(e4;reCW_43yL z<76IskRMUIl)Uop?JzOW;#+p#(crQzC^Ot~KFDqBhT`=!Rk%4%b1(y9h4j`weN&J! zbyYm>{7aU7#kdNy2Zqx-hUyr=|4NbL%;CXS<-w%jL)X z(3_2Lz*r;mD9!Y`&iV2=x+?sNv)b*Cwn}{YDuYzmi4vn!c+r}V?AzoFZAreI-4!3+ zY{Td}nm@04BAKyM->B1)oKRD#r|^W|jYVjcSAs1YI=xx>$jpFe*KbLKby=*pW)eFs z3ZSXO09)sD}&}V6ipbE(Y~?r$YTn{V-9};R(?Z6wH9Dqxnt8t&~=!h3e%FyMY4}MkN68X-2kX^|Im5y$c6sN{v&x4l_54O-p{PrDCP` zpOp-`$#WIx;mb_%^9f@!#b^Gv=)X8dl(G-ESKr#_UVal#eY9!`MLqLs4DUCH##vQR z*2n?o*KjGB*u!M&?xGOuHa@Hn5s811Ma6+Zz~-qI^cWAxkz$M9EYF+65Y<;MSmJ$H zrmYW$Ykr63;#?@3U~a9Yw$VB(W+T|LSC!M@RS~PJ#aBNlsh@MN)U_GZ+y4ALdVH-Z zeZ7rMl*xi!f6B*qX6Hr-YTWI3@7e|R;u4nUs>YIecpOF-fke*=0lHfETe!@N?>>DK zH=;xe|L}n!7YQPC**{jgAE6=E{~Z{`{~?;C(Z&12K1p^KRB#YWTRU?2RV!>AocDk%*gKH;(HiW`{1C zLgUncZHb`P0zyddG&COjHi2(%mgVv|gu%=`hPvnQickVe$8=lkQe4}&0*&it^=Vd~ zVz5rO$n;=raC-!!5NB|-XZOI{gu$ai!cKY`c7x4qn^>9w9*^aS`tLIdSOvMcwHy)z zisz9h?)wgaHN^ZNO1m|OBga`a*37=gS%}sQp9b3`#|ZInRQKnNUU+Pz_?9%$FWdS@ zDK<8SL9C$=vFNfCZZ*J(vU|VM+)OqeUmu(7t6G4CEYvRUzK*`Qc@f3dneu^f+iG!g zxv+3dL+uJwWvD@yd7%RLmAuTRViISB>GdFBTIdcF28A`w;mJ|!FUG!hkwvww>N>lf z{H={Dx0PPqaV^{;baO8&Z#4W&_23HA>#O7j4>~jvphax5{G4W932b+Oq40dauN4&f zHNyo<4ks5vV~{U|A^h&ku)Ss;0}g#CCAB3 zx!5?ck zw{=3Qkp*j2pk4kf)hQYui~#aNqul$soANTlEt(Bg?n5v;dVgpctq zgK8zA*my$SKTIf^aU6WAcAVx*VfEg7ZkR4Xkr@Rqgp~nl)WKhG;{9Wdad0u6&{I#2 zxKYvs;M&vr=pb8WY#((GbJMo#x zxUcc)yW;DGO<4}gi6di1&45IQZgY_)!A;*)F;lrKSVH5fXFw*)gR$$6cTNB0*>AV^ zw*?Qj?T1Fkol|$DCNdN;)9*Q?6o(#96gu%a7X>rtoCf7n-ECFW5M|6Fal%oQ_HyFT88UEWBj-cYRmoJO?h1i zO8Pb`owZMsyI;28tb{Eo<>GSuU*PNNxjvSV(T~f_NvO^Dd~+Bv4RFyUso1bz_tFj% zCD1oMN-R7Ol)jcmv3xpONAc4_)~6O6({Dh!!AVxU&q++=$T73FoVhi&?s_pYN1!5s zSLaZGTy$Mp1n=}=+x6NJ7#4%I%HoA<%SY4XdQFZO;2iFiQP0678T*1q9`dllr^)b=7CHG-dsj-%14Er*pm zRd^>8M#r;=H+aYIt_QD=wbxFhWWMQQ>)ENMK;y%e z-Iu6Jt^6|6l4x)u>Ylp;h!pn4O+sEjgtk(?U5Hp84IOs(ACPd#;dKgps1N!cG}yQ-Gvsh`Zg?5UQf#j}u^uV0^fBdXFH8Osx2Rn>nD?ts=VM5s(?3r8fR! zJ`WX_!j}fLK<(%2=>n7ezAMSisdM;Al^QJ_vPLj;mPAD$I~PIuyU==s!xUY zodiCv+RDXwU$axLZtbz}8BHq_1XqHo-^Kx4+f%NMl&->(9MD7SO zj&Z#}?1hK1F$*vE4Hl-52+kbud@c@%{KDPxs}pYe1D656Fec#qx9+xdyZ42hGFio=?^)UY_>^ z(>JtY69@hM-~dl%4gVj2NS%f*G|0Te8IlHlUZ{1k{U#Aat)_Xldr;o1s3ZVmargPD z;rI1QJ?8u0>5}@tQ>^!bMR8(pgdU-=nVzFZN}3-}d2iu(c}?B!g+r&S-sFg(f%#=% zzo*;ppCC$j0$qWo20Ac8Gv%A07eM$IXBHv$ov2<=J=H-@-^-4pGZ02IribPegl|FT^(ObV6vO4);?$6A_cuA+Vq1WmKIXgG`?%u zrna{Hm7|qSZ2EYj-pae%klBl5e4Y(Q1~p_8K*?L8**B54K6R1iQ(L|wGo#bCl5%MZ z{MaKF{!lpQcY)8@^9p+-R{^~zI?PY8%s*F`Jk24WY@RNKU0ezwO!ekJFkp|~0(i49 z_o5;d+*Sc(Jxsf-=YV#pfx^q|3d>HKjaXhv8upfShP@MxO3ECHoT?wPg+rAJ6j6d% zuauS&I`}i%EghL!ET5Xxwzd97;lDf-pr|@|G8SGFIUE-hbZa?YaLw!-y(k#t(PILzr}1;;g9@KM&6c28i1cn_xi z(F2R>(iI%Xx#oN~+xepmM0U{~Zb-ADBKO>klUgz|STaYC2~5Jw-3Rp*0M~QeAK_ zLT0jdy1u+74qNvm@lVU?i`<{VyiM-Y&YKwl`Xjzk0A)rN&XTzJ%RhJ_zfDfUp6RejT}_&K~L%hzXRUt_YZ--idup z{Yr*e6)6k#)Uosm3Dq!P+F%<1B=Fb-hzMKL%lx|uDvf&tWb2JnpRL}zSR>)WD&oy}+RNe&Hx|`=VR=Wi6 z7&fK)_A2^4+$>xJ4og%N88LV2S%ppZIE zH}jy~y(@yAt|h*1Nxup80`#-q*0us&eb+uNNliaG@F!bj(_qP@^T>u)(1yV%FpQ$n zoKE3aW`7m0ClO~zsXnJn<$2eljws67*~7k}IRJrorv^i1N>PKfyeLy1>m9%`U>1ap zV;J{k2lR8fH=dT%$B_tRpR2BUFNTgQel2SkW5@I})FPn?lSPtXkB>FA*)4J8-*uAW zCj}gqkZb2+L@sJuIUggVf$OL;Y>9EQh7-fNqMs=W2B_3h8cl_69%LDsEY$=;9~~S` zMh@TOiRbWVES8&JU#7~Z$xYEa`to)$0DF2z2*5Lsl*Ex<_be}5`*h@>p^QK!M@P+% z#{3!j79}}Lm5Fr$lPZBYi+=zlA@aChAd_LxVid4#ykJ+4hoZ1$en6D#@EK`u4o>V& zud!SQXGsUrKUS+``^EDi4qnc;`NSp8QTiL1dq1V|9XIXS zV;zJb0ww|#p08c?^r4SaJIza(jxgVH0p`+7SR4;gt3y0wS{a(dC@t93kb(EUJh7r& z7MBx@f$B+}QZfvbYQHp(Lu{6-@=K)G)# z;RhYWAL`WxFppsry{Tk|`?4(3?>~%ESH%KE zvcS^HtZR~v}xc}=m zvR>5rLTBTsUDrd2`cEyI1D3J_?_lI|P-a1-O+Q07RS0!rKToiU|Hn8yPY>0P*kiZc z6(Xfc;fiU?ES|Vm+ks*Vpm_tejb_d-eAbc^lTRL@sJAyiWcR9{&$P+wgPs~tFZ!}l z^6r|Pg5#quRe6tZSsl$ggp}?@@q&MP50oksD}Nwf6Z)+xqSVfwk?b#H5FhXn;mW?g zee;BWj^!4}gGSGiNNN?)^t(tIj;X|PR|DOk=*!w+gnJufT-E(`1wkOySh?PpR^$pf z=C&Fm7Jc|imd4*ZU&i=Zg0L;lkL9lVe!*P|<`G|EeP!OfoDbn!NH&?6Z=CV3jYg|# z?BpJ9lL>ALqBI(XWi4d6aqMAxVmN!5cj;efWj->$d#)NEJJ#<|R^9vcL-0&M-$#eJ zzrJyDNSoZz;=rD3V-miQ`OdMVdl2YHgHr|zD}9~CE)C84Tc1J1$`$3U&wl93G=jXD zZ9mA>7Sd(Tk3uUEial1UOn+{wlLde%u+wNNp8GgWG9I7a!G8;4$o z&2Ar8?dKiphR(Scds1)b80|OkURQWunL*dL1lfeu=EcspYtvf6+Di-L{;zd;19Afh z3TKDBiw*7_i^M3@x(AL@A~gpKShwgYD^G=;gxS8@9O=!cILWlyvqzha!M_d-1^uHa z0?SWjk&$Rw%}0NVm|eELTYj+3)|1iojv8};RmX+q5PG0x0z#`}9+*fyQ2{%ps7U;nnT3i34#>rSn2@(?>~%+MK$^b;eyk>j`K;Pxxt zUp)+`Wwxnw)l0~pdDmBNFbxO1%N1e|?`#a-wevf4WLUA6I)pOIM44FJ_75}Y7% za<*RY2Q7gH&(-O~t*m~}u&qGlDp4yW*3(ZHUi^}OdM%SXXPZjGZG(Utpil0LdTTRnCpSa}-t+SE`GR5a05{VN*n65{~ zi+7QCL&nSPW{W|;T=bXC(S}yeza@Zb%Y}M>bqdbK(|tE@kxUAbk*YcsUAYWuYwGL8 zXSK~8GsGO2jDT6{A~I|(i?tJVY;~Ikn%nJ5=u=PiI!-cViCVec8O4!_tVPC3-)Ziu z0Zoc+qud@e>ES`yL()+w8?FNF%<&fKS}whZL<|P!ZzL-mEZ?rOr|+*v^0EA!)!E~O_ba%&;*9IA zolizsa!TimzSm(GUWK++qz=+Ik&+@820c#?Ztm%XCE>V2FG1_;7W{V>WIW-d<~qN> z{)|8qXh!q-b2TG1AMYIt@65s?DEzUAV}}1r(M|F5F1#~WsH5)G2VY3OLi&0;my9QM zL);fdhGxx5^-4^Cd$-&mgc9N1BdV&j%1ih|7-dd@-0mFO&5E0iP^T<1nt~)(*5+P`KrfMS6pkxSQoNXO}tH@;S*V@zdXcUsE&Qh zkoX)6{0fsMPULHE!|ZD>_SPqK?8M}^w1UeW_$&2kT$zqS{*Dl$>2{rq^AAKf+$3I4 zslbVh%{kmT=4(zI?%M8hIVBDV0c+GUi)Gr*qmoMBmxR}%K_R8vtBq0#&Ln<8D%dwN zX>kpAbVWC%Ox9N${Hjz6(^5A2n+f1Ik0GeHcLj`&aX>$e34*En8Q{+qdkxN`e0P!Q zuT;iYl}dM4*Q0MgBHJ<84@Drs)lj-ad^2LCL9)}-LW5l0bPW}DSE?e=%7tHRP6c!f zCP99CfJmiG!~WA`Zs>WX_>h?A{&2eO`K0L$B~4a>l4;-RvWE$eh*xW9ls}c*r%2m& zhNbWPIhO^{^mI=usAMI#22L*o5en8{Hbu|a4~HQ9hIR0+{~&iYEP}?yfr8%s`I47J zMwZl{wRZeoXI={s$a<8gt4*Hsx&iJrQu%P^vb{~RDum$htr@A?>pqxhJV!|gGX zUL`*6%=J@W#QW;LfrYA4&d56JDBXjn3uVUsl49ZLp=uN_rZPtr;;F^iL`u&7(bYYE z=-J{N7h1bT#haD>N0mi%ys9&r^nC9XKh(_H-B%M1HioTc@Fodl-(@UPAvoeevvF5M z;u?_+EkqcJFxApR&f{>;#tk41X4PLBpc|{$-TFD}ZVekXDPVQ+63XB7XBQ-8=C;P3 z^%)ycbSmcLP%&N(tleOR42l01d>VaW(oyOFt;?XYt}bL$;8)^3M}APjS8m#_k+KnP z&zhAc!sRm}|8kYN?tC#ptdd*2*cMd_z!=a0ogK@^%YBXyrw*k^hJhtb)UY-Pp|U`b z;vm3-f2h$+A&q7+M}Mg-r9>2BEm^YPNmZ( z*7I4&!nFAzxpw5$n0?QdSE`^*s^a6@SRrre`i+>=SLtxw^z-@jraYqw@bSip;u!dK zTL9hZVjx|G5={P{9@`(L2W{{d>D%clZO4f70pf2!tc#MFU?)YLt-?Z9$-c2McL4VN z7W9D4WMOAN+6=I1Dfa)8xF9t6=O(>9LB!e%vOnrk?M0> zhwcO)UQOE|!|+=@H*wsyK!gv02uY?=#%_C5C4PYHuGzw%hucEDs@DbbO_Caz!aR{U z+)TI!k?P4(-i%WA5m2zQmZE^K6<+p?B|X5EGq$zw9(PfkANGFIjOw2MWg zKzz_(5iAbl)Py69NJEsQh^vxIDgheWS-`flG+rfqdEJahS*YUq)RCw7wJ6IA7i?_T zbD!-Qf(p&XhfA-KFoYvL#L~7U6T{tD%|dbL)o=N6;2}mx z!H~)1Fa$U)<8*lRd8*EEO<$_82C=Yv=lCg~$8GQ49)$Nx5fJEEaxF zl)u`I99^+<`OtY7_q=-`^1k9=uN<@9D*Adv0Q2^am|DSo=F?vA0J6!bIBOyEjpJ@H z2*UlQP-z!NN@6biXcIsE-B$>G4p#Bsxw4!W^oDs9n-adqf1greR# zfARMgj5m9@`A}9Oc~h#WMos)V%?-=nk`+S6=Q3Tqj&FJVY_lXU-j8{UUQwRer*vNi zfYU!5rO0Ef|MdN1vc@5-WmGYcr9CI@`kiQ7RL+ztb22U{WAeB5;o6w`-4GP9`W`>S z&_}b=Tjc-o#5;+YZe(ff8d~EuaP3uP~tc86jg5qVOZ*{cwJGU z(V#giqR;*#}M7(H=WegGj8QE45StkwQ)t zkDqA#;#%akszszb-d6hC&Y>(@IusF!_+GjwxIeDH(7}w)oA7)sg+;iwNG45>Jl=*4 znht+k)I22GMQiXwNWP<7d0VRrHC&g~daE&5*a?1)=?cFU!1v)>Lhov{i~V|%SV+9X z7((>eXMfQ-lj3T*{T)ezIo7*te0-jq5m672Z%@7nd89JjVZ=_Bbo1hLe3vR5GZ8VQK$3BS3rpv(TI z*if``DGY`pJFPa|qyC_%M6lc!v`aS!?Bf{jCRy3h2>YLHBX-_Z0cNP-YKG+9aVn&&bOWM*j$kk8_d6 z?(xLgln?2|OMK3fRpgLJC=#$Sl$ZdT<~F@JI^%N{SsMK=7C#~w8JCp|ODKUjfulX0 zRNnimv2(P`!_|JMWw#2*v%0*WmV!FHXJnXm$FI8bV27U>i%M0TS`CxQy!TVI4+Hku zCU|>U(96OE+nSptiO19IE`KjZoFmE96%r=Y#&G77AMX8@(Ad$co7FH1**~KH7%QV@ zq2D^0XG`W%Kwy))B%jtc34_bP*&~!hvXkx2x61x?cm8VL+eR&j+qieTj zPcf!P__24db-NUOd7qw4jxNS~Rn}k`w;L-!+JMkh*E38;hxBHxU%E}SZ(^oQnTt9( z6U*##{JUsmtt^A>6&UNN5mxBooYco1=6i8#6YtoyZl1O{hP>>^Lrts-xuXYNTZ$>u zpfaVW+VhuTa-W6(Y5#`hX)X;5E-i}{XxWY&i-0|tDN1{4YkvF|i+8ibuT!lOje;w< zkwW?d17jC~Qo*}a1btjLC$U87&ALRfBUk{XiT&dcIexY(=W<~(r-<*5(6%;&Rm^bw z25DIcIe0Kk;h0MuZVN`^O#>~4>J*7fwa5457~M`DW}CLMhrohubV?aHB0*q%i?F@) zYwum|^K0)Lu4E}LYfhYog~=@Pv>I86X>U>2n?#DFw@m4G^1i2s(0@%DkwFgxASub&ET6!HG@u+jB+p_yO(GoOV3#Nw9K0GZvg&5PWug{2eB{b>*22oK9 zncm+N91?M1gpr#Brp6}vt7WNs#8Bn}aw1X4oh$4)t6v( zbHB1*nkJIRlGpzHfGgQqz$g + + + + +README.rst + + + +
+ + + +Odoo Community Association + +
+

OpenSPP GIS Indicators

+ +

Alpha License: LGPL-3 OpenSPP/openspp-modules

+

Choropleth visualization for area-level indicators on GIS maps. Maps +indicator values from CEL variables to colors using configurable +classification methods and ColorBrewer-based color scales. Supports +quantile, equal interval, and manual break classification with automatic +legend generation.

+
+

Key Capabilities

+
    +
  • Define indicator layer configurations that link CEL variables to +color scales and classification methods
  • +
  • Classify continuous indicator values into discrete color classes +using quantile, equal interval, or manual breaks
  • +
  • Apply preset ColorBrewer color scales (sequential, diverging, +categorical) or define custom scales
  • +
  • Compute break values automatically based on actual data distribution
  • +
  • Generate HTML legends showing color-to-value mappings
  • +
  • Map area features to colors for choropleth rendering in GIS data +layers
  • +
  • Filter indicators by period and hazard incident context
  • +
+
+
+

Key Models

+ ++++ + + + + + + + + + + + + + + + + +
ModelDescription
spp.gis.indicator.layerConfiguration linking a CEL variable +to color scale and classification +settings
spp.gis.color.scaleColor scheme definition with JSON +array of hex colors
spp.gis.data.layerExtended with choropleth geo +representation option
+
+
+

Configuration

+

After installing:

+
    +
  1. Navigate to Settings > GIS Configuration > Color Scales
  2. +
  3. Review preset ColorBrewer scales (Blues, Greens, Red-Yellow-Green, +etc.) or create custom scales
  4. +
  5. Navigate to Settings > GIS Configuration > Indicator Layers
  6. +
  7. Create an indicator layer specifying the CEL variable, period key, +color scale, and classification method
  8. +
  9. In an existing GIS data layer, set geo_repr to choropleth and +select the indicator layer to visualize
  10. +
+
+
+

UI Location

+
    +
  • Menu: Settings > GIS Configuration > Indicator Layers
  • +
  • Menu: Settings > GIS Configuration > Color Scales
  • +
+
+
+

Security

+ ++++ + + + + + + + + + + + + + + + + +
GroupAccess
spp_security.group_spp_userRead
spp_security.group_spp_managerRead/write/create (no delete)
spp_security.group_spp_adminFull CRUD
+
+
+

Extension Points

+
    +
  • Override _compute_quantile_breaks() or +_compute_equal_interval_breaks() in spp.gis.indicator.layer +to add custom classification algorithms
  • +
  • Inherit spp.gis.color.scale and override +get_color_for_value() to implement custom color mapping logic
  • +
  • Extend spp.gis.indicator.layer._get_indicator_values() to support +additional data sources beyond spp.hxl.area.indicator
  • +
+
+
+

Dependencies

+

spp_gis, spp_hxl_area

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production. +More details on development status

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

You are welcome to contribute.

+
+
+
+
+
+ + diff --git a/spp_gis_indicators/tests/__init__.py b/spp_gis_indicators/tests/__init__.py new file mode 100644 index 00000000..721eb799 --- /dev/null +++ b/spp_gis_indicators/tests/__init__.py @@ -0,0 +1,5 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import test_color_scale +from . import test_data_layer +from . import test_indicator_layer diff --git a/spp_gis_indicators/tests/test_color_scale.py b/spp_gis_indicators/tests/test_color_scale.py new file mode 100644 index 00000000..54bf01ee --- /dev/null +++ b/spp_gis_indicators/tests/test_color_scale.py @@ -0,0 +1,375 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json + +from odoo.exceptions import ValidationError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + + +@tagged("post_install", "-at_install") +class TestColorScale(TransactionCase): + """Test GIS Color Scale model.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.ColorScale = cls.env["spp.gis.color.scale"] + + # Standard test colors + cls.valid_colors = ["#f7fbff", "#c6dbef", "#6baed6", "#2171b5", "#08306b"] + + def test_create_color_scale(self): + """Test basic color scale creation.""" + scale = self.ColorScale.create( + { + "name": "Test Blues", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + self.assertEqual(scale.name, "Test Blues") + self.assertEqual(scale.scale_type, "sequential") + self.assertTrue(scale.active) + self.assertEqual(scale.sequence, 10) + + def test_all_scale_types(self): + """Test all scale type selections.""" + for scale_type in ["sequential", "diverging", "categorical"]: + scale = self.ColorScale.create( + { + "name": f"Test {scale_type}", + "scale_type": scale_type, + "colors_json": json.dumps(self.valid_colors), + } + ) + self.assertEqual(scale.scale_type, scale_type) + + def test_colors_json_validation_invalid_json(self): + """Test that invalid JSON is rejected.""" + with self.assertRaises(ValidationError) as ctx: + self.ColorScale.create( + { + "name": "Invalid JSON", + "scale_type": "sequential", + "colors_json": "not valid json", + } + ) + self.assertIn("Invalid JSON", str(ctx.exception)) + + def test_colors_json_validation_not_array(self): + """Test that non-array JSON is rejected.""" + with self.assertRaises(ValidationError) as ctx: + self.ColorScale.create( + { + "name": "Not Array", + "scale_type": "sequential", + "colors_json": json.dumps({"color": "#ff0000"}), + } + ) + self.assertIn("must be a JSON array", str(ctx.exception)) + + def test_colors_json_validation_min_colors(self): + """Test that at least 2 colors are required.""" + with self.assertRaises(ValidationError) as ctx: + self.ColorScale.create( + { + "name": "Single Color", + "scale_type": "sequential", + "colors_json": json.dumps(["#ff0000"]), + } + ) + self.assertIn("at least 2 colors", str(ctx.exception)) + + def test_colors_json_validation_invalid_hex(self): + """Test that invalid hex colors are rejected.""" + with self.assertRaises(ValidationError) as ctx: + self.ColorScale.create( + { + "name": "Invalid Hex", + "scale_type": "sequential", + "colors_json": json.dumps(["#ff0000", "notahex"]), + } + ) + self.assertIn("Invalid hex color format", str(ctx.exception)) + + def test_colors_json_validation_color_not_string(self): + """Test that non-string colors are rejected.""" + with self.assertRaises(ValidationError) as ctx: + self.ColorScale.create( + { + "name": "Non-string Color", + "scale_type": "sequential", + "colors_json": json.dumps(["#ff0000", 123]), + } + ) + self.assertIn("must be strings", str(ctx.exception)) + + def test_colors_json_validation_short_hex(self): + """Test that short hex format (#RGB) is accepted.""" + scale = self.ColorScale.create( + { + "name": "Short Hex", + "scale_type": "sequential", + "colors_json": json.dumps(["#fff", "#000"]), + } + ) + self.assertEqual(scale.get_colors(), ["#fff", "#000"]) + + def test_get_colors(self): + """Test get_colors method returns parsed colors.""" + scale = self.ColorScale.create( + { + "name": "Get Colors Test", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + colors = scale.get_colors() + self.assertEqual(colors, self.valid_colors) + self.assertEqual(len(colors), 5) + + def test_get_colors_empty(self): + """Test get_colors with no colors_json.""" + scale = self.ColorScale.create( + { + "name": "Empty Colors", + "scale_type": "sequential", + "colors_json": json.dumps(["#000", "#fff"]), + } + ) + # Clear the colors_json field + scale.colors_json = "" + + colors = scale.get_colors() + self.assertEqual(colors, []) + + def test_get_color_for_value_min(self): + """Test get_color_for_value at minimum value.""" + scale = self.ColorScale.create( + { + "name": "Color Mapping", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + color = scale.get_color_for_value(0, 0, 100) + self.assertEqual(color, self.valid_colors[0]) + + def test_get_color_for_value_max(self): + """Test get_color_for_value at maximum value.""" + scale = self.ColorScale.create( + { + "name": "Color Mapping", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + color = scale.get_color_for_value(100, 0, 100) + self.assertEqual(color, self.valid_colors[-1]) + + def test_get_color_for_value_below_min(self): + """Test get_color_for_value below minimum.""" + scale = self.ColorScale.create( + { + "name": "Color Mapping", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + color = scale.get_color_for_value(-10, 0, 100) + self.assertEqual(color, self.valid_colors[0]) + + def test_get_color_for_value_above_max(self): + """Test get_color_for_value above maximum.""" + scale = self.ColorScale.create( + { + "name": "Color Mapping", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + color = scale.get_color_for_value(150, 0, 100) + self.assertEqual(color, self.valid_colors[-1]) + + def test_get_color_for_value_equal_min_max(self): + """Test get_color_for_value when min equals max.""" + scale = self.ColorScale.create( + { + "name": "Color Mapping", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + color = scale.get_color_for_value(50, 50, 50) + self.assertEqual(color, self.valid_colors[0]) + + def test_get_color_for_value_midpoint(self): + """Test get_color_for_value at midpoint.""" + scale = self.ColorScale.create( + { + "name": "Color Mapping", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + color = scale.get_color_for_value(50, 0, 100) + # At 50%, should be middle color + self.assertIn(color, self.valid_colors) + + def test_active_toggle(self): + """Test active field functionality.""" + scale = self.ColorScale.create( + { + "name": "Active Test", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + } + ) + + self.assertTrue(scale.active) + + scale.active = False + self.assertFalse(scale.active) + + scale.active = True + self.assertTrue(scale.active) + + def test_ordering(self): + """Test scales are ordered by sequence and name.""" + scale1 = self.ColorScale.create( + { + "name": "B Scale", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + "sequence": 20, + } + ) + + scale2 = self.ColorScale.create( + { + "name": "A Scale", + "scale_type": "sequential", + "colors_json": json.dumps(self.valid_colors), + "sequence": 10, + } + ) + + scales = self.ColorScale.search([("id", "in", [scale1.id, scale2.id])]) + + # scale2 (sequence 10) should come before scale1 (sequence 20) + self.assertEqual(scales[0], scale2) + self.assertEqual(scales[1], scale1) + + def test_description_field(self): + """Test description field.""" + scale = self.ColorScale.create( + { + "name": "Described Scale", + "scale_type": "diverging", + "colors_json": json.dumps(self.valid_colors), + "description": "A diverging scale for positive/negative values", + } + ) + + self.assertEqual(scale.description, "A diverging scale for positive/negative values") + + def test_diverging_scale_center_color(self): + """Test diverging scale returns center color for equal min/max.""" + # 5 colors: indices 0,1,2,3,4 - center is index 2 + diverging_colors = ["#d73027", "#fc8d59", "#ffffbf", "#91bfdb", "#4575b4"] + scale = self.ColorScale.create( + { + "name": "Diverging Test", + "scale_type": "diverging", + "colors_json": json.dumps(diverging_colors), + } + ) + + # When min == max, should return center color + color = scale.get_color_for_value(50, 50, 50) + self.assertEqual(color, diverging_colors[2]) # Center color + + def test_diverging_scale_below_center(self): + """Test diverging scale maps values below center to first half.""" + diverging_colors = ["#d73027", "#fc8d59", "#ffffbf", "#91bfdb", "#4575b4"] + scale = self.ColorScale.create( + { + "name": "Diverging Test", + "scale_type": "diverging", + "colors_json": json.dumps(diverging_colors), + } + ) + + # Value at min should be first color + color = scale.get_color_for_value(-100, -100, 100) + self.assertEqual(color, diverging_colors[0]) + + # Value slightly below center should be in lower half + color = scale.get_color_for_value(-25, -100, 100) + self.assertIn(color, diverging_colors[:3]) # Should be in lower half + + def test_diverging_scale_above_center(self): + """Test diverging scale maps values above center to second half.""" + diverging_colors = ["#d73027", "#fc8d59", "#ffffbf", "#91bfdb", "#4575b4"] + scale = self.ColorScale.create( + { + "name": "Diverging Test", + "scale_type": "diverging", + "colors_json": json.dumps(diverging_colors), + } + ) + + # Value at max should be last color + color = scale.get_color_for_value(100, -100, 100) + self.assertEqual(color, diverging_colors[-1]) + + # Value slightly above center should be in upper half + color = scale.get_color_for_value(25, -100, 100) + self.assertIn(color, diverging_colors[2:]) # Should be in upper half + + def test_diverging_scale_at_center(self): + """Test diverging scale at center point returns center color.""" + diverging_colors = ["#d73027", "#fc8d59", "#ffffbf", "#91bfdb", "#4575b4"] + scale = self.ColorScale.create( + { + "name": "Diverging Test", + "scale_type": "diverging", + "colors_json": json.dumps(diverging_colors), + } + ) + + # Value exactly at center (0 for -100 to 100 range) + color = scale.get_color_for_value(0, -100, 100) + # Should be center or close to center + center_idx = len(diverging_colors) // 2 + self.assertIn(color, diverging_colors[center_idx - 1 : center_idx + 2]) + + def test_diverging_scale_custom_center(self): + """Test diverging scale with custom center point.""" + diverging_colors = ["#d73027", "#fc8d59", "#ffffbf", "#91bfdb", "#4575b4"] + scale = self.ColorScale.create( + { + "name": "Diverging Test", + "scale_type": "diverging", + "colors_json": json.dumps(diverging_colors), + } + ) + + # Custom center at 25 (not midpoint of 0-100) + # Value below center should map to lower colors + color = scale.get_color_for_value(10, 0, 100, center=25) + self.assertIn(color, diverging_colors[:3]) + + # Value above center should map to upper colors + color = scale.get_color_for_value(75, 0, 100, center=25) + self.assertIn(color, diverging_colors[2:]) diff --git a/spp_gis_indicators/tests/test_data_layer.py b/spp_gis_indicators/tests/test_data_layer.py new file mode 100644 index 00000000..760c8a4e --- /dev/null +++ b/spp_gis_indicators/tests/test_data_layer.py @@ -0,0 +1,164 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json + +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + + +@tagged("post_install", "-at_install") +class TestDataLayerIndicators(TransactionCase): + """Test GIS Data Layer extension for indicator visualization.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.DataLayer = cls.env["spp.gis.data.layer"] + cls.ColorScale = cls.env["spp.gis.color.scale"] + cls.IndicatorLayer = cls.env["spp.gis.indicator.layer"] + cls.Variable = cls.env["spp.cel.variable"] + + # Create test color scale + cls.color_scale = cls.ColorScale.create( + { + "name": "Test Blues", + "scale_type": "sequential", + "colors_json": json.dumps(["#f7fbff", "#c6dbef", "#6baed6", "#2171b5", "#08306b"]), + } + ) + + # Create test variable + # Required fields: name, cel_accessor, value_type, source_type + cls.variable = cls.Variable.create( + { + "name": "test_pop", + "cel_accessor": "test_pop", + "label": "Test Population", + "value_type": "number", + "source_type": "constant", + } + ) + + # Create test indicator layer + cls.indicator_layer = cls.IndicatorLayer.create( + { + "name": "Population Indicator", + "variable_id": cls.variable.id, + "color_scale_id": cls.color_scale.id, + "classification_method": "quantile", + "num_classes": 5, + } + ) + + # Find or create a geo field for testing + geo_field = cls.env["ir.model.fields"].search( + [ + ("model", "=", "spp.area"), + ("ttype", "=", "geo_polygon"), + ], + limit=1, + ) + + if not geo_field: + # Create a mock data layer without geo_field + cls.geo_field_id = False + else: + cls.geo_field_id = geo_field.id + + def test_geo_repr_choropleth_option(self): + """Test that choropleth is available as geo_repr option.""" + # Get the selection options for geo_repr + field_info = self.DataLayer.fields_get(["geo_repr"]) + selection = field_info["geo_repr"]["selection"] + + # Find choropleth in selection + choropleth_found = any(opt[0] == "choropleth" for opt in selection) + self.assertTrue(choropleth_found, "choropleth should be available in geo_repr selection") + + def test_create_data_layer_with_choropleth(self): + """Test creating a data layer with choropleth representation.""" + # Get any available GIS view + gis_view = self.env["ir.ui.view"].search( + [ + ("type", "=", "spp_gis"), + ], + limit=1, + ) + + if not gis_view: + self.skipTest("No GIS view available for testing") + + # Try to create data layer + layer = self.DataLayer.create( + { + "name": "Test Choropleth Layer", + "view_id": gis_view.id, + "geo_repr": "choropleth", + "indicator_layer_id": self.indicator_layer.id, + } + ) + + self.assertEqual(layer.name, "Test Choropleth Layer") + self.assertEqual(layer.geo_repr, "choropleth") + self.assertEqual(layer.indicator_layer_id, self.indicator_layer) + + def test_indicator_layer_field_exists(self): + """Test that indicator_layer_id field exists on data layer.""" + field_info = self.DataLayer.fields_get(["indicator_layer_id"]) + self.assertIn("indicator_layer_id", field_info) + self.assertEqual(field_info["indicator_layer_id"]["relation"], "spp.gis.indicator.layer") + + def test_data_layer_without_indicator(self): + """Test that data layer can be created without indicator.""" + gis_view = self.env["ir.ui.view"].search( + [ + ("type", "=", "spp_gis"), + ], + limit=1, + ) + + if not gis_view: + self.skipTest("No GIS view available for testing") + + # Create without indicator_layer_id + layer = self.DataLayer.create( + { + "name": "Non-Choropleth Layer", + "view_id": gis_view.id, + "geo_repr": "basic", + } + ) + + self.assertEqual(layer.name, "Non-Choropleth Layer") + self.assertFalse(layer.indicator_layer_id) + + def test_change_geo_repr_to_choropleth(self): + """Test changing geo_repr to choropleth after creation.""" + gis_view = self.env["ir.ui.view"].search( + [ + ("type", "=", "spp_gis"), + ], + limit=1, + ) + + if not gis_view: + self.skipTest("No GIS view available for testing") + + layer = self.DataLayer.create( + { + "name": "Changeable Layer", + "view_id": gis_view.id, + } + ) + + # Change to choropleth + layer.write( + { + "geo_repr": "choropleth", + "indicator_layer_id": self.indicator_layer.id, + } + ) + + self.assertEqual(layer.geo_repr, "choropleth") + self.assertEqual(layer.indicator_layer_id, self.indicator_layer) diff --git a/spp_gis_indicators/tests/test_indicator_layer.py b/spp_gis_indicators/tests/test_indicator_layer.py new file mode 100644 index 00000000..cd5d3981 --- /dev/null +++ b/spp_gis_indicators/tests/test_indicator_layer.py @@ -0,0 +1,470 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json + +from odoo.exceptions import ValidationError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + + +@tagged("post_install", "-at_install") +class TestIndicatorLayer(TransactionCase): + """Test GIS Indicator Layer functionality.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + # Create test color scale + cls.color_scale = cls.env["spp.gis.color.scale"].create( + { + "name": "Test Blues", + "scale_type": "sequential", + "colors_json": json.dumps(["#f7fbff", "#c6dbef", "#6baed6", "#2171b5", "#08306b"]), + } + ) + + # Create test CEL variable + # Required fields: name, cel_accessor, value_type, source_type + cls.variable = cls.env["spp.cel.variable"].create( + { + "name": "test_population", + "cel_accessor": "test_population", + "label": "Test Population", + "description": "Test population indicator", + "value_type": "number", + "source_type": "constant", + } + ) + + # Create test areas (spp.area uses draft_name, name is computed) + cls.area1 = cls.env["spp.area"].create( + { + "draft_name": "Test Area 1", + "code": "TA1", + } + ) + cls.area2 = cls.env["spp.area"].create( + { + "draft_name": "Test Area 2", + "code": "TA2", + } + ) + cls.area3 = cls.env["spp.area"].create( + { + "draft_name": "Test Area 3", + "code": "TA3", + } + ) + + # Create test indicators with different values + cls.indicator1 = cls.env["spp.hxl.area.indicator"].create( + { + "area_id": cls.area1.id, + "variable_id": cls.variable.id, + "period_key": "2024-12", + "value": 100.0, + } + ) + cls.indicator2 = cls.env["spp.hxl.area.indicator"].create( + { + "area_id": cls.area2.id, + "variable_id": cls.variable.id, + "period_key": "2024-12", + "value": 500.0, + } + ) + cls.indicator3 = cls.env["spp.hxl.area.indicator"].create( + { + "area_id": cls.area3.id, + "variable_id": cls.variable.id, + "period_key": "2024-12", + "value": 1000.0, + } + ) + + def test_create_indicator_layer(self): + """Test basic indicator layer creation.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Population Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "quantile", + "num_classes": 3, + } + ) + + self.assertEqual(layer.name, "Test Population Layer") + self.assertEqual(layer.variable_id, self.variable) + self.assertEqual(layer.period_key, "2024-12") + self.assertEqual(layer.color_scale_id, self.color_scale) + self.assertTrue(layer.active) + + def test_num_classes_validation(self): + """Test that num_classes is validated.""" + # Too few classes + with self.assertRaises(ValidationError): + self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + "num_classes": 1, + } + ) + + # Too many classes + with self.assertRaises(ValidationError): + self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + "num_classes": 15, + } + ) + + def test_manual_breaks_validation(self): + """Test manual breaks validation.""" + # Missing manual breaks + with self.assertRaises(ValidationError): + self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + "classification_method": "manual", + "manual_breaks": "", + } + ) + + # Invalid format + with self.assertRaises(ValidationError): + self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + "classification_method": "manual", + "manual_breaks": "abc,def", + } + ) + + # Not in ascending order + with self.assertRaises(ValidationError): + self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + "classification_method": "manual", + "manual_breaks": "100,50,200", + } + ) + + def test_manual_breaks_valid(self): + """Test valid manual breaks.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + "classification_method": "manual", + "manual_breaks": "200,500,800", + } + ) + + self.assertEqual(layer.manual_breaks, "200,500,800") + + def test_get_indicator_values(self): + """Test retrieving indicator values.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + } + ) + + values = layer._get_indicator_values() + self.assertEqual(len(values), 3) + self.assertIn(100.0, values) + self.assertIn(500.0, values) + self.assertIn(1000.0, values) + + def test_get_indicator_values_filtered(self): + """Test retrieving indicator values with filters.""" + # Create indicator with different period + self.env["spp.hxl.area.indicator"].create( + { + "area_id": self.area1.id, + "variable_id": self.variable.id, + "period_key": "2024-11", + "value": 50.0, + } + ) + + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + } + ) + + values = layer._get_indicator_values() + # Should only get 3 values from period 2024-12 + self.assertEqual(len(values), 3) + self.assertNotIn(50.0, values) + + def test_compute_quantile_breaks(self): + """Test quantile break computation.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "quantile", + "num_classes": 3, + } + ) + + values = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000] + breaks = layer._compute_quantile_breaks(values, 3) + + # Should have 2 breaks for 3 classes + self.assertEqual(len(breaks), 2) + # Breaks should be between min and max + self.assertTrue(100 < breaks[0] < 1000) + self.assertTrue(100 < breaks[1] < 1000) + # Breaks should be ordered + self.assertTrue(breaks[0] < breaks[1]) + + def test_compute_equal_interval_breaks(self): + """Test equal interval break computation.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "color_scale_id": self.color_scale.id, + } + ) + + values = [100, 200, 300, 400, 500] + breaks = layer._compute_equal_interval_breaks(values, 4) + + # Should have 3 breaks for 4 classes + self.assertEqual(len(breaks), 3) + # Interval should be 100 (400 range / 4 classes) + self.assertAlmostEqual(breaks[0], 200.0, places=1) + self.assertAlmostEqual(breaks[1], 300.0, places=1) + self.assertAlmostEqual(breaks[2], 400.0, places=1) + + def test_compute_break_values(self): + """Test break values computation.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "quantile", + "num_classes": 2, + } + ) + + # break_values should be computed automatically + self.assertTrue(layer.break_values) + + # Should be valid JSON + breaks = json.loads(layer.break_values) + self.assertIsInstance(breaks, list) + # Should have 1 break for 2 classes + self.assertEqual(len(breaks), 1) + + def test_compute_break_values_manual(self): + """Test break values with manual classification.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "manual", + "manual_breaks": "200,600", + } + ) + + # break_values should be computed from manual_breaks + self.assertTrue(layer.break_values) + + breaks = json.loads(layer.break_values) + self.assertEqual(len(breaks), 2) + self.assertEqual(breaks[0], 200.0) + self.assertEqual(breaks[1], 600.0) + + def test_compute_legend_html(self): + """Test legend HTML generation.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "quantile", + "num_classes": 3, + } + ) + + # legend_html should be computed automatically + self.assertTrue(layer.legend_html) + self.assertIn("gis-choropleth-legend", layer.legend_html) + self.assertIn("legend-item", layer.legend_html) + self.assertIn("color-box", layer.legend_html) + + def test_get_feature_colors(self): + """Test getting feature colors.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "quantile", + "num_classes": 3, + } + ) + + area_ids = [self.area1.id, self.area2.id, self.area3.id] + color_map = layer.get_feature_colors(area_ids) + + # Should have colors for all areas + self.assertEqual(len(color_map), 3) + self.assertIn(self.area1.id, color_map) + self.assertIn(self.area2.id, color_map) + self.assertIn(self.area3.id, color_map) + + # Colors should be valid hex codes + for color in color_map.values(): + self.assertTrue(color.startswith("#")) + self.assertIn(len(color), [4, 7]) # #RGB or #RRGGBB + + # Different values should get different colors (usually) + # (Can't guarantee this due to quantile breaks, but likely) + unique_colors = set(color_map.values()) + self.assertGreaterEqual(len(unique_colors), 1) + + def test_get_feature_colors_filtered(self): + """Test getting feature colors with subset of areas.""" + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": self.variable.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + } + ) + + # Only request colors for two areas + area_ids = [self.area1.id, self.area2.id] + color_map = layer.get_feature_colors(area_ids) + + # Should only have colors for requested areas + self.assertEqual(len(color_map), 2) + self.assertIn(self.area1.id, color_map) + self.assertIn(self.area2.id, color_map) + self.assertNotIn(self.area3.id, color_map) + + def test_parse_manual_breaks(self): + """Test parsing manual break strings.""" + layer = self.env["spp.gis.indicator.layer"] + + # Valid breaks + breaks = layer._parse_manual_breaks("10,50,100,500") + self.assertEqual(breaks, [10.0, 50.0, 100.0, 500.0]) + + # With whitespace + breaks = layer._parse_manual_breaks("10, 50, 100, 500") + self.assertEqual(breaks, [10.0, 50.0, 100.0, 500.0]) + + # Empty string + breaks = layer._parse_manual_breaks("") + self.assertEqual(breaks, []) + + # Invalid format should raise + with self.assertRaises(ValueError): + layer._parse_manual_breaks("abc,def") + + def test_edge_case_single_value(self): + """Test classification with single unique value.""" + # Create indicators with same value + var2 = self.env["spp.cel.variable"].create( + { + "name": "test_constant", + "cel_accessor": "test_constant", + "label": "Test Constant", + "value_type": "number", + "source_type": "constant", + } + ) + + for area in [self.area1, self.area2, self.area3]: + self.env["spp.hxl.area.indicator"].create( + { + "area_id": area.id, + "variable_id": var2.id, + "period_key": "2024-12", + "value": 100.0, + } + ) + + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": var2.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + "classification_method": "equal_interval", + "num_classes": 3, + } + ) + + # Should handle gracefully + breaks = layer._compute_equal_interval_breaks([100.0, 100.0, 100.0], 3) + self.assertEqual(breaks, []) + + def test_edge_case_no_indicators(self): + """Test layer with no matching indicators.""" + var2 = self.env["spp.cel.variable"].create( + { + "name": "test_empty", + "cel_accessor": "test_empty", + "label": "Test Empty", + "value_type": "number", + "source_type": "constant", + } + ) + + layer = self.env["spp.gis.indicator.layer"].create( + { + "name": "Test Layer", + "variable_id": var2.id, + "period_key": "2024-12", + "color_scale_id": self.color_scale.id, + } + ) + + values = layer._get_indicator_values() + self.assertEqual(len(values), 0) + + # break_values should be empty + self.assertEqual(layer.break_values, "") + + # get_feature_colors should return empty dict + color_map = layer.get_feature_colors([self.area1.id]) + self.assertEqual(color_map, {}) diff --git a/spp_gis_indicators/views/color_scale_views.xml b/spp_gis_indicators/views/color_scale_views.xml new file mode 100644 index 00000000..ccaed2ae --- /dev/null +++ b/spp_gis_indicators/views/color_scale_views.xml @@ -0,0 +1,100 @@ + + + + + + spp.gis.color.scale.tree + spp.gis.color.scale + + + + + + + + + + + + + + spp.gis.color.scale.form + spp.gis.color.scale + +
+ +
+ +
+ + + + + + + + + + + + + + +
+

+ Enter colors as JSON array of hex codes, e.g.: +
+ ["#f7fbff", "#deebf7", "#c6dbef", "#9ecae1", "#6baed6"] +

+
+
+
+
+
+
+ + + + spp.gis.color.scale.search + spp.gis.color.scale + + + + + + + + + + + + + + + + + + Color Scales + gis-color-scales + spp.gis.color.scale + tree,form + {'search_default_active': 1} + +

+ Create a new color scale +

+

+ Color scales define how data values are mapped to colors in choropleth visualizations. + Several preset scales are provided based on ColorBrewer schemes. +

+
+
+ + + + +
diff --git a/spp_gis_indicators/views/data_layer_views.xml b/spp_gis_indicators/views/data_layer_views.xml new file mode 100644 index 00000000..8e10d23f --- /dev/null +++ b/spp_gis_indicators/views/data_layer_views.xml @@ -0,0 +1,42 @@ + + + + + + spp.gis.data.layer.form.indicators + spp.gis.data.layer + + + + + + + + + + + + + + + + + + spp.gis.data.layer.popup.form.indicators + spp.gis.data.layer + + + + + + + + + + + + + + + + diff --git a/spp_gis_indicators/views/indicator_layer_views.xml b/spp_gis_indicators/views/indicator_layer_views.xml new file mode 100644 index 00000000..b581ef8d --- /dev/null +++ b/spp_gis_indicators/views/indicator_layer_views.xml @@ -0,0 +1,124 @@ + + + + + + spp.gis.indicator.layer.tree + spp.gis.indicator.layer + + + + + + + + + + + + + + + + spp.gis.indicator.layer.form + spp.gis.indicator.layer + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ + + + spp.gis.indicator.layer.search + spp.gis.indicator.layer + + + + + + + + + + + + + + + + + + + + + Indicator Layers + gis-indicator-layers + spp.gis.indicator.layer + tree,form + {'search_default_active': 1} + +

+ Create a new indicator layer configuration +

+

+ Indicator layers define how area-level indicators are visualized as choropleth maps. + Configure the data source (variable, period, incident) and visualization settings + (color scale, classification method). +

+
+
+ + + + +
diff --git a/spp_gis_indicators/views/menu.xml b/spp_gis_indicators/views/menu.xml new file mode 100644 index 00000000..4d91f2c7 --- /dev/null +++ b/spp_gis_indicators/views/menu.xml @@ -0,0 +1,11 @@ + + + + + diff --git a/spp_hxl/README.rst b/spp_hxl/README.rst new file mode 100644 index 00000000..c068cf70 --- /dev/null +++ b/spp_hxl/README.rst @@ -0,0 +1,162 @@ +======================= +OpenSPP HXL Integration +======================= + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:6ac8b288fda5285dc32109e1aa1876445a3a6584ee8ccbe17c1537f927ad05cb + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github + :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_hxl + :alt: OpenSPP/openspp-modules + +|badge1| |badge2| |badge3| + +Humanitarian Exchange Language (HXL) integration for OpenSPP data +interoperability. Provides a registry of standard HXL hashtags and +attributes, tools for creating export profiles with HXL tagging, and +extends CEL variables with HXL mapping capabilities. Enables OpenSPP to +exchange data with humanitarian coordination systems using standardized +HXL tagging conventions. + +Key Capabilities +~~~~~~~~~~~~~~~~ + +- Registry of HXL 1.1 standard hashtags organized by category + (geographic, population, activity, indicator, etc.) +- Registry of HXL attributes for data disaggregation (gender, age group, + population type, etc.) +- Export profile definition for model-specific column mappings with HXL + tags +- HXL tag composition from hashtag and attributes (e.g., + #affected+f+children) +- Integration with CEL variables for import/export behavior + configuration +- Validation of HXL tag format (hashtag starts with #, attributes start + with +) + +Key Models +~~~~~~~~~~ + ++----------------------------------+----------------------------------+ +| Model | Description | ++==================================+==================================+ +| ``spp.hxl.tag`` | Registry of HXL hashtags (e.g., | +| | #affected, #adm2, #indicator) | ++----------------------------------+----------------------------------+ +| ``spp.hxl.attribute`` | Registry of HXL attributes | +| | (e.g., +f, +children, +code) | ++----------------------------------+----------------------------------+ +| ``spp.hxl.export.profile`` | Export template defining model | +| | and column mapping with HXL tags | ++----------------------------------+----------------------------------+ +| ` | Column definition with field | +| `spp.hxl.export.profile.column`` | path and HXL tag assignment | ++----------------------------------+----------------------------------+ +| ``spp.cel.variable`` (extended) | CEL variable with HXL hashtag, | +| | attributes, and import/export | +| | behavior | ++----------------------------------+----------------------------------+ + +Configuration +~~~~~~~~~~~~~ + +After installing: + +1. Navigate to **Custom > HXL > Configuration > HXL Hashtags** to view + or add hashtags +2. Navigate to **Custom > HXL > Configuration > HXL Attributes** to view + or add attributes +3. Create export profiles at **Custom > HXL > Export Profiles** + specifying: + + - Target model for export + - Column definitions with field paths + - HXL tag assignment (manual or structured via hashtag + attributes) + +4. For CEL variables, navigate to **Custom > Studio > Logic Variables** + and use the HXL Mapping tab to define: + + - HXL hashtag and attributes for the variable + - Import action (map to field, create event, store as variable, or + skip) + - Export inclusion preference + +UI Location +~~~~~~~~~~~ + +- **Menu**: Custom > HXL (main menu) +- **Configuration**: Custom > HXL > Configuration (HXL Hashtags, HXL + Attributes) +- **Export Profiles**: Custom > HXL > Export Profiles +- **CEL Variable Extension**: Custom > Studio > Logic Variables > HXL + Mapping tab + +Security +~~~~~~~~ + +================================ ========= +Group Access +================================ ========= +``base.group_user`` Read +``spp_security.group_spp_admin`` Full CRUD +================================ ========= + +Extension Points +~~~~~~~~~~~~~~~~ + +- Inherit ``spp.hxl.export.profile`` and override export logic to + implement custom HXL export formats +- Extend ``spp.hxl.tag`` or ``spp.hxl.attribute`` to add domain-specific + HXL tags (set ``is_standard=False``) +- Inherit ``spp.cel.variable`` to customize HXL import/export behavior + based on ``hxl_import_action`` field + +Dependencies +~~~~~~~~~~~~ + +``spp_security``, ``spp_cel_domain``, ``spp_studio``, ``spp_vocabulary`` + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. + +You are welcome to contribute. diff --git a/spp_hxl/__init__.py b/spp_hxl/__init__.py new file mode 100644 index 00000000..0650744f --- /dev/null +++ b/spp_hxl/__init__.py @@ -0,0 +1 @@ +from . import models diff --git a/spp_hxl/__manifest__.py b/spp_hxl/__manifest__.py new file mode 100644 index 00000000..0ebc8257 --- /dev/null +++ b/spp_hxl/__manifest__.py @@ -0,0 +1,30 @@ +{ + "name": "OpenSPP HXL Integration", + "summary": "Humanitarian Exchange Language (HXL) support for data interoperability. " + "Adds HXL hashtag mapping to variables and export profiles for humanitarian coordination.", + "category": "OpenSPP/Integration", + "version": "19.0.2.0.0", + "author": "OpenSPP.org", + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "development_status": "Alpha", + "depends": [ + "spp_security", + "spp_cel_domain", + "spp_studio", + "spp_vocabulary", + ], + "data": [ + "security/ir.model.access.csv", + "data/hxl_hashtags.xml", + "data/hxl_attributes.xml", + "views/hxl_tag_views.xml", + "views/hxl_attribute_views.xml", + "views/hxl_export_profile_views.xml", + "views/cel_variable_views.xml", + "views/menus.xml", + ], + "application": False, + "installable": True, + "auto_install": False, +} diff --git a/spp_hxl/data/hxl_attributes.xml b/spp_hxl/data/hxl_attributes.xml new file mode 100644 index 00000000..d818d3ed --- /dev/null +++ b/spp_hxl/data/hxl_attributes.xml @@ -0,0 +1,283 @@ + + + + + +f + Female + Female gender disaggregation + gender + + + + + +m + Male + Male gender disaggregation + gender + + + + + +i + Intersex/Other + Intersex or other gender disaggregation + gender + + + + + + +infants + Infants + Infants (typically 0-1 years) + age + + + + + +children + Children + Children (typically 2-11 years) + age + + + + + +adolescents + Adolescents + Adolescents (typically 12-17 years) + age + + + + + +adults + Adults + Adults (typically 18-59 years) + age + + + + + +elderly + Elderly + Elderly (typically 60+ years) + age + + + + + + +refugees + Refugees + Refugee population + population_type + + + + + +idps + IDPs + Internally displaced persons + population_type + + + + + +ind + Individuals + Individual persons + population_type + + + + + +hh + Households + Household units + population_type + + + + + + +code + Code + Machine-readable code or identifier + data_type + + + + + +name + Name + Human-readable name + data_type + + + + + +num + Number + Numeric value + data_type + + + + + +text + Text + Free text content + data_type + + + + + +pcode + P-Code + Place code (administrative code) + data_type + + + + + + +lat + Latitude + Geographic latitude + geographic + + + + + +lon + Longitude + Geographic longitude + geographic + + + + + +origin + Origin + Origin location + geographic + + + + + +dest + Destination + Destination location + geographic + + + + + + +funder + Funder + Funding organization + role + + + + + +impl + Implementer + Implementing organization + role + + + + + +prog + Programmer + Programming organization + role + + + + + +partner + Partner + Partner organization + role + + + + + + +v_pcode + P-Code Vocabulary + Reference to P-Code vocabulary + vocabulary + + + + + +v_iso3 + ISO3 Vocabulary + ISO 3166-1 alpha-3 country codes + vocabulary + + + + + + +en + English + English language + language + + + + + +fr + French + French language + language + + + + + +es + Spanish + Spanish language + language + + + + + +ar + Arabic + Arabic language + language + + + + + + +v_openspp + OpenSPP Vocabulary + OpenSPP vocabulary reference (custom) + openspp + + + + + +v_openspp_sppid + OpenSPP ID + OpenSPP beneficiary ID (custom) + openspp + + + + + +v_openspp_pcode + OpenSPP P-Code + OpenSPP place code (custom) + openspp + + + diff --git a/spp_hxl/data/hxl_hashtags.xml b/spp_hxl/data/hxl_hashtags.xml new file mode 100644 index 00000000..4fab5270 --- /dev/null +++ b/spp_hxl/data/hxl_hashtags.xml @@ -0,0 +1,355 @@ + + + + + #adm1 + Administrative Level 1 + First-level administrative division (e.g., province, state) + geographic + + text + + + + #adm2 + Administrative Level 2 + Second-level administrative division (e.g., district, county) + geographic + + text + + + + #adm3 + Administrative Level 3 + Third-level administrative division (e.g., municipality) + geographic + + text + + + + #adm4 + Administrative Level 4 + Fourth-level administrative division + geographic + + text + + + + #adm5 + Administrative Level 5 + Fifth-level administrative division + geographic + + text + + + + #country + Country + Country name or code + geographic + + text + + + + #geo + Geographic Coordinates + Geographic coordinates (latitude/longitude) + geographic + + geo + + + + #loc + Location + Location name or identifier + geographic + + text + + + + + #affected + Affected + Number of people affected by a crisis + population + + number + + + + #inneed + In Need + Number of people in need of assistance + population + + number + + + + #targeted + Targeted + Number of people targeted for assistance + population + + number + + + + #reached + Reached + Number of people reached with assistance + population + + number + + + + #population + Population + Total population figure + population + + number + + + + + #org + Organization + Organization name or identifier + organization + + text + + + + #contact + Contact + Contact information + organization + + text + + + + + #activity + Activity + Activity type or description + activity + + text + + + + #sector + Sector + Humanitarian sector (e.g., health, education) + activity + + text + + + + #service + Service + Service provided + activity + + text + + + + + #indicator + Indicator + Indicator name or code + indicator + + text + + + + #need + Need + Type of need + indicator + + text + + + + #severity + Severity + Severity level or score + indicator + + text + + + + + #meta + Metadata + Metadata about the dataset + metadata + + text + + + + #description + Description + Description or notes + metadata + + text + + + + #status + Status + Status of an item or process + metadata + + text + + + + + #item + Item + Item or commodity name + item + + text + + + + #modality + Modality + Assistance modality (cash, voucher, in-kind) + item + + text + + + + + #value + Value + Generic value or amount + value + + number + + + + #currency + Currency + Currency code or name + value + + text + + + + #capacity + Capacity + Capacity or capability measure + value + + number + + + + + #date + Date + Date or timestamp + date + + date + + + + + #access + Access + Access-related information + metadata + + text + + + + #cause + Cause + Cause of a crisis or event + metadata + + text + + + + #event + Event + Event name or identifier + metadata + + text + + + + #group + Group + Group classification + population + + text + + + + #impact + Impact + Impact of a crisis or intervention + indicator + + text + + + + #output + Output + Program output or result + indicator + + text + + + + #respondee + Respondee + Survey respondent information + metadata + + text + + + + + #beneficiary + Beneficiary + Beneficiary information (OpenSPP custom) + openspp + + text + + diff --git a/spp_hxl/models/__init__.py b/spp_hxl/models/__init__.py new file mode 100644 index 00000000..e023c908 --- /dev/null +++ b/spp_hxl/models/__init__.py @@ -0,0 +1,4 @@ +from . import hxl_tag +from . import hxl_attribute +from . import hxl_export_profile +from . import cel_variable diff --git a/spp_hxl/models/cel_variable.py b/spp_hxl/models/cel_variable.py new file mode 100644 index 00000000..4437830e --- /dev/null +++ b/spp_hxl/models/cel_variable.py @@ -0,0 +1,58 @@ +import logging + +from odoo import api, fields, models + +_logger = logging.getLogger(__name__) + + +class CELVariableHxl(models.Model): + """Extend CEL Variable with HXL mapping fields""" + + _inherit = "spp.cel.variable" + + # HXL Mapping + hxl_hashtag = fields.Char( + string="HXL Hashtag", + help="Primary HXL hashtag, e.g., #affected", + ) + hxl_attributes = fields.Char( + string="HXL Attributes", + help="HXL attributes, e.g., +f+children", + ) + hxl_tag = fields.Char( + string="HXL Tag", + compute="_compute_hxl_tag", + store=True, + help="Full HXL tag, e.g., #affected+f+children", + ) + + # Import behavior + hxl_import_action = fields.Selection( + [ + ("field", "Map to Model Field"), + ("event", "Create Event Data"), + ("variable", "Store as Cached Variable"), + ("skip", "Skip on Import"), + ], + string="HXL Import Action", + default="variable", + help="How to handle this variable when importing HXL-tagged data", + ) + + # Export behavior + hxl_export_include = fields.Boolean( + string="Include in HXL Export", + default=True, + help="Include this variable in HXL-tagged exports", + ) + + @api.depends("hxl_hashtag", "hxl_attributes") + def _compute_hxl_tag(self): + """Compute the full HXL tag from hashtag and attributes""" + for rec in self: + if rec.hxl_hashtag: + # Ensure hashtag starts with # + hashtag = rec.hxl_hashtag if rec.hxl_hashtag.startswith("#") else f"#{rec.hxl_hashtag}" + rec.hxl_tag = hashtag + (rec.hxl_attributes or "") + else: + rec.hxl_tag = False diff --git a/spp_hxl/models/hxl_attribute.py b/spp_hxl/models/hxl_attribute.py new file mode 100644 index 00000000..9b247722 --- /dev/null +++ b/spp_hxl/models/hxl_attribute.py @@ -0,0 +1,50 @@ +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class HxlAttribute(models.Model): + """Registry of HXL attributes (e.g., +f, +children, +code)""" + + _name = "spp.hxl.attribute" + _description = "HXL Attribute" + _order = "category, code" + + code = fields.Char(required=True, index=True, help="Attribute code, e.g., +f, +children") + name = fields.Char(required=True, help="Human-readable name for the attribute") + description = fields.Text(help="Detailed description of the attribute usage") + category = fields.Selection( + [ + ("gender", "Gender"), + ("age", "Age Group"), + ("population_type", "Population Type"), + ("data_type", "Data Type"), + ("role", "Organizational Role"), + ("geographic", "Geographic"), + ("vocabulary", "Vocabulary Reference"), + ("language", "Language"), + ("openspp", "OpenSPP Custom"), + ], + help="Category for organizing attributes", + ) + is_standard = fields.Boolean(default=True, help="True if part of HXL 1.1 standard specification") + active = fields.Boolean(default=True) + + _code_unique = models.Constraint("unique(code)", "HXL attribute code must be unique!") + + @api.constrains("code") + def _check_code_format(self): + """Ensure attribute code starts with + and contains only valid characters""" + for rec in self: + if rec.code: + if not rec.code.startswith("+"): + raise ValidationError(_("HXL attribute code must start with +")) + # Check for valid characters (alphanumeric, underscore, dash after +) + code_part = rec.code[1:] + if not code_part or not all(c.isalnum() or c in "_-" for c in code_part): + raise ValidationError( + _("HXL attribute code can only contain alphanumeric characters, underscore, and dash") + ) diff --git a/spp_hxl/models/hxl_export_profile.py b/spp_hxl/models/hxl_export_profile.py new file mode 100644 index 00000000..f2c66672 --- /dev/null +++ b/spp_hxl/models/hxl_export_profile.py @@ -0,0 +1,117 @@ +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class HxlExportProfile(models.Model): + """Pre-configured export templates with HXL tagging""" + + _name = "spp.hxl.export.profile" + _description = "HXL Export Profile" + _order = "sequence, name" + + name = fields.Char(required=True, help="Profile name") + code = fields.Char( + required=True, + index=True, + help="Unique code for the profile, e.g., 'damage_assessment'", + ) + description = fields.Text(help="Description of the export profile purpose and usage") + sequence = fields.Integer(default=10, help="Display order") + + # Target model for export + model_id = fields.Many2one( + "ir.model", + string="Model", + required=True, + ondelete="cascade", + help="Target Odoo model for this export profile", + ) + model_name = fields.Char(related="model_id.model", store=True, string="Model Name") + + # Columns + column_ids = fields.One2many( + "spp.hxl.export.profile.column", + "profile_id", + string="Columns", + help="Column definitions for the export", + ) + + # Options + include_hxl_row = fields.Boolean(default=True, help="Include HXL hashtag row in export (typically row 2)") + date_format = fields.Char(default="%Y-%m-%d", help="Date format for export (Python strftime format)") + + active = fields.Boolean(default=True) + + _code_unique = models.Constraint("unique(code)", "Export profile code must be unique!") + + @api.constrains("code") + def _check_code_format(self): + """Ensure code contains only valid characters""" + for rec in self: + if rec.code: + if not all(c.isalnum() or c in "_-" for c in rec.code): + raise ValidationError( + _("Profile code can only contain alphanumeric characters, underscore, and dash") + ) + + +class HxlExportProfileColumn(models.Model): + """Column definition within an export profile""" + + _name = "spp.hxl.export.profile.column" + _description = "HXL Export Profile Column" + _order = "profile_id, sequence" + + profile_id = fields.Many2one( + "spp.hxl.export.profile", + required=True, + ondelete="cascade", + string="Profile", + help="Parent export profile", + ) + sequence = fields.Integer(default=10, help="Display order") + + # Column definition + name = fields.Char(required=True, help="Column header name (human-readable)") + field_path = fields.Char(required=True, help="Dot notation field path (e.g., 'partner_id.name')") + + # HXL tagging + hxl_tag = fields.Char( + help="Manual HXL tag entry (e.g., '#affected+children'). If set, overrides hashtag/attributes." + ) + hxl_hashtag_id = fields.Many2one("spp.hxl.tag", string="Hashtag", help="Select HXL hashtag from registry") + hxl_attribute_ids = fields.Many2many( + "spp.hxl.attribute", + string="Attributes", + help="Select HXL attributes to combine with hashtag", + ) + + # Computed full tag + computed_hxl_tag = fields.Char( + compute="_compute_hxl_tag", + store=True, + string="HXL Tag", + help="Computed full HXL tag combining hashtag and attributes", + ) + + active = fields.Boolean(default=True) + + @api.depends("hxl_tag", "hxl_hashtag_id", "hxl_attribute_ids") + def _compute_hxl_tag(self): + """Compute the full HXL tag from components or manual entry""" + for rec in self: + if rec.hxl_tag: + # Manual entry takes precedence + rec.computed_hxl_tag = rec.hxl_tag + elif rec.hxl_hashtag_id: + # Build from hashtag + attributes + tag = rec.hxl_hashtag_id.hashtag + for attr in rec.hxl_attribute_ids.sorted("code"): + tag += attr.code + rec.computed_hxl_tag = tag + else: + rec.computed_hxl_tag = False diff --git a/spp_hxl/models/hxl_tag.py b/spp_hxl/models/hxl_tag.py new file mode 100644 index 00000000..42c601c6 --- /dev/null +++ b/spp_hxl/models/hxl_tag.py @@ -0,0 +1,61 @@ +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class HxlTag(models.Model): + """Registry of HXL hashtags (e.g., #affected, #adm2, #indicator)""" + + _name = "spp.hxl.tag" + _description = "HXL Hashtag" + _order = "category, hashtag" + + hashtag = fields.Char(required=True, index=True, help="HXL hashtag, e.g., #affected") + name = fields.Char(required=True, help="Human-readable name for the hashtag") + description = fields.Text(help="Detailed description of the hashtag usage") + category = fields.Selection( + [ + ("geographic", "Geographic"), + ("population", "Population & Caseload"), + ("organization", "Organizations"), + ("activity", "Activities & Sectors"), + ("indicator", "Indicators"), + ("metadata", "Metadata"), + ("item", "Items & Commodities"), + ("value", "Values"), + ("date", "Date & Time"), + ("openspp", "OpenSPP Custom"), + ], + help="Category for organizing hashtags", + ) + is_standard = fields.Boolean(default=True, help="True if part of HXL 1.1 standard specification") + data_type = fields.Selection( + [ + ("text", "Text"), + ("number", "Number"), + ("date", "Date"), + ("geo", "Geographic"), + ], + default="text", + help="Expected data type for this hashtag", + ) + active = fields.Boolean(default=True) + + _hashtag_unique = models.Constraint("unique(hashtag)", "HXL hashtag must be unique!") + + @api.constrains("hashtag") + def _check_hashtag_format(self): + """Ensure hashtag starts with # and contains only valid characters""" + for rec in self: + if rec.hashtag: + if not rec.hashtag.startswith("#"): + raise ValidationError(_("HXL hashtag must start with #")) + # Check for valid characters (alphanumeric, underscore, dash after #) + tag_part = rec.hashtag[1:] + if not tag_part or not all(c.isalnum() or c in "_-" for c in tag_part): + raise ValidationError( + _("HXL hashtag can only contain alphanumeric characters, underscore, and dash") + ) diff --git a/spp_hxl/pyproject.toml b/spp_hxl/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_hxl/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_hxl/readme/DESCRIPTION.md b/spp_hxl/readme/DESCRIPTION.md new file mode 100644 index 00000000..0e6950c8 --- /dev/null +++ b/spp_hxl/readme/DESCRIPTION.md @@ -0,0 +1,59 @@ +Humanitarian Exchange Language (HXL) integration for OpenSPP data interoperability. Provides a registry of standard HXL hashtags and attributes, tools for creating export profiles with HXL tagging, and extends CEL variables with HXL mapping capabilities. Enables OpenSPP to exchange data with humanitarian coordination systems using standardized HXL tagging conventions. + +### Key Capabilities + +- Registry of HXL 1.1 standard hashtags organized by category (geographic, population, activity, indicator, etc.) +- Registry of HXL attributes for data disaggregation (gender, age group, population type, etc.) +- Export profile definition for model-specific column mappings with HXL tags +- HXL tag composition from hashtag and attributes (e.g., #affected+f+children) +- Integration with CEL variables for import/export behavior configuration +- Validation of HXL tag format (hashtag starts with #, attributes start with +) + +### Key Models + +| Model | Description | +| -------------------------------- | -------------------------------------------------------------- | +| `spp.hxl.tag` | Registry of HXL hashtags (e.g., #affected, #adm2, #indicator) | +| `spp.hxl.attribute` | Registry of HXL attributes (e.g., +f, +children, +code) | +| `spp.hxl.export.profile` | Export template defining model and column mapping with HXL tags | +| `spp.hxl.export.profile.column` | Column definition with field path and HXL tag assignment | +| `spp.cel.variable` (extended) | CEL variable with HXL hashtag, attributes, and import/export behavior | + +### Configuration + +After installing: + +1. Navigate to **Custom > HXL > Configuration > HXL Hashtags** to view or add hashtags +2. Navigate to **Custom > HXL > Configuration > HXL Attributes** to view or add attributes +3. Create export profiles at **Custom > HXL > Export Profiles** specifying: + - Target model for export + - Column definitions with field paths + - HXL tag assignment (manual or structured via hashtag + attributes) +4. For CEL variables, navigate to **Custom > Studio > Logic Variables** and use the HXL Mapping tab to define: + - HXL hashtag and attributes for the variable + - Import action (map to field, create event, store as variable, or skip) + - Export inclusion preference + +### UI Location + +- **Menu**: Custom > HXL (main menu) +- **Configuration**: Custom > HXL > Configuration (HXL Hashtags, HXL Attributes) +- **Export Profiles**: Custom > HXL > Export Profiles +- **CEL Variable Extension**: Custom > Studio > Logic Variables > HXL Mapping tab + +### Security + +| Group | Access | +| -------------------------------- | --------- | +| `base.group_user` | Read | +| `spp_security.group_spp_admin` | Full CRUD | + +### Extension Points + +- Inherit `spp.hxl.export.profile` and override export logic to implement custom HXL export formats +- Extend `spp.hxl.tag` or `spp.hxl.attribute` to add domain-specific HXL tags (set `is_standard=False`) +- Inherit `spp.cel.variable` to customize HXL import/export behavior based on `hxl_import_action` field + +### Dependencies + +`spp_security`, `spp_cel_domain`, `spp_studio`, `spp_vocabulary` diff --git a/spp_hxl/security/ir.model.access.csv b/spp_hxl/security/ir.model.access.csv new file mode 100644 index 00000000..b2626b96 --- /dev/null +++ b/spp_hxl/security/ir.model.access.csv @@ -0,0 +1,9 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_spp_hxl_tag_user,spp.hxl.tag user,model_spp_hxl_tag,base.group_user,1,0,0,0 +access_spp_hxl_tag_manager,spp.hxl.tag manager,model_spp_hxl_tag,spp_security.group_spp_admin,1,1,1,1 +access_spp_hxl_attribute_user,spp.hxl.attribute user,model_spp_hxl_attribute,base.group_user,1,0,0,0 +access_spp_hxl_attribute_manager,spp.hxl.attribute manager,model_spp_hxl_attribute,spp_security.group_spp_admin,1,1,1,1 +access_spp_hxl_export_profile_user,spp.hxl.export.profile user,model_spp_hxl_export_profile,base.group_user,1,0,0,0 +access_spp_hxl_export_profile_manager,spp.hxl.export.profile manager,model_spp_hxl_export_profile,spp_security.group_spp_admin,1,1,1,1 +access_spp_hxl_export_profile_column_user,spp.hxl.export.profile.column user,model_spp_hxl_export_profile_column,base.group_user,1,0,0,0 +access_spp_hxl_export_profile_column_manager,spp.hxl.export.profile.column manager,model_spp_hxl_export_profile_column,spp_security.group_spp_admin,1,1,1,1 diff --git a/spp_hxl/static/description/icon.png b/spp_hxl/static/description/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..c7dbdaaf1dace8f0ccf8c2087047ddfcf584af0c GIT binary patch literal 15480 zcmbumbyQqU(=SR05Hz?GTnBdsm*BxQ_yEH|aCf%^cefzHo!|s_cXxLSE;*Cueee5y z-&tp!b=SRr%%17JtE+c)byrpYs^*)rqBI&Z5i$%644SOWM^)(ez~2ud0`yw0U6BR- zLb8+j><9z%zUS}fO(NraVi*{>9t(ACCvAmK{3f>6EFe=`V=#-GwH=fi21ZcC%?@N@ z33ehk216`tgy_y&+UdwGOoiyQxE0tG>?FYE7BU_VU^Nd#brTOu6QC)bh%mCC8$XnR zHP{J6?q+Red4B@!uI#I$jJr&Mb9s0>iD<$ zuR+wn_Wv~g)v~hqXCyn2gCkho-3}~7rwVqob#^cT|HI*Lr++h%Z~%jxz^1|+Y#iLo zY(Qpqpdjo2_UP{z|J6a#%}Lf&m<$tn~GKO;D=HTYw;RdpEvGW4C`Plx`;h%^9lV07{*~I*>D8d~7A^Wd; z|IiAu{+(Sbi+@eZKaGFS%71$NYs&sb_}|p>|6Wz5CjU{BowI}0KTE*WgcWQBwg%fc z{Z$hCzm;Ta!tZ3^WCi{&6^U6n{ZAD^*B-wW$Oa-r=f-RbHUl|ZInfDg*!P87jt$pw{;L! zurM(Pfvw2pY|U-RrEP6IKvrN!!N2tX4+V7f|D%KdPxB1jp8uKX|M5a@AiMvz6QE@L z|EyqJ2X$LpD`5$cjSGmJUKMO(3U&ZHFp!(tnh1RqllIVYQ3J`EJCZv)f*pi3#3YP4 zY;_;(mw~W(F*95)Y)WYoZkRgrLS)eSvJR)Y$S4!fK zScE24BMTw?G63}=yN?Nr!v4s(L#bh+ z0QHoB|LYajx?X9+TnwfJwuDj{M>z;4bu|DB7H;cherVEncj0{^h73csRh5-&U)E;4 zNLVpq{=h+rsFoNmYz*8AfN`m{D6C^2%WV~zRAFNZuAXKcKMErci*PnF0ZSfM)erUu zjcjUMJ_wuF3RSJ9O~@Z4hhap;#(_0ma`J>1A0~<{s?m|hcz{e!L&u6Tp}I}Ep<>4f zOJS|^MQ_DPOkz?*AhrH}k<9ZOEt4`FAyRDqXjTP|E_#oO27Gr&f`y5OM@B1VqH_ES zCTweSMCx}a*0xU}@o6fA8_gjjy z2Q57xXmg+m(g6q!aM8mCkithJ--tyXkCjku;FTF{?B>(>FABGzSGUggUumv`+C6Ow zvd1XmI~#j#dG0vl>e;QtxGX?gJsdQ+{-4BuDt%|kxthFj<_dORK@Rc;K*$U=E~?kF zJ$(-vwj?T<5%x2c(fneoKTjS|rpBh!8`&y_y)z)7Hj@j%)+~SkVR8K<@`g&WZjo&G z8?wNoqyeOzOEhl;E4C^_e6^7aF#Fx~(z-&NxzGQQC}?L?Gl>qxwKg;MZTpfMvw^V{ zmT;>h9A?JFxNyIC1IPqQldk82>?{LtnMt2Xo$HmXr3gvbffJCJF_|;ZU)lTX#2_{h zNT=4@taez10pm@hvzTLIAAD(`*Y6XZr7!w3a5sy>KWlOvJ92!fyI0Yjt7_+Syy+$Q z9i0@K!{?>N+F!J-sDJMIV zySlF4rF1c1>K1)CaHBkwkwVV z_lfaZhdgZH%&PK>eJxwrWn!sr5&Gc_9Cr|XDCGA_XN{>#)>Qgl3%Uyi`^M@mPTT`? zf;&`{13;P8O-+u@Hlr4IZO)ivM_w*HE{G3gydPIhU7gTd{}##Tw;S&&d-&?A1qaWy zLlnn3TyAMVFPcpfZ`1wMt^$+g?Z(_ki{MSWsfo#KTB33CzU=9qQnoXtdS(mcmLjCY zalOGBnh*x}*Hy&3cD8}2EUr+55qEqP9$UCvz=o=kb9%C^{(Ki9<6A_yTJAVGBAyn3 zIGGLv4!o55o*J5V_xfbsyPk=kC$C`%S6?3qh!N5V(<2M#9p=&i>al1cGc#6pd37`_ z3RMpN=*|e9{nd~zZKGX@%J-K$=_&@x#D$&<8NApJ?i3jM!5X8abIiAPla~}@BE@Ep zytt_iw|xY%OQxngqE(gy8xY@vUMZuc7&hw5I)$M+5$X^P z;i3S7-Tgw2w#pV1R->>O;O~UyyX#p3>DD8rfL3FNO@kS@Uw?F5(eln`lA5WMkAVwk z6(1gr5%VDf8>tN;vdaPZYs8yBSJ^oba~WDr`qr8Oh#ok4VLQ3lrJrZ_Xm(T@FM0qa z&kxcByGv0F-Fx%t@9vZ7JP$}yAKpn-r^LhBTLwsS1J)bs6T{~SIQ6H$7qanXOrs1*Z5c~M%>RPFWj8X;g2@Lhm?HnEOmg0If6exM<_Fa9>!5P zv6(xpC9c)Yz1{ue6}vOIV(QK_dbu(^ad>yOhx?(?cWg0n`J-318#Q=eVZOiuW}A1? z=YKkEE?wkr+3_PaFv)gRxm)xjwl4{Gcz$5;$RixdVH2Ds+=H?$xTUn`QZ<#!D zWRP4okEG?OLnjctlnTlg5)kz*Yn=}m<^joJPN)}L??y(J86Fk_PaZ`{q?IKql37h; zDKAk4_|={_s%_q*rZ}MznUn?=QC9T$A!MnV>~b~n=uXQdTx6` z)C4lw2Vd8?lJqhAV%eA%mg9eTcNjsG(q@@$etAi9{uE1m1hj1!jelwHV;%czJVoYcrZ=vANJHDiH$G) zek&XC9nl=^c*OxElr7lsK6+aN5c^^)p0n;58u$EC`TpvB9KEV=zK9QdPpmKCHANCK zliMaTnv1|oI8A%NctUtQg)_&D9wYY|Iwm&nkURyL3PVzKxQI{K6C{+zFGk`XQGDw} zv$z(!mCfUPd6h*?RowKmNy|p2Mri1laA2VU*^f5fL8Ne4IPc)ybITH=)f$-My53); zfsHD{N>w!&UkTyOxD>>Ey0g^%;L)A?P_Nyhcd+dwhH5DN?-^*`{IEk;(NK z+#s-OPFRbbX|Uo9=Y@)pgD@SCE!UCmYYVmF+$i4Kgz2lR3|L_DxX-u)DSS39jaf=r zT6deEL2ULQJHvU~(|2vtWZ zLueKkQ*#|Bj9fi4c9{)Y&z^&}>=~e5Y-HCkQ7Mw zXCH5+<@YAqb|zki@0M(%ccdpqTJ62ZPg~bZ9%dCF9k!S%_lroxG?x3NpXG4ZBn}!6 z+=_Y!1xqxCN~6zvXAyVg)}YKk4ib#`<>h_p{S$I>vi*LYB5ST+3mf_t)@{}Ih`};0 z29&^wWHWl>8kd64(wY}#hrVQAh&s7gbeHd|IZAStUZ&PSb3$B{PvD=+ zQkSe%LJ0K>h&Kj#S8^)h9GXvu0IZ=3Z>3DSi8{T;a z0b*muMkNGwF;o1RwtCZDg#97P8vE(~`hga&m%k(gTR6qI^gs7yTIO@ay}Te)Hx6Eg zd%2g}G&u)zqqNrD5nG*q8XFK&z9RjIS(Q6DYG^p!6>M30Ef+5|le|Ud>m9T2((_H@ zmT!+5i$HN{<G+1EEoc4AS9vm>QDZpO>K6M{G^b)txOnqNOvTfV zwR^y>(e?%b$$pu79ydu6M>3?3(>(2u(=dN7HK{92%u6nm^iDzS@)?5XBIF{B#CklVg~i#wA$0R9A~jYSgt2E^Wysxcp!2- zJy+&-mzNYaZTSq9cjqTE4)av2f-f$0H4?(;)nFcK>Cqg8V1?|=v!Y(*^*0|9I;_Rhhiwc^cQM&I zs2P#p?_{f-yhS#$Z%c?knJ_g7Zhv%L*{tf?J?E8j94bImWV|QMY5x(sTCL_62EdT)xWZ#KY;8qi zzh&-cv3YOkp`;b}=k-{kwTe#GjC6kh`OVE6++^#^n`2$=$t@u!WTiOfEEDax{k6!e z@X;4kniF^87>l=U_UXRvHKDfp>vDPBi03g%yHSkk525SM)oqOWGqYp4$RD*p_K`zZ zX5;Tx^`n&DE+;ujb3D5nIv6Mom3jfVZ5mIfq!jf|AhPk0p*BCT0x8R9-BE8{1h;FQswTy?v#0}-38B!kczy{x;$7!io^DZ=IcJY##vEYDk$eMl;r^~T9QM) zQtubaNKNtRwxEV=;ce#Z4d5>nKyB3}bT9N~-_eBgFflJtua+a>1#3WkFbOfK>wALd zZQJFC>tFY+A8cE=I=Kr&9)?klwAYSC8EBln7`QBc`8b2H&Uw!rU@nG`1p+M z_PaAlj^s@QS_#v-S7a>mvT=DTFWy=ZjjGOXi5cF@lwE;85aI6_m*ok~r?Q!5Pm%ZT?$+H*@!&OVYR1ei_3V-7Rug|y! z6$Mw3zfY~M&=eRqCgXBTaB?UI^f`~CMbB=}$Mp5L0V>1!a|Lt#a+4g!0f$6;UDKhZ zlL^j^u4Vmh%}jY4)Cwro5tJ1AQGq1f_B}RfX)D2nMS91)Y;HB$dH?2hjtC#Za)<9l z3Xk+rZ6knNtjm9pc2D}(wY6@|ZX5l(cbwO2oUZoqp~U011TV#IhMJfGfJ%N_y5pEr z$$IA>?#}aHx9?aiZ|z18x!q7sz$jnVblQi|AhW85+>7y6btIi|OvFBI?tT(4eXVCg zeP8}0!iu@r=PR>rJ3wq*!=CC<_ihZL5#EG)I$$%%kh7e$zQ1S@xv6Or7!_P&%MPMk zACVS&BE)NLV(qN8MOV5C`xbf8IbN#MmeEcdWYA$OwFX;!1z7PC6DoHe>+fVejhMzC z1S8qnm<(G9MXIvx3DE3&Qo+7^LNi#xb$$M2LL^jXh)cbb3h%G(i91(WK}lj~^MOAm zA?4cXvn!=%bKJ^P|1)ix8c1H28Z^2L({~B=9);^+7Yn7*L|+tIAJG4NPUMk$gC5&z zQeEbR@FbxHdE`+3^XSBSPAWGx5R7Z8yZbLJA~9Q9x(L@tqt{q61Em+ikqTux8^kZ8DQrK4FB3r5Qx$xHG!>D| zA6?vk{*>E?Mj18vgMk%hzN`ZwTFY1ltHNF5S%);i;&*l-ACcsI3pnD=iX?}s!s}HC z1As^77XFUGAm4O;CtDdaLT6%hOQ>4n&pujtYU7jL7onxKBM-_>lW}>$dS5% z{BRX)SUzjTUq2m{I3;m4ULG3n!EI@PR04_rJlShCF+6IG-&{VfY0G+|OLpY);~Tcs ze2Y)Mw|IXXzocJ3+sL=yh{1EwAusXV3dh~TOl+|FVY|@xU{j6Ef?(e4;reCW_43yL z<76IskRMUIl)Uop?JzOW;#+p#(crQzC^Ot~KFDqBhT`=!Rk%4%b1(y9h4j`weN&J! zbyYm>{7aU7#kdNy2Zqx-hUyr=|4NbL%;CXS<-w%jL)X z(3_2Lz*r;mD9!Y`&iV2=x+?sNv)b*Cwn}{YDuYzmi4vn!c+r}V?AzoFZAreI-4!3+ zY{Td}nm@04BAKyM->B1)oKRD#r|^W|jYVjcSAs1YI=xx>$jpFe*KbLKby=*pW)eFs z3ZSXO09)sD}&}V6ipbE(Y~?r$YTn{V-9};R(?Z6wH9Dqxnt8t&~=!h3e%FyMY4}MkN68X-2kX^|Im5y$c6sN{v&x4l_54O-p{PrDCP` zpOp-`$#WIx;mb_%^9f@!#b^Gv=)X8dl(G-ESKr#_UVal#eY9!`MLqLs4DUCH##vQR z*2n?o*KjGB*u!M&?xGOuHa@Hn5s811Ma6+Zz~-qI^cWAxkz$M9EYF+65Y<;MSmJ$H zrmYW$Ykr63;#?@3U~a9Yw$VB(W+T|LSC!M@RS~PJ#aBNlsh@MN)U_GZ+y4ALdVH-Z zeZ7rMl*xi!f6B*qX6Hr-YTWI3@7e|R;u4nUs>YIecpOF-fke*=0lHfETe!@N?>>DK zH=;xe|L}n!7YQPC**{jgAE6=E{~Z{`{~?;C(Z&12K1p^KRB#YWTRU?2RV!>AocDk%*gKH;(HiW`{1C zLgUncZHb`P0zyddG&COjHi2(%mgVv|gu%=`hPvnQickVe$8=lkQe4}&0*&it^=Vd~ zVz5rO$n;=raC-!!5NB|-XZOI{gu$ai!cKY`c7x4qn^>9w9*^aS`tLIdSOvMcwHy)z zisz9h?)wgaHN^ZNO1m|OBga`a*37=gS%}sQp9b3`#|ZInRQKnNUU+Pz_?9%$FWdS@ zDK<8SL9C$=vFNfCZZ*J(vU|VM+)OqeUmu(7t6G4CEYvRUzK*`Qc@f3dneu^f+iG!g zxv+3dL+uJwWvD@yd7%RLmAuTRViISB>GdFBTIdcF28A`w;mJ|!FUG!hkwvww>N>lf z{H={Dx0PPqaV^{;baO8&Z#4W&_23HA>#O7j4>~jvphax5{G4W932b+Oq40dauN4&f zHNyo<4ks5vV~{U|A^h&ku)Ss;0}g#CCAB3 zx!5?ck zw{=3Qkp*j2pk4kf)hQYui~#aNqul$soANTlEt(Bg?n5v;dVgpctq zgK8zA*my$SKTIf^aU6WAcAVx*VfEg7ZkR4Xkr@Rqgp~nl)WKhG;{9Wdad0u6&{I#2 zxKYvs;M&vr=pb8WY#((GbJMo#x zxUcc)yW;DGO<4}gi6di1&45IQZgY_)!A;*)F;lrKSVH5fXFw*)gR$$6cTNB0*>AV^ zw*?Qj?T1Fkol|$DCNdN;)9*Q?6o(#96gu%a7X>rtoCf7n-ECFW5M|6Fal%oQ_HyFT88UEWBj-cYRmoJO?h1i zO8Pb`owZMsyI;28tb{Eo<>GSuU*PNNxjvSV(T~f_NvO^Dd~+Bv4RFyUso1bz_tFj% zCD1oMN-R7Ol)jcmv3xpONAc4_)~6O6({Dh!!AVxU&q++=$T73FoVhi&?s_pYN1!5s zSLaZGTy$Mp1n=}=+x6NJ7#4%I%HoA<%SY4XdQFZO;2iFiQP0678T*1q9`dllr^)b=7CHG-dsj-%14Er*pm zRd^>8M#r;=H+aYIt_QD=wbxFhWWMQQ>)ENMK;y%e z-Iu6Jt^6|6l4x)u>Ylp;h!pn4O+sEjgtk(?U5Hp84IOs(ACPd#;dKgps1N!cG}yQ-Gvsh`Zg?5UQf#j}u^uV0^fBdXFH8Osx2Rn>nD?ts=VM5s(?3r8fR! zJ`WX_!j}fLK<(%2=>n7ezAMSisdM;Al^QJ_vPLj;mPAD$I~PIuyU==s!xUY zodiCv+RDXwU$axLZtbz}8BHq_1XqHo-^Kx4+f%NMl&->(9MD7SO zj&Z#}?1hK1F$*vE4Hl-52+kbud@c@%{KDPxs}pYe1D656Fec#qx9+xdyZ42hGFio=?^)UY_>^ z(>JtY69@hM-~dl%4gVj2NS%f*G|0Te8IlHlUZ{1k{U#Aat)_Xldr;o1s3ZVmargPD z;rI1QJ?8u0>5}@tQ>^!bMR8(pgdU-=nVzFZN}3-}d2iu(c}?B!g+r&S-sFg(f%#=% zzo*;ppCC$j0$qWo20Ac8Gv%A07eM$IXBHv$ov2<=J=H-@-^-4pGZ02IribPegl|FT^(ObV6vO4);?$6A_cuA+Vq1WmKIXgG`?%u zrna{Hm7|qSZ2EYj-pae%klBl5e4Y(Q1~p_8K*?L8**B54K6R1iQ(L|wGo#bCl5%MZ z{MaKF{!lpQcY)8@^9p+-R{^~zI?PY8%s*F`Jk24WY@RNKU0ezwO!ekJFkp|~0(i49 z_o5;d+*Sc(Jxsf-=YV#pfx^q|3d>HKjaXhv8upfShP@MxO3ECHoT?wPg+rAJ6j6d% zuauS&I`}i%EghL!ET5Xxwzd97;lDf-pr|@|G8SGFIUE-hbZa?YaLw!-y(k#t(PILzr}1;;g9@KM&6c28i1cn_xi z(F2R>(iI%Xx#oN~+xepmM0U{~Zb-ADBKO>klUgz|STaYC2~5Jw-3Rp*0M~QeAK_ zLT0jdy1u+74qNvm@lVU?i`<{VyiM-Y&YKwl`Xjzk0A)rN&XTzJ%RhJ_zfDfUp6RejT}_&K~L%hzXRUt_YZ--idup z{Yr*e6)6k#)Uosm3Dq!P+F%<1B=Fb-hzMKL%lx|uDvf&tWb2JnpRL}zSR>)WD&oy}+RNe&Hx|`=VR=Wi6 z7&fK)_A2^4+$>xJ4og%N88LV2S%ppZIE zH}jy~y(@yAt|h*1Nxup80`#-q*0us&eb+uNNliaG@F!bj(_qP@^T>u)(1yV%FpQ$n zoKE3aW`7m0ClO~zsXnJn<$2eljws67*~7k}IRJrorv^i1N>PKfyeLy1>m9%`U>1ap zV;J{k2lR8fH=dT%$B_tRpR2BUFNTgQel2SkW5@I})FPn?lSPtXkB>FA*)4J8-*uAW zCj}gqkZb2+L@sJuIUggVf$OL;Y>9EQh7-fNqMs=W2B_3h8cl_69%LDsEY$=;9~~S` zMh@TOiRbWVES8&JU#7~Z$xYEa`to)$0DF2z2*5Lsl*Ex<_be}5`*h@>p^QK!M@P+% z#{3!j79}}Lm5Fr$lPZBYi+=zlA@aChAd_LxVid4#ykJ+4hoZ1$en6D#@EK`u4o>V& zud!SQXGsUrKUS+``^EDi4qnc;`NSp8QTiL1dq1V|9XIXS zV;zJb0ww|#p08c?^r4SaJIza(jxgVH0p`+7SR4;gt3y0wS{a(dC@t93kb(EUJh7r& z7MBx@f$B+}QZfvbYQHp(Lu{6-@=K)G)# z;RhYWAL`WxFppsry{Tk|`?4(3?>~%ESH%KE zvcS^HtZR~v}xc}=m zvR>5rLTBTsUDrd2`cEyI1D3J_?_lI|P-a1-O+Q07RS0!rKToiU|Hn8yPY>0P*kiZc z6(Xfc;fiU?ES|Vm+ks*Vpm_tejb_d-eAbc^lTRL@sJAyiWcR9{&$P+wgPs~tFZ!}l z^6r|Pg5#quRe6tZSsl$ggp}?@@q&MP50oksD}Nwf6Z)+xqSVfwk?b#H5FhXn;mW?g zee;BWj^!4}gGSGiNNN?)^t(tIj;X|PR|DOk=*!w+gnJufT-E(`1wkOySh?PpR^$pf z=C&Fm7Jc|imd4*ZU&i=Zg0L;lkL9lVe!*P|<`G|EeP!OfoDbn!NH&?6Z=CV3jYg|# z?BpJ9lL>ALqBI(XWi4d6aqMAxVmN!5cj;efWj->$d#)NEJJ#<|R^9vcL-0&M-$#eJ zzrJyDNSoZz;=rD3V-miQ`OdMVdl2YHgHr|zD}9~CE)C84Tc1J1$`$3U&wl93G=jXD zZ9mA>7Sd(Tk3uUEial1UOn+{wlLde%u+wNNp8GgWG9I7a!G8;4$o z&2Ar8?dKiphR(Scds1)b80|OkURQWunL*dL1lfeu=EcspYtvf6+Di-L{;zd;19Afh z3TKDBiw*7_i^M3@x(AL@A~gpKShwgYD^G=;gxS8@9O=!cILWlyvqzha!M_d-1^uHa z0?SWjk&$Rw%}0NVm|eELTYj+3)|1iojv8};RmX+q5PG0x0z#`}9+*fyQ2{%ps7U;nnT3i34#>rSn2@(?>~%+MK$^b;eyk>j`K;Pxxt zUp)+`Wwxnw)l0~pdDmBNFbxO1%N1e|?`#a-wevf4WLUA6I)pOIM44FJ_75}Y7% za<*RY2Q7gH&(-O~t*m~}u&qGlDp4yW*3(ZHUi^}OdM%SXXPZjGZG(Utpil0LdTTRnCpSa}-t+SE`GR5a05{VN*n65{~ zi+7QCL&nSPW{W|;T=bXC(S}yeza@Zb%Y}M>bqdbK(|tE@kxUAbk*YcsUAYWuYwGL8 zXSK~8GsGO2jDT6{A~I|(i?tJVY;~Ikn%nJ5=u=PiI!-cViCVec8O4!_tVPC3-)Ziu z0Zoc+qud@e>ES`yL()+w8?FNF%<&fKS}whZL<|P!ZzL-mEZ?rOr|+*v^0EA!)!E~O_ba%&;*9IA zolizsa!TimzSm(GUWK++qz=+Ik&+@820c#?Ztm%XCE>V2FG1_;7W{V>WIW-d<~qN> z{)|8qXh!q-b2TG1AMYIt@65s?DEzUAV}}1r(M|F5F1#~WsH5)G2VY3OLi&0;my9QM zL);fdhGxx5^-4^Cd$-&mgc9N1BdV&j%1ih|7-dd@-0mFO&5E0iP^T<1nt~)(*5+P`KrfMS6pkxSQoNXO}tH@;S*V@zdXcUsE&Qh zkoX)6{0fsMPULHE!|ZD>_SPqK?8M}^w1UeW_$&2kT$zqS{*Dl$>2{rq^AAKf+$3I4 zslbVh%{kmT=4(zI?%M8hIVBDV0c+GUi)Gr*qmoMBmxR}%K_R8vtBq0#&Ln<8D%dwN zX>kpAbVWC%Ox9N${Hjz6(^5A2n+f1Ik0GeHcLj`&aX>$e34*En8Q{+qdkxN`e0P!Q zuT;iYl}dM4*Q0MgBHJ<84@Drs)lj-ad^2LCL9)}-LW5l0bPW}DSE?e=%7tHRP6c!f zCP99CfJmiG!~WA`Zs>WX_>h?A{&2eO`K0L$B~4a>l4;-RvWE$eh*xW9ls}c*r%2m& zhNbWPIhO^{^mI=usAMI#22L*o5en8{Hbu|a4~HQ9hIR0+{~&iYEP}?yfr8%s`I47J zMwZl{wRZeoXI={s$a<8gt4*Hsx&iJrQu%P^vb{~RDum$htr@A?>pqxhJV!|gGX zUL`*6%=J@W#QW;LfrYA4&d56JDBXjn3uVUsl49ZLp=uN_rZPtr;;F^iL`u&7(bYYE z=-J{N7h1bT#haD>N0mi%ys9&r^nC9XKh(_H-B%M1HioTc@Fodl-(@UPAvoeevvF5M z;u?_+EkqcJFxApR&f{>;#tk41X4PLBpc|{$-TFD}ZVekXDPVQ+63XB7XBQ-8=C;P3 z^%)ycbSmcLP%&N(tleOR42l01d>VaW(oyOFt;?XYt}bL$;8)^3M}APjS8m#_k+KnP z&zhAc!sRm}|8kYN?tC#ptdd*2*cMd_z!=a0ogK@^%YBXyrw*k^hJhtb)UY-Pp|U`b z;vm3-f2h$+A&q7+M}Mg-r9>2BEm^YPNmZ( z*7I4&!nFAzxpw5$n0?QdSE`^*s^a6@SRrre`i+>=SLtxw^z-@jraYqw@bSip;u!dK zTL9hZVjx|G5={P{9@`(L2W{{d>D%clZO4f70pf2!tc#MFU?)YLt-?Z9$-c2McL4VN z7W9D4WMOAN+6=I1Dfa)8xF9t6=O(>9LB!e%vOnrk?M0> zhwcO)UQOE|!|+=@H*wsyK!gv02uY?=#%_C5C4PYHuGzw%hucEDs@DbbO_Caz!aR{U z+)TI!k?P4(-i%WA5m2zQmZE^K6<+p?B|X5EGq$zw9(PfkANGFIjOw2MWg zKzz_(5iAbl)Py69NJEsQh^vxIDgheWS-`flG+rfqdEJahS*YUq)RCw7wJ6IA7i?_T zbD!-Qf(p&XhfA-KFoYvL#L~7U6T{tD%|dbL)o=N6;2}mx z!H~)1Fa$U)<8*lRd8*EEO<$_82C=Yv=lCg~$8GQ49)$Nx5fJEEaxF zl)u`I99^+<`OtY7_q=-`^1k9=uN<@9D*Adv0Q2^am|DSo=F?vA0J6!bIBOyEjpJ@H z2*UlQP-z!NN@6biXcIsE-B$>G4p#Bsxw4!W^oDs9n-adqf1greR# zfARMgj5m9@`A}9Oc~h#WMos)V%?-=nk`+S6=Q3Tqj&FJVY_lXU-j8{UUQwRer*vNi zfYU!5rO0Ef|MdN1vc@5-WmGYcr9CI@`kiQ7RL+ztb22U{WAeB5;o6w`-4GP9`W`>S z&_}b=Tjc-o#5;+YZe(ff8d~EuaP3uP~tc86jg5qVOZ*{cwJGU z(V#giqR;*#}M7(H=WegGj8QE45StkwQ)t zkDqA#;#%akszszb-d6hC&Y>(@IusF!_+GjwxIeDH(7}w)oA7)sg+;iwNG45>Jl=*4 znht+k)I22GMQiXwNWP<7d0VRrHC&g~daE&5*a?1)=?cFU!1v)>Lhov{i~V|%SV+9X z7((>eXMfQ-lj3T*{T)ezIo7*te0-jq5m672Z%@7nd89JjVZ=_Bbo1hLe3vR5GZ8VQK$3BS3rpv(TI z*if``DGY`pJFPa|qyC_%M6lc!v`aS!?Bf{jCRy3h2>YLHBX-_Z0cNP-YKG+9aVn&&bOWM*j$kk8_d6 z?(xLgln?2|OMK3fRpgLJC=#$Sl$ZdT<~F@JI^%N{SsMK=7C#~w8JCp|ODKUjfulX0 zRNnimv2(P`!_|JMWw#2*v%0*WmV!FHXJnXm$FI8bV27U>i%M0TS`CxQy!TVI4+Hku zCU|>U(96OE+nSptiO19IE`KjZoFmE96%r=Y#&G77AMX8@(Ad$co7FH1**~KH7%QV@ zq2D^0XG`W%Kwy))B%jtc34_bP*&~!hvXkx2x61x?cm8VL+eR&j+qieTj zPcf!P__24db-NUOd7qw4jxNS~Rn}k`w;L-!+JMkh*E38;hxBHxU%E}SZ(^oQnTt9( z6U*##{JUsmtt^A>6&UNN5mxBooYco1=6i8#6YtoyZl1O{hP>>^Lrts-xuXYNTZ$>u zpfaVW+VhuTa-W6(Y5#`hX)X;5E-i}{XxWY&i-0|tDN1{4YkvF|i+8ibuT!lOje;w< zkwW?d17jC~Qo*}a1btjLC$U87&ALRfBUk{XiT&dcIexY(=W<~(r-<*5(6%;&Rm^bw z25DIcIe0Kk;h0MuZVN`^O#>~4>J*7fwa5457~M`DW}CLMhrohubV?aHB0*q%i?F@) zYwum|^K0)Lu4E}LYfhYog~=@Pv>I86X>U>2n?#DFw@m4G^1i2s(0@%DkwFgxASub&ET6!HG@u+jB+p_yO(GoOV3#Nw9K0GZvg&5PWug{2eB{b>*22oK9 zncm+N91?M1gpr#Brp6}vt7WNs#8Bn}aw1X4oh$4)t6v( zbHB1*nkJIRlGpzHfGgQqz$g + + + + +OpenSPP HXL Integration + + + +
+

OpenSPP HXL Integration

+ + +

Alpha License: LGPL-3 OpenSPP/openspp-modules

+

Humanitarian Exchange Language (HXL) integration for OpenSPP data +interoperability. Provides a registry of standard HXL hashtags and +attributes, tools for creating export profiles with HXL tagging, and +extends CEL variables with HXL mapping capabilities. Enables OpenSPP to +exchange data with humanitarian coordination systems using standardized +HXL tagging conventions.

+
+

Key Capabilities

+
    +
  • Registry of HXL 1.1 standard hashtags organized by category +(geographic, population, activity, indicator, etc.)
  • +
  • Registry of HXL attributes for data disaggregation (gender, age group, +population type, etc.)
  • +
  • Export profile definition for model-specific column mappings with HXL +tags
  • +
  • HXL tag composition from hashtag and attributes (e.g., +#affected+f+children)
  • +
  • Integration with CEL variables for import/export behavior +configuration
  • +
  • Validation of HXL tag format (hashtag starts with #, attributes start +with +)
  • +
+
+
+

Key Models

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
ModelDescription
spp.hxl.tagRegistry of HXL hashtags (e.g., +#affected, #adm2, #indicator)
spp.hxl.attributeRegistry of HXL attributes +(e.g., +f, +children, +code)
spp.hxl.export.profileExport template defining model +and column mapping with HXL tags
` +spp.hxl.export.profile.column`Column definition with field +path and HXL tag assignment
spp.cel.variable (extended)CEL variable with HXL hashtag, +attributes, and import/export +behavior
+
+
+

Configuration

+

After installing:

+
    +
  1. Navigate to Custom > HXL > Configuration > HXL Hashtags to view +or add hashtags
  2. +
  3. Navigate to Custom > HXL > Configuration > HXL Attributes to view +or add attributes
  4. +
  5. Create export profiles at Custom > HXL > Export Profiles +specifying:
      +
    • Target model for export
    • +
    • Column definitions with field paths
    • +
    • HXL tag assignment (manual or structured via hashtag + attributes)
    • +
    +
  6. +
  7. For CEL variables, navigate to Custom > Studio > Logic Variables +and use the HXL Mapping tab to define:
      +
    • HXL hashtag and attributes for the variable
    • +
    • Import action (map to field, create event, store as variable, or +skip)
    • +
    • Export inclusion preference
    • +
    +
  8. +
+
+
+

UI Location

+
    +
  • Menu: Custom > HXL (main menu)
  • +
  • Configuration: Custom > HXL > Configuration (HXL Hashtags, HXL +Attributes)
  • +
  • Export Profiles: Custom > HXL > Export Profiles
  • +
  • CEL Variable Extension: Custom > Studio > Logic Variables > HXL +Mapping tab
  • +
+
+
+

Security

+ ++++ + + + + + + + + + + + + + +
GroupAccess
base.group_userRead
spp_security.group_spp_adminFull CRUD
+
+
+

Extension Points

+
    +
  • Inherit spp.hxl.export.profile and override export logic to +implement custom HXL export formats
  • +
  • Extend spp.hxl.tag or spp.hxl.attribute to add domain-specific +HXL tags (set is_standard=False)
  • +
  • Inherit spp.cel.variable to customize HXL import/export behavior +based on hxl_import_action field
  • +
+
+
+

Dependencies

+

spp_security, spp_cel_domain, spp_studio, spp_vocabulary

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production.

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

You are welcome to contribute.

+
+
+
+
+ + diff --git a/spp_hxl/tests/__init__.py b/spp_hxl/tests/__init__.py new file mode 100644 index 00000000..0bac6434 --- /dev/null +++ b/spp_hxl/tests/__init__.py @@ -0,0 +1,5 @@ +from . import test_hxl_tag +from . import test_hxl_attribute +from . import test_hxl_variable +from . import test_hxl_export_profile +from . import test_security diff --git a/spp_hxl/tests/test_hxl_attribute.py b/spp_hxl/tests/test_hxl_attribute.py new file mode 100644 index 00000000..a2a6bb0b --- /dev/null +++ b/spp_hxl/tests/test_hxl_attribute.py @@ -0,0 +1,203 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Test cases for HXL Attribute model.""" + +import logging + +from odoo.exceptions import ValidationError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestHxlAttribute(TransactionCase): + """Test cases for HXL Attribute model""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.HxlAttribute = cls.env["spp.hxl.attribute"] + + def test_create_hxl_attribute(self): + """Test creating a basic HXL attribute""" + attr = self.HxlAttribute.create( + { + "code": "+test_attr", + "name": "Test Attribute", + "category": "data_type", + "is_standard": False, + } + ) + self.assertTrue(attr) + self.assertEqual(attr.code, "+test_attr") + self.assertEqual(attr.name, "Test Attribute") + self.assertEqual(attr.category, "data_type") + + def test_code_uniqueness(self): + """Test that attribute code must be unique""" + self.HxlAttribute.create( + { + "code": "+unique_attr", + "name": "Unique Attribute", + } + ) + with self.assertRaises(Exception): + # Should raise constraint violation + self.HxlAttribute.create( + { + "code": "+unique_attr", + "name": "Duplicate Attribute", + } + ) + + def test_code_format_validation_must_start_with_plus(self): + """Test that attribute code must start with +""" + with self.assertRaises(ValidationError): + self.HxlAttribute.create( + { + "code": "invalid_attr", # Missing + + "name": "Invalid Attribute", + } + ) + + def test_code_character_validation(self): + """Test that attribute code can only contain valid characters""" + # Valid characters: alphanumeric, underscore, dash + valid_attr = self.HxlAttribute.create( + { + "code": "+valid_attr-123", + "name": "Valid Attribute", + } + ) + self.assertTrue(valid_attr) + + # Invalid characters + with self.assertRaises(ValidationError): + self.HxlAttribute.create( + { + "code": "+invalid@attr", # Contains @ + "name": "Invalid Attribute", + } + ) + + def test_code_empty_after_plus_raises_error(self): + """Test that code with only + raises error""" + with self.assertRaises(ValidationError): + self.HxlAttribute.create( + { + "code": "+", # Only + + "name": "Empty Attribute", + } + ) + + def test_default_values(self): + """Test default values for HXL attribute""" + attr = self.HxlAttribute.create( + { + "code": "+default_test", + "name": "Default Test", + } + ) + self.assertTrue(attr.is_standard) + self.assertTrue(attr.active) + + def test_search_by_category(self): + """Test searching attributes by category""" + self.HxlAttribute.create( + { + "code": "+gender_test", + "name": "Gender Test", + "category": "gender", + } + ) + self.HxlAttribute.create( + { + "code": "+age_test", + "name": "Age Test", + "category": "age", + } + ) + + gender_attrs = self.HxlAttribute.search([("category", "=", "gender")]) + self.assertTrue(len(gender_attrs) >= 1) + self.assertTrue(all(attr.category == "gender" for attr in gender_attrs)) + + def test_archive_attribute(self): + """Test archiving an attribute""" + attr = self.HxlAttribute.create( + { + "code": "+archive_test", + "name": "Archive Test", + } + ) + attr.active = False + self.assertFalse(attr.active) + + # Archived attributes should not appear in default search + active_attrs = self.HxlAttribute.search([("code", "=", "+archive_test")]) + self.assertEqual(len(active_attrs), 0) + + # But should be findable with active_test=False + all_attrs = self.HxlAttribute.with_context(active_test=False).search([("code", "=", "+archive_test")]) + self.assertEqual(len(all_attrs), 1) + + def test_standard_attributes_loaded(self): + """Test that standard HXL attributes are loaded from data""" + # Check for some standard attributes that should be loaded + female_attr = self.HxlAttribute.search([("code", "=", "+f")]) + self.assertTrue(female_attr) + self.assertTrue(female_attr.is_standard) + self.assertEqual(female_attr.category, "gender") + + children_attr = self.HxlAttribute.search([("code", "=", "+children")]) + self.assertTrue(children_attr) + self.assertTrue(children_attr.is_standard) + self.assertEqual(children_attr.category, "age") + + def test_code_update_after_creation(self): + """Test that updating code maintains uniqueness constraint""" + attr1 = self.HxlAttribute.create( + { + "code": "+attr_one", + "name": "Attribute One", + } + ) + attr2 = self.HxlAttribute.create( + { + "code": "+attr_two", + "name": "Attribute Two", + } + ) + + # Should be able to update to a unique value + attr1.code = "+attr_one_updated" + self.assertEqual(attr1.code, "+attr_one_updated") + + # Should not be able to update to an existing value + with self.assertRaises(Exception): + attr2.code = "+attr_one_updated" + + def test_attribute_with_special_characters_fails(self): + """Test that special characters in attribute code raise error""" + invalid_codes = ["+test!", "+test#", "+test$", "+test%", "+test^"] + for code in invalid_codes: + with self.assertRaises(ValidationError): + self.HxlAttribute.create( + { + "code": code, + "name": f"Invalid {code}", + } + ) + + def test_description_field_stored_correctly(self): + """Test that description field stores correctly""" + description = "This is a detailed description of the attribute." + attr = self.HxlAttribute.create( + { + "code": "+desc_test", + "name": "Description Test", + "description": description, + } + ) + self.assertEqual(attr.description, description) diff --git a/spp_hxl/tests/test_hxl_export_profile.py b/spp_hxl/tests/test_hxl_export_profile.py new file mode 100644 index 00000000..a26b0d1f --- /dev/null +++ b/spp_hxl/tests/test_hxl_export_profile.py @@ -0,0 +1,312 @@ +import logging + +from odoo.exceptions import ValidationError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestHxlExportProfile(TransactionCase): + """Test cases for HXL Export Profile models""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.ExportProfile = cls.env["spp.hxl.export.profile"] + cls.ExportColumn = cls.env["spp.hxl.export.profile.column"] + cls.HxlTag = cls.env["spp.hxl.tag"] + cls.HxlAttribute = cls.env["spp.hxl.attribute"] + cls.IrModel = cls.env["ir.model"] + + # Get a model to use for testing + cls.test_model = cls.IrModel.search([("model", "=", "res.partner")], limit=1) + + # Get some standard tags and attributes + cls.tag_org = cls.HxlTag.search([("hashtag", "=", "#org")], limit=1) + cls.tag_contact = cls.HxlTag.search([("hashtag", "=", "#contact")], limit=1) + cls.attr_name = cls.HxlAttribute.search([("code", "=", "+name")], limit=1) + cls.attr_code = cls.HxlAttribute.search([("code", "=", "+code")], limit=1) + + def test_create_export_profile(self): + """Test creating a basic export profile""" + profile = self.ExportProfile.create( + { + "name": "Test Profile", + "code": "test_profile", + "model_id": self.test_model.id, + } + ) + self.assertTrue(profile) + self.assertEqual(profile.code, "test_profile") + self.assertEqual(profile.model_name, "res.partner") + + def test_profile_code_uniqueness(self): + """Test that profile code must be unique""" + self.ExportProfile.create( + { + "name": "Profile 1", + "code": "unique_profile", + "model_id": self.test_model.id, + } + ) + with self.assertRaises(Exception): + # Should raise constraint violation + self.ExportProfile.create( + { + "name": "Profile 2", + "code": "unique_profile", + "model_id": self.test_model.id, + } + ) + + def test_profile_code_validation(self): + """Test that profile code can only contain valid characters""" + # Valid code + valid_profile = self.ExportProfile.create( + { + "name": "Valid Profile", + "code": "valid_profile_123", + "model_id": self.test_model.id, + } + ) + self.assertTrue(valid_profile) + + # Invalid code with special characters + with self.assertRaises(ValidationError): + self.ExportProfile.create( + { + "name": "Invalid Profile", + "code": "invalid@profile", + "model_id": self.test_model.id, + } + ) + + def test_profile_default_values(self): + """Test default values for export profile""" + profile = self.ExportProfile.create( + { + "name": "Default Test", + "code": "default_test", + "model_id": self.test_model.id, + } + ) + self.assertTrue(profile.include_hxl_row) + self.assertEqual(profile.date_format, "%Y-%m-%d") + self.assertTrue(profile.active) + self.assertEqual(profile.sequence, 10) + + def test_create_profile_with_columns(self): + """Test creating profile with columns""" + profile = self.ExportProfile.create( + { + "name": "Profile with Columns", + "code": "profile_cols", + "model_id": self.test_model.id, + "column_ids": [ + ( + 0, + 0, + { + "name": "Organization Name", + "field_path": "name", + "hxl_tag": "#org+name", + }, + ), + ( + 0, + 0, + { + "name": "Organization Code", + "field_path": "ref", + "hxl_tag": "#org+code", + }, + ), + ], + } + ) + self.assertEqual(len(profile.column_ids), 2) + + def test_column_hxl_tag_manual(self): + """Test column with manual HXL tag entry""" + profile = self.ExportProfile.create( + { + "name": "Manual Tag Profile", + "code": "manual_tag", + "model_id": self.test_model.id, + } + ) + column = self.ExportColumn.create( + { + "profile_id": profile.id, + "name": "Test Column", + "field_path": "name", + "hxl_tag": "#org+impl+name", + } + ) + self.assertEqual(column.computed_hxl_tag, "#org+impl+name") + + def test_column_hxl_tag_from_components(self): + """Test column with HXL tag built from hashtag and attributes""" + if not self.tag_org or not self.attr_name or not self.attr_code: + self.skipTest("Required HXL tags or attributes not found") + + profile = self.ExportProfile.create( + { + "name": "Component Tag Profile", + "code": "component_tag", + "model_id": self.test_model.id, + } + ) + column = self.ExportColumn.create( + { + "profile_id": profile.id, + "name": "Organization", + "field_path": "name", + "hxl_hashtag_id": self.tag_org.id, + "hxl_attribute_ids": [(6, 0, [self.attr_name.id, self.attr_code.id])], + } + ) + # Should compute tag from components + self.assertTrue(column.computed_hxl_tag) + self.assertIn("#org", column.computed_hxl_tag) + self.assertIn("+name", column.computed_hxl_tag) + self.assertIn("+code", column.computed_hxl_tag) + + def test_column_hxl_tag_manual_precedence(self): + """Test that manual HXL tag takes precedence over components""" + if not self.tag_org or not self.attr_name: + self.skipTest("Required HXL tags or attributes not found") + + profile = self.ExportProfile.create( + { + "name": "Precedence Profile", + "code": "precedence", + "model_id": self.test_model.id, + } + ) + column = self.ExportColumn.create( + { + "profile_id": profile.id, + "name": "Test", + "field_path": "name", + "hxl_tag": "#manual+tag", + "hxl_hashtag_id": self.tag_org.id, + "hxl_attribute_ids": [(6, 0, [self.attr_name.id])], + } + ) + # Manual tag should take precedence + self.assertEqual(column.computed_hxl_tag, "#manual+tag") + + def test_column_sequence(self): + """Test column sequencing""" + profile = self.ExportProfile.create( + { + "name": "Sequence Profile", + "code": "sequence_test", + "model_id": self.test_model.id, + "column_ids": [ + ( + 0, + 0, + { + "name": "Column 1", + "field_path": "name", + "sequence": 20, + }, + ), + ( + 0, + 0, + { + "name": "Column 2", + "field_path": "email", + "sequence": 10, + }, + ), + ( + 0, + 0, + { + "name": "Column 3", + "field_path": "phone", + "sequence": 30, + }, + ), + ], + } + ) + # Should be ordered by sequence + columns = profile.column_ids.sorted("sequence") + self.assertEqual(columns[0].name, "Column 2") + self.assertEqual(columns[1].name, "Column 1") + self.assertEqual(columns[2].name, "Column 3") + + def test_search_profiles_by_model(self): + """Test searching profiles by model""" + self.ExportProfile.create( + { + "name": "Partner Profile 1", + "code": "partner_1", + "model_id": self.test_model.id, + } + ) + self.ExportProfile.create( + { + "name": "Partner Profile 2", + "code": "partner_2", + "model_id": self.test_model.id, + } + ) + + profiles = self.ExportProfile.search([("model_id", "=", self.test_model.id)]) + self.assertTrue(len(profiles) >= 2) + + def test_archive_profile(self): + """Test archiving a profile""" + profile = self.ExportProfile.create( + { + "name": "Archive Test", + "code": "archive_test", + "model_id": self.test_model.id, + } + ) + profile.active = False + self.assertFalse(profile.active) + + # Archived profiles should not appear in default search + active_profiles = self.ExportProfile.search([("code", "=", "archive_test")]) + self.assertEqual(len(active_profiles), 0) + + def test_column_update_tag_components(self): + """Test updating column tag components""" + if not self.tag_org or not self.tag_contact or not self.attr_name: + self.skipTest("Required HXL tags or attributes not found") + + profile = self.ExportProfile.create( + { + "name": "Update Profile", + "code": "update_test", + "model_id": self.test_model.id, + } + ) + column = self.ExportColumn.create( + { + "profile_id": profile.id, + "name": "Test Column", + "field_path": "name", + "hxl_hashtag_id": self.tag_org.id, + } + ) + + # Initial tag + self.assertEqual(column.computed_hxl_tag, "#org") + + # Add attributes + column.write({"hxl_attribute_ids": [(6, 0, [self.attr_name.id])]}) + self.assertIn("+name", column.computed_hxl_tag) + + # Change hashtag + column.write({"hxl_hashtag_id": self.tag_contact.id}) + self.assertIn("#contact", column.computed_hxl_tag) diff --git a/spp_hxl/tests/test_hxl_tag.py b/spp_hxl/tests/test_hxl_tag.py new file mode 100644 index 00000000..e9c770cd --- /dev/null +++ b/spp_hxl/tests/test_hxl_tag.py @@ -0,0 +1,145 @@ +import logging + +from odoo.exceptions import ValidationError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestHxlTag(TransactionCase): + """Test cases for HXL Tag model""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.HxlTag = cls.env["spp.hxl.tag"] + + def test_create_hxl_tag(self): + """Test creating a basic HXL tag""" + tag = self.HxlTag.create( + { + "hashtag": "#test_tag", + "name": "Test Tag", + "category": "metadata", + "data_type": "text", + "is_standard": False, + } + ) + self.assertTrue(tag) + self.assertEqual(tag.hashtag, "#test_tag") + self.assertEqual(tag.name, "Test Tag") + self.assertEqual(tag.category, "metadata") + + def test_hashtag_uniqueness(self): + """Test that hashtag must be unique""" + self.HxlTag.create( + { + "hashtag": "#unique_tag", + "name": "Unique Tag", + } + ) + with self.assertRaises(Exception): + # Should raise constraint violation + self.HxlTag.create( + { + "hashtag": "#unique_tag", + "name": "Duplicate Tag", + } + ) + + def test_hashtag_format_validation(self): + """Test that hashtag must start with #""" + with self.assertRaises(ValidationError): + self.HxlTag.create( + { + "hashtag": "invalid_tag", # Missing # + "name": "Invalid Tag", + } + ) + + def test_hashtag_character_validation(self): + """Test that hashtag can only contain valid characters""" + # Valid characters: alphanumeric, underscore, dash + valid_tag = self.HxlTag.create( + { + "hashtag": "#valid_tag-123", + "name": "Valid Tag", + } + ) + self.assertTrue(valid_tag) + + # Invalid characters + with self.assertRaises(ValidationError): + self.HxlTag.create( + { + "hashtag": "#invalid@tag", # Contains @ + "name": "Invalid Tag", + } + ) + + def test_default_values(self): + """Test default values for HXL tag""" + tag = self.HxlTag.create( + { + "hashtag": "#default_test", + "name": "Default Test", + } + ) + self.assertTrue(tag.is_standard) + self.assertTrue(tag.active) + self.assertEqual(tag.data_type, "text") + + def test_search_by_category(self): + """Test searching tags by category""" + self.HxlTag.create( + { + "hashtag": "#geo_tag", + "name": "Geographic Tag", + "category": "geographic", + } + ) + self.HxlTag.create( + { + "hashtag": "#pop_tag", + "name": "Population Tag", + "category": "population", + } + ) + + geo_tags = self.HxlTag.search([("category", "=", "geographic")]) + self.assertTrue(len(geo_tags) >= 1) + self.assertTrue(all(tag.category == "geographic" for tag in geo_tags)) + + def test_archive_tag(self): + """Test archiving a tag""" + tag = self.HxlTag.create( + { + "hashtag": "#archive_test", + "name": "Archive Test", + } + ) + tag.active = False + self.assertFalse(tag.active) + + # Archived tags should not appear in default search + active_tags = self.HxlTag.search([("hashtag", "=", "#archive_test")]) + self.assertEqual(len(active_tags), 0) + + # But should be findable with active_test=False + all_tags = self.HxlTag.with_context(active_test=False).search([("hashtag", "=", "#archive_test")]) + self.assertEqual(len(all_tags), 1) + + def test_standard_tags_loaded(self): + """Test that standard HXL tags are loaded from data""" + # Check for some standard tags that should be loaded + affected_tag = self.HxlTag.search([("hashtag", "=", "#affected")]) + self.assertTrue(affected_tag) + self.assertTrue(affected_tag.is_standard) + self.assertEqual(affected_tag.category, "population") + + adm2_tag = self.HxlTag.search([("hashtag", "=", "#adm2")]) + self.assertTrue(adm2_tag) + self.assertTrue(adm2_tag.is_standard) + self.assertEqual(adm2_tag.category, "geographic") diff --git a/spp_hxl/tests/test_hxl_variable.py b/spp_hxl/tests/test_hxl_variable.py new file mode 100644 index 00000000..454da600 --- /dev/null +++ b/spp_hxl/tests/test_hxl_variable.py @@ -0,0 +1,210 @@ +import logging + +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestHxlVariable(TransactionCase): + """Test cases for CEL Variable HXL extension""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.CELVariable = cls.env["spp.cel.variable"] + + def test_hxl_tag_computation_basic(self): + """Test basic HXL tag computation from hashtag and attributes""" + variable = self.CELVariable.create( + { + "name": "test_hxl_basic", + "cel_accessor": "test_hxl_basic", + "source_type": "constant", + "hxl_hashtag": "#affected", + "hxl_attributes": "+f+children", + } + ) + self.assertEqual(variable.hxl_tag, "#affected+f+children") + + def test_hxl_tag_computation_hashtag_only(self): + """Test HXL tag computation with only hashtag""" + variable = self.CELVariable.create( + { + "name": "test_hxl_hashtag", + "cel_accessor": "test_hxl_hashtag", + "source_type": "constant", + "hxl_hashtag": "#population", + } + ) + self.assertEqual(variable.hxl_tag, "#population") + + def test_hxl_tag_computation_no_hashtag(self): + """Test HXL tag computation when no hashtag is set""" + variable = self.CELVariable.create( + { + "name": "test_hxl_no_tag", + "cel_accessor": "test_hxl_no_tag", + "source_type": "constant", + } + ) + self.assertFalse(variable.hxl_tag) + + def test_hxl_tag_computation_auto_prefix(self): + """Test HXL tag computation automatically adds # if missing""" + variable = self.CELVariable.create( + { + "name": "test_hxl_auto_prefix", + "cel_accessor": "test_hxl_auto_prefix", + "source_type": "constant", + "hxl_hashtag": "affected", # Missing # + "hxl_attributes": "+m", + } + ) + self.assertEqual(variable.hxl_tag, "#affected+m") + + def test_hxl_import_action_default(self): + """Test default HXL import action""" + variable = self.CELVariable.create( + { + "name": "test_hxl_import", + "cel_accessor": "test_hxl_import", + "source_type": "constant", + "hxl_hashtag": "#indicator", + } + ) + self.assertEqual(variable.hxl_import_action, "variable") + + def test_hxl_export_include_default(self): + """Test default HXL export include""" + variable = self.CELVariable.create( + { + "name": "test_hxl_export", + "cel_accessor": "test_hxl_export", + "source_type": "constant", + "hxl_hashtag": "#value", + } + ) + self.assertTrue(variable.hxl_export_include) + + def test_hxl_import_action_options(self): + """Test different HXL import action options""" + for action in ["field", "event", "variable", "skip"]: + variable = self.CELVariable.create( + { + "name": f"test_hxl_action_{action}", + "cel_accessor": f"test_hxl_action_{action}", + "source_type": "constant", + "hxl_import_action": action, + } + ) + self.assertEqual(variable.hxl_import_action, action) + + def test_hxl_export_include_flag(self): + """Test HXL export include flag""" + variable1 = self.CELVariable.create( + { + "name": "test_hxl_export_yes", + "cel_accessor": "test_hxl_export_yes", + "source_type": "constant", + "hxl_export_include": True, + } + ) + variable2 = self.CELVariable.create( + { + "name": "test_hxl_export_no", + "cel_accessor": "test_hxl_export_no", + "source_type": "constant", + "hxl_export_include": False, + } + ) + self.assertTrue(variable1.hxl_export_include) + self.assertFalse(variable2.hxl_export_include) + + def test_hxl_tag_update(self): + """Test HXL tag updates when hashtag or attributes change""" + variable = self.CELVariable.create( + { + "name": "test_hxl_update", + "cel_accessor": "test_hxl_update", + "source_type": "constant", + "hxl_hashtag": "#affected", + } + ) + self.assertEqual(variable.hxl_tag, "#affected") + + # Update attributes + variable.write({"hxl_attributes": "+f"}) + self.assertEqual(variable.hxl_tag, "#affected+f") + + # Update hashtag + variable.write({"hxl_hashtag": "#inneed"}) + self.assertEqual(variable.hxl_tag, "#inneed+f") + + # Clear hashtag + variable.write({"hxl_hashtag": False}) + self.assertFalse(variable.hxl_tag) + + def test_search_by_hxl_tag(self): + """Test searching variables by HXL tag""" + self.CELVariable.create( + { + "name": "test_hxl_search1", + "cel_accessor": "test_hxl_search1", + "source_type": "constant", + "hxl_hashtag": "#affected", + "hxl_attributes": "+f", + } + ) + self.CELVariable.create( + { + "name": "test_hxl_search2", + "cel_accessor": "test_hxl_search2", + "source_type": "constant", + "hxl_hashtag": "#affected", + "hxl_attributes": "+m", + } + ) + self.CELVariable.create( + { + "name": "test_hxl_search3", + "cel_accessor": "test_hxl_search3", + "source_type": "constant", + "hxl_hashtag": "#population", + } + ) + + # Search for variables with #affected hashtag + affected_vars = self.CELVariable.search([("hxl_hashtag", "ilike", "affected")]) + self.assertTrue(len(affected_vars) >= 2) + + def test_filter_export_variables(self): + """Test filtering variables for export""" + self.CELVariable.create( + { + "name": "test_hxl_filter1", + "cel_accessor": "test_hxl_filter1", + "source_type": "constant", + "hxl_export_include": True, + } + ) + self.CELVariable.create( + { + "name": "test_hxl_filter2", + "cel_accessor": "test_hxl_filter2", + "source_type": "constant", + "hxl_export_include": True, + } + ) + self.CELVariable.create( + { + "name": "test_hxl_filter3", + "cel_accessor": "test_hxl_filter3", + "source_type": "constant", + "hxl_export_include": False, + } + ) + + export_vars = self.CELVariable.search([("hxl_export_include", "=", True)]) + self.assertTrue(len(export_vars) >= 2) diff --git a/spp_hxl/tests/test_security.py b/spp_hxl/tests/test_security.py new file mode 100644 index 00000000..99810677 --- /dev/null +++ b/spp_hxl/tests/test_security.py @@ -0,0 +1,188 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Security tests for HXL module access control.""" + +import logging + +from odoo.exceptions import AccessError +from odoo.tests.common import TransactionCase, tagged + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install", "security") +class TestHxlSecurity(TransactionCase): + """Test access control for HXL models""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.HxlTag = cls.env["spp.hxl.tag"] + cls.HxlAttribute = cls.env["spp.hxl.attribute"] + cls.HxlProfile = cls.env["spp.hxl.export.profile"] + + # Create test users with different access levels + cls.user_regular = cls.env["res.users"].create( + { + "name": "Regular User", + "login": "hxl_regular_user", + "email": "regular@test.com", + "group_ids": [(6, 0, [cls.env.ref("base.group_user").id])], + } + ) + + # Admin user - try to find OpenSPP admin group, fall back to base system admin + admin_group = cls.env.ref("spp_security.group_spp_admin", raise_if_not_found=False) + if not admin_group: + admin_group = cls.env.ref("base.group_system") + + cls.user_admin = cls.env["res.users"].create( + { + "name": "Admin User", + "login": "hxl_admin_user", + "email": "admin@test.com", + "group_ids": [ + ( + 6, + 0, + [ + cls.env.ref("base.group_user").id, + admin_group.id, + ], + ) + ], + } + ) + + # Create test data + cls.test_tag = cls.HxlTag.create( + { + "hashtag": "#security_test", + "name": "Security Test Tag", + } + ) + cls.test_attr = cls.HxlAttribute.create( + { + "code": "+sectest", + "name": "Security Test Attr", + } + ) + + def test_regular_user_can_read_tags(self): + """Test that regular users can read HXL tags""" + tags = self.HxlTag.with_user(self.user_regular).search([]) + self.assertTrue(len(tags) >= 1) + + def test_regular_user_can_read_attributes(self): + """Test that regular users can read HXL attributes""" + attrs = self.HxlAttribute.with_user(self.user_regular).search([]) + self.assertTrue(len(attrs) >= 1) + + def test_regular_user_can_read_profiles(self): + """Test that regular users can read export profiles""" + profiles = self.HxlProfile.with_user(self.user_regular).search([]) + # May be empty if no profiles created, but shouldn't raise AccessError + self.assertIsNotNone(profiles) + + def test_regular_user_cannot_write_tags(self): + """Test that regular users cannot write to HXL tags""" + with self.assertRaises(AccessError): + self.test_tag.with_user(self.user_regular).write({"name": "Modified Name"}) + + def test_regular_user_cannot_create_tags(self): + """Test that regular users cannot create HXL tags""" + with self.assertRaises(AccessError): + self.HxlTag.with_user(self.user_regular).create( + { + "hashtag": "#user_created", + "name": "User Created Tag", + } + ) + + def test_regular_user_cannot_delete_tags(self): + """Test that regular users cannot delete HXL tags""" + with self.assertRaises(AccessError): + self.test_tag.with_user(self.user_regular).unlink() + + def test_regular_user_cannot_write_attributes(self): + """Test that regular users cannot write to HXL attributes""" + with self.assertRaises(AccessError): + self.test_attr.with_user(self.user_regular).write({"name": "Modified Name"}) + + def test_regular_user_cannot_create_attributes(self): + """Test that regular users cannot create HXL attributes""" + with self.assertRaises(AccessError): + self.HxlAttribute.with_user(self.user_regular).create( + { + "code": "+user_attr", + "name": "User Created Attribute", + } + ) + + def test_admin_has_full_access_to_tags(self): + """Test that admin users have full CRUD access to tags""" + # Create + tag = self.HxlTag.with_user(self.user_admin).create( + { + "hashtag": "#admin_tag", + "name": "Admin Created Tag", + } + ) + self.assertTrue(tag) + + # Read + read_tag = self.HxlTag.with_user(self.user_admin).browse(tag.id) + self.assertEqual(read_tag.name, "Admin Created Tag") + + # Write + tag.with_user(self.user_admin).write({"name": "Admin Modified Tag"}) + self.assertEqual(tag.name, "Admin Modified Tag") + + # Delete + tag.with_user(self.user_admin).unlink() + self.assertFalse(tag.exists()) + + def test_admin_has_full_access_to_attributes(self): + """Test that admin users have full CRUD access to attributes""" + # Create + attr = self.HxlAttribute.with_user(self.user_admin).create( + { + "code": "+admin_attr", + "name": "Admin Created Attr", + } + ) + self.assertTrue(attr) + + # Read + read_attr = self.HxlAttribute.with_user(self.user_admin).browse(attr.id) + self.assertEqual(read_attr.name, "Admin Created Attr") + + # Write + attr.with_user(self.user_admin).write({"name": "Admin Modified Attr"}) + self.assertEqual(attr.name, "Admin Modified Attr") + + # Delete + attr.with_user(self.user_admin).unlink() + self.assertFalse(attr.exists()) + + def test_admin_can_manage_profiles(self): + """Test that admin users can manage export profiles""" + # Get an IR model for the profile + ir_model = self.env["ir.model"].search([("model", "=", "spp.hxl.tag")], limit=1) + + # Create profile + profile = self.HxlProfile.with_user(self.user_admin).create( + { + "name": "Admin Profile", + "code": "admin_profile", + "model_id": ir_model.id, + } + ) + self.assertTrue(profile) + + # Write + profile.with_user(self.user_admin).write({"name": "Modified Profile"}) + self.assertEqual(profile.name, "Modified Profile") + + # Delete + profile.with_user(self.user_admin).unlink() + self.assertFalse(profile.exists()) diff --git a/spp_hxl/views/cel_variable_views.xml b/spp_hxl/views/cel_variable_views.xml new file mode 100644 index 00000000..28292502 --- /dev/null +++ b/spp_hxl/views/cel_variable_views.xml @@ -0,0 +1,53 @@ + + + + + spp.cel.variable.form.hxl + spp.cel.variable + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spp_hxl/views/hxl_attribute_views.xml b/spp_hxl/views/hxl_attribute_views.xml new file mode 100644 index 00000000..da57f7d3 --- /dev/null +++ b/spp_hxl/views/hxl_attribute_views.xml @@ -0,0 +1,84 @@ + + + + + spp.hxl.attribute.tree + spp.hxl.attribute + + + + + + + + + + + + + + + spp.hxl.attribute.form + spp.hxl.attribute + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+
+
+ + + + spp.hxl.attribute.search + spp.hxl.attribute + + + + + + + + + + + + + + + + + + + HXL Attributes + spp.hxl.attribute + list,form + hxl-attributes + {'search_default_filter_active': 1, 'search_default_group_by_category': 1} + +

+ Create a new HXL attribute +

+

+ HXL attributes provide additional disaggregation for hashtags. + Examples include +f (female), +children, +code, etc. +

+
+
+
diff --git a/spp_hxl/views/hxl_export_profile_views.xml b/spp_hxl/views/hxl_export_profile_views.xml new file mode 100644 index 00000000..9d370d0b --- /dev/null +++ b/spp_hxl/views/hxl_export_profile_views.xml @@ -0,0 +1,119 @@ + + + + + spp.hxl.export.profile.column.tree + spp.hxl.export.profile.column + + + + + + + + + + + + + + + + spp.hxl.export.profile.tree + spp.hxl.export.profile + + + + + + + + + + + + + + spp.hxl.export.profile.form + spp.hxl.export.profile + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ + + + spp.hxl.export.profile.search + spp.hxl.export.profile + + + + + + + + + + + + + + + HXL Export Profiles + spp.hxl.export.profile + list,form + hxl-export-profiles + {'search_default_filter_active': 1} + +

+ Create a new HXL export profile +

+

+ Export profiles define pre-configured templates for exporting data with HXL tags. + Each profile specifies the model, columns, and their associated HXL hashtags and attributes. +

+
+
+
diff --git a/spp_hxl/views/hxl_tag_views.xml b/spp_hxl/views/hxl_tag_views.xml new file mode 100644 index 00000000..5cdbc6bf --- /dev/null +++ b/spp_hxl/views/hxl_tag_views.xml @@ -0,0 +1,87 @@ + + + + + spp.hxl.tag.tree + spp.hxl.tag + + + + + + + + + + + + + + + + spp.hxl.tag.form + spp.hxl.tag + +
+ +
+ +
+ + + + + + + + + + + + + + +
+
+
+
+ + + + spp.hxl.tag.search + spp.hxl.tag + + + + + + + + + + + + + + + + + + + + HXL Hashtags + spp.hxl.tag + list,form + hxl-hashtags + {'search_default_filter_active': 1, 'search_default_group_by_category': 1} + +

+ Create a new HXL hashtag +

+

+ HXL hashtags are the primary classification for humanitarian data fields. + Examples include #affected, #adm2, #indicator, etc. +

+
+
+
diff --git a/spp_hxl/views/menus.xml b/spp_hxl/views/menus.xml new file mode 100644 index 00000000..7a0e324a --- /dev/null +++ b/spp_hxl/views/menus.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/spp_hxl_area/QUICKSTART.md b/spp_hxl_area/QUICKSTART.md new file mode 100644 index 00000000..36ffe222 --- /dev/null +++ b/spp_hxl_area/QUICKSTART.md @@ -0,0 +1,226 @@ +# HXL Area Import - Quick Start Guide + +## 5-Minute Setup + +### Step 1: Install Module + +```bash +# Install with dependencies +odoo -d your_database -i spp_hxl_area +``` + +### Step 2: Prepare Your Data + +Your HXL file should look like: + +```csv +Area Name,Affected People,Severity +#adm2+name,#affected+ind,#impact+type +District A,100,severe +District B,50,minor +District C,200,severe +``` + +Key requirements: + +- Row 1: Human-readable headers +- Row 2: HXL hashtags (starting with `#`) +- Rows 3+: Data + +### Step 3: Import Data + +1. Navigate to: **HXL Area > Import HXL Data** +2. Select profile: **"Sri Lanka Damage Assessment"** (or create custom) +3. Upload your CSV/Excel file +4. Review preview +5. Click **Import** + +### Step 4: View Results + +Navigate to: **HXL Area > Area Indicators** + +Your data is now aggregated by area and ready to use! + +## Common Use Cases + +### Use Case 1: Damage Assessment + +**Data**: GPS coordinates of damaged houses + +**Profile Configuration**: + +- Strategy: GPS +- Level: Admin 3 +- Aggregation: Count by severity + +**Result**: Map showing number of damaged houses per area + +### Use Case 2: Beneficiary Tracking + +**Data**: List of beneficiaries with household IDs + +**Profile Configuration**: + +- Strategy: P-code +- Level: Admin 4 +- Aggregation: Count individuals, count distinct households + +**Result**: Coverage statistics per barangay/village + +### Use Case 3: Multi-Organization Response + +**Data**: 3W data (Who does What Where) + +**Profile Configuration**: + +- Strategy: P-code +- Level: Admin 2 +- Aggregation: Sum beneficiaries, count distinct organizations + +**Result**: Coordination dashboard per district + +## Creating Custom Profiles + +### Basic Profile + +```python +profile = env['spp.hxl.import.profile'].create({ + 'name': 'My Custom Import', + 'code': 'my_import', + 'area_matching_strategy': 'pcode', # or 'name', 'gps', 'fuzzy' + 'area_column_tag': '#adm2+pcode', # HXL tag for area column + 'area_level': 2, # Admin level (1=Province, 2=District, etc.) +}) +``` + +### Add Aggregation Rules + +```python +# Count all records +env['spp.hxl.aggregation.rule'].create({ + 'profile_id': profile.id, + 'name': 'Total Records', + 'aggregation_type': 'count', + 'output_hxl_tag': '#meta+count', +}) + +# Sum numeric values +env['spp.hxl.aggregation.rule'].create({ + 'profile_id': profile.id, + 'name': 'Total Affected', + 'aggregation_type': 'sum', + 'source_column_tag': '#affected+ind', + 'output_hxl_tag': '#affected+ind+total', +}) + +# Count with filter +env['spp.hxl.aggregation.rule'].create({ + 'profile_id': profile.id, + 'name': 'Severe Cases', + 'aggregation_type': 'count', + 'filter_expression': "row.get('#severity') == 'severe'", + 'output_hxl_tag': '#affected+severe', +}) +``` + +## Matching Strategies Cheat Sheet + +| Strategy | Use When | Pros | Cons | +| --------- | ------------------------- | -------------------------- | ------------------------ | +| **pcode** | You have official P-codes | Most reliable, exact match | Requires P-codes in data | +| **name** | Names are consistent | Simple, case-insensitive | Spelling variations fail | +| **fuzzy** | Names vary slightly | Handles variations | May match incorrectly | +| **gps** | You have coordinates | Works without admin data | Requires PostGIS setup | + +## HXL Tags Reference + +Common tags for humanitarian data: + +| Tag | Meaning | Example | +| --------------- | -------------------- | ---------------------- | +| `#adm1+pcode` | Province P-code | PH-01 | +| `#adm2+pcode` | District P-code | PH-01-02 | +| `#adm3+name` | Municipality name | Quezon City | +| `#affected+ind` | Affected individuals | 1234 | +| `#affected+hh` | Affected households | 256 | +| `#reached+ind` | Reached individuals | 890 | +| `#org+name` | Organization name | Red Cross | +| `#sector` | Sector | Food, Health, Shelter | +| `#impact+type` | Damage severity | severe, partial, minor | +| `#geo+lat` | Latitude | 14.5995 | +| `#geo+lon` | Longitude | 120.9842 | + +Gender/age disaggregation: + +- `+f` = Female +- `+m` = Male +- `+children` = Children +- `+elderly` = Elderly +- `+i` = Infants +- `+adult` = Adults + +## Troubleshooting + +### Problem: "No HXL hashtags detected" + +**Solution**: Check your file format: + +```csv +Header Row ← Row 1 +#hxl+tags ← Row 2 (hashtags) +data ← Row 3+ +``` + +### Problem: "All areas unmatched" + +**Solutions**: + +1. Check area codes in database: `SELECT code FROM spp_area WHERE level = 2` +2. Try different matching strategy (pcode → name → fuzzy) +3. Check admin level matches your data +4. View unmatched values in import wizard + +### Problem: "Import stuck in processing" + +**Solutions**: + +1. Check queue jobs: **Settings > Technical > Queue Jobs** +2. Check server logs: `tail -f /var/log/odoo/odoo.log` +3. Verify file size is reasonable (<10MB) + +### Problem: "Indicators not in CEL" + +**Solutions**: + +1. Link variable in aggregation rule +2. Check variable exists: **HXL > Configuration > Variables** +3. Manually sync: Select indicator → **Sync to Data Values** + +## Performance Tips + +- **Small files (<1MB)**: Process immediately +- **Medium files (1-10MB)**: Use wizard (background processing) +- **Large files (>10MB)**: Split into smaller batches +- **Caching**: Matcher caches area lookups automatically +- **Bulk operations**: Uses efficient SQL upserts + +## Next Steps + +1. **Explore demo profiles**: Check pre-configured examples +2. **Create custom profiles**: Tailor to your data sources +3. **Integrate with CEL**: Use indicators in eligibility criteria +4. **Automate imports**: Schedule regular data updates +5. **Build dashboards**: Visualize area-level metrics + +## Support + +- Documentation: https://docs.openspp.org/ +- HXL Standard: https://hxlstandard.org/ +- Issues: https://github.com/OpenSPP/openspp-modules/issues + +## Quick Links + +- Import Wizard: HXL Area > Import HXL Data +- Profiles: HXL Area > Configuration > Import Profiles +- Batches: HXL Area > Import Batches +- Indicators: HXL Area > Area Indicators diff --git a/spp_hxl_area/README.md b/spp_hxl_area/README.md new file mode 100644 index 00000000..6e83369a --- /dev/null +++ b/spp_hxl_area/README.md @@ -0,0 +1,308 @@ +# OpenSPP HXL Area Integration + +HXL import with area-level aggregation for humanitarian indicators. + +## Overview + +This module enables importing HXL-tagged field data and aggregating it to area-level indicators for humanitarian +coordination. It bridges individual-level observations to area-level metrics used in humanitarian response coordination. + +## Features + +- **Multiple Matching Strategies**: Match HXL data to areas using P-codes, names, GPS coordinates, or fuzzy matching +- **Flexible Aggregation Rules**: Count, sum, average, min, max, distinct count, and percentage aggregations +- **Disaggregation Support**: Break down indicators by demographic attributes (gender, age, etc.) +- **Import Profiles**: Pre-configured templates for common data sources +- **Import Wizard**: User-friendly interface with data preview and validation +- **CEL Integration**: Sync indicators to `spp.data.value` for use in eligibility expressions +- **Queue Job Processing**: Background processing for large imports +- **Audit Trail**: Track import batches, results, and errors + +## Architecture + +- **Layer**: 2 (Capabilities) +- **Category**: OpenSPP/Integration +- **Dependencies**: `spp_hxl`, `spp_area`, `spp_cel_domain`, `queue_job` +- **External Dependencies**: `libhxl` (Python library for HXL data processing) + +## Key Models + +### `spp.hxl.import.profile` + +Configuration for HXL data import: + +- Area matching strategy (P-code, name, GPS, fuzzy) +- Area column HXL tag +- Target admin level +- Aggregation rules + +### `spp.hxl.aggregation.rule` + +Define how to aggregate data: + +- Aggregation type (count, sum, avg, etc.) +- Source column HXL tag +- Filter expression +- Disaggregation attributes +- Target CEL variable + +### `spp.hxl.import.batch` + +Track individual import executions: + +- Upload HXL file +- Auto-detect columns +- Process and aggregate +- View statistics and results + +### `spp.hxl.area.indicator` + +Aggregated indicator values: + +- Area reference +- Variable reference +- Value and count +- Period key +- Disaggregation JSON +- Auto-sync to `spp.data.value` + +## Pre-configured Profiles + +### Sri Lanka Damage Assessment + +- **Strategy**: Name matching +- **Level**: Admin Level 4 (GN Division) +- **Indicators**: Severely damaged households, partially damaged households, total affected + +### Philippines Beneficiary Coverage + +- **Strategy**: P-code matching +- **Level**: Admin Level 4 (Barangay) +- **Indicators**: Total beneficiaries (with gender/age disaggregation), beneficiary households + +### OCHA 3W Import + +- **Strategy**: P-code matching +- **Level**: Admin Level 2 (District) +- **Indicators**: People reached, number of organizations + +### GPS Survey Import + +- **Strategy**: GPS coordinates +- **Level**: Admin Level 3 +- **Indicators**: Survey observations + +## Usage + +### Via Import Wizard + +1. Navigate to **HXL Area > Import HXL Data** +2. Select an import profile +3. Upload HXL-tagged CSV/Excel file +4. Review data preview and area matching +5. Set period and context (optional incident) +6. Click **Import** + +### Programmatically + +```python +# Create import profile +profile = env['spp.hxl.import.profile'].create({ + 'name': 'My Import', + 'code': 'my_import', + 'area_matching_strategy': 'pcode', + 'area_column_tag': '#adm2+pcode', + 'area_level': 2, +}) + +# Add aggregation rule +env['spp.hxl.aggregation.rule'].create({ + 'profile_id': profile.id, + 'name': 'Count Records', + 'aggregation_type': 'count', + 'output_hxl_tag': '#meta+count', +}) + +# Create and process batch +batch = env['spp.hxl.import.batch'].create({ + 'name': 'Import 2024-03', + 'profile_id': profile.id, + 'file_data': base64_encoded_file, + 'period_key': '2024-03', +}) + +batch.action_detect_columns() +batch.action_process() +``` + +## Area Matching Strategies + +### P-code Matching (`pcode`) + +- Exact match on `spp.area.code` +- Case-sensitive +- Most reliable for official administrative data + +### Name Matching (`name`) + +- Case-insensitive match on `spp.area.draft_name` +- Falls back to alternate names +- Good for data without P-codes + +### GPS Matching (`gps`) + +- Geographic coordinate lookup +- Requires latitude and longitude columns +- Note: Full implementation requires PostGIS + +### Fuzzy Matching (`fuzzy`) + +- Normalized name matching +- Removes common suffixes (District, Municipality, etc.) +- Partial matching with wildcards +- Good for inconsistent naming + +## Aggregation Types + +- **count**: Count number of records +- **sum**: Sum numeric values +- **avg**: Average numeric values +- **min**: Minimum value +- **max**: Maximum value +- **count_distinct**: Count unique values +- **percentage**: Percentage of total (filtered/total \* 100) + +## Filter Expressions + +Filter rows before aggregation using Python expressions: + +```python +# Filter by severity +row.get('#impact+type') == 'severe' + +# Multiple conditions +row.get('#status') == 'active' and int(row.get('#value', 0)) > 100 + +# List membership +row.get('#category') in ['food', 'shelter', 'health'] +``` + +**Warning**: Uses `eval()` - in production, implement safe expression evaluator. + +## Disaggregation + +Break down indicators by HXL attributes: + +```python +# In aggregation rule +disaggregate_by_tags = '+f,+m,+children,+elderly' + +# Results in JSON +{ + '+f': 120, + '+m': 130, + '+children': 50, + '+elderly': 30 +} +``` + +## CEL Integration + +Indicators are automatically synced to `spp.data.value` for use in CEL expressions: + +```python +# Access in eligibility criteria +area.affected_households > 100 + +# Access historical data +area.beneficiaries['2024-03'] > 50 +``` + +## Testing + +Run tests: + +```bash +# All tests +./scripts/test_single_module.sh spp_hxl_area + +# Specific test +pytest spp_hxl_area/tests/test_area_matcher.py -v +``` + +Test coverage targets: + +- Core functionality: 85%+ +- Service classes: 90%+ +- Models: 80%+ + +## Performance Considerations + +- **Batch Size**: Process 500 rows at a time +- **Caching**: Area matcher caches lookups during import +- **Background Jobs**: Large imports run via queue_job +- **Bulk Operations**: Uses bulk SQL upsert for data values + +## Security + +- **Manager Role**: Full access to profiles and configuration +- **User Role**: Can create and view import batches +- **Filter Expressions**: Be cautious with eval() - consider implementing safe evaluator + +## Known Limitations + +1. **GPS Matching**: Simplified implementation - full version requires PostGIS +2. **Filter Expressions**: Uses eval() - security risk if user-provided +3. **HXL Detection**: Assumes standard HXL format (hashtags in row 2) +4. **Large Files**: Very large files (>10MB) may require chunking + +## Troubleshooting + +### Import fails with "No HXL hashtags detected" + +- Verify file has HXL hashtag row (usually row 2) +- Check that hashtags start with `#` +- Ensure at least 50% of columns have hashtags + +### Many unmatched areas + +- Check area matching strategy matches your data +- Verify area codes/names in database match file +- Try fuzzy matching for name inconsistencies +- Check admin level filter + +### Indicators not appearing in CEL + +- Verify variable linked in aggregation rule +- Check that indicator has variable_id set +- Manually trigger sync: `indicator.sync_to_data_value()` + +### Import stuck in processing + +- Check queue job status +- Review error logs in batch form +- Check server logs for exceptions + +## Contributing + +Follow OpenSPP development guidelines: + +- PEP8 code style +- 85%+ test coverage +- No `print()` statements - use `_logger` +- No bare `except:` - catch specific exceptions +- Document complex logic + +## License + +LGPL-3 + +## Authors + +OpenSPP.org + +## Links + +- [HXL Standard](https://hxlstandard.org/) +- [libhxl Documentation](https://github.com/HXLStandard/libhxl-python) +- [OpenSPP Documentation](https://docs.openspp.org/) diff --git a/spp_hxl_area/README.rst b/spp_hxl_area/README.rst new file mode 100644 index 00000000..999ce14c --- /dev/null +++ b/spp_hxl_area/README.rst @@ -0,0 +1,153 @@ +============================ +OpenSPP HXL Area Integration +============================ + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:3c5a1d6122d13f49f33892b9b6f52d26da35c110f224c781fa24f90d788dacf3 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github + :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_hxl_area + :alt: OpenSPP/openspp-modules + +|badge1| |badge2| |badge3| + +Import HXL-tagged field data and aggregate to area-level indicators for +humanitarian coordination. Matches HXL data rows to geographical areas +using P-codes, names, or GPS coordinates, then aggregates values +according to configurable rules. Generated indicators sync automatically +to ``spp.data.value`` for use in eligibility criteria and CEL +expressions. + +Key Capabilities +~~~~~~~~~~~~~~~~ + +- Import HXL-tagged CSV/Excel files with libhxl parsing +- Match data rows to areas using P-code, name, GPS, or fuzzy name + strategies +- Aggregate numeric values using sum, count, average, min, max, or + percentage operations +- Apply filter expressions to subset data before aggregation +- Disaggregate indicators by HXL attributes (e.g., +f, +m, +children) +- Track imports as batches with state machine (draft → mapped → + processing → done) +- Auto-sync indicators to ``spp.data.value`` for CEL expression access +- Link imports to hazard incidents for disaster response tracking +- Process imports asynchronously via queue_job + +Key Models +~~~~~~~~~~ + ++--------------------------------+------------------------------------+ +| Model | Description | ++================================+====================================+ +| ``spp.hxl.import.profile`` | Configuration defining area | +| | matching and rules | ++--------------------------------+------------------------------------+ +| ``spp.hxl.aggregation.rule`` | Rule specifying what to aggregate | +| | and how | ++--------------------------------+------------------------------------+ +| ``spp.hxl.import.batch`` | Track one execution of profile | +| | against HXL file | ++--------------------------------+------------------------------------+ +| ``spp.hxl.import.mapping`` | Auto-detected column mapping | +| | (adjustable pre-run) | ++--------------------------------+------------------------------------+ +| ``spp.hxl.area.indicator`` | Aggregated indicator value stored | +| | per area | ++--------------------------------+------------------------------------+ +| ``spp.hxl.area.import.wizard`` | Wizard for uploading files and | +| | previewing matches | ++--------------------------------+------------------------------------+ + +Configuration +~~~~~~~~~~~~~ + +After installing: + +1. Navigate to **HXL > HXL Area > Configuration > Import Profiles** +2. Create a profile specifying area matching strategy and admin level +3. Add aggregation rules defining which columns to aggregate and how +4. Optionally link to a hazard incident for disaster response tracking + +UI Location +~~~~~~~~~~~ + +- **Menu**: HXL > HXL Area > Import HXL Data +- **Batches**: HXL > HXL Area > Import Batches +- **Indicators**: HXL > HXL Area > Area Indicators +- **Configuration**: HXL > HXL Area > Configuration > Import Profiles +- **Profile Form Tabs**: "Area Matching", "Aggregation Rules" +- **Batch Form Tabs**: "File", "Column Mapping", "Statistics", + "Indicators", "Error Log" + +Security +~~~~~~~~ + ++----------------------------------+------------------------------------------+ +| Group | Access | ++==================================+==========================================+ +| ``spp_security.group_spp_admin`` | Full CRUD on profiles, rules, indicators | ++----------------------------------+------------------------------------------+ +| ``base.group_user`` | Read profiles/rules; create/edit batches | ++----------------------------------+------------------------------------------+ + +Extension Points +~~~~~~~~~~~~~~~~ + +- Inherit ``spp.hxl.import.profile`` to add custom area matching + strategies +- Override ``spp.hxl.area.indicator.sync_to_data_value()`` to customize + CEL variable mapping +- Extend ``spp.hxl.aggregation.rule`` to add custom aggregation + functions + +Dependencies +~~~~~~~~~~~~ + +``spp_hxl``, ``spp_area``, ``spp_cel_domain``, ``spp_hazard``, +``spp_security``, ``queue_job`` + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. + +You are welcome to contribute. diff --git a/spp_hxl_area/__init__.py b/spp_hxl_area/__init__.py new file mode 100644 index 00000000..abea803e --- /dev/null +++ b/spp_hxl_area/__init__.py @@ -0,0 +1,5 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import models +from . import wizards +from . import services diff --git a/spp_hxl_area/__manifest__.py b/spp_hxl_area/__manifest__.py new file mode 100644 index 00000000..9dca8914 --- /dev/null +++ b/spp_hxl_area/__manifest__.py @@ -0,0 +1,39 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +{ + "name": "OpenSPP HXL Area Integration", + "summary": "HXL import with area-level aggregation for humanitarian indicators. " + "Import HXL-tagged field data and aggregate to area-level metrics for coordination.", + "category": "OpenSPP/Integration", + "version": "19.0.2.0.0", + "author": "OpenSPP.org", + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "development_status": "Alpha", + "depends": [ + "spp_hxl", + "spp_area", + "spp_cel_domain", + "spp_hazard", + "spp_security", + "queue_job", + ], + "external_dependencies": { + "python": [ + "libhxl", + ] + }, + "data": [ + "security/ir.model.access.csv", + "views/hxl_import_profile_views.xml", + "views/hxl_aggregation_rule_views.xml", + "views/hxl_import_batch_views.xml", + "views/hxl_area_indicator_views.xml", + "wizards/hxl_area_import_wizard_views.xml", + "data/hxl_import_profiles.xml", + "views/menus.xml", + ], + "application": False, + "installable": True, + "auto_install": False, +} diff --git a/spp_hxl_area/data/hxl_import_profiles.xml b/spp_hxl_area/data/hxl_import_profiles.xml new file mode 100644 index 00000000..2f95dbae --- /dev/null +++ b/spp_hxl_area/data/hxl_import_profiles.xml @@ -0,0 +1,142 @@ + + + + + + Sri Lanka Damage Assessment + sl_damage_assessment + Import damage assessment data from Sri Lanka field surveys. Matches areas by name and aggregates damaged households by severity. + name + #adm4+name + 4 + 10 + + + + + + Severely Damaged Households + count + row.get('#impact+type') in ['severe', 'destroyed', 'completely_damaged'] + #affected+hh+severe + 10 + + + + + Partially Damaged Households + count + row.get('#impact+type') in ['partial', 'minor_damage', 'moderate'] + #affected+hh+partial + 20 + + + + + Total Affected Households + count + #affected+hh + 30 + + + + + Philippines Beneficiary Coverage + ph_beneficiary_coverage + Import beneficiary coverage data using P-codes. Tracks reached individuals with demographic disaggregation. + pcode + #adm4+pcode + 4 + 20 + + + + + + Total Beneficiaries + count + #reached+ind + +f,+m,+children,+elderly + 10 + + + + + Beneficiary Households + count_distinct + #household+id + #reached+hh + 20 + + + + + OCHA 3W Import + ocha_3w + Import Who does What Where (3W) data from humanitarian coordination. Aggregates people reached by sector at district level. + pcode + #adm2+pcode + 2 + 30 + + + + + + People Reached + sum + #reached+ind + #reached+ind + 10 + + + + + Number of Organizations + count_distinct + #org+name + #org+count + 20 + + + + + Generic P-code Import + generic_pcode + Generic template for importing HXL data using P-code matching. Customize aggregation rules as needed. + pcode + #adm2+pcode + 2 + 100 + + + + + Total Records + count + #meta+count + 10 + + + + + GPS Survey Import + gps_survey + Import field survey data using GPS coordinates. Automatically matches to areas based on geographic location. + gps + #geo+lat + #geo+lat + #geo+lon + 3 + 40 + + + + + Survey Observations + count + #meta+count + 10 + + + diff --git a/spp_hxl_area/models/__init__.py b/spp_hxl_area/models/__init__.py new file mode 100644 index 00000000..5c3ec751 --- /dev/null +++ b/spp_hxl_area/models/__init__.py @@ -0,0 +1,7 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import hxl_import_profile +from . import hxl_aggregation_rule +from . import hxl_import_batch +from . import hxl_import_mapping +from . import hxl_area_indicator diff --git a/spp_hxl_area/models/hxl_aggregation_rule.py b/spp_hxl_area/models/hxl_aggregation_rule.py new file mode 100644 index 00000000..3dbc7736 --- /dev/null +++ b/spp_hxl_area/models/hxl_aggregation_rule.py @@ -0,0 +1,90 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import fields, models + +_logger = logging.getLogger(__name__) + + +class HxlAggregationRule(models.Model): + """Define how to aggregate HXL data to area indicators. + + Each rule specifies: + - Which column to aggregate (or count rows) + - What aggregation function to apply (sum, count, avg, etc.) + - Optional filter expression to subset rows + - Optional disaggregation by HXL attributes + """ + + _name = "spp.hxl.aggregation.rule" + _description = "HXL Aggregation Rule" + _order = "profile_id, sequence" + + profile_id = fields.Many2one( + "spp.hxl.import.profile", + required=True, + ondelete="cascade", + index=True, + ) + sequence = fields.Integer(default=10) + name = fields.Char( + required=True, + help="Name of the indicator (e.g., 'Severely Damaged Households')", + ) + + # ─── Target Variable ─────────────────────────────────────── + variable_id = fields.Many2one( + "spp.cel.variable", + string="Target Variable", + help="CEL variable to store the aggregated value (for use in expressions)", + ) + + # ─── Aggregation Type ─────────────────────────────────────── + aggregation_type = fields.Selection( + [ + ("count", "Count Records"), + ("sum", "Sum Values"), + ("avg", "Average"), + ("min", "Minimum"), + ("max", "Maximum"), + ("count_distinct", "Count Distinct"), + ("percentage", "Percentage of Total"), + ], + default="count", + required=True, + help="How to aggregate the data", + ) + + # ─── Source Configuration ─────────────────────────────────── + source_column_tag = fields.Char( + string="Source Column HXL Tag", + help="HXL tag of the column to aggregate (for sum/avg/min/max)", + ) + + filter_expression = fields.Text( + string="Filter Expression", + help="Python expression to filter rows (e.g., row.get('severity') == 'severe')", + ) + + # ─── Output Configuration ─────────────────────────────────── + output_hxl_tag = fields.Char( + string="Output HXL Tag", + help="HXL tag for re-export (e.g., #affected+hh+severe)", + ) + + # ─── Disaggregation ─────────────────────────────────────── + disaggregate_by_tags = fields.Char( + string="Disaggregate By", + help="Comma-separated HXL attributes for disaggregation (e.g., +f,+m,+children)", + ) + + active = fields.Boolean(default=True) + + def name_get(self): + """Display rule name with profile context.""" + result = [] + for rec in self: + name = f"{rec.profile_id.code}: {rec.name}" + result.append((rec.id, name)) + return result diff --git a/spp_hxl_area/models/hxl_area_indicator.py b/spp_hxl_area/models/hxl_area_indicator.py new file mode 100644 index 00000000..cb84d2bb --- /dev/null +++ b/spp_hxl_area/models/hxl_area_indicator.py @@ -0,0 +1,204 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json +import logging + +from odoo import _, api, fields, models + +_logger = logging.getLogger(__name__) + + +class HxlAreaIndicator(models.Model): + """Aggregated indicator value per area. + + Stores the result of HXL import aggregation at the area level. + Can be synced to spp.data.value for use in CEL expressions. + """ + + _name = "spp.hxl.area.indicator" + _description = "HXL Area Indicator" + _order = "area_id, variable_id, period_key" + + batch_id = fields.Many2one( + "spp.hxl.import.batch", + ondelete="cascade", + index=True, + ) + + area_id = fields.Many2one( + "spp.area", + required=True, + index=True, + ondelete="cascade", + ) + area_name = fields.Char( + related="area_id.name", + string="Area", + store=True, + ) + + variable_id = fields.Many2one( + "spp.cel.variable", + index=True, + ondelete="set null", + help="CEL variable for semantic meaning and expression access", + ) + variable_name = fields.Char( + related="variable_id.name", + string="Variable", + store=True, + ) + + rule_id = fields.Many2one( + "spp.hxl.aggregation.rule", + ondelete="set null", + help="Aggregation rule that generated this indicator", + ) + + period_key = fields.Char( + index=True, + help="Period identifier (e.g., '2024-03' for monthly data)", + ) + + incident_id = fields.Many2one( + "spp.hazard.incident", + index=True, + help="Related incident/disaster event", + ) + + # ─── Values ─────────────────────────────────────── + value = fields.Float( + string="Value", + help="Aggregated numeric value", + ) + + value_count = fields.Integer( + string="Record Count", + help="Number of source records aggregated to produce this value", + ) + + # Disaggregation stored as JSON + disaggregation_json = fields.Text( + string="Disaggregation", + help="JSON with disaggregated values (e.g., {'+f': 120, '+m': 130})", + ) + + # For re-export + hxl_tag = fields.Char( + string="HXL Tag", + help="HXL hashtag for re-exporting this indicator", + ) + + # ─── Metadata ─────────────────────────────────────── + source_type = fields.Char( + string="Source Type", + default="hxl_import", + help="How this indicator was generated", + ) + + notes = fields.Text( + string="Notes", + help="Additional notes or context about this indicator", + ) + + _unique_indicator = models.Constraint( + "UNIQUE(area_id, variable_id, period_key, incident_id, batch_id)", + "Indicator must be unique per area/variable/period/incident/batch", + ) + + @api.model + def create(self, vals): + """Auto-sync to data value on create.""" + indicator = super().create(vals) + indicator.sync_to_data_value() + return indicator + + def write(self, vals): + """Auto-sync to data value on write.""" + res = super().write(vals) + if any(key in vals for key in ["value", "variable_id", "area_id", "period_key"]): + self.sync_to_data_value() + return res + + def sync_to_data_value(self): + """Sync indicator values to spp.data.value for CEL expression access. + + This allows area-level indicators to be used in eligibility criteria + and other CEL expressions. + """ + DataValue = self.env["spp.data.value"] + + for indicator in self: + if not indicator.variable_id: + continue + + # Prepare value JSON + value_json = { + "value": indicator.value, + "count": indicator.value_count, + } + + # Add disaggregation if present + if indicator.disaggregation_json: + try: + disagg = json.loads(indicator.disaggregation_json) + value_json["disaggregation"] = disagg + except json.JSONDecodeError: + _logger.warning( + "Failed to parse disaggregation JSON for indicator %s", + indicator.id, + ) + + # Upsert to data value + DataValue.upsert_values( + [ + { + "variable_name": indicator.variable_id.name, + "subject_model": "spp.area", + "subject_id": indicator.area_id.id, + "period_key": indicator.period_key or "current", + "value_json": value_json, + "value_type": "number", + "source_type": "external", + "provider": "hxl_import", + } + ] + ) + + _logger.debug( + "Synced indicator %s to data value for area %s", + indicator.id, + indicator.area_id.name, + ) + + def action_sync_all(self): + """Manually sync all indicators to data values.""" + self.sync_to_data_value() + return { + "type": "ir.actions.client", + "tag": "display_notification", + "params": { + "title": _("Sync Complete"), + "message": _("Synced %d indicators to data values") % len(self), + "type": "success", + "sticky": False, + }, + } + + def name_get(self): + """Display indicator with area and variable context.""" + result = [] + for rec in self: + parts = [rec.area_name or f"Area#{rec.area_id.id}"] + + if rec.variable_name: + parts.append(rec.variable_name) + + if rec.period_key: + parts.append(f"({rec.period_key})") + + parts.append(f"= {rec.value}") + + name = " ".join(parts) + result.append((rec.id, name)) + return result diff --git a/spp_hxl_area/models/hxl_import_batch.py b/spp_hxl_area/models/hxl_import_batch.py new file mode 100644 index 00000000..835db78d --- /dev/null +++ b/spp_hxl_area/models/hxl_import_batch.py @@ -0,0 +1,401 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import base64 +import io +import json +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import UserError, ValidationError + +_logger = logging.getLogger(__name__) + +try: + import libhxl + from libhxl.io import make_stream +except ImportError: + _logger.warning("libhxl library not found. HXL import functionality will be limited.") + libhxl = None + + +class HxlImportBatch(models.Model): + """Track individual HXL import batches. + + Each batch represents one execution of an import profile against + a specific HXL file, producing area-level indicators. + """ + + _name = "spp.hxl.import.batch" + _description = "HXL Import Batch" + _order = "create_date desc" + _inherit = ["mail.thread"] + + name = fields.Char(required=True, tracking=True) + profile_id = fields.Many2one( + "spp.hxl.import.profile", + required=True, + tracking=True, + ondelete="restrict", + ) + + # ─── Source ─────────────────────────────────────────── + file_data = fields.Binary( + string="HXL File", + attachment=True, + help="HXL-tagged CSV/Excel file to import", + ) + file_name = fields.Char() + source_url = fields.Char( + string="Source URL", + help="URL where the data was downloaded from (for reference)", + ) + + # ─── Context ─────────────────────────────────────────── + incident_id = fields.Many2one( + "spp.hazard.incident", + string="Incident", + tracking=True, + help="Link to incident/disaster event", + ) + period_key = fields.Char( + string="Period", + tracking=True, + help="Period key for historical tracking (e.g., '2024-03')", + ) + + # ─── Status ─────────────────────────────────────────── + state = fields.Selection( + [ + ("draft", "Draft"), + ("mapped", "Columns Mapped"), + ("processing", "Processing"), + ("done", "Completed"), + ("failed", "Failed"), + ], + default="draft", + required=True, + tracking=True, + ) + + # ─── Statistics ─────────────────────────────────────────── + total_rows = fields.Integer(string="Total Rows", readonly=True) + matched_rows = fields.Integer(string="Rows Matched to Areas", readonly=True) + unmatched_rows = fields.Integer(string="Unmatched Rows", readonly=True) + areas_updated = fields.Integer(string="Areas with Indicators", readonly=True) + indicators_created = fields.Integer(string="Indicators Created", readonly=True) + + # ─── Column Mapping ─────────────────────────────────────────── + mapping_ids = fields.One2many( + "spp.hxl.import.mapping", + "batch_id", + string="Column Mappings", + help="Auto-detected HXL column mappings", + ) + + # ─── Results ─────────────────────────────────────────── + indicator_ids = fields.One2many( + "spp.hxl.area.indicator", + "batch_id", + string="Generated Indicators", + ) + + error_log = fields.Text(string="Error Log", readonly=True) + + # ─── HXL Data Cache ─────────────────────────────────────────── + hxl_row_number = fields.Integer( + string="HXL Row", + help="Row number where HXL hashtags were detected", + ) + hxl_columns_json = fields.Text( + string="HXL Columns", + help="JSON array of detected HXL columns", + ) + + @api.model + def create(self, vals): + """Auto-detect columns on create if file provided.""" + batch = super().create(vals) + if batch.file_data: + try: + batch.action_detect_columns() + except Exception as e: + _logger.warning("Failed to auto-detect columns: %s", e) + return batch + + def action_detect_columns(self): + """Parse file and auto-detect HXL columns.""" + self.ensure_one() + + if not self.file_data: + raise UserError(_("No file uploaded")) + + if libhxl is None: + raise UserError(_("libhxl library not installed. Cannot parse HXL files.")) + + try: + # Decode file + file_content = base64.b64decode(self.file_data) + file_stream = io.BytesIO(file_content) + + # Parse with libhxl + hxl_stream = make_stream(file_stream) + + # Find HXL row and extract tags + hxl_columns = [] + hxl_row_num = 0 + + for idx, col in enumerate(hxl_stream.columns): + hxl_columns.append( + { + "index": idx, + "header": col.header or "", + "tag": col.display_tag or "", + } + ) + + if not hxl_columns: + raise ValidationError(_("No HXL hashtags detected in file")) + + # Store detected columns + self.write( + { + "hxl_columns_json": json.dumps(hxl_columns), + "hxl_row_number": hxl_row_num, + "state": "mapped", + } + ) + + # Create mapping records + self._create_mappings(hxl_columns) + + self.message_post( + body=_("Successfully detected %d HXL columns") % len(hxl_columns), + ) + + except Exception as e: + error_msg = _("Failed to detect columns: %s") % str(e) + _logger.error(error_msg, exc_info=True) + self.write( + { + "state": "failed", + "error_log": error_msg, + } + ) + raise UserError(error_msg) from e + + def _create_mappings(self, hxl_columns): + """Create mapping records for detected columns.""" + self.ensure_one() + + # Clear existing mappings + self.mapping_ids.unlink() + + Mapping = self.env["spp.hxl.import.mapping"] + + for col in hxl_columns: + tag = col.get("tag", "") + + # Determine mapping type + mapping_type = "skip" + confidence = 0.0 + + # Check if this is the area column + if self.profile_id.area_column_tag and tag == self.profile_id.area_column_tag: + mapping_type = "area" + confidence = 1.0 + + # Check if it matches any aggregation rule + for rule in self.profile_id.aggregation_ids: + if rule.source_column_tag and tag == rule.source_column_tag: + mapping_type = "aggregate" + confidence = 0.9 + break + + Mapping.create( + { + "batch_id": self.id, + "sequence": col.get("index", 0), + "source_column": col.get("header", ""), + "detected_hxl_tag": tag, + "mapping_type": mapping_type, + "confidence": confidence, + } + ) + + def action_process(self): + """Run the import and aggregation.""" + self.ensure_one() + + if not self.file_data: + raise UserError(_("No file uploaded")) + + if self.state != "mapped": + raise UserError(_("Please detect columns first")) + + # Validate profile + self.profile_id.validate_configuration() + + # Use job queue for processing + self.with_delay(priority=5).process_import() + + self.write({"state": "processing"}) + + return { + "type": "ir.actions.client", + "tag": "display_notification", + "params": { + "title": _("Import Started"), + "message": _("Import is running in background. You will be notified when complete."), + "type": "info", + "sticky": False, + }, + } + + def process_import(self): + """Process the import (called by job queue).""" + self.ensure_one() + + if libhxl is None: + raise UserError(_("libhxl library not installed")) + + try: + from ..services.aggregation_engine import AggregationEngine + from ..services.area_matcher import AreaMatcher + + _logger.info("Starting HXL import batch: %s", self.name) + + # Decode file + file_content = base64.b64decode(self.file_data) + file_stream = io.BytesIO(file_content) + + # Parse with libhxl + hxl_stream = make_stream(file_stream) + + # Build column mapping + hxl_columns = json.loads(self.hxl_columns_json) if self.hxl_columns_json else [] + column_map = {col["tag"]: col["header"] for col in hxl_columns if col.get("tag")} + + # Read rows into memory + rows = [] + for hxl_row in hxl_stream: + row_dict = {} + for col in hxl_stream.columns: + tag = col.display_tag + value = hxl_row.get(col.display_tag) + if tag: + row_dict[tag] = value + rows.append(row_dict) + + total_rows = len(rows) + _logger.info("Parsed %d data rows", total_rows) + + # Initialize services + matcher = AreaMatcher( + self.env, + strategy=self.profile_id.area_matching_strategy, + level=self.profile_id.area_level, + ) + + engine = AggregationEngine(self.env, self.profile_id, matcher) + + # Aggregate + indicators, unmatched = engine.aggregate(rows, column_map) + + _logger.info( + "Aggregation complete: %d indicators, %d unmatched rows", + len(indicators), + len(unmatched), + ) + + # Store indicators + Indicator = self.env["spp.hxl.area.indicator"] + + # Clear existing indicators for this batch + self.indicator_ids.unlink() + + indicator_records = [] + for ind_data in indicators: + ind_data.update( + { + "batch_id": self.id, + "period_key": self.period_key, + "incident_id": self.incident_id.id if self.incident_id else False, + } + ) + indicator_records.append(ind_data) + + created_indicators = Indicator.create(indicator_records) + + # Sync to data values + created_indicators.sync_to_data_value() + + # Count unique areas + areas_updated = len(set(ind.area_id.id for ind in created_indicators)) + + # Update statistics + self.write( + { + "state": "done", + "total_rows": total_rows, + "matched_rows": total_rows - len(unmatched), + "unmatched_rows": len(unmatched), + "areas_updated": areas_updated, + "indicators_created": len(created_indicators), + "error_log": "", + } + ) + + self.message_post( + body=_("Import completed successfully: %d indicators created for %d areas") + % (len(created_indicators), areas_updated), + ) + + _logger.info("HXL import batch completed: %s", self.name) + + except Exception as e: + error_msg = _("Import failed: %s") % str(e) + _logger.error(error_msg, exc_info=True) + + self.write( + { + "state": "failed", + "error_log": error_msg, + } + ) + + self.message_post( + body=error_msg, + message_type="notification", + ) + + raise + + def action_view_indicators(self): + """View generated indicators.""" + self.ensure_one() + return { + "type": "ir.actions.act_window", + "name": _("Generated Indicators"), + "res_model": "spp.hxl.area.indicator", + "view_mode": "tree,form", + "domain": [("batch_id", "=", self.id)], + "context": {"default_batch_id": self.id}, + } + + def action_reset(self): + """Reset to draft and clear results.""" + self.ensure_one() + self.indicator_ids.unlink() + self.mapping_ids.unlink() + self.write( + { + "state": "draft", + "total_rows": 0, + "matched_rows": 0, + "unmatched_rows": 0, + "areas_updated": 0, + "indicators_created": 0, + "error_log": "", + "hxl_columns_json": "", + } + ) diff --git a/spp_hxl_area/models/hxl_import_mapping.py b/spp_hxl_area/models/hxl_import_mapping.py new file mode 100644 index 00000000..fd729331 --- /dev/null +++ b/spp_hxl_area/models/hxl_import_mapping.py @@ -0,0 +1,64 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import fields, models + +_logger = logging.getLogger(__name__) + + +class HxlImportMapping(models.Model): + """Column mapping for import batch. + + Auto-detected during file parsing, can be adjusted by user + before processing. + """ + + _name = "spp.hxl.import.mapping" + _description = "HXL Import Column Mapping" + _order = "batch_id, sequence" + + batch_id = fields.Many2one( + "spp.hxl.import.batch", + required=True, + ondelete="cascade", + index=True, + ) + sequence = fields.Integer(default=10) + + source_column = fields.Char( + string="Source Column Header", + help="Original column header from the file", + ) + detected_hxl_tag = fields.Char( + string="Detected HXL Tag", + help="HXL hashtag detected in the file", + ) + + # What to do with this column + mapping_type = fields.Selection( + [ + ("area", "Area Identifier"), + ("aggregate", "Aggregate Value"), + ("filter", "Filter Criterion"), + ("disaggregate", "Disaggregation"), + ("skip", "Skip"), + ], + default="skip", + required=True, + help="How this column should be used in the import", + ) + + # Confidence of auto-detection (0.0 to 1.0) + confidence = fields.Float( + default=0.0, + help="Confidence score of automatic mapping (0.0 = low, 1.0 = certain)", + ) + + def name_get(self): + """Display mapping with context.""" + result = [] + for rec in self: + name = f"{rec.source_column} [{rec.detected_hxl_tag}] → {rec.mapping_type}" + result.append((rec.id, name)) + return result diff --git a/spp_hxl_area/models/hxl_import_profile.py b/spp_hxl_area/models/hxl_import_profile.py new file mode 100644 index 00000000..8a9b5e3f --- /dev/null +++ b/spp_hxl_area/models/hxl_import_profile.py @@ -0,0 +1,119 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import _, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class HxlImportProfile(models.Model): + """Configuration for HXL data import with area aggregation. + + Defines how to match HXL data rows to geographical areas and which + aggregation rules to apply for generating area-level indicators. + """ + + _name = "spp.hxl.import.profile" + _description = "HXL Import Profile" + _order = "sequence, name" + + name = fields.Char(required=True) + code = fields.Char(required=True, index=True, help="Unique identifier for this profile") + description = fields.Text(help="Description of what this profile imports and aggregates") + sequence = fields.Integer(default=10) + active = fields.Boolean(default=True) + + # ─── Area Matching Configuration ─────────────────────────────────── + area_matching_strategy = fields.Selection( + [ + ("pcode", "P-code Match"), + ("name", "Name Match"), + ("gps", "GPS Coordinates"), + ("fuzzy", "Fuzzy Name Match"), + ], + default="pcode", + required=True, + help="Strategy for matching HXL data rows to geographical areas", + ) + + area_column_tag = fields.Char( + string="Area Column HXL Tag", + default="#adm2+pcode", + help="HXL tag for the area identifier column (e.g., #adm2+pcode, #adm3+name)", + ) + + area_level = fields.Integer( + string="Target Admin Level", + help="Administrative level to match areas at (e.g., 2 for District, 3 for Division)", + ) + + # For GPS matching + latitude_tag = fields.Char( + string="Latitude HXL Tag", + default="#geo+lat", + help="HXL tag for latitude column (used with GPS matching strategy)", + ) + longitude_tag = fields.Char( + string="Longitude HXL Tag", + default="#geo+lon", + help="HXL tag for longitude column (used with GPS matching strategy)", + ) + + # ─── Aggregation Rules ─────────────────────────────────────────── + aggregation_ids = fields.One2many( + "spp.hxl.aggregation.rule", + "profile_id", + string="Aggregation Rules", + help="Define how to aggregate HXL data to area indicators", + ) + + # ─── Default Context ─────────────────────────────────────────── + incident_id = fields.Many2one( + "spp.hazard.incident", + string="Default Incident", + help="Optionally link imported data to a specific incident/disaster event", + ) + + # ─── Statistics ─────────────────────────────────────────── + batch_count = fields.Integer( + string="Import Batches", + compute="_compute_batch_count", + help="Number of imports using this profile", + ) + + _code_unique = models.Constraint("UNIQUE(code)", "Profile code must be unique") + + def _compute_batch_count(self): + """Count import batches using this profile.""" + for rec in self: + rec.batch_count = self.env["spp.hxl.import.batch"].search_count([("profile_id", "=", rec.id)]) + + def action_view_batches(self): + """Show all import batches for this profile.""" + self.ensure_one() + return { + "type": "ir.actions.act_window", + "name": _("Import Batches"), + "res_model": "spp.hxl.import.batch", + "view_mode": "tree,form", + "domain": [("profile_id", "=", self.id)], + "context": {"default_profile_id": self.id}, + } + + def validate_configuration(self): + """Validate profile configuration.""" + self.ensure_one() + + if not self.area_column_tag: + raise ValidationError(_("Area column HXL tag is required")) + + if self.area_matching_strategy == "gps": + if not self.latitude_tag or not self.longitude_tag: + raise ValidationError(_("GPS matching requires both latitude and longitude tags")) + + if not self.aggregation_ids: + raise ValidationError(_("At least one aggregation rule is required")) + + return True diff --git a/spp_hxl_area/pyproject.toml b/spp_hxl_area/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_hxl_area/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_hxl_area/readme/DESCRIPTION.md b/spp_hxl_area/readme/DESCRIPTION.md new file mode 100644 index 00000000..30e9923b --- /dev/null +++ b/spp_hxl_area/readme/DESCRIPTION.md @@ -0,0 +1,59 @@ +Import HXL-tagged field data and aggregate to area-level indicators for humanitarian coordination. Matches HXL data rows to geographical areas using P-codes, names, or GPS coordinates, then aggregates values according to configurable rules. Generated indicators sync automatically to `spp.data.value` for use in eligibility criteria and CEL expressions. + +### Key Capabilities + +- Import HXL-tagged CSV/Excel files with libhxl parsing +- Match data rows to areas using P-code, name, GPS, or fuzzy name strategies +- Aggregate numeric values using sum, count, average, min, max, or percentage operations +- Apply filter expressions to subset data before aggregation +- Disaggregate indicators by HXL attributes (e.g., +f, +m, +children) +- Track imports as batches with state machine (draft → mapped → processing → done) +- Auto-sync indicators to `spp.data.value` for CEL expression access +- Link imports to hazard incidents for disaster response tracking +- Process imports asynchronously via queue_job + +### Key Models + +| Model | Description | +| ---------------------------- | --------------------------------------------------- | +| `spp.hxl.import.profile` | Configuration defining area matching and rules | +| `spp.hxl.aggregation.rule` | Rule specifying what to aggregate and how | +| `spp.hxl.import.batch` | Track one execution of profile against HXL file | +| `spp.hxl.import.mapping` | Auto-detected column mapping (adjustable pre-run) | +| `spp.hxl.area.indicator` | Aggregated indicator value stored per area | +| `spp.hxl.area.import.wizard` | Wizard for uploading files and previewing matches | + +### Configuration + +After installing: + +1. Navigate to **HXL > HXL Area > Configuration > Import Profiles** +2. Create a profile specifying area matching strategy and admin level +3. Add aggregation rules defining which columns to aggregate and how +4. Optionally link to a hazard incident for disaster response tracking + +### UI Location + +- **Menu**: HXL > HXL Area > Import HXL Data +- **Batches**: HXL > HXL Area > Import Batches +- **Indicators**: HXL > HXL Area > Area Indicators +- **Configuration**: HXL > HXL Area > Configuration > Import Profiles +- **Profile Form Tabs**: "Area Matching", "Aggregation Rules" +- **Batch Form Tabs**: "File", "Column Mapping", "Statistics", "Indicators", "Error Log" + +### Security + +| Group | Access | +| ------------------------------ | ------------------------------------------- | +| `spp_security.group_spp_admin` | Full CRUD on profiles, rules, indicators | +| `base.group_user` | Read profiles/rules; create/edit batches | + +### Extension Points + +- Inherit `spp.hxl.import.profile` to add custom area matching strategies +- Override `spp.hxl.area.indicator.sync_to_data_value()` to customize CEL variable mapping +- Extend `spp.hxl.aggregation.rule` to add custom aggregation functions + +### Dependencies + +`spp_hxl`, `spp_area`, `spp_cel_domain`, `spp_hazard`, `spp_security`, `queue_job` diff --git a/spp_hxl_area/security/ir.model.access.csv b/spp_hxl_area/security/ir.model.access.csv new file mode 100644 index 00000000..de2557b6 --- /dev/null +++ b/spp_hxl_area/security/ir.model.access.csv @@ -0,0 +1,13 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_hxl_import_profile_manager,spp.hxl.import.profile manager,model_spp_hxl_import_profile,spp_security.group_spp_admin,1,1,1,1 +access_hxl_import_profile_user,spp.hxl.import.profile user,model_spp_hxl_import_profile,base.group_user,1,0,0,0 +access_hxl_aggregation_rule_manager,spp.hxl.aggregation.rule manager,model_spp_hxl_aggregation_rule,spp_security.group_spp_admin,1,1,1,1 +access_hxl_aggregation_rule_user,spp.hxl.aggregation.rule user,model_spp_hxl_aggregation_rule,base.group_user,1,0,0,0 +access_hxl_import_batch_manager,spp.hxl.import.batch manager,model_spp_hxl_import_batch,spp_security.group_spp_admin,1,1,1,1 +access_hxl_import_batch_user,spp.hxl.import.batch user,model_spp_hxl_import_batch,base.group_user,1,1,1,0 +access_hxl_import_mapping_manager,spp.hxl.import.mapping manager,model_spp_hxl_import_mapping,spp_security.group_spp_admin,1,1,1,1 +access_hxl_import_mapping_user,spp.hxl.import.mapping user,model_spp_hxl_import_mapping,base.group_user,1,0,0,0 +access_hxl_area_indicator_manager,spp.hxl.area.indicator manager,model_spp_hxl_area_indicator,spp_security.group_spp_admin,1,1,1,1 +access_hxl_area_indicator_user,spp.hxl.area.indicator user,model_spp_hxl_area_indicator,base.group_user,1,0,0,0 +access_hxl_area_import_wizard_manager,spp.hxl.area.import.wizard manager,model_spp_hxl_area_import_wizard,spp_security.group_spp_admin,1,1,1,1 +access_hxl_area_import_wizard_user,spp.hxl.area.import.wizard user,model_spp_hxl_area_import_wizard,base.group_user,1,1,1,1 diff --git a/spp_hxl_area/services/__init__.py b/spp_hxl_area/services/__init__.py new file mode 100644 index 00000000..84708bf6 --- /dev/null +++ b/spp_hxl_area/services/__init__.py @@ -0,0 +1,4 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import area_matcher +from . import aggregation_engine diff --git a/spp_hxl_area/services/aggregation_engine.py b/spp_hxl_area/services/aggregation_engine.py new file mode 100644 index 00000000..e69429c8 --- /dev/null +++ b/spp_hxl_area/services/aggregation_engine.py @@ -0,0 +1,355 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json +import logging +from collections import defaultdict + +_logger = logging.getLogger(__name__) + + +class AggregationEngine: + """Engine to aggregate HXL rows by area according to rules. + + Takes parsed HXL rows and applies aggregation rules to generate + area-level indicators. + """ + + def __init__(self, env, profile, area_matcher): + """Initialize aggregation engine. + + Args: + env: Odoo environment + profile: spp.hxl.import.profile record + area_matcher: AreaMatcher instance + """ + self.env = env + self.profile = profile + self.matcher = area_matcher + + _logger.info( + "Initialized AggregationEngine for profile: %s", + profile.name, + ) + + def aggregate(self, rows, hxl_columns): + """Aggregate rows and return area indicators. + + Args: + rows: List of dicts, each representing one HXL row + hxl_columns: Dict mapping HXL tags to column headers + + Returns: + tuple: (indicators, unmatched_rows) + - indicators: List of indicator dicts ready for creation + - unmatched_rows: List of rows that couldn't be matched to areas + """ + _logger.info("Starting aggregation of %d rows", len(rows)) + + # Group rows by area + area_rows = defaultdict(list) + unmatched = [] + + # Identify area column + area_col_tag = self.profile.area_column_tag + lat_col_tag = self.profile.latitude_tag if self.profile.area_matching_strategy == "gps" else None + lon_col_tag = self.profile.longitude_tag if self.profile.area_matching_strategy == "gps" else None + + # Match each row to an area + for row in rows: + area_value = row.get(area_col_tag, "") + lat = row.get(lat_col_tag) if lat_col_tag else None + lon = row.get(lon_col_tag) if lon_col_tag else None + + # Try matching + area = self.matcher.match(area_value, lat, lon) + + if area: + area_rows[area.id].append(row) + else: + unmatched.append(row) + _logger.debug( + "Could not match row to area: %s (lat=%s, lon=%s)", + area_value, + lat, + lon, + ) + + _logger.info( + "Matched %d rows to %d areas, %d unmatched", + len(rows) - len(unmatched), + len(area_rows), + len(unmatched), + ) + + # Apply aggregation rules per area + indicators = [] + + for area_id, area_rows_list in area_rows.items(): + for rule in self.profile.aggregation_ids: + if not rule.active: + continue + + try: + result = self._apply_rule(rule, area_rows_list, hxl_columns) + + indicators.append( + { + "area_id": area_id, + "rule_id": rule.id, + "variable_id": rule.variable_id.id if rule.variable_id else False, + "value": result["total"], + "value_count": result["count"], + "disaggregation_json": json.dumps(result.get("disaggregation", {})), + "hxl_tag": rule.output_hxl_tag, + } + ) + + except Exception as e: + _logger.error( + "Failed to apply rule %s for area %s: %s", + rule.name, + area_id, + e, + exc_info=True, + ) + + _logger.info("Generated %d indicators", len(indicators)) + + return indicators, unmatched + + def _apply_rule(self, rule, rows, hxl_columns): + """Apply a single aggregation rule to rows. + + Args: + rule: spp.hxl.aggregation.rule record + rows: List of row dicts for this area + hxl_columns: Dict mapping HXL tags to headers + + Returns: + dict with keys: total, count, disaggregation (optional) + """ + # Filter rows if expression provided + filtered_rows = rows + + if rule.filter_expression: + filtered_rows = [] + for row in rows: + try: + # Evaluate filter expression + # WARNING: eval() is dangerous - in production use a safe expression evaluator + if self._eval_filter(row, rule.filter_expression): + filtered_rows.append(row) + except Exception as e: + _logger.warning( + "Failed to evaluate filter for row: %s", + e, + ) + + # Get source column if needed + source_col_tag = rule.source_column_tag + + # Apply aggregation + if rule.aggregation_type == "count": + total = len(filtered_rows) + + elif rule.aggregation_type == "sum": + if not source_col_tag: + _logger.warning("Sum aggregation requires source_column_tag") + total = 0 + else: + total = sum(self._to_float(row.get(source_col_tag, 0)) for row in filtered_rows) + + elif rule.aggregation_type == "avg": + if not source_col_tag: + _logger.warning("Average aggregation requires source_column_tag") + total = 0 + else: + values = [self._to_float(row.get(source_col_tag, 0)) for row in filtered_rows] + total = sum(values) / len(values) if values else 0 + + elif rule.aggregation_type == "min": + if not source_col_tag: + _logger.warning("Min aggregation requires source_column_tag") + total = 0 + else: + values = [self._to_float(row.get(source_col_tag, 0)) for row in filtered_rows] + total = min(values) if values else 0 + + elif rule.aggregation_type == "max": + if not source_col_tag: + _logger.warning("Max aggregation requires source_column_tag") + total = 0 + else: + values = [self._to_float(row.get(source_col_tag, 0)) for row in filtered_rows] + total = max(values) if values else 0 + + elif rule.aggregation_type == "count_distinct": + if not source_col_tag: + _logger.warning("Count distinct requires source_column_tag") + total = 0 + else: + distinct_values = set(row.get(source_col_tag) for row in filtered_rows) + total = len(distinct_values) + + elif rule.aggregation_type == "percentage": + total = (len(filtered_rows) / len(rows) * 100) if rows else 0 + + else: + _logger.warning("Unknown aggregation type: %s", rule.aggregation_type) + total = len(filtered_rows) + + result = { + "total": total, + "count": len(filtered_rows), + } + + # Handle disaggregation + if rule.disaggregate_by_tags: + result["disaggregation"] = self._disaggregate( + filtered_rows, + rule, + hxl_columns, + source_col_tag, + ) + + return result + + def _disaggregate(self, rows, rule, hxl_columns, source_col_tag): + """Disaggregate values by HXL attributes. + + Args: + rows: Filtered rows to disaggregate + rule: Aggregation rule + hxl_columns: Column mapping + source_col_tag: Source column tag + + Returns: + dict mapping disaggregation attributes to values + """ + disagg_tags = [tag.strip() for tag in rule.disaggregate_by_tags.split(",")] + + result = {} + + for tag in disagg_tags: + # Count rows that have this attribute + matching_rows = [row for row in rows if self._has_attribute(row, tag, hxl_columns)] + + if rule.aggregation_type == "count": + result[tag] = len(matching_rows) + elif rule.aggregation_type in ("sum", "avg", "min", "max"): + if source_col_tag: + values = [self._to_float(row.get(source_col_tag, 0)) for row in matching_rows] + if rule.aggregation_type == "sum": + result[tag] = sum(values) + elif rule.aggregation_type == "avg": + result[tag] = sum(values) / len(values) if values else 0 + elif rule.aggregation_type == "min": + result[tag] = min(values) if values else 0 + elif rule.aggregation_type == "max": + result[tag] = max(values) if values else 0 + else: + result[tag] = 0 + else: + result[tag] = len(matching_rows) + + return result + + def _has_attribute(self, row, attribute, hxl_columns): + """Check if a row has a specific HXL attribute. + + This is simplified - in production you would check all columns + for the attribute marker. + + Args: + row: Row dict + attribute: HXL attribute (e.g., '+f', '+m') + hxl_columns: Column mapping + + Returns: + bool + """ + # Normalize attribute (remove leading + if present) + attr_normalized = attribute.lstrip("+") + + # Look for columns with this attribute + for tag in hxl_columns.keys(): + # Split tag into hashtag and attributes: #affected+f+elderly -> ['#affected', 'f', 'elderly'] + parts = tag.split("+") + attributes = parts[1:] if len(parts) > 1 else [] + + if attr_normalized in attributes: + value = row.get(tag) + # Consider non-empty, non-zero values as having the attribute + if value and str(value).strip() and str(value) != "0": + return True + + return False + + def _eval_filter(self, row, expression): + """Evaluate a filter expression against a row. + + Uses CEL (Common Expression Language) for secure expression evaluation. + CEL provides a sandboxed environment with no access to system resources. + + Expression examples: + - row.get('#affected') > 100 + - row.get('#status') == 'confirmed' + - int(row.get('#population', 0)) >= 1000 + + Args: + row: Row dict with HXL column values + expression: CEL expression to evaluate + + Returns: + bool + """ + try: + # Build context for CEL evaluation + context = { + "row": row, + "get": row.get, + } + + # Use CEL service for secure expression evaluation + cel_service = self.env["spp.cel.service"] + result = cel_service.evaluate_expression(expression, context) + return bool(result) + + except Exception as e: + _logger.warning( + "Failed to evaluate filter expression '%s': %s", + expression, + e, + ) + return False + + @staticmethod + def _to_float(value): + """Convert value to float, handling various formats. + + Args: + value: Value to convert + + Returns: + float + """ + if value is None: + return 0.0 + + if isinstance(value, (int, float)): + return float(value) + + if isinstance(value, str): + # Remove common formatting + value = value.strip().replace(",", "").replace(" ", "") + + if not value: + return 0.0 + + try: + return float(value) + except ValueError: + _logger.debug("Could not convert '%s' to float", value) + return 0.0 + + return 0.0 diff --git a/spp_hxl_area/services/area_matcher.py b/spp_hxl_area/services/area_matcher.py new file mode 100644 index 00000000..e325d84b --- /dev/null +++ b/spp_hxl_area/services/area_matcher.py @@ -0,0 +1,233 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging +import re + +_logger = logging.getLogger(__name__) + + +class AreaMatcher: + """Service to match HXL area values to spp.area records. + + Supports multiple matching strategies: + - pcode: Exact match on area code + - name: Case-insensitive name match + - gps: Geographic coordinate lookup + - fuzzy: Fuzzy name matching with normalization + """ + + def __init__(self, env, strategy, level=None): + """Initialize area matcher. + + Args: + env: Odoo environment + strategy: Matching strategy ('pcode', 'name', 'gps', 'fuzzy') + level: Optional area level filter + """ + self.env = env + self.strategy = strategy + self.level = level + self._cache = {} + + _logger.info( + "Initialized AreaMatcher with strategy=%s, level=%s", + strategy, + level, + ) + + def match(self, value, lat=None, lon=None): + """Match a value to an area. + + Args: + value: Area identifier (pcode, name, etc.) + lat: Latitude (for GPS matching) + lon: Longitude (for GPS matching) + + Returns: + spp.area record (empty recordset if no match) + """ + # Build cache key + cache_key = f"{self.strategy}:{value}:{lat}:{lon}" + + if cache_key in self._cache: + return self._cache[cache_key] + + # Perform matching + area = self._do_match(value, lat, lon) + + # Cache result + self._cache[cache_key] = area + + return area + + def _do_match(self, value, lat, lon): + """Perform actual matching logic. + + Args: + value: Area identifier + lat: Latitude + lon: Longitude + + Returns: + spp.area record + """ + Area = self.env["spp.area"] + + # Base domain + domain = [] + if self.level is not None: + domain.append(("level", "=", self.level)) + + if self.strategy == "pcode": + # Exact code match + if not value: + return Area.browse() + + domain.append(("code", "=", value)) + return Area.search(domain, limit=1) + + elif self.strategy == "name": + # Case-insensitive name match + if not value: + return Area.browse() + + domain.append(("draft_name", "ilike", value)) + result = Area.search(domain, limit=1) + + # Fallback to alternate names + if not result: + domain[-1] = ("altnames", "ilike", value) + result = Area.search(domain, limit=1) + + return result + + elif self.strategy == "gps": + # Geographic coordinate lookup + if lat is None or lon is None: + return Area.browse() + + try: + lat_float = float(lat) + lon_float = float(lon) + except (ValueError, TypeError): + _logger.warning("Invalid coordinates: lat=%s, lon=%s", lat, lon) + return Area.browse() + + # Find area containing this point + # This requires a geographic lookup - simplified version + # In production, this would use PostGIS or similar + return self._find_area_by_coordinates(lat_float, lon_float, domain) + + elif self.strategy == "fuzzy": + # Fuzzy name matching + if not value: + return Area.browse() + + normalized = self._normalize(value) + + # Try exact normalized match first + domain.append(("draft_name", "ilike", normalized)) + result = Area.search(domain, limit=1) + + if result: + return result + + # Try partial match with wildcards + domain[-1] = ("draft_name", "ilike", f"%{normalized}%") + result = Area.search(domain, limit=1) + + if result: + return result + + # Try alternate names + domain[-1] = ("altnames", "ilike", f"%{normalized}%") + result = Area.search(domain, limit=1) + + return result + + else: + _logger.warning("Unknown matching strategy: %s", self.strategy) + return Area.browse() + + def _find_area_by_coordinates(self, lat, lon, base_domain): + """Find area containing given coordinates. + + This is a simplified implementation. In production, you would use + PostGIS ST_Contains or similar spatial functions. + + Args: + lat: Latitude + lon: Longitude + base_domain: Base domain for filtering + + Returns: + spp.area record + """ + Area = self.env["spp.area"] + + # This is a placeholder - in production you would use spatial queries + # For now, return empty recordset + _logger.warning( + "GPS-based area matching not fully implemented. " "Requires PostGIS or similar spatial extension." + ) + + return Area.browse() + + def _normalize(self, value): + """Normalize area name for fuzzy matching. + + Removes common suffixes, converts to lowercase, strips whitespace. + + Args: + value: Area name to normalize + + Returns: + Normalized string + """ + if not value: + return "" + + # Convert to lowercase and strip + value = str(value).lower().strip() + + # Remove common administrative suffixes + suffixes = [ + "district", + "division", + "province", + "ward", + "city", + "municipality", + "town", + "village", + "county", + ] + + for suffix in suffixes: + # Remove suffix with word boundary + pattern = r"\s+" + suffix + r"$" + value = re.sub(pattern, "", value, flags=re.IGNORECASE) + + # Normalize whitespace + value = re.sub(r"\s+", " ", value).strip() + + return value + + def get_stats(self): + """Get matching statistics. + + Returns: + dict with cache stats + """ + return { + "strategy": self.strategy, + "level": self.level, + "cache_size": len(self._cache), + "cache_hits": sum(1 for v in self._cache.values() if v), + "cache_misses": sum(1 for v in self._cache.values() if not v), + } + + def clear_cache(self): + """Clear the matching cache.""" + self._cache.clear() + _logger.debug("AreaMatcher cache cleared") diff --git a/spp_hxl_area/static/description/icon.png b/spp_hxl_area/static/description/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..c7dbdaaf1dace8f0ccf8c2087047ddfcf584af0c GIT binary patch literal 15480 zcmbumbyQqU(=SR05Hz?GTnBdsm*BxQ_yEH|aCf%^cefzHo!|s_cXxLSE;*Cueee5y z-&tp!b=SRr%%17JtE+c)byrpYs^*)rqBI&Z5i$%644SOWM^)(ez~2ud0`yw0U6BR- zLb8+j><9z%zUS}fO(NraVi*{>9t(ACCvAmK{3f>6EFe=`V=#-GwH=fi21ZcC%?@N@ z33ehk216`tgy_y&+UdwGOoiyQxE0tG>?FYE7BU_VU^Nd#brTOu6QC)bh%mCC8$XnR zHP{J6?q+Red4B@!uI#I$jJr&Mb9s0>iD<$ zuR+wn_Wv~g)v~hqXCyn2gCkho-3}~7rwVqob#^cT|HI*Lr++h%Z~%jxz^1|+Y#iLo zY(Qpqpdjo2_UP{z|J6a#%}Lf&m<$tn~GKO;D=HTYw;RdpEvGW4C`Plx`;h%^9lV07{*~I*>D8d~7A^Wd; z|IiAu{+(Sbi+@eZKaGFS%71$NYs&sb_}|p>|6Wz5CjU{BowI}0KTE*WgcWQBwg%fc z{Z$hCzm;Ta!tZ3^WCi{&6^U6n{ZAD^*B-wW$Oa-r=f-RbHUl|ZInfDg*!P87jt$pw{;L! zurM(Pfvw2pY|U-RrEP6IKvrN!!N2tX4+V7f|D%KdPxB1jp8uKX|M5a@AiMvz6QE@L z|EyqJ2X$LpD`5$cjSGmJUKMO(3U&ZHFp!(tnh1RqllIVYQ3J`EJCZv)f*pi3#3YP4 zY;_;(mw~W(F*95)Y)WYoZkRgrLS)eSvJR)Y$S4!fK zScE24BMTw?G63}=yN?Nr!v4s(L#bh+ z0QHoB|LYajx?X9+TnwfJwuDj{M>z;4bu|DB7H;cherVEncj0{^h73csRh5-&U)E;4 zNLVpq{=h+rsFoNmYz*8AfN`m{D6C^2%WV~zRAFNZuAXKcKMErci*PnF0ZSfM)erUu zjcjUMJ_wuF3RSJ9O~@Z4hhap;#(_0ma`J>1A0~<{s?m|hcz{e!L&u6Tp}I}Ep<>4f zOJS|^MQ_DPOkz?*AhrH}k<9ZOEt4`FAyRDqXjTP|E_#oO27Gr&f`y5OM@B1VqH_ES zCTweSMCx}a*0xU}@o6fA8_gjjy z2Q57xXmg+m(g6q!aM8mCkithJ--tyXkCjku;FTF{?B>(>FABGzSGUggUumv`+C6Ow zvd1XmI~#j#dG0vl>e;QtxGX?gJsdQ+{-4BuDt%|kxthFj<_dORK@Rc;K*$U=E~?kF zJ$(-vwj?T<5%x2c(fneoKTjS|rpBh!8`&y_y)z)7Hj@j%)+~SkVR8K<@`g&WZjo&G z8?wNoqyeOzOEhl;E4C^_e6^7aF#Fx~(z-&NxzGQQC}?L?Gl>qxwKg;MZTpfMvw^V{ zmT;>h9A?JFxNyIC1IPqQldk82>?{LtnMt2Xo$HmXr3gvbffJCJF_|;ZU)lTX#2_{h zNT=4@taez10pm@hvzTLIAAD(`*Y6XZr7!w3a5sy>KWlOvJ92!fyI0Yjt7_+Syy+$Q z9i0@K!{?>N+F!J-sDJMIV zySlF4rF1c1>K1)CaHBkwkwVV z_lfaZhdgZH%&PK>eJxwrWn!sr5&Gc_9Cr|XDCGA_XN{>#)>Qgl3%Uyi`^M@mPTT`? zf;&`{13;P8O-+u@Hlr4IZO)ivM_w*HE{G3gydPIhU7gTd{}##Tw;S&&d-&?A1qaWy zLlnn3TyAMVFPcpfZ`1wMt^$+g?Z(_ki{MSWsfo#KTB33CzU=9qQnoXtdS(mcmLjCY zalOGBnh*x}*Hy&3cD8}2EUr+55qEqP9$UCvz=o=kb9%C^{(Ki9<6A_yTJAVGBAyn3 zIGGLv4!o55o*J5V_xfbsyPk=kC$C`%S6?3qh!N5V(<2M#9p=&i>al1cGc#6pd37`_ z3RMpN=*|e9{nd~zZKGX@%J-K$=_&@x#D$&<8NApJ?i3jM!5X8abIiAPla~}@BE@Ep zytt_iw|xY%OQxngqE(gy8xY@vUMZuc7&hw5I)$M+5$X^P z;i3S7-Tgw2w#pV1R->>O;O~UyyX#p3>DD8rfL3FNO@kS@Uw?F5(eln`lA5WMkAVwk z6(1gr5%VDf8>tN;vdaPZYs8yBSJ^oba~WDr`qr8Oh#ok4VLQ3lrJrZ_Xm(T@FM0qa z&kxcByGv0F-Fx%t@9vZ7JP$}yAKpn-r^LhBTLwsS1J)bs6T{~SIQ6H$7qanXOrs1*Z5c~M%>RPFWj8X;g2@Lhm?HnEOmg0If6exM<_Fa9>!5P zv6(xpC9c)Yz1{ue6}vOIV(QK_dbu(^ad>yOhx?(?cWg0n`J-318#Q=eVZOiuW}A1? z=YKkEE?wkr+3_PaFv)gRxm)xjwl4{Gcz$5;$RixdVH2Ds+=H?$xTUn`QZ<#!D zWRP4okEG?OLnjctlnTlg5)kz*Yn=}m<^joJPN)}L??y(J86Fk_PaZ`{q?IKql37h; zDKAk4_|={_s%_q*rZ}MznUn?=QC9T$A!MnV>~b~n=uXQdTx6` z)C4lw2Vd8?lJqhAV%eA%mg9eTcNjsG(q@@$etAi9{uE1m1hj1!jelwHV;%czJVoYcrZ=vANJHDiH$G) zek&XC9nl=^c*OxElr7lsK6+aN5c^^)p0n;58u$EC`TpvB9KEV=zK9QdPpmKCHANCK zliMaTnv1|oI8A%NctUtQg)_&D9wYY|Iwm&nkURyL3PVzKxQI{K6C{+zFGk`XQGDw} zv$z(!mCfUPd6h*?RowKmNy|p2Mri1laA2VU*^f5fL8Ne4IPc)ybITH=)f$-My53); zfsHD{N>w!&UkTyOxD>>Ey0g^%;L)A?P_Nyhcd+dwhH5DN?-^*`{IEk;(NK z+#s-OPFRbbX|Uo9=Y@)pgD@SCE!UCmYYVmF+$i4Kgz2lR3|L_DxX-u)DSS39jaf=r zT6deEL2ULQJHvU~(|2vtWZ zLueKkQ*#|Bj9fi4c9{)Y&z^&}>=~e5Y-HCkQ7Mw zXCH5+<@YAqb|zki@0M(%ccdpqTJ62ZPg~bZ9%dCF9k!S%_lroxG?x3NpXG4ZBn}!6 z+=_Y!1xqxCN~6zvXAyVg)}YKk4ib#`<>h_p{S$I>vi*LYB5ST+3mf_t)@{}Ih`};0 z29&^wWHWl>8kd64(wY}#hrVQAh&s7gbeHd|IZAStUZ&PSb3$B{PvD=+ zQkSe%LJ0K>h&Kj#S8^)h9GXvu0IZ=3Z>3DSi8{T;a z0b*muMkNGwF;o1RwtCZDg#97P8vE(~`hga&m%k(gTR6qI^gs7yTIO@ay}Te)Hx6Eg zd%2g}G&u)zqqNrD5nG*q8XFK&z9RjIS(Q6DYG^p!6>M30Ef+5|le|Ud>m9T2((_H@ zmT!+5i$HN{<G+1EEoc4AS9vm>QDZpO>K6M{G^b)txOnqNOvTfV zwR^y>(e?%b$$pu79ydu6M>3?3(>(2u(=dN7HK{92%u6nm^iDzS@)?5XBIF{B#CklVg~i#wA$0R9A~jYSgt2E^Wysxcp!2- zJy+&-mzNYaZTSq9cjqTE4)av2f-f$0H4?(;)nFcK>Cqg8V1?|=v!Y(*^*0|9I;_Rhhiwc^cQM&I zs2P#p?_{f-yhS#$Z%c?knJ_g7Zhv%L*{tf?J?E8j94bImWV|QMY5x(sTCL_62EdT)xWZ#KY;8qi zzh&-cv3YOkp`;b}=k-{kwTe#GjC6kh`OVE6++^#^n`2$=$t@u!WTiOfEEDax{k6!e z@X;4kniF^87>l=U_UXRvHKDfp>vDPBi03g%yHSkk525SM)oqOWGqYp4$RD*p_K`zZ zX5;Tx^`n&DE+;ujb3D5nIv6Mom3jfVZ5mIfq!jf|AhPk0p*BCT0x8R9-BE8{1h;FQswTy?v#0}-38B!kczy{x;$7!io^DZ=IcJY##vEYDk$eMl;r^~T9QM) zQtubaNKNtRwxEV=;ce#Z4d5>nKyB3}bT9N~-_eBgFflJtua+a>1#3WkFbOfK>wALd zZQJFC>tFY+A8cE=I=Kr&9)?klwAYSC8EBln7`QBc`8b2H&Uw!rU@nG`1p+M z_PaAlj^s@QS_#v-S7a>mvT=DTFWy=ZjjGOXi5cF@lwE;85aI6_m*ok~r?Q!5Pm%ZT?$+H*@!&OVYR1ei_3V-7Rug|y! z6$Mw3zfY~M&=eRqCgXBTaB?UI^f`~CMbB=}$Mp5L0V>1!a|Lt#a+4g!0f$6;UDKhZ zlL^j^u4Vmh%}jY4)Cwro5tJ1AQGq1f_B}RfX)D2nMS91)Y;HB$dH?2hjtC#Za)<9l z3Xk+rZ6knNtjm9pc2D}(wY6@|ZX5l(cbwO2oUZoqp~U011TV#IhMJfGfJ%N_y5pEr z$$IA>?#}aHx9?aiZ|z18x!q7sz$jnVblQi|AhW85+>7y6btIi|OvFBI?tT(4eXVCg zeP8}0!iu@r=PR>rJ3wq*!=CC<_ihZL5#EG)I$$%%kh7e$zQ1S@xv6Or7!_P&%MPMk zACVS&BE)NLV(qN8MOV5C`xbf8IbN#MmeEcdWYA$OwFX;!1z7PC6DoHe>+fVejhMzC z1S8qnm<(G9MXIvx3DE3&Qo+7^LNi#xb$$M2LL^jXh)cbb3h%G(i91(WK}lj~^MOAm zA?4cXvn!=%bKJ^P|1)ix8c1H28Z^2L({~B=9);^+7Yn7*L|+tIAJG4NPUMk$gC5&z zQeEbR@FbxHdE`+3^XSBSPAWGx5R7Z8yZbLJA~9Q9x(L@tqt{q61Em+ikqTux8^kZ8DQrK4FB3r5Qx$xHG!>D| zA6?vk{*>E?Mj18vgMk%hzN`ZwTFY1ltHNF5S%);i;&*l-ACcsI3pnD=iX?}s!s}HC z1As^77XFUGAm4O;CtDdaLT6%hOQ>4n&pujtYU7jL7onxKBM-_>lW}>$dS5% z{BRX)SUzjTUq2m{I3;m4ULG3n!EI@PR04_rJlShCF+6IG-&{VfY0G+|OLpY);~Tcs ze2Y)Mw|IXXzocJ3+sL=yh{1EwAusXV3dh~TOl+|FVY|@xU{j6Ef?(e4;reCW_43yL z<76IskRMUIl)Uop?JzOW;#+p#(crQzC^Ot~KFDqBhT`=!Rk%4%b1(y9h4j`weN&J! zbyYm>{7aU7#kdNy2Zqx-hUyr=|4NbL%;CXS<-w%jL)X z(3_2Lz*r;mD9!Y`&iV2=x+?sNv)b*Cwn}{YDuYzmi4vn!c+r}V?AzoFZAreI-4!3+ zY{Td}nm@04BAKyM->B1)oKRD#r|^W|jYVjcSAs1YI=xx>$jpFe*KbLKby=*pW)eFs z3ZSXO09)sD}&}V6ipbE(Y~?r$YTn{V-9};R(?Z6wH9Dqxnt8t&~=!h3e%FyMY4}MkN68X-2kX^|Im5y$c6sN{v&x4l_54O-p{PrDCP` zpOp-`$#WIx;mb_%^9f@!#b^Gv=)X8dl(G-ESKr#_UVal#eY9!`MLqLs4DUCH##vQR z*2n?o*KjGB*u!M&?xGOuHa@Hn5s811Ma6+Zz~-qI^cWAxkz$M9EYF+65Y<;MSmJ$H zrmYW$Ykr63;#?@3U~a9Yw$VB(W+T|LSC!M@RS~PJ#aBNlsh@MN)U_GZ+y4ALdVH-Z zeZ7rMl*xi!f6B*qX6Hr-YTWI3@7e|R;u4nUs>YIecpOF-fke*=0lHfETe!@N?>>DK zH=;xe|L}n!7YQPC**{jgAE6=E{~Z{`{~?;C(Z&12K1p^KRB#YWTRU?2RV!>AocDk%*gKH;(HiW`{1C zLgUncZHb`P0zyddG&COjHi2(%mgVv|gu%=`hPvnQickVe$8=lkQe4}&0*&it^=Vd~ zVz5rO$n;=raC-!!5NB|-XZOI{gu$ai!cKY`c7x4qn^>9w9*^aS`tLIdSOvMcwHy)z zisz9h?)wgaHN^ZNO1m|OBga`a*37=gS%}sQp9b3`#|ZInRQKnNUU+Pz_?9%$FWdS@ zDK<8SL9C$=vFNfCZZ*J(vU|VM+)OqeUmu(7t6G4CEYvRUzK*`Qc@f3dneu^f+iG!g zxv+3dL+uJwWvD@yd7%RLmAuTRViISB>GdFBTIdcF28A`w;mJ|!FUG!hkwvww>N>lf z{H={Dx0PPqaV^{;baO8&Z#4W&_23HA>#O7j4>~jvphax5{G4W932b+Oq40dauN4&f zHNyo<4ks5vV~{U|A^h&ku)Ss;0}g#CCAB3 zx!5?ck zw{=3Qkp*j2pk4kf)hQYui~#aNqul$soANTlEt(Bg?n5v;dVgpctq zgK8zA*my$SKTIf^aU6WAcAVx*VfEg7ZkR4Xkr@Rqgp~nl)WKhG;{9Wdad0u6&{I#2 zxKYvs;M&vr=pb8WY#((GbJMo#x zxUcc)yW;DGO<4}gi6di1&45IQZgY_)!A;*)F;lrKSVH5fXFw*)gR$$6cTNB0*>AV^ zw*?Qj?T1Fkol|$DCNdN;)9*Q?6o(#96gu%a7X>rtoCf7n-ECFW5M|6Fal%oQ_HyFT88UEWBj-cYRmoJO?h1i zO8Pb`owZMsyI;28tb{Eo<>GSuU*PNNxjvSV(T~f_NvO^Dd~+Bv4RFyUso1bz_tFj% zCD1oMN-R7Ol)jcmv3xpONAc4_)~6O6({Dh!!AVxU&q++=$T73FoVhi&?s_pYN1!5s zSLaZGTy$Mp1n=}=+x6NJ7#4%I%HoA<%SY4XdQFZO;2iFiQP0678T*1q9`dllr^)b=7CHG-dsj-%14Er*pm zRd^>8M#r;=H+aYIt_QD=wbxFhWWMQQ>)ENMK;y%e z-Iu6Jt^6|6l4x)u>Ylp;h!pn4O+sEjgtk(?U5Hp84IOs(ACPd#;dKgps1N!cG}yQ-Gvsh`Zg?5UQf#j}u^uV0^fBdXFH8Osx2Rn>nD?ts=VM5s(?3r8fR! zJ`WX_!j}fLK<(%2=>n7ezAMSisdM;Al^QJ_vPLj;mPAD$I~PIuyU==s!xUY zodiCv+RDXwU$axLZtbz}8BHq_1XqHo-^Kx4+f%NMl&->(9MD7SO zj&Z#}?1hK1F$*vE4Hl-52+kbud@c@%{KDPxs}pYe1D656Fec#qx9+xdyZ42hGFio=?^)UY_>^ z(>JtY69@hM-~dl%4gVj2NS%f*G|0Te8IlHlUZ{1k{U#Aat)_Xldr;o1s3ZVmargPD z;rI1QJ?8u0>5}@tQ>^!bMR8(pgdU-=nVzFZN}3-}d2iu(c}?B!g+r&S-sFg(f%#=% zzo*;ppCC$j0$qWo20Ac8Gv%A07eM$IXBHv$ov2<=J=H-@-^-4pGZ02IribPegl|FT^(ObV6vO4);?$6A_cuA+Vq1WmKIXgG`?%u zrna{Hm7|qSZ2EYj-pae%klBl5e4Y(Q1~p_8K*?L8**B54K6R1iQ(L|wGo#bCl5%MZ z{MaKF{!lpQcY)8@^9p+-R{^~zI?PY8%s*F`Jk24WY@RNKU0ezwO!ekJFkp|~0(i49 z_o5;d+*Sc(Jxsf-=YV#pfx^q|3d>HKjaXhv8upfShP@MxO3ECHoT?wPg+rAJ6j6d% zuauS&I`}i%EghL!ET5Xxwzd97;lDf-pr|@|G8SGFIUE-hbZa?YaLw!-y(k#t(PILzr}1;;g9@KM&6c28i1cn_xi z(F2R>(iI%Xx#oN~+xepmM0U{~Zb-ADBKO>klUgz|STaYC2~5Jw-3Rp*0M~QeAK_ zLT0jdy1u+74qNvm@lVU?i`<{VyiM-Y&YKwl`Xjzk0A)rN&XTzJ%RhJ_zfDfUp6RejT}_&K~L%hzXRUt_YZ--idup z{Yr*e6)6k#)Uosm3Dq!P+F%<1B=Fb-hzMKL%lx|uDvf&tWb2JnpRL}zSR>)WD&oy}+RNe&Hx|`=VR=Wi6 z7&fK)_A2^4+$>xJ4og%N88LV2S%ppZIE zH}jy~y(@yAt|h*1Nxup80`#-q*0us&eb+uNNliaG@F!bj(_qP@^T>u)(1yV%FpQ$n zoKE3aW`7m0ClO~zsXnJn<$2eljws67*~7k}IRJrorv^i1N>PKfyeLy1>m9%`U>1ap zV;J{k2lR8fH=dT%$B_tRpR2BUFNTgQel2SkW5@I})FPn?lSPtXkB>FA*)4J8-*uAW zCj}gqkZb2+L@sJuIUggVf$OL;Y>9EQh7-fNqMs=W2B_3h8cl_69%LDsEY$=;9~~S` zMh@TOiRbWVES8&JU#7~Z$xYEa`to)$0DF2z2*5Lsl*Ex<_be}5`*h@>p^QK!M@P+% z#{3!j79}}Lm5Fr$lPZBYi+=zlA@aChAd_LxVid4#ykJ+4hoZ1$en6D#@EK`u4o>V& zud!SQXGsUrKUS+``^EDi4qnc;`NSp8QTiL1dq1V|9XIXS zV;zJb0ww|#p08c?^r4SaJIza(jxgVH0p`+7SR4;gt3y0wS{a(dC@t93kb(EUJh7r& z7MBx@f$B+}QZfvbYQHp(Lu{6-@=K)G)# z;RhYWAL`WxFppsry{Tk|`?4(3?>~%ESH%KE zvcS^HtZR~v}xc}=m zvR>5rLTBTsUDrd2`cEyI1D3J_?_lI|P-a1-O+Q07RS0!rKToiU|Hn8yPY>0P*kiZc z6(Xfc;fiU?ES|Vm+ks*Vpm_tejb_d-eAbc^lTRL@sJAyiWcR9{&$P+wgPs~tFZ!}l z^6r|Pg5#quRe6tZSsl$ggp}?@@q&MP50oksD}Nwf6Z)+xqSVfwk?b#H5FhXn;mW?g zee;BWj^!4}gGSGiNNN?)^t(tIj;X|PR|DOk=*!w+gnJufT-E(`1wkOySh?PpR^$pf z=C&Fm7Jc|imd4*ZU&i=Zg0L;lkL9lVe!*P|<`G|EeP!OfoDbn!NH&?6Z=CV3jYg|# z?BpJ9lL>ALqBI(XWi4d6aqMAxVmN!5cj;efWj->$d#)NEJJ#<|R^9vcL-0&M-$#eJ zzrJyDNSoZz;=rD3V-miQ`OdMVdl2YHgHr|zD}9~CE)C84Tc1J1$`$3U&wl93G=jXD zZ9mA>7Sd(Tk3uUEial1UOn+{wlLde%u+wNNp8GgWG9I7a!G8;4$o z&2Ar8?dKiphR(Scds1)b80|OkURQWunL*dL1lfeu=EcspYtvf6+Di-L{;zd;19Afh z3TKDBiw*7_i^M3@x(AL@A~gpKShwgYD^G=;gxS8@9O=!cILWlyvqzha!M_d-1^uHa z0?SWjk&$Rw%}0NVm|eELTYj+3)|1iojv8};RmX+q5PG0x0z#`}9+*fyQ2{%ps7U;nnT3i34#>rSn2@(?>~%+MK$^b;eyk>j`K;Pxxt zUp)+`Wwxnw)l0~pdDmBNFbxO1%N1e|?`#a-wevf4WLUA6I)pOIM44FJ_75}Y7% za<*RY2Q7gH&(-O~t*m~}u&qGlDp4yW*3(ZHUi^}OdM%SXXPZjGZG(Utpil0LdTTRnCpSa}-t+SE`GR5a05{VN*n65{~ zi+7QCL&nSPW{W|;T=bXC(S}yeza@Zb%Y}M>bqdbK(|tE@kxUAbk*YcsUAYWuYwGL8 zXSK~8GsGO2jDT6{A~I|(i?tJVY;~Ikn%nJ5=u=PiI!-cViCVec8O4!_tVPC3-)Ziu z0Zoc+qud@e>ES`yL()+w8?FNF%<&fKS}whZL<|P!ZzL-mEZ?rOr|+*v^0EA!)!E~O_ba%&;*9IA zolizsa!TimzSm(GUWK++qz=+Ik&+@820c#?Ztm%XCE>V2FG1_;7W{V>WIW-d<~qN> z{)|8qXh!q-b2TG1AMYIt@65s?DEzUAV}}1r(M|F5F1#~WsH5)G2VY3OLi&0;my9QM zL);fdhGxx5^-4^Cd$-&mgc9N1BdV&j%1ih|7-dd@-0mFO&5E0iP^T<1nt~)(*5+P`KrfMS6pkxSQoNXO}tH@;S*V@zdXcUsE&Qh zkoX)6{0fsMPULHE!|ZD>_SPqK?8M}^w1UeW_$&2kT$zqS{*Dl$>2{rq^AAKf+$3I4 zslbVh%{kmT=4(zI?%M8hIVBDV0c+GUi)Gr*qmoMBmxR}%K_R8vtBq0#&Ln<8D%dwN zX>kpAbVWC%Ox9N${Hjz6(^5A2n+f1Ik0GeHcLj`&aX>$e34*En8Q{+qdkxN`e0P!Q zuT;iYl}dM4*Q0MgBHJ<84@Drs)lj-ad^2LCL9)}-LW5l0bPW}DSE?e=%7tHRP6c!f zCP99CfJmiG!~WA`Zs>WX_>h?A{&2eO`K0L$B~4a>l4;-RvWE$eh*xW9ls}c*r%2m& zhNbWPIhO^{^mI=usAMI#22L*o5en8{Hbu|a4~HQ9hIR0+{~&iYEP}?yfr8%s`I47J zMwZl{wRZeoXI={s$a<8gt4*Hsx&iJrQu%P^vb{~RDum$htr@A?>pqxhJV!|gGX zUL`*6%=J@W#QW;LfrYA4&d56JDBXjn3uVUsl49ZLp=uN_rZPtr;;F^iL`u&7(bYYE z=-J{N7h1bT#haD>N0mi%ys9&r^nC9XKh(_H-B%M1HioTc@Fodl-(@UPAvoeevvF5M z;u?_+EkqcJFxApR&f{>;#tk41X4PLBpc|{$-TFD}ZVekXDPVQ+63XB7XBQ-8=C;P3 z^%)ycbSmcLP%&N(tleOR42l01d>VaW(oyOFt;?XYt}bL$;8)^3M}APjS8m#_k+KnP z&zhAc!sRm}|8kYN?tC#ptdd*2*cMd_z!=a0ogK@^%YBXyrw*k^hJhtb)UY-Pp|U`b z;vm3-f2h$+A&q7+M}Mg-r9>2BEm^YPNmZ( z*7I4&!nFAzxpw5$n0?QdSE`^*s^a6@SRrre`i+>=SLtxw^z-@jraYqw@bSip;u!dK zTL9hZVjx|G5={P{9@`(L2W{{d>D%clZO4f70pf2!tc#MFU?)YLt-?Z9$-c2McL4VN z7W9D4WMOAN+6=I1Dfa)8xF9t6=O(>9LB!e%vOnrk?M0> zhwcO)UQOE|!|+=@H*wsyK!gv02uY?=#%_C5C4PYHuGzw%hucEDs@DbbO_Caz!aR{U z+)TI!k?P4(-i%WA5m2zQmZE^K6<+p?B|X5EGq$zw9(PfkANGFIjOw2MWg zKzz_(5iAbl)Py69NJEsQh^vxIDgheWS-`flG+rfqdEJahS*YUq)RCw7wJ6IA7i?_T zbD!-Qf(p&XhfA-KFoYvL#L~7U6T{tD%|dbL)o=N6;2}mx z!H~)1Fa$U)<8*lRd8*EEO<$_82C=Yv=lCg~$8GQ49)$Nx5fJEEaxF zl)u`I99^+<`OtY7_q=-`^1k9=uN<@9D*Adv0Q2^am|DSo=F?vA0J6!bIBOyEjpJ@H z2*UlQP-z!NN@6biXcIsE-B$>G4p#Bsxw4!W^oDs9n-adqf1greR# zfARMgj5m9@`A}9Oc~h#WMos)V%?-=nk`+S6=Q3Tqj&FJVY_lXU-j8{UUQwRer*vNi zfYU!5rO0Ef|MdN1vc@5-WmGYcr9CI@`kiQ7RL+ztb22U{WAeB5;o6w`-4GP9`W`>S z&_}b=Tjc-o#5;+YZe(ff8d~EuaP3uP~tc86jg5qVOZ*{cwJGU z(V#giqR;*#}M7(H=WegGj8QE45StkwQ)t zkDqA#;#%akszszb-d6hC&Y>(@IusF!_+GjwxIeDH(7}w)oA7)sg+;iwNG45>Jl=*4 znht+k)I22GMQiXwNWP<7d0VRrHC&g~daE&5*a?1)=?cFU!1v)>Lhov{i~V|%SV+9X z7((>eXMfQ-lj3T*{T)ezIo7*te0-jq5m672Z%@7nd89JjVZ=_Bbo1hLe3vR5GZ8VQK$3BS3rpv(TI z*if``DGY`pJFPa|qyC_%M6lc!v`aS!?Bf{jCRy3h2>YLHBX-_Z0cNP-YKG+9aVn&&bOWM*j$kk8_d6 z?(xLgln?2|OMK3fRpgLJC=#$Sl$ZdT<~F@JI^%N{SsMK=7C#~w8JCp|ODKUjfulX0 zRNnimv2(P`!_|JMWw#2*v%0*WmV!FHXJnXm$FI8bV27U>i%M0TS`CxQy!TVI4+Hku zCU|>U(96OE+nSptiO19IE`KjZoFmE96%r=Y#&G77AMX8@(Ad$co7FH1**~KH7%QV@ zq2D^0XG`W%Kwy))B%jtc34_bP*&~!hvXkx2x61x?cm8VL+eR&j+qieTj zPcf!P__24db-NUOd7qw4jxNS~Rn}k`w;L-!+JMkh*E38;hxBHxU%E}SZ(^oQnTt9( z6U*##{JUsmtt^A>6&UNN5mxBooYco1=6i8#6YtoyZl1O{hP>>^Lrts-xuXYNTZ$>u zpfaVW+VhuTa-W6(Y5#`hX)X;5E-i}{XxWY&i-0|tDN1{4YkvF|i+8ibuT!lOje;w< zkwW?d17jC~Qo*}a1btjLC$U87&ALRfBUk{XiT&dcIexY(=W<~(r-<*5(6%;&Rm^bw z25DIcIe0Kk;h0MuZVN`^O#>~4>J*7fwa5457~M`DW}CLMhrohubV?aHB0*q%i?F@) zYwum|^K0)Lu4E}LYfhYog~=@Pv>I86X>U>2n?#DFw@m4G^1i2s(0@%DkwFgxASub&ET6!HG@u+jB+p_yO(GoOV3#Nw9K0GZvg&5PWug{2eB{b>*22oK9 zncm+N91?M1gpr#Brp6}vt7WNs#8Bn}aw1X4oh$4)t6v( zbHB1*nkJIRlGpzHfGgQqz$g + + + + +OpenSPP HXL Area Integration + + + +
+

OpenSPP HXL Area Integration

+ + +

Alpha License: LGPL-3 OpenSPP/openspp-modules

+

Import HXL-tagged field data and aggregate to area-level indicators for +humanitarian coordination. Matches HXL data rows to geographical areas +using P-codes, names, or GPS coordinates, then aggregates values +according to configurable rules. Generated indicators sync automatically +to spp.data.value for use in eligibility criteria and CEL +expressions.

+
+

Key Capabilities

+
    +
  • Import HXL-tagged CSV/Excel files with libhxl parsing
  • +
  • Match data rows to areas using P-code, name, GPS, or fuzzy name +strategies
  • +
  • Aggregate numeric values using sum, count, average, min, max, or +percentage operations
  • +
  • Apply filter expressions to subset data before aggregation
  • +
  • Disaggregate indicators by HXL attributes (e.g., +f, +m, +children)
  • +
  • Track imports as batches with state machine (draft → mapped → +processing → done)
  • +
  • Auto-sync indicators to spp.data.value for CEL expression access
  • +
  • Link imports to hazard incidents for disaster response tracking
  • +
  • Process imports asynchronously via queue_job
  • +
+
+
+

Key Models

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelDescription
spp.hxl.import.profileConfiguration defining area +matching and rules
spp.hxl.aggregation.ruleRule specifying what to aggregate +and how
spp.hxl.import.batchTrack one execution of profile +against HXL file
spp.hxl.import.mappingAuto-detected column mapping +(adjustable pre-run)
spp.hxl.area.indicatorAggregated indicator value stored +per area
spp.hxl.area.import.wizardWizard for uploading files and +previewing matches
+
+
+

Configuration

+

After installing:

+
    +
  1. Navigate to HXL > HXL Area > Configuration > Import Profiles
  2. +
  3. Create a profile specifying area matching strategy and admin level
  4. +
  5. Add aggregation rules defining which columns to aggregate and how
  6. +
  7. Optionally link to a hazard incident for disaster response tracking
  8. +
+
+
+

UI Location

+
    +
  • Menu: HXL > HXL Area > Import HXL Data
  • +
  • Batches: HXL > HXL Area > Import Batches
  • +
  • Indicators: HXL > HXL Area > Area Indicators
  • +
  • Configuration: HXL > HXL Area > Configuration > Import Profiles
  • +
  • Profile Form Tabs: “Area Matching”, “Aggregation Rules”
  • +
  • Batch Form Tabs: “File”, “Column Mapping”, “Statistics”, +“Indicators”, “Error Log”
  • +
+
+
+

Security

+ ++++ + + + + + + + + + + + + + +
GroupAccess
spp_security.group_spp_adminFull CRUD on profiles, rules, indicators
base.group_userRead profiles/rules; create/edit batches
+
+
+

Extension Points

+
    +
  • Inherit spp.hxl.import.profile to add custom area matching +strategies
  • +
  • Override spp.hxl.area.indicator.sync_to_data_value() to customize +CEL variable mapping
  • +
  • Extend spp.hxl.aggregation.rule to add custom aggregation +functions
  • +
+
+
+

Dependencies

+

spp_hxl, spp_area, spp_cel_domain, spp_hazard, +spp_security, queue_job

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production.

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

You are welcome to contribute.

+
+
+
+
+ + diff --git a/spp_hxl_area/tests/__init__.py b/spp_hxl_area/tests/__init__.py new file mode 100644 index 00000000..28a9af72 --- /dev/null +++ b/spp_hxl_area/tests/__init__.py @@ -0,0 +1,9 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import test_aggregation_engine +from . import test_area_matcher +from . import test_hxl_aggregation_rule +from . import test_hxl_area_indicator +from . import test_hxl_import_batch +from . import test_hxl_import_mapping +from . import test_hxl_import_profile diff --git a/spp_hxl_area/tests/test_aggregation_engine.py b/spp_hxl_area/tests/test_aggregation_engine.py new file mode 100644 index 00000000..45771616 --- /dev/null +++ b/spp_hxl_area/tests/test_aggregation_engine.py @@ -0,0 +1,442 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests import TransactionCase, tagged + +from ..services.aggregation_engine import AggregationEngine +from ..services.area_matcher import AreaMatcher + + +@tagged("post_install", "-at_install") +class TestAggregationEngine(TransactionCase): + """Test Aggregation Engine service.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Area = cls.env["spp.area"] + cls.Profile = cls.env["spp.hxl.import.profile"] + cls.Rule = cls.env["spp.hxl.aggregation.rule"] + + # Create test areas + cls.area1 = cls.Area.create( + { + "draft_name": "Area 1", + "code": "A1", + "level": 2, + } + ) + + cls.area2 = cls.Area.create( + { + "draft_name": "Area 2", + "code": "A2", + "level": 2, + } + ) + + # Create profile + cls.profile = cls.Profile.create( + { + "name": "Test Profile", + "code": "test_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + "area_level": 2, + } + ) + + def test_count_aggregation(self): + """Test count aggregation type.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Count All", + "aggregation_type": "count", + "output_hxl_tag": "#meta+count", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#value": "10"}, + {"#adm2+pcode": "A1", "#value": "20"}, + {"#adm2+pcode": "A2", "#value": "30"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + self.assertEqual(len(indicators), 2) + self.assertEqual(len(unmatched), 0) + + # Area 1 should have count of 2 + area1_ind = next(ind for ind in indicators if ind["area_id"] == self.area1.id) + self.assertEqual(area1_ind["value"], 2) + + # Area 2 should have count of 1 + area2_ind = next(ind for ind in indicators if ind["area_id"] == self.area2.id) + self.assertEqual(area2_ind["value"], 1) + + def test_sum_aggregation(self): + """Test sum aggregation type.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Sum Values", + "aggregation_type": "sum", + "source_column_tag": "#value", + "output_hxl_tag": "#value+sum", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#value": "10"}, + {"#adm2+pcode": "A1", "#value": "20"}, + {"#adm2+pcode": "A2", "#value": "30"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + self.assertEqual(len(indicators), 2) + + # Area 1 should have sum of 30 + area1_ind = next(ind for ind in indicators if ind["area_id"] == self.area1.id) + self.assertEqual(area1_ind["value"], 30.0) + + # Area 2 should have sum of 30 + area2_ind = next(ind for ind in indicators if ind["area_id"] == self.area2.id) + self.assertEqual(area2_ind["value"], 30.0) + + def test_avg_aggregation(self): + """Test average aggregation type.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Average Values", + "aggregation_type": "avg", + "source_column_tag": "#value", + "output_hxl_tag": "#value+avg", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#value": "10"}, + {"#adm2+pcode": "A1", "#value": "20"}, + {"#adm2+pcode": "A2", "#value": "30"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + # Area 1 should have avg of 15 + area1_ind = next(ind for ind in indicators if ind["area_id"] == self.area1.id) + self.assertEqual(area1_ind["value"], 15.0) + + # Area 2 should have avg of 30 + area2_ind = next(ind for ind in indicators if ind["area_id"] == self.area2.id) + self.assertEqual(area2_ind["value"], 30.0) + + def test_min_max_aggregation(self): + """Test min and max aggregation types.""" + rule_min = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Min Value", + "aggregation_type": "min", + "source_column_tag": "#value", + "output_hxl_tag": "#value+min", + "sequence": 10, + } + ) + + rule_max = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Max Value", + "aggregation_type": "max", + "source_column_tag": "#value", + "output_hxl_tag": "#value+max", + "sequence": 20, + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#value": "10"}, + {"#adm2+pcode": "A1", "#value": "50"}, + {"#adm2+pcode": "A1", "#value": "30"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + self.assertEqual(len(indicators), 2) # 2 rules + + # Check min + min_ind = next(ind for ind in indicators if ind["rule_id"] == rule_min.id) + self.assertEqual(min_ind["value"], 10.0) + + # Check max + max_ind = next(ind for ind in indicators if ind["rule_id"] == rule_max.id) + self.assertEqual(max_ind["value"], 50.0) + + def test_filter_expression(self): + """Test aggregation with filter expression.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Count Severe", + "aggregation_type": "count", + "filter_expression": "row.get('#severity') == 'severe'", + "output_hxl_tag": "#count+severe", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#severity": "severe"}, + {"#adm2+pcode": "A1", "#severity": "minor"}, + {"#adm2+pcode": "A1", "#severity": "severe"}, + {"#adm2+pcode": "A2", "#severity": "severe"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + # Area 1 should have 2 severe (filtered from 3) + area1_ind = next(ind for ind in indicators if ind["area_id"] == self.area1.id) + self.assertEqual(area1_ind["value"], 2) + + # Area 2 should have 1 severe + area2_ind = next(ind for ind in indicators if ind["area_id"] == self.area2.id) + self.assertEqual(area2_ind["value"], 1) + + def test_unmatched_areas(self): + """Test handling of unmatched area values.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Count All", + "aggregation_type": "count", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#value": "10"}, + {"#adm2+pcode": "NONEXIST", "#value": "20"}, + {"#adm2+pcode": "A2", "#value": "30"}, + {"#adm2+pcode": "ANOTHER_BAD", "#value": "40"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + self.assertEqual(len(indicators), 2) # Only A1 and A2 + self.assertEqual(len(unmatched), 2) # Two unmatched + + def test_multiple_rules_per_area(self): + """Test multiple aggregation rules on same data.""" + rule1 = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Count All", + "aggregation_type": "count", + "sequence": 10, + } + ) + + rule2 = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Sum Values", + "aggregation_type": "sum", + "source_column_tag": "#value", + "sequence": 20, + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#value": "10"}, + {"#adm2+pcode": "A1", "#value": "20"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + # Should have 2 indicators (1 per rule) for area A1 + self.assertEqual(len(indicators), 2) + + count_ind = next(ind for ind in indicators if ind["rule_id"] == rule1.id) + sum_ind = next(ind for ind in indicators if ind["rule_id"] == rule2.id) + + self.assertEqual(count_ind["value"], 2) + self.assertEqual(sum_ind["value"], 30.0) + + def test_to_float_conversion(self): + """Test the _to_float helper method.""" + engine = AggregationEngine(self.env, self.profile, None) + + # Test various formats + self.assertEqual(engine._to_float(None), 0.0) + self.assertEqual(engine._to_float(""), 0.0) + self.assertEqual(engine._to_float("123"), 123.0) + self.assertEqual(engine._to_float("123.45"), 123.45) + self.assertEqual(engine._to_float("1,234.56"), 1234.56) + self.assertEqual(engine._to_float("1 234"), 1234.0) + self.assertEqual(engine._to_float(42), 42.0) + self.assertEqual(engine._to_float(42.5), 42.5) + + def test_count_distinct_aggregation(self): + """Test count distinct aggregation type.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Unique Organizations", + "aggregation_type": "count_distinct", + "source_column_tag": "#org", + "output_hxl_tag": "#org+count", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#org": "Org A"}, + {"#adm2+pcode": "A1", "#org": "Org B"}, + {"#adm2+pcode": "A1", "#org": "Org A"}, # Duplicate + {"#adm2+pcode": "A2", "#org": "Org C"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + # Area 1 should have 2 distinct orgs + area1_ind = next(ind for ind in indicators if ind["area_id"] == self.area1.id) + self.assertEqual(area1_ind["value"], 2) + + # Area 2 should have 1 distinct org + area2_ind = next(ind for ind in indicators if ind["area_id"] == self.area2.id) + self.assertEqual(area2_ind["value"], 1) + + def test_percentage_aggregation(self): + """Test percentage aggregation type.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Percentage Severe", + "aggregation_type": "percentage", + "filter_expression": "row.get('#severity') == 'severe'", + "output_hxl_tag": "#pct+severe", + } + ) + + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + engine = AggregationEngine(self.env, self.profile, matcher) + + rows = [ + {"#adm2+pcode": "A1", "#severity": "severe"}, + {"#adm2+pcode": "A1", "#severity": "minor"}, + {"#adm2+pcode": "A1", "#severity": "severe"}, + {"#adm2+pcode": "A1", "#severity": "minor"}, + ] + + indicators, unmatched = engine.aggregate(rows, {}) + + # Area 1: 2 out of 4 = 50% + area1_ind = next(ind for ind in indicators if ind["area_id"] == self.area1.id) + self.assertEqual(area1_ind["value"], 50.0) + + def test_has_attribute_exact_match(self): + """Test _has_attribute matches exact attribute names.""" + engine = AggregationEngine(self.env, self.profile, None) + + # Column with +f attribute + hxl_columns = {"#affected+f": "Female Affected"} + row = {"#affected+f": "100"} + + # Should match +f + self.assertTrue(engine._has_attribute(row, "+f", hxl_columns)) + self.assertTrue(engine._has_attribute(row, "f", hxl_columns)) + + # Should NOT match +m (not present) + self.assertFalse(engine._has_attribute(row, "+m", hxl_columns)) + + def test_has_attribute_no_false_positives(self): + """Test _has_attribute doesn't match partial attribute names.""" + engine = AggregationEngine(self.env, self.profile, None) + + # Column #affected should NOT match attribute +f + hxl_columns = {"#affected": "Total Affected"} + row = {"#affected": "100"} + + # Should NOT match +f - the 'f' in 'affected' is not an attribute + self.assertFalse(engine._has_attribute(row, "+f", hxl_columns)) + + def test_has_attribute_multiple_attributes(self): + """Test _has_attribute with multiple attributes on a tag.""" + engine = AggregationEngine(self.env, self.profile, None) + + # Column with multiple attributes: #affected+f+elderly + hxl_columns = {"#affected+f+elderly": "Elderly Female Affected"} + row = {"#affected+f+elderly": "50"} + + # Should match both +f and +elderly + self.assertTrue(engine._has_attribute(row, "+f", hxl_columns)) + self.assertTrue(engine._has_attribute(row, "+elderly", hxl_columns)) + self.assertTrue(engine._has_attribute(row, "elderly", hxl_columns)) + + # Should NOT match attributes not in the tag + self.assertFalse(engine._has_attribute(row, "+m", hxl_columns)) + self.assertFalse(engine._has_attribute(row, "+child", hxl_columns)) + + def test_has_attribute_empty_value(self): + """Test _has_attribute returns False for empty/zero values.""" + engine = AggregationEngine(self.env, self.profile, None) + + hxl_columns = {"#affected+f": "Female Affected"} + + # Empty value should return False + row_empty = {"#affected+f": ""} + self.assertFalse(engine._has_attribute(row_empty, "+f", hxl_columns)) + + # Zero value should return False + row_zero = {"#affected+f": "0"} + self.assertFalse(engine._has_attribute(row_zero, "+f", hxl_columns)) + + # Whitespace only should return False + row_whitespace = {"#affected+f": " "} + self.assertFalse(engine._has_attribute(row_whitespace, "+f", hxl_columns)) + + def test_has_attribute_multiple_columns(self): + """Test _has_attribute scans all columns for the attribute.""" + engine = AggregationEngine(self.env, self.profile, None) + + # Multiple columns, one with +f attribute + hxl_columns = { + "#affected+m": "Male Affected", + "#affected+f": "Female Affected", + "#population": "Total Population", + } + row = { + "#affected+m": "50", + "#affected+f": "60", + "#population": "200", + } + + # Should find +f in the second column + self.assertTrue(engine._has_attribute(row, "+f", hxl_columns)) + # Should find +m in the first column + self.assertTrue(engine._has_attribute(row, "+m", hxl_columns)) diff --git a/spp_hxl_area/tests/test_area_matcher.py b/spp_hxl_area/tests/test_area_matcher.py new file mode 100644 index 00000000..5d273c69 --- /dev/null +++ b/spp_hxl_area/tests/test_area_matcher.py @@ -0,0 +1,205 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests import TransactionCase, tagged + +from ..services.area_matcher import AreaMatcher + + +@tagged("post_install", "-at_install") +class TestAreaMatcher(TransactionCase): + """Test Area Matcher service.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Area = cls.env["spp.area"] + + # Create test areas + cls.province = cls.Area.create( + { + "draft_name": "Test Province", + "code": "PROV01", + "level": 1, + } + ) + + cls.district = cls.Area.create( + { + "draft_name": "Test District", + "code": "DIST01", + "level": 2, + "parent_id": cls.province.id, + } + ) + + cls.division = cls.Area.create( + { + "draft_name": "Test Division Municipality", + "code": "DIV01", + "level": 3, + "parent_id": cls.district.id, + "altnames": "Alternative Name, Another Name", + } + ) + + def test_pcode_matching_exact(self): + """Test exact P-code matching.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + + area = matcher.match("DIST01") + + self.assertEqual(area, self.district) + + def test_pcode_matching_case_sensitive(self): + """Test that P-code matching is case-sensitive.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + + area = matcher.match("dist01") + + # Should not match (case-sensitive) + self.assertFalse(area) + + def test_pcode_matching_with_level_filter(self): + """Test P-code matching with level filter.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=3) + + # Should only match level 3 + area = matcher.match("DIV01") + self.assertEqual(area, self.division) + + # Should not match level 2 + area = matcher.match("DIST01") + self.assertFalse(area) + + def test_name_matching_exact(self): + """Test exact name matching.""" + matcher = AreaMatcher(self.env, strategy="name", level=2) + + area = matcher.match("Test District") + + self.assertEqual(area, self.district) + + def test_name_matching_case_insensitive(self): + """Test case-insensitive name matching.""" + matcher = AreaMatcher(self.env, strategy="name", level=2) + + area = matcher.match("test district") + + self.assertEqual(area, self.district) + + def test_name_matching_alternate_names(self): + """Test matching against alternate names.""" + matcher = AreaMatcher(self.env, strategy="name", level=3) + + area = matcher.match("Alternative Name") + + self.assertEqual(area, self.division) + + def test_fuzzy_matching_exact(self): + """Test fuzzy matching with exact name.""" + matcher = AreaMatcher(self.env, strategy="fuzzy", level=3) + + area = matcher.match("Test Division Municipality") + + self.assertEqual(area, self.division) + + def test_fuzzy_matching_normalized(self): + """Test fuzzy matching with suffix removed.""" + matcher = AreaMatcher(self.env, strategy="fuzzy", level=3) + + # Should match even with "Municipality" suffix + area = matcher.match("Test Division") + + self.assertEqual(area, self.division) + + def test_fuzzy_matching_partial(self): + """Test fuzzy matching with partial name.""" + matcher = AreaMatcher(self.env, strategy="fuzzy", level=3) + + area = matcher.match("Division") + + self.assertEqual(area, self.division) + + def test_gps_matching_not_implemented(self): + """Test that GPS matching returns empty recordset (not implemented).""" + matcher = AreaMatcher(self.env, strategy="gps", level=2) + + area = matcher.match("", lat=12.34, lon=56.78) + + # Should return empty recordset (not fully implemented) + self.assertFalse(area) + + def test_matching_with_cache(self): + """Test that matcher caches results.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + + # First match + area1 = matcher.match("DIST01") + self.assertEqual(area1, self.district) + + # Second match should be cached + area2 = matcher.match("DIST01") + self.assertEqual(area2, self.district) + + # Check cache stats + stats = matcher.get_stats() + self.assertEqual(stats["cache_size"], 1) + self.assertEqual(stats["cache_hits"], 1) + + def test_matching_empty_value(self): + """Test matching with empty value.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + + area = matcher.match("") + + self.assertFalse(area) + + def test_matching_nonexistent_code(self): + """Test matching with non-existent code.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + + area = matcher.match("NONEXIST") + + self.assertFalse(area) + + def test_normalize_method(self): + """Test the normalize method.""" + matcher = AreaMatcher(self.env, strategy="fuzzy") + + # Test suffix removal - common suffixes are removed from end + self.assertEqual(matcher._normalize("Test District"), "test") + self.assertEqual(matcher._normalize("Test District District"), "test district") + self.assertEqual(matcher._normalize("Test Division Municipality"), "test division") + + # Test whitespace normalization + self.assertEqual(matcher._normalize("Test Area Name"), "test area name") + + # Test empty string + self.assertEqual(matcher._normalize(""), "") + + def test_clear_cache(self): + """Test cache clearing.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=2) + + # Add to cache + matcher.match("DIST01") + self.assertEqual(matcher.get_stats()["cache_size"], 1) + + # Clear cache + matcher.clear_cache() + self.assertEqual(matcher.get_stats()["cache_size"], 0) + + def test_matcher_without_level_filter(self): + """Test matcher without level filter.""" + matcher = AreaMatcher(self.env, strategy="pcode", level=None) + + # Should match at any level + area = matcher.match("PROV01") + self.assertEqual(area, self.province) + + area = matcher.match("DIST01") + self.assertEqual(area, self.district) + + area = matcher.match("DIV01") + self.assertEqual(area, self.division) diff --git a/spp_hxl_area/tests/test_hxl_aggregation_rule.py b/spp_hxl_area/tests/test_hxl_aggregation_rule.py new file mode 100644 index 00000000..0a109a80 --- /dev/null +++ b/spp_hxl_area/tests/test_hxl_aggregation_rule.py @@ -0,0 +1,251 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestHxlAggregationRule(TransactionCase): + """Test HXL Aggregation Rule model.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Rule = cls.env["spp.hxl.aggregation.rule"] + cls.Profile = cls.env["spp.hxl.import.profile"] + cls.Variable = cls.env["spp.cel.variable"] + + # Create test profile + cls.profile = cls.Profile.create( + { + "name": "Test Profile", + "code": "test_agg_rule_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + def test_create_rule(self): + """Test creating a basic aggregation rule.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Count Households", + "aggregation_type": "count", + } + ) + + self.assertEqual(rule.name, "Count Households") + self.assertEqual(rule.aggregation_type, "count") + self.assertEqual(rule.profile_id, self.profile) + + def test_required_fields(self): + """Test that required fields are enforced.""" + # profile_id is required + with self.assertRaises(Exception): + self.Rule.create( + { + "name": "Test Rule", + "aggregation_type": "count", + } + ) + + # name is required + with self.assertRaises(Exception): + self.Rule.create( + { + "profile_id": self.profile.id, + "aggregation_type": "count", + } + ) + + def test_default_values(self): + """Test default values are set correctly.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Default Test", + "aggregation_type": "sum", + } + ) + + self.assertEqual(rule.sequence, 10) + self.assertEqual(rule.aggregation_type, "sum") + self.assertTrue(rule.active) + + def test_ordering(self): + """Test rules are ordered by profile_id and sequence.""" + rule1 = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Rule 1", + "aggregation_type": "count", + "sequence": 20, + } + ) + + rule2 = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Rule 2", + "aggregation_type": "sum", + "sequence": 5, + } + ) + + rules = self.Rule.search([("profile_id", "=", self.profile.id)]) + + # Rule2 (sequence 5) should come before Rule1 (sequence 20) + self.assertEqual(rules[0], rule2) + self.assertEqual(rules[1], rule1) + + def test_all_aggregation_types(self): + """Test all aggregation type selections.""" + types = [ + "count", + "sum", + "avg", + "min", + "max", + "count_distinct", + "percentage", + ] + + for agg_type in types: + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": f"Test {agg_type}", + "aggregation_type": agg_type, + } + ) + self.assertEqual(rule.aggregation_type, agg_type) + + def test_variable_linking(self): + """Test linking rule to CEL variable.""" + # Get or create a variable + variable = self.Variable.search([], limit=1) + if not variable: + variable = self.Variable.create( + { + "name": "test_variable", + "label": "Test Variable", + "data_type": "number", + } + ) + + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Variable Linked Rule", + "aggregation_type": "sum", + "variable_id": variable.id, + } + ) + + self.assertEqual(rule.variable_id, variable) + + def test_name_get_display(self): + """Test name_get returns profile code with name.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Test Display", + "aggregation_type": "count", + } + ) + + result = rule.name_get() + + # Should return [(id, 'profile_code: rule_name')] + self.assertEqual(len(result), 1) + self.assertEqual(result[0][0], rule.id) + self.assertIn(self.profile.code, result[0][1]) + self.assertIn("Test Display", result[0][1]) + + def test_cascade_deletion(self): + """Test rule is deleted when profile is deleted.""" + # Create a new profile for this test + profile = self.Profile.create( + { + "name": "Cascade Test Profile", + "code": "cascade_test", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + rule = self.Rule.create( + { + "profile_id": profile.id, + "name": "Cascade Rule", + "aggregation_type": "count", + } + ) + rule_id = rule.id + + # Delete profile + profile.unlink() + + # Rule should be deleted + rule_check = self.Rule.browse(rule_id) + self.assertFalse(rule_check.exists()) + + def test_disaggregate_by_tags(self): + """Test disaggregation tags field.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Disaggregate Test", + "aggregation_type": "count", + "disaggregate_by_tags": "+f,+m,+children", + } + ) + + self.assertEqual(rule.disaggregate_by_tags, "+f,+m,+children") + + def test_filter_expression_storage(self): + """Test filter expression field.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Filter Test", + "aggregation_type": "count", + "filter_expression": "row.get('severity') == 'severe'", + } + ) + + self.assertEqual(rule.filter_expression, "row.get('severity') == 'severe'") + + def test_output_hxl_tag(self): + """Test output HXL tag field.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Output Tag Test", + "aggregation_type": "sum", + "source_column_tag": "#affected+hh", + "output_hxl_tag": "#affected+hh+sum", + } + ) + + self.assertEqual(rule.source_column_tag, "#affected+hh") + self.assertEqual(rule.output_hxl_tag, "#affected+hh+sum") + + def test_active_toggle(self): + """Test active field toggles correctly.""" + rule = self.Rule.create( + { + "profile_id": self.profile.id, + "name": "Active Test", + "aggregation_type": "count", + } + ) + + self.assertTrue(rule.active) + + rule.active = False + self.assertFalse(rule.active) + + rule.active = True + self.assertTrue(rule.active) diff --git a/spp_hxl_area/tests/test_hxl_area_indicator.py b/spp_hxl_area/tests/test_hxl_area_indicator.py new file mode 100644 index 00000000..148424f9 --- /dev/null +++ b/spp_hxl_area/tests/test_hxl_area_indicator.py @@ -0,0 +1,385 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import json +from unittest.mock import patch + +from psycopg2 import IntegrityError + +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestHxlAreaIndicator(TransactionCase): + """Test HXL Area Indicator model.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Indicator = cls.env["spp.hxl.area.indicator"] + cls.Area = cls.env["spp.area"] + cls.Batch = cls.env["spp.hxl.import.batch"] + cls.Profile = cls.env["spp.hxl.import.profile"] + cls.Variable = cls.env["spp.cel.variable"] + + # Create test area + cls.area = cls.Area.create( + { + "draft_name": "Test District", + "code": "TEST01", + "level": 2, + } + ) + + # Create test profile + cls.profile = cls.Profile.create( + { + "name": "Indicator Test Profile", + "code": "indicator_test_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + # Create test batch + cls.batch = cls.Batch.create( + { + "name": "Test Indicator Batch", + "profile_id": cls.profile.id, + } + ) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_create_indicator(self, mock_sync): + """Test creating a basic indicator.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 150.0, + "value_count": 10, + } + ) + + self.assertEqual(indicator.area_id, self.area) + self.assertEqual(indicator.value, 150.0) + self.assertEqual(indicator.value_count, 10) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_required_fields(self, mock_sync): + """Test that area_id is required.""" + mock_sync.return_value = None + + with self.assertRaises(IntegrityError), self.cr.savepoint(): + self.Indicator.create( + { + "batch_id": self.batch.id, + "value": 100.0, + } + ) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_unique_constraint(self, mock_sync): + """Test unique constraint on indicator. + + The UNIQUE constraint is (area_id, variable_id, period_key, incident_id, batch_id). + PostgreSQL treats NULL != NULL in unique constraints, so we need all fields + to be non-NULL for the constraint to fire. Since incident_id is typically NULL, + we test with all non-NULL fields by using batch_id which is always set. + """ + mock_sync.return_value = None + + # Get or create a variable + variable = self.Variable.search([], limit=1) + if not variable: + variable = self.Variable.create( + { + "name": "test_indicator_var", + "cel_accessor": "test_indicator_var", + "value_type": "number", + "source_type": "external", + } + ) + + # Create first indicator + self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "variable_id": variable.id, + "period_key": "2024-03", + "value": 100.0, + } + ) + + # Duplicate with all non-NULL fields matching should create + # (PostgreSQL allows duplicates when NULLable columns like incident_id are NULL) + duplicate = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "variable_id": variable.id, + "period_key": "2024-03", + "value": 200.0, + } + ) + self.assertTrue(duplicate.exists()) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_auto_sync_on_create(self, mock_sync): + """Test that sync_to_data_value is called on create.""" + self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + # sync_to_data_value should be called during create + mock_sync.assert_called() + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_auto_sync_on_write(self, mock_sync): + """Test that sync_to_data_value is called on write for value changes.""" + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + # Reset mock to count calls from write + mock_sync.reset_mock() + + # Update value - should trigger sync + indicator.write({"value": 200.0}) + mock_sync.assert_called() + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_no_sync_on_notes_update(self, mock_sync): + """Test that sync is not called for notes-only updates.""" + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + # Reset mock + mock_sync.reset_mock() + + # Update notes only - should not trigger sync + indicator.write({"notes": "Some notes"}) + mock_sync.assert_not_called() + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_sync_to_data_value_requires_variable(self, mock_sync): + """Test that sync only works with variable_id set.""" + mock_sync.return_value = None + + # Create indicator without variable + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + # Indicator without variable_id should not have variable set + self.assertFalse(indicator.variable_id) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_disaggregation_json_storage(self, mock_sync): + """Test disaggregation JSON field.""" + mock_sync.return_value = None + + disagg_data = {"+f": 80, "+m": 70, "+children": 30} + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 150.0, + "disaggregation_json": json.dumps(disagg_data), + } + ) + + # Read back and parse + parsed = json.loads(indicator.disaggregation_json) + self.assertEqual(parsed["+f"], 80) + self.assertEqual(parsed["+m"], 70) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_action_sync_all(self, mock_sync): + """Test manual sync action.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + # Reset mock + mock_sync.reset_mock() + + result = indicator.action_sync_all() + + # Should return notification + self.assertEqual(result["type"], "ir.actions.client") + self.assertEqual(result["tag"], "display_notification") + self.assertEqual(result["params"]["type"], "success") + + # sync_to_data_value should be called + mock_sync.assert_called() + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_name_get_display(self, mock_sync): + """Test name_get returns area and value context.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 150.0, + "period_key": "2024-03", + } + ) + + result = indicator.name_get() + + self.assertEqual(len(result), 1) + self.assertEqual(result[0][0], indicator.id) + # Should contain area name and value + self.assertIn("150", result[0][1]) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_cascade_deletion_batch(self, mock_sync): + """Test indicator is deleted when batch is deleted.""" + mock_sync.return_value = None + + # Create a new batch for this test + batch = self.Batch.create( + { + "name": "Cascade Test Batch", + "profile_id": self.profile.id, + } + ) + + indicator = self.Indicator.create( + { + "batch_id": batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + indicator_id = indicator.id + + # Delete batch + batch.unlink() + + # Indicator should be deleted + indicator_check = self.Indicator.browse(indicator_id) + self.assertFalse(indicator_check.exists()) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_cascade_deletion_area(self, mock_sync): + """Test indicator is deleted when area is deleted.""" + mock_sync.return_value = None + + # Create a new area for this test + area = self.Area.create( + { + "draft_name": "Cascade Test Area", + "code": "CASCADE01", + "level": 2, + } + ) + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": area.id, + "value": 100.0, + } + ) + indicator_id = indicator.id + + # Delete area + area.unlink() + + # Indicator should be deleted + indicator_check = self.Indicator.browse(indicator_id) + self.assertFalse(indicator_check.exists()) + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_period_key_handling(self, mock_sync): + """Test period key field.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + "period_key": "2024-Q1", + } + ) + + self.assertEqual(indicator.period_key, "2024-Q1") + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_hxl_tag_storage(self, mock_sync): + """Test HXL tag field for re-export.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + "hxl_tag": "#affected+hh+sum", + } + ) + + self.assertEqual(indicator.hxl_tag, "#affected+hh+sum") + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_source_type_default(self, mock_sync): + """Test default source_type.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + self.assertEqual(indicator.source_type, "hxl_import") + + @patch("odoo.addons.spp_hxl_area.models.hxl_area_indicator.HxlAreaIndicator.sync_to_data_value") + def test_related_fields(self, mock_sync): + """Test related fields for area and variable names.""" + mock_sync.return_value = None + + indicator = self.Indicator.create( + { + "batch_id": self.batch.id, + "area_id": self.area.id, + "value": 100.0, + } + ) + + # area_name should be populated from related field + self.assertTrue(indicator.area_name) diff --git a/spp_hxl_area/tests/test_hxl_import_batch.py b/spp_hxl_area/tests/test_hxl_import_batch.py new file mode 100644 index 00000000..5dadea0a --- /dev/null +++ b/spp_hxl_area/tests/test_hxl_import_batch.py @@ -0,0 +1,248 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.exceptions import UserError +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestHxlImportBatch(TransactionCase): + """Test HXL Import Batch model.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Batch = cls.env["spp.hxl.import.batch"] + cls.Profile = cls.env["spp.hxl.import.profile"] + cls.Rule = cls.env["spp.hxl.aggregation.rule"] + cls.Indicator = cls.env["spp.hxl.area.indicator"] + cls.Area = cls.env["spp.area"] + + # Create test profile + cls.profile = cls.Profile.create( + { + "name": "Test Profile", + "code": "test_batch_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + "area_level": 2, + } + ) + + cls.rule = cls.Rule.create( + { + "profile_id": cls.profile.id, + "name": "Count All", + "aggregation_type": "count", + "output_hxl_tag": "#meta+count", + } + ) + + # Create test areas + cls.area1 = cls.Area.create( + { + "draft_name": "Test Area 1", + "code": "A1", + "level": 2, + } + ) + + def test_create_batch(self): + """Test creating a basic import batch.""" + batch = self.Batch.create( + { + "name": "Test Batch", + "profile_id": self.profile.id, + } + ) + + self.assertEqual(batch.name, "Test Batch") + self.assertEqual(batch.profile_id, self.profile) + self.assertEqual(batch.state, "draft") + + def test_batch_state_transitions(self): + """Test batch state transitions.""" + batch = self.Batch.create( + { + "name": "State Test Batch", + "profile_id": self.profile.id, + } + ) + + self.assertEqual(batch.state, "draft") + + # Simulate state transitions + batch.write({"state": "mapped"}) + self.assertEqual(batch.state, "mapped") + + batch.write({"state": "processing"}) + self.assertEqual(batch.state, "processing") + + batch.write({"state": "done"}) + self.assertEqual(batch.state, "done") + + def test_action_detect_columns_no_file(self): + """Test column detection fails without file.""" + batch = self.Batch.create( + { + "name": "No File Batch", + "profile_id": self.profile.id, + } + ) + + with self.assertRaises(UserError): + batch.action_detect_columns() + + def test_action_process_without_mapping(self): + """Test processing fails without column mapping.""" + batch = self.Batch.create( + { + "name": "No Mapping Batch", + "profile_id": self.profile.id, + "state": "draft", + } + ) + + with self.assertRaises(UserError): + batch.action_process() + + def test_action_view_indicators(self): + """Test action to view indicators.""" + batch = self.Batch.create( + { + "name": "View Test Batch", + "profile_id": self.profile.id, + } + ) + + action = batch.action_view_indicators() + + self.assertEqual(action["type"], "ir.actions.act_window") + self.assertEqual(action["res_model"], "spp.hxl.area.indicator") + self.assertEqual(action["domain"], [("batch_id", "=", batch.id)]) + + def test_action_reset(self): + """Test resetting a batch.""" + batch = self.Batch.create( + { + "name": "Reset Test Batch", + "profile_id": self.profile.id, + "state": "done", + "total_rows": 100, + "matched_rows": 90, + } + ) + + # Create some indicators + self.Indicator.create( + { + "batch_id": batch.id, + "area_id": self.area1.id, + "value": 10, + } + ) + + batch.action_reset() + + self.assertEqual(batch.state, "draft") + self.assertEqual(batch.total_rows, 0) + self.assertEqual(batch.matched_rows, 0) + self.assertEqual(len(batch.indicator_ids), 0) + + def test_batch_with_period_and_incident(self): + """Test batch with period and incident context.""" + batch = self.Batch.create( + { + "name": "Context Test Batch", + "profile_id": self.profile.id, + "period_key": "2024-03", + } + ) + + self.assertEqual(batch.period_key, "2024-03") + + def test_batch_statistics_fields(self): + """Test batch statistics fields.""" + batch = self.Batch.create( + { + "name": "Stats Test Batch", + "profile_id": self.profile.id, + "total_rows": 100, + "matched_rows": 85, + "unmatched_rows": 15, + "areas_updated": 10, + "indicators_created": 20, + } + ) + + self.assertEqual(batch.total_rows, 100) + self.assertEqual(batch.matched_rows, 85) + self.assertEqual(batch.unmatched_rows, 15) + self.assertEqual(batch.areas_updated, 10) + self.assertEqual(batch.indicators_created, 20) + + def test_batch_error_log(self): + """Test error log field.""" + batch = self.Batch.create( + { + "name": "Error Test Batch", + "profile_id": self.profile.id, + "state": "failed", + "error_log": "Test error message", + } + ) + + self.assertEqual(batch.state, "failed") + self.assertEqual(batch.error_log, "Test error message") + + def test_create_mappings(self): + """Test creation of column mappings.""" + batch = self.Batch.create( + { + "name": "Mapping Test Batch", + "profile_id": self.profile.id, + } + ) + + # Simulate detected columns + hxl_columns = [ + {"index": 0, "header": "Area Code", "tag": "#adm2+pcode"}, + {"index": 1, "header": "Value", "tag": "#value"}, + {"index": 2, "header": "Other", "tag": "#other"}, + ] + + batch._create_mappings(hxl_columns) + + self.assertEqual(len(batch.mapping_ids), 3) + + # Area column should be mapped as "area" + area_mapping = batch.mapping_ids.filtered(lambda m: m.detected_hxl_tag == "#adm2+pcode") + self.assertEqual(area_mapping.mapping_type, "area") + self.assertEqual(area_mapping.confidence, 1.0) + + def test_batch_with_source_url(self): + """Test batch with source URL.""" + batch = self.Batch.create( + { + "name": "URL Test Batch", + "profile_id": self.profile.id, + "source_url": "https://example.com/data.csv", + } + ) + + self.assertEqual(batch.source_url, "https://example.com/data.csv") + + def test_batch_message_tracking(self): + """Test that batch inherits mail tracking.""" + batch = self.Batch.create( + { + "name": "Message Test Batch", + "profile_id": self.profile.id, + } + ) + + # Post a message + batch.message_post(body="Test message", subject="Test") + + # Should have message + self.assertTrue(batch.message_ids) diff --git a/spp_hxl_area/tests/test_hxl_import_mapping.py b/spp_hxl_area/tests/test_hxl_import_mapping.py new file mode 100644 index 00000000..d74721db --- /dev/null +++ b/spp_hxl_area/tests/test_hxl_import_mapping.py @@ -0,0 +1,230 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestHxlImportMapping(TransactionCase): + """Test HXL Import Mapping model.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Mapping = cls.env["spp.hxl.import.mapping"] + cls.Batch = cls.env["spp.hxl.import.batch"] + cls.Profile = cls.env["spp.hxl.import.profile"] + + # Create test profile + cls.profile = cls.Profile.create( + { + "name": "Mapping Test Profile", + "code": "mapping_test_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + # Create test batch + cls.batch = cls.Batch.create( + { + "name": "Test Import Batch", + "profile_id": cls.profile.id, + } + ) + + def test_create_mapping(self): + """Test creating a basic column mapping.""" + mapping = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "District", + "detected_hxl_tag": "#adm2+name", + "mapping_type": "area", + } + ) + + self.assertEqual(mapping.source_column, "District") + self.assertEqual(mapping.detected_hxl_tag, "#adm2+name") + self.assertEqual(mapping.mapping_type, "area") + + def test_required_fields(self): + """Test that required fields are enforced.""" + # batch_id is required + with self.assertRaises(Exception): + self.Mapping.create( + { + "source_column": "Test", + "mapping_type": "skip", + } + ) + + def test_default_values(self): + """Test default values are set correctly.""" + mapping = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Test Column", + } + ) + + self.assertEqual(mapping.sequence, 10) + self.assertEqual(mapping.mapping_type, "skip") + self.assertEqual(mapping.confidence, 0.0) + + def test_all_mapping_types(self): + """Test all mapping type selections.""" + types = ["area", "aggregate", "filter", "disaggregate", "skip"] + + for map_type in types: + mapping = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": f"Test {map_type}", + "mapping_type": map_type, + } + ) + self.assertEqual(mapping.mapping_type, map_type) + + def test_confidence_score_ranges(self): + """Test confidence scores between 0.0 and 1.0.""" + # Test 0.0 confidence + mapping_low = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Low Confidence", + "mapping_type": "skip", + "confidence": 0.0, + } + ) + self.assertEqual(mapping_low.confidence, 0.0) + + # Test 0.5 confidence + mapping_mid = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Mid Confidence", + "mapping_type": "aggregate", + "confidence": 0.5, + } + ) + self.assertEqual(mapping_mid.confidence, 0.5) + + # Test 1.0 confidence + mapping_high = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "High Confidence", + "mapping_type": "area", + "confidence": 1.0, + } + ) + self.assertEqual(mapping_high.confidence, 1.0) + + def test_name_get_display(self): + """Test name_get returns column info.""" + mapping = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Households", + "detected_hxl_tag": "#affected+hh", + "mapping_type": "aggregate", + } + ) + + result = mapping.name_get() + + # Should return [(id, 'source_column [tag] -> type')] + self.assertEqual(len(result), 1) + self.assertEqual(result[0][0], mapping.id) + self.assertIn("Households", result[0][1]) + self.assertIn("#affected+hh", result[0][1]) + self.assertIn("aggregate", result[0][1]) + + def test_name_get_without_tag(self): + """Test name_get handles missing HXL tag.""" + mapping = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Unknown", + "mapping_type": "skip", + } + ) + + result = mapping.name_get() + + # Should still work + self.assertEqual(len(result), 1) + self.assertIn("Unknown", result[0][1]) + + def test_cascade_deletion(self): + """Test mapping is deleted when batch is deleted.""" + # Create a new batch for this test + batch = self.Batch.create( + { + "name": "Cascade Test Batch", + "profile_id": self.profile.id, + } + ) + + mapping = self.Mapping.create( + { + "batch_id": batch.id, + "source_column": "Cascade Test", + "mapping_type": "skip", + } + ) + mapping_id = mapping.id + + # Delete batch + batch.unlink() + + # Mapping should be deleted + mapping_check = self.Mapping.browse(mapping_id) + self.assertFalse(mapping_check.exists()) + + def test_ordering(self): + """Test mappings are ordered by batch_id and sequence.""" + mapping1 = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Column 1", + "mapping_type": "skip", + "sequence": 30, + } + ) + + mapping2 = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": "Column 2", + "mapping_type": "skip", + "sequence": 10, + } + ) + + mappings = self.Mapping.search([("batch_id", "=", self.batch.id)]) + + # mapping2 (sequence 10) should come before mapping1 (sequence 30) + self.assertEqual(mappings[0], mapping2) + self.assertEqual(mappings[1], mapping1) + + def test_detected_hxl_tag_storage(self): + """Test HXL tag can be stored and retrieved.""" + tags = [ + "#adm2+pcode", + "#affected+hh+f", + "#meta+count", + "#loc+lat", + ] + + for tag in tags: + mapping = self.Mapping.create( + { + "batch_id": self.batch.id, + "source_column": f"Test {tag}", + "detected_hxl_tag": tag, + "mapping_type": "aggregate", + } + ) + self.assertEqual(mapping.detected_hxl_tag, tag) diff --git a/spp_hxl_area/tests/test_hxl_import_profile.py b/spp_hxl_area/tests/test_hxl_import_profile.py new file mode 100644 index 00000000..fffe8c02 --- /dev/null +++ b/spp_hxl_area/tests/test_hxl_import_profile.py @@ -0,0 +1,209 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.exceptions import ValidationError +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestHxlImportProfile(TransactionCase): + """Test HXL Import Profile model.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.Profile = cls.env["spp.hxl.import.profile"] + cls.Rule = cls.env["spp.hxl.aggregation.rule"] + cls.Variable = cls.env["spp.cel.variable"] + + def test_create_profile(self): + """Test creating a basic import profile.""" + profile = self.Profile.create( + { + "name": "Test Profile", + "code": "test_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + "area_level": 2, + } + ) + + self.assertEqual(profile.name, "Test Profile") + self.assertEqual(profile.code, "test_profile") + self.assertEqual(profile.area_matching_strategy, "pcode") + + def test_profile_code_uniqueness(self): + """Test that profile codes must be unique.""" + self.Profile.create( + { + "name": "Profile 1", + "code": "unique_code", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + with self.assertRaises(Exception): + self.Profile.create( + { + "name": "Profile 2", + "code": "unique_code", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + def test_validate_configuration_missing_area_tag(self): + """Test validation fails when area column tag is missing.""" + profile = self.Profile.create( + { + "name": "Invalid Profile", + "code": "invalid_1", + "area_matching_strategy": "pcode", + "area_column_tag": "", + } + ) + + with self.assertRaises(ValidationError): + profile.validate_configuration() + + def test_validate_configuration_gps_missing_coords(self): + """Test validation fails for GPS strategy without lat/lon tags.""" + profile = self.Profile.create( + { + "name": "GPS Profile", + "code": "gps_test", + "area_matching_strategy": "gps", + "area_column_tag": "#geo+lat", + "latitude_tag": "", + "longitude_tag": "", + } + ) + + with self.assertRaises(ValidationError): + profile.validate_configuration() + + def test_validate_configuration_no_rules(self): + """Test validation fails when no aggregation rules defined.""" + profile = self.Profile.create( + { + "name": "No Rules Profile", + "code": "no_rules", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + with self.assertRaises(ValidationError): + profile.validate_configuration() + + def test_validate_configuration_success(self): + """Test successful validation with all required fields.""" + profile = self.Profile.create( + { + "name": "Valid Profile", + "code": "valid_profile", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + "area_level": 2, + } + ) + + # Add a rule + self.Rule.create( + { + "profile_id": profile.id, + "name": "Test Rule", + "aggregation_type": "count", + } + ) + + # Should not raise + result = profile.validate_configuration() + self.assertTrue(result) + + def test_batch_count_computation(self): + """Test that batch count is computed correctly.""" + profile = self.Profile.create( + { + "name": "Profile with Batches", + "code": "batch_test", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + self.assertEqual(profile.batch_count, 0) + + # Create batches + Batch = self.env["spp.hxl.import.batch"] + Batch.create( + { + "name": "Batch 1", + "profile_id": profile.id, + } + ) + Batch.create( + { + "name": "Batch 2", + "profile_id": profile.id, + } + ) + + profile._compute_batch_count() + self.assertEqual(profile.batch_count, 2) + + def test_profile_with_aggregation_rules(self): + """Test creating profile with multiple aggregation rules.""" + profile = self.Profile.create( + { + "name": "Multi-Rule Profile", + "code": "multi_rule", + "area_matching_strategy": "name", + "area_column_tag": "#adm3+name", + "area_level": 3, + } + ) + + # Create rules + rule1 = self.Rule.create( + { + "profile_id": profile.id, + "name": "Count All", + "aggregation_type": "count", + "output_hxl_tag": "#meta+count", + "sequence": 10, + } + ) + + rule2 = self.Rule.create( + { + "profile_id": profile.id, + "name": "Sum Values", + "aggregation_type": "sum", + "source_column_tag": "#affected+ind", + "output_hxl_tag": "#affected+ind+total", + "sequence": 20, + } + ) + + self.assertEqual(len(profile.aggregation_ids), 2) + self.assertEqual(profile.aggregation_ids[0], rule1) + self.assertEqual(profile.aggregation_ids[1], rule2) + + def test_action_view_batches(self): + """Test the action to view batches.""" + profile = self.Profile.create( + { + "name": "View Test Profile", + "code": "view_test", + "area_matching_strategy": "pcode", + "area_column_tag": "#adm2+pcode", + } + ) + + action = profile.action_view_batches() + + self.assertEqual(action["type"], "ir.actions.act_window") + self.assertEqual(action["res_model"], "spp.hxl.import.batch") + self.assertEqual(action["domain"], [("profile_id", "=", profile.id)]) diff --git a/spp_hxl_area/views/hxl_aggregation_rule_views.xml b/spp_hxl_area/views/hxl_aggregation_rule_views.xml new file mode 100644 index 00000000..08748f96 --- /dev/null +++ b/spp_hxl_area/views/hxl_aggregation_rule_views.xml @@ -0,0 +1,45 @@ + + + + + spp.hxl.aggregation.rule.form + spp.hxl.aggregation.rule + +
+ + + + + + + + + + + + + + + + + + + + +
+
+
+ + + + HXL Aggregation Rules + spp.hxl.aggregation.rule + list,form + +
diff --git a/spp_hxl_area/views/hxl_area_indicator_views.xml b/spp_hxl_area/views/hxl_area_indicator_views.xml new file mode 100644 index 00000000..bd1cb418 --- /dev/null +++ b/spp_hxl_area/views/hxl_area_indicator_views.xml @@ -0,0 +1,104 @@ + + + + + spp.hxl.area.indicator.tree + spp.hxl.area.indicator + + + + + + + + + + + + + + + + spp.hxl.area.indicator.form + spp.hxl.area.indicator + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + + + spp.hxl.area.indicator.search + spp.hxl.area.indicator + + + + + + + + + + + + + + + + + + + + HXL Area Indicators + spp.hxl.area.indicator + list,form + hxl-area-indicators + +

+ No indicators yet +

+

+ Area indicators are generated from HXL import batches. +

+
+
+
diff --git a/spp_hxl_area/views/hxl_import_batch_views.xml b/spp_hxl_area/views/hxl_import_batch_views.xml new file mode 100644 index 00000000..78a7d27c --- /dev/null +++ b/spp_hxl_area/views/hxl_import_batch_views.xml @@ -0,0 +1,182 @@ + + + + + spp.hxl.import.batch.tree + spp.hxl.import.batch + + + + + + + + + + + + + + + + + + + spp.hxl.import.batch.form + spp.hxl.import.batch + +
+
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+ + + + spp.hxl.import.batch.search + spp.hxl.import.batch + + + + + + + + + + + + + + + + + + + + + + + + HXL Import Batches + spp.hxl.import.batch + list,form + hxl-import-batches + +

+ No import batches yet +

+

+ Use the HXL Import Wizard to create your first import batch. +

+
+
+
diff --git a/spp_hxl_area/views/hxl_import_profile_views.xml b/spp_hxl_area/views/hxl_import_profile_views.xml new file mode 100644 index 00000000..306d72d9 --- /dev/null +++ b/spp_hxl_area/views/hxl_import_profile_views.xml @@ -0,0 +1,175 @@ + + + + + spp.hxl.import.profile.tree + spp.hxl.import.profile + + + + + + + + + + + + + + + + spp.hxl.import.profile.form + spp.hxl.import.profile + +
+
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ + + + spp.hxl.import.profile.search + spp.hxl.import.profile + + + + + + + + + + + + + + + + + + HXL Import Profiles + spp.hxl.import.profile + list,form + hxl-import-profiles + {'search_default_filter_active': 1} + +

+ Create your first HXL Import Profile +

+

+ Import profiles define how to match HXL data to areas and which aggregation rules to apply. +

+
+
+
diff --git a/spp_hxl_area/views/menus.xml b/spp_hxl_area/views/menus.xml new file mode 100644 index 00000000..0beca0bc --- /dev/null +++ b/spp_hxl_area/views/menus.xml @@ -0,0 +1,54 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/spp_hxl_area/wizards/__init__.py b/spp_hxl_area/wizards/__init__.py new file mode 100644 index 00000000..85d6abb0 --- /dev/null +++ b/spp_hxl_area/wizards/__init__.py @@ -0,0 +1,3 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import hxl_area_import_wizard diff --git a/spp_hxl_area/wizards/hxl_area_import_wizard.py b/spp_hxl_area/wizards/hxl_area_import_wizard.py new file mode 100644 index 00000000..70b3114d --- /dev/null +++ b/spp_hxl_area/wizards/hxl_area_import_wizard.py @@ -0,0 +1,285 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import base64 +import io +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import UserError + +_logger = logging.getLogger(__name__) + +try: + import libhxl + from libhxl.io import make_stream +except ImportError: + _logger.warning("libhxl library not found") + libhxl = None + + +class HxlAreaImportWizard(models.TransientModel): + """Wizard for importing HXL data with area aggregation. + + Multi-step wizard: + 1. Select profile and upload file + 2. Preview data and area matching + 3. Confirm and import + """ + + _name = "spp.hxl.area.import.wizard" + _description = "HXL Area Import Wizard" + + # ─── Step 1: Profile and File ─────────────────────────────────── + profile_id = fields.Many2one( + "spp.hxl.import.profile", + required=True, + string="Import Profile", + help="Select the import profile defining aggregation rules", + ) + + file_data = fields.Binary( + string="HXL File", + required=True, + help="Upload HXL-tagged CSV or Excel file", + ) + file_name = fields.Char(string="File Name") + + # ─── Step 2: Context ─────────────────────────────────── + incident_id = fields.Many2one( + "spp.hazard.incident", + string="Incident", + help="Optionally link to an incident/disaster event", + ) + + period_key = fields.Char( + string="Period", + default=lambda self: fields.Date.today().strftime("%Y-%m"), + help="Period identifier (e.g., '2024-03' for March 2024)", + ) + + batch_name = fields.Char( + string="Batch Name", + help="Name for this import batch (auto-generated if not provided)", + ) + + # ─── Preview Data ─────────────────────────────────── + preview_html = fields.Html( + string="Data Preview", + readonly=True, + help="Preview of first few rows from the file", + ) + + detected_hxl_row = fields.Integer( + string="HXL Row", + readonly=True, + help="Row number where HXL hashtags were detected", + ) + + total_rows = fields.Integer( + string="Total Rows", + readonly=True, + help="Total data rows in the file", + ) + + # ─── Area Matching Preview ─────────────────────────────────── + preview_matched = fields.Integer( + string="Preview Rows Matched", + readonly=True, + help="Number of rows that could be matched to areas (sample)", + ) + + preview_unmatched = fields.Integer( + string="Preview Unmatched", + readonly=True, + help="Number of rows that couldn't be matched (sample)", + ) + + unmatched_areas = fields.Text( + string="Unmatched Area Values", + readonly=True, + help="Sample of area values that couldn't be matched", + ) + + @api.onchange("file_data", "profile_id") + def _onchange_file(self): + """Parse file and generate preview when file is uploaded.""" + if self.file_data and self.profile_id: + self._parse_and_preview() + + def _parse_and_preview(self): + """Parse the uploaded file and generate preview.""" + if not self.file_data: + return + + if libhxl is None: + raise UserError(_("libhxl library not installed")) + + try: + # Decode file + file_content = base64.b64decode(self.file_data) + file_stream = io.BytesIO(file_content) + + # Parse with libhxl + hxl_stream = make_stream(file_stream) + + # Read first N rows for preview + preview_rows = [] + sample_size = 10 + + for idx, hxl_row in enumerate(hxl_stream): + if idx >= sample_size: + break + + row_dict = {} + for col in hxl_stream.columns: + tag = col.display_tag + value = hxl_row.get(col.display_tag) + if tag: + row_dict[tag] = value + + preview_rows.append(row_dict) + + # Generate HTML preview + if preview_rows: + html = self._generate_preview_html(preview_rows) + self.preview_html = html + self.total_rows = idx + 1 # Approximate + + # Preview area matching + if self.profile_id: + self._preview_area_matching(preview_rows) + + except Exception as e: + _logger.error("Failed to parse file: %s", e, exc_info=True) + self.preview_html = f"

Error parsing file: {e}

" + + def _generate_preview_html(self, rows): + """Generate HTML table preview of rows. + + Args: + rows: List of row dicts + + Returns: + HTML string + """ + if not rows: + return "

No data to preview

" + + # Extract columns from first row + columns = list(rows[0].keys()) + + html = [""] + + # Header + html.append("") + for col in columns: + html.append(f"") + html.append("") + + # Rows + html.append("") + for row in rows: + html.append("") + for col in columns: + value = row.get(col, "") + html.append(f"") + html.append("") + html.append("") + + html.append("
{col}
{value}
") + + return "".join(html) + + def _preview_area_matching(self, rows): + """Preview how well areas will match. + + Args: + rows: Sample rows to test matching + """ + if not rows: + return + + from ..services.area_matcher import AreaMatcher + + # Initialize matcher + matcher = AreaMatcher( + self.env, + strategy=self.profile_id.area_matching_strategy, + level=self.profile_id.area_level, + ) + + area_col_tag = self.profile_id.area_column_tag + matched = 0 + unmatched = 0 + unmatched_values = set() + + for row in rows: + area_value = row.get(area_col_tag, "") + + if not area_value: + unmatched += 1 + continue + + area = matcher.match(area_value) + + if area: + matched += 1 + else: + unmatched += 1 + unmatched_values.add(area_value) + + self.preview_matched = matched + self.preview_unmatched = unmatched + + if unmatched_values: + self.unmatched_areas = "\n".join(sorted(unmatched_values)[:20]) + if len(unmatched_values) > 20: + self.unmatched_areas += f"\n... and {len(unmatched_values) - 20} more" + + def action_import(self): + """Create batch and process import.""" + self.ensure_one() + + if not self.file_data: + raise UserError(_("Please upload a file")) + + if not self.profile_id: + raise UserError(_("Please select an import profile")) + + # Validate profile + self.profile_id.validate_configuration() + + # Generate batch name if not provided + batch_name = self.batch_name or f"Import {fields.Datetime.now().strftime('%Y-%m-%d %H:%M')}" + + # Create batch + batch = self.env["spp.hxl.import.batch"].create( + { + "name": batch_name, + "profile_id": self.profile_id.id, + "file_data": self.file_data, + "file_name": self.file_name, + "incident_id": self.incident_id.id if self.incident_id else False, + "period_key": self.period_key, + } + ) + + # Detect columns + batch.action_detect_columns() + + # Process + batch.action_process() + + # Return action to view batch + return { + "type": "ir.actions.act_window", + "name": _("Import Batch"), + "res_model": "spp.hxl.import.batch", + "res_id": batch.id, + "view_mode": "form", + "target": "current", + } + + def action_cancel(self): + """Cancel wizard.""" + return {"type": "ir.actions.act_window_close"} diff --git a/spp_hxl_area/wizards/hxl_area_import_wizard_views.xml b/spp_hxl_area/wizards/hxl_area_import_wizard_views.xml new file mode 100644 index 00000000..996bb8b0 --- /dev/null +++ b/spp_hxl_area/wizards/hxl_area_import_wizard_views.xml @@ -0,0 +1,109 @@ + + + + + spp.hxl.area.import.wizard.form + spp.hxl.area.import.wizard + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + + + Import HXL Data + spp.hxl.area.import.wizard + form + new + +
From 0c8c4964abb8e0a2406c86098b2693664bbb33f4 Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Tue, 17 Feb 2026 11:59:16 +0700 Subject: [PATCH 2/4] fix(spp_hxl, spp_hxl_area): update to latest with Beta promotion and UI fixes - Promote both modules to Beta status - Fix mode="tree" to mode="list" (Odoo 19 compat) - Add missing list view for aggregation rules - Make import batch fields readonly when completed/failed - Remove redundant header buttons - Fix PII in log messages - Improve form layouts --- spp_hxl/README.rst | 46 +++++++++++++------ spp_hxl/__manifest__.py | 3 +- spp_hxl/static/description/index.html | 17 +++---- spp_hxl/views/hxl_export_profile_views.xml | 7 +-- spp_hxl_area/README.rst | 46 +++++++++++++------ spp_hxl_area/__manifest__.py | 3 +- spp_hxl_area/data/hxl_import_profiles.xml | 4 +- spp_hxl_area/models/hxl_import_batch.py | 6 +-- spp_hxl_area/models/hxl_import_profile.py | 2 +- spp_hxl_area/static/description/index.html | 17 +++---- .../views/hxl_aggregation_rule_views.xml | 18 ++++++++ spp_hxl_area/views/hxl_import_batch_views.xml | 23 ++++------ .../views/hxl_import_profile_views.xml | 29 +++++------- 13 files changed, 127 insertions(+), 94 deletions(-) diff --git a/spp_hxl/README.rst b/spp_hxl/README.rst index c068cf70..678fe5b8 100644 --- a/spp_hxl/README.rst +++ b/spp_hxl/README.rst @@ -7,20 +7,26 @@ OpenSPP HXL Integration !! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - !! source digest: sha256:6ac8b288fda5285dc32109e1aa1876445a3a6584ee8ccbe17c1537f927ad05cb + !! source digest: sha256:2725b4cddb87416b387ae9395036e4f2f9e4cbea74760c5ba624796521b4d519 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png :target: https://odoo-community.org/page/development-status - :alt: Alpha + :alt: Beta .. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html :alt: License: LGPL-3 -.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github - :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_hxl - :alt: OpenSPP/openspp-modules - -|badge1| |badge2| |badge3| +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fopenspp--modules-lightgray.png?logo=github + :target: https://github.com/OCA/openspp-modules/tree/19.0/spp_hxl + :alt: OCA/openspp-modules +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/openspp-modules-19-0/openspp-modules-19-0-spp_hxl + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/openspp-modules&target_branch=19.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| Humanitarian Exchange Language (HXL) integration for OpenSPP data interoperability. Provides a registry of standard HXL hashtags and @@ -127,10 +133,6 @@ Dependencies ``spp_security``, ``spp_cel_domain``, ``spp_studio``, ``spp_vocabulary`` -.. IMPORTANT:: - This is an alpha version, the data model and design can change at any time without warning. - Only for development or testing purpose, do not use in production. - **Table of contents** .. contents:: @@ -139,10 +141,10 @@ Dependencies Bug Tracker =========== -Bugs are tracked on `GitHub Issues `_. +Bugs are tracked on `GitHub Issues `_. In case of trouble, please check there if your issue has already been reported. If you spotted it first, help us to smash it by providing a detailed and welcomed -`feedback `_. +`feedback `_. Do not contact contributors directly about support or help with technical issues. @@ -157,6 +159,20 @@ Authors Maintainers ----------- -This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. +.. |maintainer-jeremi| image:: https://github.com/jeremi.png?size=40px + :target: https://github.com/jeremi + :alt: jeremi +.. |maintainer-gonzalesedwin1123| image:: https://github.com/gonzalesedwin1123.png?size=40px + :target: https://github.com/gonzalesedwin1123 + :alt: gonzalesedwin1123 +.. |maintainer-emjay0921| image:: https://github.com/emjay0921.png?size=40px + :target: https://github.com/emjay0921 + :alt: emjay0921 + +Current maintainers: + +|maintainer-jeremi| |maintainer-gonzalesedwin1123| |maintainer-emjay0921| + +This module is part of the `OCA/openspp-modules `_ project on GitHub. You are welcome to contribute. diff --git a/spp_hxl/__manifest__.py b/spp_hxl/__manifest__.py index 0ebc8257..09336ea6 100644 --- a/spp_hxl/__manifest__.py +++ b/spp_hxl/__manifest__.py @@ -7,7 +7,8 @@ "author": "OpenSPP.org", "website": "https://github.com/OpenSPP/OpenSPP2", "license": "LGPL-3", - "development_status": "Alpha", + "maintainers": ["jeremi", "gonzalesedwin1123", "emjay0921"], + "development_status": "Beta", "depends": [ "spp_security", "spp_cel_domain", diff --git a/spp_hxl/static/description/index.html b/spp_hxl/static/description/index.html index 85eb0959..36ce5773 100644 --- a/spp_hxl/static/description/index.html +++ b/spp_hxl/static/description/index.html @@ -367,9 +367,9 @@

OpenSPP HXL Integration

!! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!! source digest: sha256:6ac8b288fda5285dc32109e1aa1876445a3a6584ee8ccbe17c1537f927ad05cb +!! source digest: sha256:2725b4cddb87416b387ae9395036e4f2f9e4cbea74760c5ba624796521b4d519 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! --> -

Alpha License: LGPL-3 OpenSPP/openspp-modules

+

Beta License: LGPL-3 OCA/openspp-modules Translate me on Weblate Try me on Runboat

Humanitarian Exchange Language (HXL) integration for OpenSPP data interoperability. Provides a registry of standard HXL hashtags and attributes, tools for creating export profiles with HXL tagging, and @@ -503,11 +503,6 @@

Extension Points

Dependencies

spp_security, spp_cel_domain, spp_studio, spp_vocabulary

-
-

Important

-

This is an alpha version, the data model and design can change at any time without warning. -Only for development or testing purpose, do not use in production.

-

Table of contents

    @@ -521,10 +516,10 @@

    Dependencies

Bug Tracker

-

Bugs are tracked on GitHub Issues. +

Bugs are tracked on GitHub Issues. In case of trouble, please check there if your issue has already been reported. If you spotted it first, help us to smash it by providing a detailed and welcomed -feedback.

+feedback.

Do not contact contributors directly about support or help with technical issues.

@@ -537,7 +532,9 @@

Authors

Maintainers

-

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

Current maintainers:

+

jeremi gonzalesedwin1123 emjay0921

+

This module is part of the OCA/openspp-modules project on GitHub.

You are welcome to contribute.

diff --git a/spp_hxl/views/hxl_export_profile_views.xml b/spp_hxl/views/hxl_export_profile_views.xml index 9d370d0b..832ecf82 100644 --- a/spp_hxl/views/hxl_export_profile_views.xml +++ b/spp_hxl/views/hxl_export_profile_views.xml @@ -55,11 +55,12 @@ - - + + + - +
diff --git a/spp_hxl_area/README.rst b/spp_hxl_area/README.rst index 999ce14c..62693042 100644 --- a/spp_hxl_area/README.rst +++ b/spp_hxl_area/README.rst @@ -7,20 +7,26 @@ OpenSPP HXL Area Integration !! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - !! source digest: sha256:3c5a1d6122d13f49f33892b9b6f52d26da35c110f224c781fa24f90d788dacf3 + !! source digest: sha256:8207f2737da83b4a78dda75cf232ad3cabe9bedab42315659385ce7646ea06dc !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png :target: https://odoo-community.org/page/development-status - :alt: Alpha + :alt: Beta .. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html :alt: License: LGPL-3 -.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github - :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_hxl_area - :alt: OpenSPP/openspp-modules - -|badge1| |badge2| |badge3| +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fopenspp--modules-lightgray.png?logo=github + :target: https://github.com/OCA/openspp-modules/tree/19.0/spp_hxl_area + :alt: OCA/openspp-modules +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/openspp-modules-19-0/openspp-modules-19-0-spp_hxl_area + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/openspp-modules&target_branch=19.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| Import HXL-tagged field data and aggregate to area-level indicators for humanitarian coordination. Matches HXL data rows to geographical areas @@ -118,10 +124,6 @@ Dependencies ``spp_hxl``, ``spp_area``, ``spp_cel_domain``, ``spp_hazard``, ``spp_security``, ``queue_job`` -.. IMPORTANT:: - This is an alpha version, the data model and design can change at any time without warning. - Only for development or testing purpose, do not use in production. - **Table of contents** .. contents:: @@ -130,10 +132,10 @@ Dependencies Bug Tracker =========== -Bugs are tracked on `GitHub Issues `_. +Bugs are tracked on `GitHub Issues `_. In case of trouble, please check there if your issue has already been reported. If you spotted it first, help us to smash it by providing a detailed and welcomed -`feedback `_. +`feedback `_. Do not contact contributors directly about support or help with technical issues. @@ -148,6 +150,20 @@ Authors Maintainers ----------- -This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. +.. |maintainer-jeremi| image:: https://github.com/jeremi.png?size=40px + :target: https://github.com/jeremi + :alt: jeremi +.. |maintainer-gonzalesedwin1123| image:: https://github.com/gonzalesedwin1123.png?size=40px + :target: https://github.com/gonzalesedwin1123 + :alt: gonzalesedwin1123 +.. |maintainer-emjay0921| image:: https://github.com/emjay0921.png?size=40px + :target: https://github.com/emjay0921 + :alt: emjay0921 + +Current maintainers: + +|maintainer-jeremi| |maintainer-gonzalesedwin1123| |maintainer-emjay0921| + +This module is part of the `OCA/openspp-modules `_ project on GitHub. You are welcome to contribute. diff --git a/spp_hxl_area/__manifest__.py b/spp_hxl_area/__manifest__.py index 9dca8914..f1a05e0f 100644 --- a/spp_hxl_area/__manifest__.py +++ b/spp_hxl_area/__manifest__.py @@ -9,7 +9,8 @@ "author": "OpenSPP.org", "website": "https://github.com/OpenSPP/OpenSPP2", "license": "LGPL-3", - "development_status": "Alpha", + "maintainers": ["jeremi", "gonzalesedwin1123", "emjay0921"], + "development_status": "Beta", "depends": [ "spp_hxl", "spp_area", diff --git a/spp_hxl_area/data/hxl_import_profiles.xml b/spp_hxl_area/data/hxl_import_profiles.xml index 2f95dbae..87f07e07 100644 --- a/spp_hxl_area/data/hxl_import_profiles.xml +++ b/spp_hxl_area/data/hxl_import_profiles.xml @@ -1,6 +1,5 @@ - - + Sri Lanka Damage Assessment @@ -138,5 +137,4 @@ #meta+count 10 - diff --git a/spp_hxl_area/models/hxl_import_batch.py b/spp_hxl_area/models/hxl_import_batch.py index 835db78d..6d02d473 100644 --- a/spp_hxl_area/models/hxl_import_batch.py +++ b/spp_hxl_area/models/hxl_import_batch.py @@ -262,7 +262,7 @@ def process_import(self): from ..services.aggregation_engine import AggregationEngine from ..services.area_matcher import AreaMatcher - _logger.info("Starting HXL import batch: %s", self.name) + _logger.info("Starting HXL import batch: %s", self.id) # Decode file file_content = base64.b64decode(self.file_data) @@ -350,7 +350,7 @@ def process_import(self): % (len(created_indicators), areas_updated), ) - _logger.info("HXL import batch completed: %s", self.name) + _logger.info("HXL import batch completed: %s", self.id) except Exception as e: error_msg = _("Import failed: %s") % str(e) @@ -377,7 +377,7 @@ def action_view_indicators(self): "type": "ir.actions.act_window", "name": _("Generated Indicators"), "res_model": "spp.hxl.area.indicator", - "view_mode": "tree,form", + "view_mode": "list,form", "domain": [("batch_id", "=", self.id)], "context": {"default_batch_id": self.id}, } diff --git a/spp_hxl_area/models/hxl_import_profile.py b/spp_hxl_area/models/hxl_import_profile.py index 8a9b5e3f..9ac95f49 100644 --- a/spp_hxl_area/models/hxl_import_profile.py +++ b/spp_hxl_area/models/hxl_import_profile.py @@ -97,7 +97,7 @@ def action_view_batches(self): "type": "ir.actions.act_window", "name": _("Import Batches"), "res_model": "spp.hxl.import.batch", - "view_mode": "tree,form", + "view_mode": "list,form", "domain": [("profile_id", "=", self.id)], "context": {"default_profile_id": self.id}, } diff --git a/spp_hxl_area/static/description/index.html b/spp_hxl_area/static/description/index.html index be2f19c5..aece9677 100644 --- a/spp_hxl_area/static/description/index.html +++ b/spp_hxl_area/static/description/index.html @@ -367,9 +367,9 @@

OpenSPP HXL Area Integration

!! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!! source digest: sha256:3c5a1d6122d13f49f33892b9b6f52d26da35c110f224c781fa24f90d788dacf3 +!! source digest: sha256:8207f2737da83b4a78dda75cf232ad3cabe9bedab42315659385ce7646ea06dc !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! --> -

Alpha License: LGPL-3 OpenSPP/openspp-modules

+

Beta License: LGPL-3 OCA/openspp-modules Translate me on Weblate Try me on Runboat

Import HXL-tagged field data and aggregate to area-level indicators for humanitarian coordination. Matches HXL data rows to geographical areas using P-codes, names, or GPS coordinates, then aggregates values @@ -492,11 +492,6 @@

Extension Points

Dependencies

spp_hxl, spp_area, spp_cel_domain, spp_hazard, spp_security, queue_job

-
-

Important

-

This is an alpha version, the data model and design can change at any time without warning. -Only for development or testing purpose, do not use in production.

-

Table of contents

    @@ -510,10 +505,10 @@

    Dependencies

Bug Tracker

-

Bugs are tracked on GitHub Issues. +

Bugs are tracked on GitHub Issues. In case of trouble, please check there if your issue has already been reported. If you spotted it first, help us to smash it by providing a detailed and welcomed -feedback.

+feedback.

Do not contact contributors directly about support or help with technical issues.

@@ -526,7 +521,9 @@

Authors

Maintainers

-

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

Current maintainers:

+

jeremi gonzalesedwin1123 emjay0921

+

This module is part of the OCA/openspp-modules project on GitHub.

You are welcome to contribute.

diff --git a/spp_hxl_area/views/hxl_aggregation_rule_views.xml b/spp_hxl_area/views/hxl_aggregation_rule_views.xml index 08748f96..dc74f127 100644 --- a/spp_hxl_area/views/hxl_aggregation_rule_views.xml +++ b/spp_hxl_area/views/hxl_aggregation_rule_views.xml @@ -1,5 +1,23 @@ + + + spp.hxl.aggregation.rule.tree + spp.hxl.aggregation.rule + + + + + + + + + + + + + + spp.hxl.aggregation.rule.form diff --git a/spp_hxl_area/views/hxl_import_batch_views.xml b/spp_hxl_area/views/hxl_import_batch_views.xml index 78a7d27c..6b27f6c0 100644 --- a/spp_hxl_area/views/hxl_import_batch_views.xml +++ b/spp_hxl_area/views/hxl_import_batch_views.xml @@ -41,13 +41,6 @@ invisible="state != 'mapped'" class="oe_highlight" /> - - - + + - - + + - - + + @@ -87,18 +123,33 @@ - - + + - + - + @@ -111,7 +162,11 @@ - + @@ -122,12 +177,16 @@ - + - +
@@ -143,15 +202,43 @@ - - - - + + + + - - - + + + diff --git a/spp_hxl_area/views/hxl_import_profile_views.xml b/spp_hxl_area/views/hxl_import_profile_views.xml index 85c4329e..e2586457 100644 --- a/spp_hxl_area/views/hxl_import_profile_views.xml +++ b/spp_hxl_area/views/hxl_import_profile_views.xml @@ -39,7 +39,11 @@ class="oe_stat_button" icon="fa-upload" > - + HXL Data Import

- HXL (Humanitarian Exchange Language) uses hashtags to standardize - humanitarian data columns. Tags like #adm2+name (district name) or - #affected+hh (affected households) enable automatic data processing. + HXL (Humanitarian Exchange Language) uses hashtags to standardize + humanitarian data columns. Tags like #adm2+name (district name) or + #affected+hh (affected households) enable automatic data processing.

This wizard imports HXL-tagged data and aggregates it by geographical area, @@ -69,19 +72,27 @@ - +

@@ -93,7 +104,12 @@ string="Import" class="oe_highlight" /> -