diff --git a/config/DO_Dashboard_Config.xlsx b/config/DO_Dashboard_Config.xlsx index 3bded32..2b4b82c 100644 Binary files a/config/DO_Dashboard_Config.xlsx and b/config/DO_Dashboard_Config.xlsx differ diff --git a/do_dashboard.py b/do_dashboard.py index cf25ad4..96f87fb 100644 --- a/do_dashboard.py +++ b/do_dashboard.py @@ -1487,6 +1487,33 @@ def main(): print("Building organizations summary...") organizations_list = build_organizations(request_metas) + # === REPORT ORGANIZATION CENTER MAPPING === + if mapping_dict: + unique_orgs = {} # org_normalized -> original org_name + for meta in request_metas: + org_name = meta.get("org_name") + if org_name: + unique_orgs[org_name.strip().lower()] = org_name + + mapped_count = sum(1 for norm in unique_orgs if norm in mapping_dict) + unmapped_orgs = sorted( + name for norm, name in unique_orgs.items() if norm not in mapping_dict + ) + + mapping_summary = ( + f"Organization center mapping: {mapped_count}/{len(unique_orgs)} organization(s) mapped." + ) + console.print(f"[green]{mapping_summary}[/green]") + logging.info(mapping_summary) + + if unmapped_orgs: + unmapped_header = f"{len(unmapped_orgs)} unmapped organization(s):" + console.print(f"[yellow]⚠ {unmapped_header}[/yellow]") + logging.warning(unmapped_header) + for org_name in unmapped_orgs: + console.print(f"[yellow] - {org_name}[/yellow]") + logging.warning(" - %s", org_name) + try: # === QUALITY CHECKS === print() diff --git a/do_dashboard_quality_checks.py b/do_dashboard_quality_checks.py index 4537ac1..fef6eb8 100644 --- a/do_dashboard_quality_checks.py +++ b/do_dashboard_quality_checks.py @@ -540,7 +540,20 @@ def non_regression_check(output_requests, old_requests_filename): f"Config: {rule.get('field_selection')}" ) - for group_name, field_name in sorted(candidate_fields): + # Iterate in config order (field_selection), not alphabetically + field_selection_config = rule.get("field_selection") or [] + ordered_candidates = [] + seen = set() + for _action, selector in field_selection_config: + sel_group, sel_field = selector.split(".", 1) + for (group_name, field_name) in candidate_fields: + if (group_name, field_name) in seen: + continue + if (sel_group in ("*", group_name)) and (sel_field in ("*", field_name)): + ordered_candidates.append((group_name, field_name)) + seen.add((group_name, field_name)) + + for group_name, field_name in ordered_candidates: new_val = get_nested_value(new_req, [group_name, field_name]) old_val = get_nested_value(old_req, [group_name, field_name]) if new_val is not None and old_val is not None: @@ -734,10 +747,14 @@ def non_regression_check(output_requests, old_requests_filename): for rule in rules: line_label = rule["line_label"] - if line_label in ["New Requests", "Deleted Requests"]: + if line_label == "New Requests": count = _process_special_rule(rule, line_label, new_dict, old_dict) line_results.append((line_label, count, None, "simple")) + elif line_label == "Deleted Requests": + deleted_keys = sorted(set(old_dict.keys()) - set(new_dict.keys())) + line_results.append((line_label, len(deleted_keys), deleted_keys, "deleted_requests")) + elif line_label in ["New Fields", "Deleted Fields"]: field_list = _process_new_deleted_fields(line_label, new_dict, old_dict) count = len(field_list) @@ -778,6 +795,13 @@ def non_regression_check(output_requests, old_requests_filename): new_display = f"'{new_val}'" if isinstance(new_val, str) else str(new_val) console.print(f" - {qualified_field}: {old_display} → {new_display}") + elif result_type == "deleted_requests": + _print_check_line(line_label, count, status_tuple, indent=1) + logging.warning("Regression check - %s: %d", line_label, count) + for deleted_key in data: + console.print(f" {key_field}: {deleted_key}") + logging.warning(" Deleted request: %s=%s", key_field, deleted_key) + else: _print_check_line(line_label, count, status_tuple, indent=1)