Compare commits

..

3 Commits

Author SHA1 Message Date
6711bce827 Mapping update 2026-04-09 02:10:56 +01:00
70bdd1639b Quality Check Fix 2026-03-31 22:50:22 +01:00
4b5b6cd1a4 ignore .exe 2026-03-31 20:01:04 +01:00
8 changed files with 56 additions and 4 deletions

5
.gitignore vendored
View File

@@ -195,8 +195,9 @@ Endobest Reporting/
jsons history/
nul
# ignore running artefarcts
# ignore runtime artefarcts
/*.xlsx
!do_org_center_mapping.xlsx
/*.json
/*.json
/*.exe

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1487,6 +1487,33 @@ def main():
print("Building organizations summary...")
organizations_list = build_organizations(request_metas)
# === REPORT ORGANIZATION CENTER MAPPING ===
if mapping_dict:
unique_orgs = {} # org_normalized -> original org_name
for meta in request_metas:
org_name = meta.get("org_name")
if org_name:
unique_orgs[org_name.strip().lower()] = org_name
mapped_count = sum(1 for norm in unique_orgs if norm in mapping_dict)
unmapped_orgs = sorted(
name for norm, name in unique_orgs.items() if norm not in mapping_dict
)
mapping_summary = (
f"Organization center mapping: {mapped_count}/{len(unique_orgs)} organization(s) mapped."
)
console.print(f"[green]{mapping_summary}[/green]")
logging.info(mapping_summary)
if unmapped_orgs:
unmapped_header = f"{len(unmapped_orgs)} unmapped organization(s):"
console.print(f"[yellow]⚠ {unmapped_header}[/yellow]")
logging.warning(unmapped_header)
for org_name in unmapped_orgs:
console.print(f"[yellow] - {org_name}[/yellow]")
logging.warning(" - %s", org_name)
try:
# === QUALITY CHECKS ===
print()

View File

@@ -540,7 +540,20 @@ def non_regression_check(output_requests, old_requests_filename):
f"Config: {rule.get('field_selection')}"
)
for group_name, field_name in sorted(candidate_fields):
# Iterate in config order (field_selection), not alphabetically
field_selection_config = rule.get("field_selection") or []
ordered_candidates = []
seen = set()
for _action, selector in field_selection_config:
sel_group, sel_field = selector.split(".", 1)
for (group_name, field_name) in candidate_fields:
if (group_name, field_name) in seen:
continue
if (sel_group in ("*", group_name)) and (sel_field in ("*", field_name)):
ordered_candidates.append((group_name, field_name))
seen.add((group_name, field_name))
for group_name, field_name in ordered_candidates:
new_val = get_nested_value(new_req, [group_name, field_name])
old_val = get_nested_value(old_req, [group_name, field_name])
if new_val is not None and old_val is not None:
@@ -734,10 +747,14 @@ def non_regression_check(output_requests, old_requests_filename):
for rule in rules:
line_label = rule["line_label"]
if line_label in ["New Requests", "Deleted Requests"]:
if line_label == "New Requests":
count = _process_special_rule(rule, line_label, new_dict, old_dict)
line_results.append((line_label, count, None, "simple"))
elif line_label == "Deleted Requests":
deleted_keys = sorted(set(old_dict.keys()) - set(new_dict.keys()))
line_results.append((line_label, len(deleted_keys), deleted_keys, "deleted_requests"))
elif line_label in ["New Fields", "Deleted Fields"]:
field_list = _process_new_deleted_fields(line_label, new_dict, old_dict)
count = len(field_list)
@@ -778,6 +795,13 @@ def non_regression_check(output_requests, old_requests_filename):
new_display = f"'{new_val}'" if isinstance(new_val, str) else str(new_val)
console.print(f" - {qualified_field}: {old_display}{new_display}")
elif result_type == "deleted_requests":
_print_check_line(line_label, count, status_tuple, indent=1)
logging.warning("Regression check - %s: %d", line_label, count)
for deleted_key in data:
console.print(f" {key_field}: {deleted_key}")
logging.warning(" Deleted request: %s=%s", key_field, deleted_key)
else:
_print_check_line(line_label, count, status_tuple, indent=1)

Binary file not shown.