Fix and Clean

This commit is contained in:
2026-03-28 04:25:29 +01:00
parent a2dea95c89
commit ba91826e01
20 changed files with 87034 additions and 48375 deletions

Binary file not shown.

View File

@@ -873,6 +873,28 @@ def _execute_custom_function(function_name, args, output_request):
return f"$$$$ Unknown Custom Function: {function_name}" return f"$$$$ Unknown Custom Function: {function_name}"
def _apply_date_format(strftime_template, iso_value):
"""
Parses an ISO 8601 date string and formats it using a strftime template.
Returns the formatted string, or None if the source value cannot be parsed.
"""
iso_formats = [
"%Y-%m-%dT%H:%M:%S.%fZ",
"%Y-%m-%dT%H:%M:%SZ",
"%Y-%m-%dT%H:%M:%S.%f",
"%Y-%m-%dT%H:%M:%S",
"%Y-%m-%dT%H:%M",
"%Y-%m-%d",
]
for fmt in iso_formats:
try:
return datetime.strptime(iso_value, fmt).strftime(strftime_template)
except (ValueError, TypeError):
continue
return None
def process_requests_mapping(output_request, request_data): def process_requests_mapping(output_request, request_data):
"""Processes and adds the requests mapping fields to the output request dictionary.""" """Processes and adds the requests mapping fields to the output request dictionary."""
for field in requests_mapping_config: for field in requests_mapping_config:
@@ -946,6 +968,10 @@ def process_requests_mapping(output_request, request_data):
# Post-processing: Apply field template # Post-processing: Apply field template
field_template = field.get("field_template") field_template = field.get("field_template")
if field_template and final_value not in ["undefined", "N/A"] and isinstance(final_value, (str, int, float, bool)): if field_template and final_value not in ["undefined", "N/A"] and isinstance(final_value, (str, int, float, bool)):
if "%" in field_template:
formatted = _apply_date_format(field_template, str(final_value))
final_value = formatted if formatted is not None else f"$$$$ Date Format Error: {final_value}"
else:
final_value = field_template.replace("$value", str(final_value)) final_value = field_template.replace("$value", str(final_value))
if field_group not in output_request: if field_group not in output_request:
@@ -1139,8 +1165,8 @@ def _process_single_request(worklist_request, mapping_dict):
request_detail = {} request_detail = {}
# --- 2. Fetch professional names (prescriber + requester, deduplicated) --- # --- 2. Fetch professional names (prescriber + requester, deduplicated) ---
prescriber_id = worklist_request.get("prescriber") prescriber_id = request_detail.get("prescriber")
requester_id = worklist_request.get("requester") requester_id = request_detail.get("requester")
# Deduplicate IDs before API call # Deduplicate IDs before API call
unique_ids = list({pid for pid in [prescriber_id, requester_id] if pid}) unique_ids = list({pid for pid in [prescriber_id, requester_id] if pid})
@@ -1150,7 +1176,7 @@ def _process_single_request(worklist_request, mapping_dict):
request_detail["prescriberName"] = professionals.get(prescriber_id) if prescriber_id else None request_detail["prescriberName"] = professionals.get(prescriber_id) if prescriber_id else None
request_detail["requesterName"] = professionals.get(requester_id) if requester_id else None request_detail["requesterName"] = professionals.get(requester_id) if requester_id else None
# --- 3. Inject patient identity fields from worklist --- # --- 3. Inject patient identity fields from worklist (only source for these fields) ---
identity = worklist_request.get("identity") or {} identity = worklist_request.get("identity") or {}
request_detail["lastname"] = identity.get("lastname") request_detail["lastname"] = identity.get("lastname")
request_detail["firstname"] = identity.get("firstname") request_detail["firstname"] = identity.get("firstname")
@@ -1162,7 +1188,7 @@ def _process_single_request(worklist_request, mapping_dict):
request_detail["status"] = diagnostic_status request_detail["status"] = diagnostic_status
# --- 5. Center mapping: inject Center_Name from labeledOrganization --- # --- 5. Center mapping: inject Center_Name from labeledOrganization ---
labeled_org = worklist_request.get("labeledOrganization") labeled_org = request_detail.get("labeledOrganization")
if labeled_org: if labeled_org:
org_normalized = labeled_org.strip().lower() org_normalized = labeled_org.strip().lower()
request_detail["Center_Name"] = mapping_dict.get(org_normalized, labeled_org) request_detail["Center_Name"] = mapping_dict.get(org_normalized, labeled_org)
@@ -1175,7 +1201,7 @@ def _process_single_request(worklist_request, mapping_dict):
# --- 7. Build meta for organization building and sorting --- # --- 7. Build meta for organization building and sorting ---
request_meta = { request_meta = {
"org_id": worklist_request.get("organization"), "org_id": request_detail.get("organization"),
"org_name": labeled_org, "org_name": labeled_org,
"center_name": request_detail.get("Center_Name"), "center_name": request_detail.get("Center_Name"),
"status": request_detail.get("status"), "status": request_detail.get("status"),
@@ -1447,6 +1473,7 @@ def main():
print("Sorting results...") print("Sorting results...")
all_results.sort(key=lambda x: ( all_results.sort(key=lambda x: (
x[1].get("center_name") or "",
x[1].get("lastname") or "", x[1].get("lastname") or "",
x[1].get("firstname") or "", x[1].get("firstname") or "",
x[1].get("id") or "" x[1].get("id") or ""

File diff suppressed because it is too large Load Diff

1122
do_organizations_old.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

42851
do_requests_old.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +0,0 @@
@echo off
call C:\PythonProjects\.rcvenv\Scripts\activate.bat
python eb_dashboard.py %*

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
@echo off
eb_dashboard.exe --check-only %*

View File

@@ -1,4 +0,0 @@
@echo off
call C:\PythonProjects\.rcvenv\Scripts\activate.bat
python eb_dashboard.py --check-only %*

View File

@@ -1,3 +0,0 @@
@echo off
eb_dashboard.exe --check-only --debug %*

View File

@@ -1,4 +0,0 @@
@echo off
call C:\PythonProjects\.rcvenv\Scripts\activate.bat
python eb_dashboard.py --check-only --debug %*

View File

@@ -1,143 +0,0 @@
"""
Endobest Dashboard - Centralized Constants Module
This module defines ALL constants used across the Endobest Dashboard application.
It serves as the single source of truth for all configuration values.
All other modules MUST import constants from this module, NOT define them locally.
Structure:
- File names & paths
- Table names (Excel sheets)
- API endpoints
- Authentication credentials
- Threading & retry parameters
- Protocol IDs
- UI formatting constants
"""
# ============================================================================
# FILE NAMES & PATHS
# ============================================================================
INCLUSIONS_FILE_NAME = "endobest_inclusions.json"
ORGANIZATIONS_FILE_NAME = "endobest_organizations.json"
OLD_FILE_SUFFIX = "_old"
CONFIG_FOLDER_NAME = "config"
# ============================================================================
# EXCEL CONFIGURATION FILES
# ============================================================================
DASHBOARD_CONFIG_FILE_NAME = "Endobest_Dashboard_Config.xlsx"
ORG_CENTER_MAPPING_FILE_NAME = "eb_org_center_mapping.xlsx"
# ============================================================================
# TABLE NAMES (Excel sheets in DASHBOARD_CONFIG_FILE_NAME)
# ============================================================================
INCLUSIONS_MAPPING_TABLE_NAME = "Inclusions_Mapping"
ORGANIZATIONS_MAPPING_TABLE_NAME = "Organizations_Mapping"
EXCEL_WORKBOOKS_TABLE_NAME = "Excel_Workbooks"
EXCEL_SHEETS_TABLE_NAME = "Excel_Sheets"
REGRESSION_CHECK_TABLE_NAME = "Regression_Check"
ORG_CENTER_MAPPING_TABLE_NAME = "Org_Center_Mapping"
# ============================================================================
# API ENDPOINTS & AUTHENTICATION
# ============================================================================
IAM_URL = "https://api-auth.ziwig-connect.com"
RC_URL = "https://api-hcp.ziwig-connect.com"
GDD_URL = "https://api-lab.ziwig-connect.com"
RC_APP_ID = "602aea51-cdb2-4f73-ac99-fd84050dc393"
DEFAULT_USER_NAME = "ziwig-invest2@yopmail.com"
DEFAULT_PASSWORD = "pbrrA765$bP3beiuyuiyhiuy!agxagx"
# ============================================================================
# RESEARCH PROTOCOL CONFIGURATION
# ============================================================================
RC_ENDOBEST_PROTOCOL_ID = "3c7bcb4d-91ed-4e9f-b93f-99d8447a276e"
RC_ENDOBEST_EXCLUDED_CENTERS = [
"e18e7487-60d5-4110-b465-b4156fe0e7f3",
"5582bd75-12fd-4d8e-bfd6-d63c43667a99",
"e053512f-d989-4564-8a73-b3d2d1b38fec"
]
# ============================================================================
# API ENDPOINTS
# ============================================================================
# Authentication endpoints
API_AUTH_LOGIN_ENDPOINT = "/api/auth/ziwig-pro/login"
API_AUTH_CONFIG_TOKEN_ENDPOINT = "/api/auth/config-token"
API_AUTH_REFRESH_TOKEN_ENDPOINT = "/api/auth/refreshToken"
# Research Clinic (RC) endpoints
API_RC_GET_ALL_ORGANIZATIONS_ENDPOINT = "/api/inclusions/getAllOrganizations"
API_RC_INCLUSION_STATISTICS_ENDPOINT = "/api/inclusions/inclusion-statistics"
API_RC_SEARCH_INCLUSIONS_ENDPOINT = "/api/inclusions/search"
API_RC_GET_RECORD_BY_PATIENT_ENDPOINT = "/api/records/byPatient"
API_RC_GET_SURVEYS_ENDPOINT = "/api/surveys/filter/with-answers"
API_RC_SEARCH_VISITS_ENDPOINT = "/api/visits/visits/search"
# GDD (Lab/Diagnostic) endpoints
API_GDD_GET_REQUEST_BY_TUBE_ID_ENDPOINT = "/api/requests/by-tube-id"
# ============================================================================
# THREADING & RETRY PARAMETERS
# ============================================================================
ERROR_MAX_RETRY = 10
WAIT_BEFORE_RETRY = 1
WAIT_BEFORE_NEW_BATCH_OF_RETRIES = 20
MAX_BATCHS_OF_RETRIES = 3
MAX_THREADS = 40
# Excel operation retry parameters (for handling transient xlwings/Excel failures)
# Applies to: SaveAs, Range.Select(), and other COM operations that can fail transiently on Excel 2013
EXCEL_COM_MAX_RETRIES = 3 # Maximum retry attempts for transient COM failures
EXCEL_COM_RETRY_DELAY = 0.5 # Delay in seconds between retries
# ============================================================================
# LOGGING CONFIGURATION
# ============================================================================
LOG_FILE_NAME = "dashboard.log"
# ============================================================================
# API CONFIGURATION
# ============================================================================
API_TIMEOUT = 60 # seconds - timeout for all API calls
# ============================================================================
# EXCEL EXPORT CONFIGURATION
# ============================================================================
# Output file conflict handling actions
OUTPUT_ACTION_OVERWRITE = "Overwrite"
OUTPUT_ACTION_INCREMENT = "Increment"
OUTPUT_ACTION_BACKUP = "Backup"
OUTPUT_ACTIONS = [OUTPUT_ACTION_OVERWRITE, OUTPUT_ACTION_INCREMENT, OUTPUT_ACTION_BACKUP]
# Excel export data source types
SOURCE_TYPE_INCLUSIONS = "Inclusions"
SOURCE_TYPE_ORGANIZATIONS = "Organizations"
SOURCE_TYPE_VARIABLE = "Variable"
SOURCE_TYPES = [SOURCE_TYPE_INCLUSIONS, SOURCE_TYPE_ORGANIZATIONS, SOURCE_TYPE_VARIABLE]
# Excel export target types (for data filling)
TARGET_TYPE_TABLE = "Table" # Excel structured table (ListObject) - has headers, supports Resize()
TARGET_TYPE_NAMED_RANGE = "NamedRange" # Simple named range - no headers, resize via Name.RefersTo
# ============================================================================
# UI FORMATTING (Progress bars)
# ============================================================================
BAR_N_FMT_WIDTH = 4
BAR_TOTAL_FMT_WIDTH = 4
BAR_TIME_WIDTH = 8
BAR_RATE_WIDTH = 10

View File

@@ -1,3 +0,0 @@
@echo off
eb_dashboard.exe --debug %*

View File

@@ -1,4 +0,0 @@
@echo off
call C:\PythonProjects\.rcvenv\Scripts\activate.bat
python eb_dashboard.py --debug %*

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
@echo off
eb_dashboard.exe --excel-only %*

View File

@@ -1,4 +0,0 @@
@echo off
call C:\PythonProjects\.rcvenv\Scripts\activate.bat
python eb_dashboard.py --excel-only %*

File diff suppressed because it is too large Load Diff

View File

@@ -1,220 +0,0 @@
"""
Endobest Dashboard - Utility Functions Module
This module contains generic utility functions used throughout the Endobest Dashboard:
- HTTP client management (thread-safe)
- Nested data structure navigation with wildcard support
- Configuration path resolution (script vs PyInstaller)
- Thread position management for progress bars
- Filename generation utilities
"""
import os
import sys
import threading
import httpx
from eb_dashboard_constants import CONFIG_FOLDER_NAME
# ============================================================================
# GLOBAL VARIABLES (managed by main module)
# ============================================================================
thread_local_storage = threading.local()
def run_with_context(func, context, *args, **kwargs):
"""
Wrapper to set thread-local context before running a function in a new thread.
Useful for ThreadPoolExecutor where context is lost.
"""
thread_local_storage.current_patient_context = context
return func(*args, **kwargs)
# These will be set/accessed from the main module
httpx_clients = {}
_clients_lock = threading.Lock()
threads_list = []
_threads_list_lock = threading.Lock()
# ============================================================================
# HTTP CLIENT MANAGEMENT
# ============================================================================
def get_httpx_client() -> httpx.Client:
"""
Get or create thread-local HTTP client.
Keep-alive is disabled to avoid stale connections with load balancers.
"""
global httpx_clients
thread_id = threading.get_ident()
with _clients_lock:
if thread_id not in httpx_clients:
# Create client with keep-alive disabled
httpx_clients[thread_id] = httpx.Client(
headers={"Connection": "close"}, # Explicitly request closing
limits=httpx.Limits(max_keepalive_connections=0, max_connections=100)
)
return httpx_clients[thread_id]
def clear_httpx_client():
"""
Removes the current thread's client from the cache.
Ensures a fresh client (and socket pool) will be created on the next call.
"""
global httpx_clients
thread_id = threading.get_ident()
with _clients_lock:
if thread_id in httpx_clients:
try:
# Close the client before removing it
httpx_clients[thread_id].close()
except:
pass
del httpx_clients[thread_id]
def get_thread_position():
"""
Get the position of the current thread in the threads list.
Used for managing progress bar positions in multithreaded environment.
"""
global threads_list
thread_id = threading.get_ident()
with _threads_list_lock:
if thread_id not in threads_list:
threads_list.append(thread_id)
return len(threads_list) - 1
else:
return threads_list.index(thread_id)
# ============================================================================
# NESTED DATA NAVIGATION
# ============================================================================
def get_nested_value(data_structure, path, default=None):
"""
Extracts a value from a nested structure of dictionaries and lists.
Supports a wildcard '*' in the path to retrieve all elements from a list.
Args:
data_structure: The nested dict/list structure to navigate
path: List of keys/indices to follow. Use '*' for list wildcard.
default: Value to return if path not found
Returns:
The value at the end of the path, or default if not found
Examples:
get_nested_value({"a": {"b": 1}}, ["a", "b"]) -> 1
get_nested_value({"items": [{"x": 1}, {"x": 2}]}, ["items", "*", "x"]) -> [1, 2]
"""
if data_structure is None:
return "$$$$ No Data"
if not path:
return default
if "*" in path:
wildcard_index = path.index("*")
path_before = path[:wildcard_index]
path_after = path[wildcard_index+1:]
# Create a temporary function for non-wildcard path resolution
def _get_simple_nested_value(ds, p, d):
cl = ds
for k in p:
if isinstance(cl, dict):
cl = cl.get(k)
elif isinstance(cl, list):
try:
if isinstance(k, int) and -len(cl) <= k < len(cl):
cl = cl[k]
else: return d
except (IndexError, TypeError): return d
else: return d
if cl is None: return d
return cl
base_level = _get_simple_nested_value(data_structure, path_before, default)
if not isinstance(base_level, list):
return default
results = []
for item in base_level:
# For each item, recursively call to resolve the rest of the path
value = get_nested_value(item, path_after, default)
if value is not default and value != "$$$$ No Data":
results.append(value)
# Flatten the results by one level to handle multiple wildcards
final_results = []
for res in results:
if isinstance(res, list):
final_results.extend(res)
else:
final_results.append(res)
return final_results
# No wildcard, original logic (iterative)
current_level = data_structure
for key_or_index in path:
if isinstance(current_level, dict):
current_level = current_level.get(key_or_index)
if current_level is None:
return default
elif isinstance(current_level, list):
try:
if isinstance(key_or_index, int) and -len(current_level) <= key_or_index < len(current_level):
current_level = current_level[key_or_index]
else:
return default
except (IndexError, TypeError):
return default
else:
return default
return current_level
# ============================================================================
# CONFIGURATION UTILITIES
# ============================================================================
def get_config_path():
"""
Gets the correct path to the config folder.
Works for both script execution and PyInstaller executable.
Returns:
Path to config folder
"""
if getattr(sys, 'frozen', False):
# Running as a PyInstaller bundle
config_folder = CONFIG_FOLDER_NAME
return os.path.join(sys._MEIPASS, config_folder)
else:
# Running as a script
return CONFIG_FOLDER_NAME
def get_old_filename(current_filename, old_suffix="_old"):
"""Generate old backup filename from current filename.
Example: "endobest_inclusions.json""endobest_inclusions_old.json"
Args:
current_filename: Current file name (e.g., "endobest_inclusions.json")
old_suffix: Suffix to append before file extension (default: "_old")
Returns:
Old backup filename with suffix before extension
"""
name, ext = os.path.splitext(current_filename)
return f"{name}{old_suffix}{ext}"