added append and delete column feature
This commit is contained in:
@ -171,6 +171,133 @@ class BaseProcessor:
|
|||||||
self._sweep_history.clear()
|
self._sweep_history.clear()
|
||||||
logger.debug("History cleared")
|
logger.debug("History cleared")
|
||||||
|
|
||||||
|
def append_history(self, history_data: list[dict[str, Any]]) -> ProcessedResult | None:
|
||||||
|
"""
|
||||||
|
Append sweep history from external source (e.g., loaded file) to existing history.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
history_data:
|
||||||
|
List of history entries in the format exported by export_history_data.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
ProcessedResult | None
|
||||||
|
Result after appending history, or None if history is empty.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
- Only history is appended, processor config is NOT updated.
|
||||||
|
- If total history exceeds max_history limit, oldest entries are dropped.
|
||||||
|
"""
|
||||||
|
if not history_data:
|
||||||
|
logger.warning("Append history called with empty data")
|
||||||
|
return None
|
||||||
|
|
||||||
|
from vna_system.core.acquisition.sweep_buffer import SweepData
|
||||||
|
from vna_system.core.settings.preset_manager import VNAMode
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
initial_count = len(self._sweep_history)
|
||||||
|
|
||||||
|
# Convert imported data to internal format (same as import_history_data)
|
||||||
|
for idx, entry in enumerate(history_data):
|
||||||
|
sweep_points = entry.get("sweep_points", [])
|
||||||
|
calibrated_points = entry.get("calibrated_points", [])
|
||||||
|
reference_points = entry.get("reference_points", [])
|
||||||
|
raw_reference_points = entry.get("raw_reference_points", [])
|
||||||
|
|
||||||
|
# Reconstruct SweepData objects
|
||||||
|
sweep_data = SweepData(
|
||||||
|
sweep_number=initial_count + idx,
|
||||||
|
timestamp=entry.get("timestamp", 0.0),
|
||||||
|
points=sweep_points,
|
||||||
|
total_points=len(sweep_points)
|
||||||
|
) if sweep_points else None
|
||||||
|
|
||||||
|
calibrated_data = SweepData(
|
||||||
|
sweep_number=initial_count + idx,
|
||||||
|
timestamp=entry.get("timestamp", 0.0),
|
||||||
|
points=calibrated_points,
|
||||||
|
total_points=len(calibrated_points)
|
||||||
|
) if calibrated_points else None
|
||||||
|
|
||||||
|
reference_data = SweepData(
|
||||||
|
sweep_number=initial_count + idx,
|
||||||
|
timestamp=entry.get("timestamp", 0.0),
|
||||||
|
points=reference_points,
|
||||||
|
total_points=len(reference_points)
|
||||||
|
) if reference_points else None
|
||||||
|
|
||||||
|
raw_reference_data = SweepData(
|
||||||
|
sweep_number=initial_count + idx,
|
||||||
|
timestamp=entry.get("timestamp", 0.0),
|
||||||
|
points=raw_reference_points,
|
||||||
|
total_points=len(raw_reference_points)
|
||||||
|
) if raw_reference_points else None
|
||||||
|
|
||||||
|
# Reconstruct calibration standards
|
||||||
|
calibration_standards = None
|
||||||
|
cal_standards_data = entry.get("calibration_standards")
|
||||||
|
if cal_standards_data:
|
||||||
|
calibration_standards = {}
|
||||||
|
for std_name, std_data in cal_standards_data.items():
|
||||||
|
std_points = std_data.get("points", [])
|
||||||
|
if std_points:
|
||||||
|
calibration_standards[std_name] = SweepData(
|
||||||
|
sweep_number=std_data.get("sweep_number", initial_count + idx),
|
||||||
|
timestamp=std_data.get("timestamp", entry.get("timestamp", 0.0)),
|
||||||
|
points=std_points,
|
||||||
|
total_points=len(std_points)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reconstruct reference info
|
||||||
|
from vna_system.core.settings.reference_manager import ReferenceInfo
|
||||||
|
reference_info = None
|
||||||
|
ref_info_data = entry.get("reference_info")
|
||||||
|
if ref_info_data and ref_info_data.get("name"):
|
||||||
|
# Only create ReferenceInfo if we have required fields
|
||||||
|
try:
|
||||||
|
reference_info = ReferenceInfo(
|
||||||
|
name=ref_info_data.get("name", ""),
|
||||||
|
timestamp=datetime.fromtimestamp(ref_info_data.get("timestamp", 0.0)) if ref_info_data.get("timestamp") else datetime.now(),
|
||||||
|
preset_filename=ref_info_data.get("preset_filename", ""),
|
||||||
|
description=ref_info_data.get("description", ""),
|
||||||
|
metadata=ref_info_data.get("metadata")
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# If ReferenceInfo creation fails, skip it
|
||||||
|
reference_info = None
|
||||||
|
|
||||||
|
# Reconstruct VNA config
|
||||||
|
vna_config = entry.get("vna_config", {})
|
||||||
|
|
||||||
|
# Append to history
|
||||||
|
self._sweep_history.append({
|
||||||
|
"sweep_data": sweep_data,
|
||||||
|
"calibrated_data": calibrated_data,
|
||||||
|
"vna_config": vna_config,
|
||||||
|
"reference_data": reference_data,
|
||||||
|
"reference_info": reference_info,
|
||||||
|
"raw_reference_data": raw_reference_data,
|
||||||
|
"calibration_standards": calibration_standards,
|
||||||
|
"timestamp": entry.get("timestamp", datetime.now().timestamp()),
|
||||||
|
})
|
||||||
|
|
||||||
|
self._trim_history()
|
||||||
|
final_count = len(self._sweep_history)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"History appended",
|
||||||
|
processor_id=self.processor_id,
|
||||||
|
added=len(history_data),
|
||||||
|
initial_count=initial_count,
|
||||||
|
final_count=final_count,
|
||||||
|
dropped=initial_count + len(history_data) - final_count
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.recalculate()
|
||||||
|
|
||||||
def _trim_history(self) -> None:
|
def _trim_history(self) -> None:
|
||||||
"""Internal: keep only the newest `_max_history` items."""
|
"""Internal: keep only the newest `_max_history` items."""
|
||||||
if len(self._sweep_history) > self._max_history:
|
if len(self._sweep_history) > self._max_history:
|
||||||
@ -636,17 +763,29 @@ class BaseProcessor:
|
|||||||
"timestamp": getattr(std_data, "timestamp", None),
|
"timestamp": getattr(std_data, "timestamp", None),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Export reference info with all fields
|
||||||
|
ref_info_export = None
|
||||||
|
if reference_info:
|
||||||
|
ref_info_export = {
|
||||||
|
"name": getattr(reference_info, "name", None),
|
||||||
|
"timestamp": getattr(reference_info, "timestamp", datetime.now()).timestamp() if hasattr(reference_info, "timestamp") else None,
|
||||||
|
"preset_filename": getattr(reference_info, "preset_filename", None),
|
||||||
|
"description": getattr(reference_info, "description", None),
|
||||||
|
"metadata": getattr(reference_info, "metadata", None),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get timestamp safely
|
||||||
|
entry_timestamp = entry.get("timestamp")
|
||||||
|
timestamp_export = float(entry_timestamp) if entry_timestamp is not None else None
|
||||||
|
|
||||||
exported.append({
|
exported.append({
|
||||||
"timestamp": float(entry.get("timestamp")) if entry.get("timestamp") is not None else None,
|
"timestamp": timestamp_export,
|
||||||
"sweep_points": self._points_to_list(getattr(sweep_data, "points", [])),
|
"sweep_points": self._points_to_list(getattr(sweep_data, "points", [])),
|
||||||
"calibrated_points": self._points_to_list(getattr(calibrated_data, "points", [])),
|
"calibrated_points": self._points_to_list(getattr(calibrated_data, "points", [])),
|
||||||
"reference_points": self._points_to_list(getattr(reference_data, "points", [])),
|
"reference_points": self._points_to_list(getattr(reference_data, "points", [])),
|
||||||
"raw_reference_points": self._points_to_list(getattr(raw_reference_data, "points", [])),
|
"raw_reference_points": self._points_to_list(getattr(raw_reference_data, "points", [])),
|
||||||
"calibration_standards": cal_standards_export,
|
"calibration_standards": cal_standards_export,
|
||||||
"reference_info": {
|
"reference_info": ref_info_export,
|
||||||
"name": getattr(reference_info, "name", None),
|
|
||||||
"description": getattr(reference_info, "description", None),
|
|
||||||
} if reference_info else None,
|
|
||||||
"vna_config": self._snapshot_vna_config(entry.get("vna_config")),
|
"vna_config": self._snapshot_vna_config(entry.get("vna_config")),
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -671,6 +810,7 @@ class BaseProcessor:
|
|||||||
sweep_points = entry.get("sweep_points", [])
|
sweep_points = entry.get("sweep_points", [])
|
||||||
calibrated_points = entry.get("calibrated_points", [])
|
calibrated_points = entry.get("calibrated_points", [])
|
||||||
reference_points = entry.get("reference_points", [])
|
reference_points = entry.get("reference_points", [])
|
||||||
|
raw_reference_points = entry.get("raw_reference_points", [])
|
||||||
|
|
||||||
# Reconstruct SweepData objects
|
# Reconstruct SweepData objects
|
||||||
# Use sequential index as sweep_number since it's not stored
|
# Use sequential index as sweep_number since it's not stored
|
||||||
@ -695,6 +835,46 @@ class BaseProcessor:
|
|||||||
total_points=len(reference_points)
|
total_points=len(reference_points)
|
||||||
) if reference_points else None
|
) if reference_points else None
|
||||||
|
|
||||||
|
raw_reference_data = SweepData(
|
||||||
|
sweep_number=idx,
|
||||||
|
timestamp=entry.get("timestamp", 0.0),
|
||||||
|
points=raw_reference_points,
|
||||||
|
total_points=len(raw_reference_points)
|
||||||
|
) if raw_reference_points else None
|
||||||
|
|
||||||
|
# Reconstruct calibration standards
|
||||||
|
calibration_standards = None
|
||||||
|
cal_standards_data = entry.get("calibration_standards")
|
||||||
|
if cal_standards_data:
|
||||||
|
calibration_standards = {}
|
||||||
|
for std_name, std_data in cal_standards_data.items():
|
||||||
|
std_points = std_data.get("points", [])
|
||||||
|
if std_points:
|
||||||
|
calibration_standards[std_name] = SweepData(
|
||||||
|
sweep_number=std_data.get("sweep_number", idx),
|
||||||
|
timestamp=std_data.get("timestamp", entry.get("timestamp", 0.0)),
|
||||||
|
points=std_points,
|
||||||
|
total_points=len(std_points)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reconstruct reference info
|
||||||
|
from vna_system.core.settings.reference_manager import ReferenceInfo
|
||||||
|
reference_info = None
|
||||||
|
ref_info_data = entry.get("reference_info")
|
||||||
|
if ref_info_data and ref_info_data.get("name"):
|
||||||
|
# Only create ReferenceInfo if we have required fields
|
||||||
|
try:
|
||||||
|
reference_info = ReferenceInfo(
|
||||||
|
name=ref_info_data.get("name", ""),
|
||||||
|
timestamp=datetime.fromtimestamp(ref_info_data.get("timestamp", 0.0)) if ref_info_data.get("timestamp") else datetime.now(),
|
||||||
|
preset_filename=ref_info_data.get("preset_filename", ""),
|
||||||
|
description=ref_info_data.get("description", ""),
|
||||||
|
metadata=ref_info_data.get("metadata")
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# If ReferenceInfo creation fails, skip it
|
||||||
|
reference_info = None
|
||||||
|
|
||||||
# Restore VNAMode enum from string if needed
|
# Restore VNAMode enum from string if needed
|
||||||
vna_config = entry.get("vna_config", {})
|
vna_config = entry.get("vna_config", {})
|
||||||
if isinstance(vna_config.get("mode"), str):
|
if isinstance(vna_config.get("mode"), str):
|
||||||
@ -707,6 +887,9 @@ class BaseProcessor:
|
|||||||
"sweep_data": sweep_data,
|
"sweep_data": sweep_data,
|
||||||
"calibrated_data": calibrated_data,
|
"calibrated_data": calibrated_data,
|
||||||
"reference_data": reference_data,
|
"reference_data": reference_data,
|
||||||
|
"raw_reference_data": raw_reference_data,
|
||||||
|
"reference_info": reference_info,
|
||||||
|
"calibration_standards": calibration_standards,
|
||||||
"vna_config": vna_config,
|
"vna_config": vna_config,
|
||||||
"timestamp": entry.get("timestamp"),
|
"timestamp": entry.get("timestamp"),
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
{
|
{
|
||||||
"open_air": false,
|
"open_air": false,
|
||||||
"axis": "phase",
|
"axis": "abs",
|
||||||
"cut": 0.279,
|
"cut": 0.279,
|
||||||
"max": 4.0,
|
"max": 1.5,
|
||||||
"gain": 0.5,
|
"gain": 0.7,
|
||||||
"start_freq": 100.0,
|
"start_freq": 2130.0,
|
||||||
"stop_freq": 8230.0,
|
"stop_freq": 8230.0,
|
||||||
"clear_history": false,
|
"clear_history": false,
|
||||||
"data_limit": 500
|
"data_limit": 500
|
||||||
|
|||||||
@ -144,6 +144,51 @@ class BScanProcessor(BaseProcessor):
|
|||||||
self._plot_history.clear()
|
self._plot_history.clear()
|
||||||
logger.info("Plot and sweep history cleared completely", processor_id=self.processor_id)
|
logger.info("Plot and sweep history cleared completely", processor_id=self.processor_id)
|
||||||
|
|
||||||
|
def delete_column(self, column_index: int) -> bool:
|
||||||
|
"""
|
||||||
|
Delete a specific column (sweep) from the plot history.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
column_index : int
|
||||||
|
The 1-based column index to delete (matching the sweep number displayed to the user).
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
bool
|
||||||
|
True if deletion was successful, False otherwise.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with self._lock:
|
||||||
|
# Convert 1-based index to 0-based
|
||||||
|
array_index = column_index - 1
|
||||||
|
|
||||||
|
if array_index < 0 or array_index >= len(self._plot_history):
|
||||||
|
logger.warning(
|
||||||
|
"Invalid column index for deletion",
|
||||||
|
column_index=column_index,
|
||||||
|
history_length=len(self._plot_history)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Delete from plot history
|
||||||
|
del self._plot_history[array_index]
|
||||||
|
|
||||||
|
# Also delete from sweep history if indices match
|
||||||
|
if array_index < len(self._sweep_history):
|
||||||
|
del self._sweep_history[array_index]
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Column deleted successfully",
|
||||||
|
column_index=column_index,
|
||||||
|
remaining_columns=len(self._plot_history)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Column deletion failed", error=repr(exc), column_index=column_index)
|
||||||
|
return False
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
# Processing
|
# Processing
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
|
|||||||
@ -206,6 +206,47 @@ class ProcessorManager:
|
|||||||
logger.error("History load error", processor_id=processor_id, error=repr(exc))
|
logger.error("History load error", processor_id=processor_id, error=repr(exc))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def append_processor_history(self, processor_id: str, history_data: list[dict[str, Any]]) -> ProcessedResult | None:
|
||||||
|
"""
|
||||||
|
Append sweep history to existing processor history from JSON data and recalculate.
|
||||||
|
|
||||||
|
Does NOT update processor configuration - only appends history.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
processor_id : str
|
||||||
|
The processor to append history to.
|
||||||
|
history_data : list[dict]
|
||||||
|
History records in the format exported by export_history_data.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
ProcessedResult | None
|
||||||
|
The result of recalculation after appending history.
|
||||||
|
"""
|
||||||
|
processor = self.get_processor(processor_id)
|
||||||
|
if not processor:
|
||||||
|
raise ValueError(f"Processor {processor_id} not found")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = processor.append_history(history_data)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
with self._lock:
|
||||||
|
callbacks = list(self._result_callbacks)
|
||||||
|
for cb in callbacks:
|
||||||
|
try:
|
||||||
|
cb(processor_id, result)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
logger.error("Result callback failed", processor_id=processor_id, error=repr(exc))
|
||||||
|
|
||||||
|
logger.info("History appended and recalculated", processor_id=processor_id, records=len(history_data))
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
logger.error("History append error", processor_id=processor_id, error=repr(exc))
|
||||||
|
raise
|
||||||
|
|
||||||
def build_processor_state(self, processor_id: str) -> dict[str, Any]:
|
def build_processor_state(self, processor_id: str) -> dict[str, Any]:
|
||||||
"""Return a JSON-ready snapshot of processor state and current result."""
|
"""Return a JSON-ready snapshot of processor state and current result."""
|
||||||
processor = self.get_processor(processor_id)
|
processor = self.get_processor(processor_id)
|
||||||
|
|||||||
@ -104,6 +104,10 @@ class ProcessorWebSocketHandler:
|
|||||||
await self._handle_load_history(websocket, message)
|
await self._handle_load_history(websocket, message)
|
||||||
elif mtype == "get_processor_state":
|
elif mtype == "get_processor_state":
|
||||||
await self._handle_get_processor_state(websocket, message)
|
await self._handle_get_processor_state(websocket, message)
|
||||||
|
elif mtype == "delete_column":
|
||||||
|
await self._handle_delete_column(websocket, message)
|
||||||
|
elif mtype == "append_history":
|
||||||
|
await self._handle_append_history(websocket, message)
|
||||||
else:
|
else:
|
||||||
await self._send_error(websocket, f"Неизвестный тип сообщения: {mtype!r}")
|
await self._send_error(websocket, f"Неизвестный тип сообщения: {mtype!r}")
|
||||||
except json.JSONDecodeError as json_error:
|
except json.JSONDecodeError as json_error:
|
||||||
@ -202,6 +206,33 @@ class ProcessorWebSocketHandler:
|
|||||||
logger.error("History load failed", processor_id=processor_id, error=repr(exc))
|
logger.error("History load failed", processor_id=processor_id, error=repr(exc))
|
||||||
await self._send_error(websocket, f"Загрузка истории не удалась: {exc}")
|
await self._send_error(websocket, f"Загрузка истории не удалась: {exc}")
|
||||||
|
|
||||||
|
async def _handle_append_history(self, websocket: WebSocket, message: dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Append sweep history from JSON data to existing processor history and recalculate.
|
||||||
|
|
||||||
|
Does NOT update processor configuration - only appends history.
|
||||||
|
"""
|
||||||
|
processor_id = message.get("processor_id")
|
||||||
|
history_data = message.get("history_data")
|
||||||
|
|
||||||
|
if not processor_id:
|
||||||
|
await self._send_error(websocket, "Требуется processor_id")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not history_data or not isinstance(history_data, list):
|
||||||
|
await self._send_error(websocket, "Требуется history_data (список)")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = self.processor_manager.append_processor_history(processor_id, history_data)
|
||||||
|
if result:
|
||||||
|
await websocket.send_text(json.dumps(self._result_to_message(processor_id, result)))
|
||||||
|
else:
|
||||||
|
await self._send_error(websocket, f"Нет результата от процессора {processor_id} после дополнения истории")
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
logger.error("History append failed", processor_id=processor_id, error=repr(exc))
|
||||||
|
await self._send_error(websocket, f"Дополнение истории не удалось: {exc}")
|
||||||
|
|
||||||
async def _handle_get_processor_state(self, websocket: WebSocket, message: dict[str, Any]) -> None:
|
async def _handle_get_processor_state(self, websocket: WebSocket, message: dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
Fetch complete processor state including sweep history and current data.
|
Fetch complete processor state including sweep history and current data.
|
||||||
@ -222,6 +253,58 @@ class ProcessorWebSocketHandler:
|
|||||||
logger.error("Error getting processor state", processor_id=processor_id, error=repr(exc))
|
logger.error("Error getting processor state", processor_id=processor_id, error=repr(exc))
|
||||||
await self._send_error(websocket, f"Ошибка получения состояния процессора: {exc}")
|
await self._send_error(websocket, f"Ошибка получения состояния процессора: {exc}")
|
||||||
|
|
||||||
|
async def _handle_delete_column(self, websocket: WebSocket, message: dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Delete a specific column (sweep) from the bscan processor plot history.
|
||||||
|
"""
|
||||||
|
processor_id = message.get("processor_id")
|
||||||
|
column_index = message.get("column_index")
|
||||||
|
|
||||||
|
if not processor_id:
|
||||||
|
await self._send_error(websocket, "Требуется processor_id")
|
||||||
|
return
|
||||||
|
|
||||||
|
if column_index is None or not isinstance(column_index, int):
|
||||||
|
await self._send_error(websocket, "Требуется column_index (целое число)")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
processor = self.processor_manager.get_processor(processor_id)
|
||||||
|
if processor is None:
|
||||||
|
await self._send_error(websocket, f"Процессор {processor_id} не найден")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if processor has delete_column method (only BScanProcessor)
|
||||||
|
if not hasattr(processor, "delete_column"):
|
||||||
|
await self._send_error(
|
||||||
|
websocket,
|
||||||
|
f"Процессор {processor_id} не поддерживает удаление столбцов"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Execute deletion
|
||||||
|
success = processor.delete_column(column_index)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
await self._send_error(
|
||||||
|
websocket,
|
||||||
|
f"Не удалось удалить столбец {column_index}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Recalculate and send updated result to all clients
|
||||||
|
result = processor.recalculate()
|
||||||
|
if result:
|
||||||
|
# Broadcast to all connected clients
|
||||||
|
message_str = json.dumps(self._result_to_message(processor_id, result))
|
||||||
|
await self._send_to_connections(message_str)
|
||||||
|
else:
|
||||||
|
await self._send_error(websocket, "Пересчёт после удаления не удался")
|
||||||
|
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
logger.error("Column deletion failed", processor_id=processor_id, error=repr(exc))
|
||||||
|
await self._send_error(websocket, f"Удаление столбца не удалось: {exc}")
|
||||||
|
|
||||||
def _result_to_message(self, processor_id: str, result: ProcessedResult) -> dict[str, Any]:
|
def _result_to_message(self, processor_id: str, result: ProcessedResult) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Convert a `ProcessedResult` into a lightweight JSON-serializable message for broadcasting.
|
Convert a `ProcessedResult` into a lightweight JSON-serializable message for broadcasting.
|
||||||
|
|||||||
@ -41,12 +41,12 @@ class VNADashboard {
|
|||||||
this.storage = new StorageManager();
|
this.storage = new StorageManager();
|
||||||
this.notifications = new NotificationManager();
|
this.notifications = new NotificationManager();
|
||||||
|
|
||||||
// Charts first (used by UI, independent of UI initialization)
|
// WebSocket first (needed by charts for bscan click handler)
|
||||||
this.charts = new ChartManager(this.config.charts, this.notifications);
|
|
||||||
|
|
||||||
// WebSocket before UI (UI subscribes to WebSocket events)
|
|
||||||
this.websocket = new WebSocketManager(this.config.websocket, this.notifications);
|
this.websocket = new WebSocketManager(this.config.websocket, this.notifications);
|
||||||
|
|
||||||
|
// Charts after websocket (uses websocket for bscan click handler)
|
||||||
|
this.charts = new ChartManager(this.config.charts, this.notifications, this.websocket);
|
||||||
|
|
||||||
// UI receives dependencies from outside
|
// UI receives dependencies from outside
|
||||||
this.ui = new UIManager(this.notifications, this.websocket, this.charts);
|
this.ui = new UIManager(this.notifications, this.websocket, this.charts);
|
||||||
|
|
||||||
|
|||||||
@ -6,6 +6,7 @@
|
|||||||
import { formatProcessorName, safeClone, downloadJSON } from './utils.js';
|
import { formatProcessorName, safeClone, downloadJSON } from './utils.js';
|
||||||
import { renderIcons } from './icons.js';
|
import { renderIcons } from './icons.js';
|
||||||
import { ChartSettingsManager } from './charts/chart-settings.js';
|
import { ChartSettingsManager } from './charts/chart-settings.js';
|
||||||
|
import { BScanClickHandler } from './charts/bscan-click-handler.js';
|
||||||
import {
|
import {
|
||||||
defaultPlotlyLayout,
|
defaultPlotlyLayout,
|
||||||
defaultPlotlyConfig,
|
defaultPlotlyConfig,
|
||||||
@ -17,9 +18,10 @@ import {
|
|||||||
} from './plotly-utils.js';
|
} from './plotly-utils.js';
|
||||||
|
|
||||||
export class ChartManager {
|
export class ChartManager {
|
||||||
constructor(config, notifications) {
|
constructor(config, notifications, websocket = null) {
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.notifications = notifications;
|
this.notifications = notifications;
|
||||||
|
this.websocket = websocket;
|
||||||
|
|
||||||
this.charts = new Map();
|
this.charts = new Map();
|
||||||
this.chartData = new Map();
|
this.chartData = new Map();
|
||||||
@ -40,6 +42,7 @@ export class ChartManager {
|
|||||||
};
|
};
|
||||||
|
|
||||||
this.settingsManager = new ChartSettingsManager();
|
this.settingsManager = new ChartSettingsManager();
|
||||||
|
this.bscanClickHandler = new BScanClickHandler(websocket, notifications);
|
||||||
}
|
}
|
||||||
|
|
||||||
async init() {
|
async init() {
|
||||||
@ -97,11 +100,26 @@ export class ChartManager {
|
|||||||
height: plotContainer.clientHeight || 420
|
height: plotContainer.clientHeight || 420
|
||||||
};
|
};
|
||||||
|
|
||||||
createPlotlyPlot(plotContainer, [], layoutOverrides);
|
// Disable interactivity for bscan processor
|
||||||
|
const configOverrides = processorId === 'bscan' ? {
|
||||||
|
staticPlot: false,
|
||||||
|
displayModeBar: false,
|
||||||
|
scrollZoom: false,
|
||||||
|
doubleClick: false,
|
||||||
|
showTips: false,
|
||||||
|
editable: false
|
||||||
|
} : {};
|
||||||
|
|
||||||
|
createPlotlyPlot(plotContainer, [], layoutOverrides, configOverrides);
|
||||||
|
|
||||||
this.charts.set(processorId, { element: card, plotContainer, isVisible: true, settingsInitialized: false });
|
this.charts.set(processorId, { element: card, plotContainer, isVisible: true, settingsInitialized: false });
|
||||||
this.performanceStats.chartsCreated++;
|
this.performanceStats.chartsCreated++;
|
||||||
|
|
||||||
|
// Attach click handler for bscan processor
|
||||||
|
if (processorId === 'bscan') {
|
||||||
|
this.bscanClickHandler.attachClickHandler(processorId, plotContainer);
|
||||||
|
}
|
||||||
|
|
||||||
if (this.config.animation) {
|
if (this.config.animation) {
|
||||||
setTimeout(() => card.classList.add('chart-card--animated'), 50);
|
setTimeout(() => card.classList.add('chart-card--animated'), 50);
|
||||||
}
|
}
|
||||||
@ -124,7 +142,17 @@ export class ChartManager {
|
|||||||
title: { text: formatProcessorName(processorId), font: { size: 16, color: '#f1f5f9' } }
|
title: { text: formatProcessorName(processorId), font: { size: 16, color: '#f1f5f9' } }
|
||||||
};
|
};
|
||||||
|
|
||||||
await updatePlotlyPlot(chart.plotContainer, plotlyConfig.data || [], layoutOverrides);
|
// Disable interactivity for bscan processor
|
||||||
|
const configOverrides = processorId === 'bscan' ? {
|
||||||
|
staticPlot: false,
|
||||||
|
displayModeBar: false,
|
||||||
|
scrollZoom: false,
|
||||||
|
doubleClick: false,
|
||||||
|
showTips: false,
|
||||||
|
editable: false
|
||||||
|
} : {};
|
||||||
|
|
||||||
|
await updatePlotlyPlot(chart.plotContainer, plotlyConfig.data || [], layoutOverrides, configOverrides);
|
||||||
|
|
||||||
this.updateChartMetadata(processorId);
|
this.updateChartMetadata(processorId);
|
||||||
|
|
||||||
@ -178,6 +206,9 @@ export class ChartManager {
|
|||||||
<button class="chart-card__action" data-action="upload" title="Load History">
|
<button class="chart-card__action" data-action="upload" title="Load History">
|
||||||
<span data-icon="upload"></span>
|
<span data-icon="upload"></span>
|
||||||
</button>
|
</button>
|
||||||
|
<button class="chart-card__action" data-action="append" title="Append History">
|
||||||
|
<span data-icon="plus"></span>
|
||||||
|
</button>
|
||||||
<button class="chart-card__action" data-action="download" title="Download JSON">
|
<button class="chart-card__action" data-action="download" title="Download JSON">
|
||||||
<span data-icon="download"></span>
|
<span data-icon="download"></span>
|
||||||
</button>
|
</button>
|
||||||
@ -189,6 +220,7 @@ export class ChartManager {
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<input type="file" id="historyFileInput_${processorId}" accept=".json" style="display: none;">
|
<input type="file" id="historyFileInput_${processorId}" accept=".json" style="display: none;">
|
||||||
|
<input type="file" id="appendFileInput_${processorId}" accept=".json" style="display: none;">
|
||||||
</div>
|
</div>
|
||||||
<div class="chart-card__content">
|
<div class="chart-card__content">
|
||||||
<div class="chart-card__plot" id="plot-${processorId}"></div>
|
<div class="chart-card__plot" id="plot-${processorId}"></div>
|
||||||
@ -222,6 +254,7 @@ export class ChartManager {
|
|||||||
switch (action) {
|
switch (action) {
|
||||||
case 'fullscreen': this.toggleFullscreen(processorId); break;
|
case 'fullscreen': this.toggleFullscreen(processorId); break;
|
||||||
case 'upload': this.uploadHistory(processorId); break;
|
case 'upload': this.uploadHistory(processorId); break;
|
||||||
|
case 'append': this.appendHistory(processorId); break;
|
||||||
case 'download': this.downloadChart(processorId); break;
|
case 'download': this.downloadChart(processorId); break;
|
||||||
case 'export-sweeps': this.exportSweeps(processorId); break;
|
case 'export-sweeps': this.exportSweeps(processorId); break;
|
||||||
case 'hide':
|
case 'hide':
|
||||||
@ -231,13 +264,21 @@ export class ChartManager {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup file input handler
|
// Setup file input handler for load history
|
||||||
const fileInput = card.querySelector(`#historyFileInput_${processorId}`);
|
const fileInput = card.querySelector(`#historyFileInput_${processorId}`);
|
||||||
if (fileInput) {
|
if (fileInput) {
|
||||||
fileInput.addEventListener('change', async (e) => {
|
fileInput.addEventListener('change', async (e) => {
|
||||||
await this.handleHistoryUpload(processorId, e);
|
await this.handleHistoryUpload(processorId, e);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Setup file input handler for append history
|
||||||
|
const appendFileInput = card.querySelector(`#appendFileInput_${processorId}`);
|
||||||
|
if (appendFileInput) {
|
||||||
|
appendFileInput.addEventListener('change', async (e) => {
|
||||||
|
await this.handleHistoryAppend(processorId, e);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
updateChartMetadata(processorId) {
|
updateChartMetadata(processorId) {
|
||||||
@ -284,6 +325,11 @@ export class ChartManager {
|
|||||||
removeChart(id) {
|
removeChart(id) {
|
||||||
const c = this.charts.get(id);
|
const c = this.charts.get(id);
|
||||||
if (c) {
|
if (c) {
|
||||||
|
// Cleanup bscan click handler if applicable
|
||||||
|
if (id === 'bscan') {
|
||||||
|
this.bscanClickHandler.detachClickHandler(id, c.plotContainer);
|
||||||
|
}
|
||||||
|
|
||||||
cleanupPlotly(c.plotContainer);
|
cleanupPlotly(c.plotContainer);
|
||||||
c.element.remove();
|
c.element.remove();
|
||||||
this.charts.delete(id);
|
this.charts.delete(id);
|
||||||
@ -514,15 +560,16 @@ export class ChartManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Export calibration standards if present
|
// Export calibration standards if present
|
||||||
if (latestSweep.calibration_standards) {
|
// COMMENTED OUT: Don't export calibration files (may be needed later)
|
||||||
console.log('Exporting calibration standards:', Object.keys(latestSweep.calibration_standards));
|
// if (latestSweep.calibration_standards) {
|
||||||
for (const [standardName, standardData] of Object.entries(latestSweep.calibration_standards)) {
|
// console.log('Exporting calibration standards:', Object.keys(latestSweep.calibration_standards));
|
||||||
if (standardData && standardData.points && standardData.points.length > 0) {
|
// for (const [standardName, standardData] of Object.entries(latestSweep.calibration_standards)) {
|
||||||
this.exportPointsToTSV(standardData.points, latestSweep.vna_config, `${baseFilename}_cal_${standardName}`);
|
// if (standardData && standardData.points && standardData.points.length > 0) {
|
||||||
exportedCount++;
|
// this.exportPointsToTSV(standardData.points, latestSweep.vna_config, `${baseFilename}_cal_${standardName}`);
|
||||||
}
|
// exportedCount++;
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
// Export raw reference if present
|
// Export raw reference if present
|
||||||
if (latestSweep.raw_reference_points && latestSweep.raw_reference_points.length > 0) {
|
if (latestSweep.raw_reference_points && latestSweep.raw_reference_points.length > 0) {
|
||||||
@ -641,6 +688,16 @@ export class ChartManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
appendHistory(processorId) {
|
||||||
|
const chart = this.charts.get(processorId);
|
||||||
|
if (!chart) return;
|
||||||
|
|
||||||
|
const fileInput = chart.element.querySelector(`#appendFileInput_${processorId}`);
|
||||||
|
if (fileInput) {
|
||||||
|
fileInput.click();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async handleHistoryUpload(processorId, event) {
|
async handleHistoryUpload(processorId, event) {
|
||||||
const file = event.target.files?.[0];
|
const file = event.target.files?.[0];
|
||||||
if (!file) return;
|
if (!file) return;
|
||||||
@ -698,10 +755,67 @@ export class ChartManager {
|
|||||||
event.target.value = '';
|
event.target.value = '';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async handleHistoryAppend(processorId, event) {
|
||||||
|
const file = event.target.files?.[0];
|
||||||
|
if (!file) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const text = await file.text();
|
||||||
|
const jsonData = JSON.parse(text);
|
||||||
|
|
||||||
|
// Extract sweep_history from the saved JSON file
|
||||||
|
// Note: We do NOT use the config - only append history
|
||||||
|
const sweepHistory = jsonData.sweep_history || [];
|
||||||
|
|
||||||
|
if (!sweepHistory || sweepHistory.length === 0) {
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'error',
|
||||||
|
title: 'Ошибка дополнения',
|
||||||
|
message: 'Файл не содержит истории свипов'
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send append_history message via WebSocket
|
||||||
|
const websocket = window.vnaDashboard?.websocket;
|
||||||
|
if (websocket && websocket.ws && websocket.ws.readyState === WebSocket.OPEN) {
|
||||||
|
websocket.ws.send(JSON.stringify({
|
||||||
|
type: 'append_history',
|
||||||
|
processor_id: processorId,
|
||||||
|
history_data: sweepHistory
|
||||||
|
}));
|
||||||
|
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'success',
|
||||||
|
title: 'История дополнена',
|
||||||
|
message: `Добавлено ${sweepHistory.length} записей к истории ${formatProcessorName(processorId)}`
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'error',
|
||||||
|
title: 'Ошибка подключения',
|
||||||
|
message: 'WebSocket не подключен'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error appending history:', err);
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'error',
|
||||||
|
title: 'Ошибка дополнения',
|
||||||
|
message: `Не удалось прочитать файл: ${err.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset file input
|
||||||
|
event.target.value = '';
|
||||||
|
}
|
||||||
|
|
||||||
destroy() {
|
destroy() {
|
||||||
console.log('Cleaning up Chart Manager...');
|
console.log('Cleaning up Chart Manager...');
|
||||||
this.clearAll();
|
this.clearAll();
|
||||||
this.settingsManager.destroy();
|
this.settingsManager.destroy();
|
||||||
|
this.bscanClickHandler.destroy();
|
||||||
this.updateQueue.clear();
|
this.updateQueue.clear();
|
||||||
this.isUpdating = false;
|
this.isUpdating = false;
|
||||||
this.isPaused = true;
|
this.isPaused = true;
|
||||||
|
|||||||
@ -0,0 +1,117 @@
|
|||||||
|
/**
|
||||||
|
* B-Scan Click Handler
|
||||||
|
* Handles column deletion clicks on the B-Scan heatmap
|
||||||
|
*/
|
||||||
|
|
||||||
|
export class BScanClickHandler {
|
||||||
|
constructor(websocket, notifications) {
|
||||||
|
this.websocket = websocket;
|
||||||
|
this.notifications = notifications;
|
||||||
|
this.activeListeners = new Map();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attach click handler to a B-Scan plot container
|
||||||
|
* @param {string} processorId - Processor ID (e.g., "bscan")
|
||||||
|
* @param {HTMLElement} plotContainer - Plot container element
|
||||||
|
*/
|
||||||
|
attachClickHandler(processorId, plotContainer) {
|
||||||
|
if (!plotContainer || this.activeListeners.has(processorId)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const clickHandler = (data) => {
|
||||||
|
// Check if user clicked on a heatmap point
|
||||||
|
if (!data.points || data.points.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const point = data.points[0];
|
||||||
|
const columnIndex = point.x; // X-axis represents sweep/column number (1-based)
|
||||||
|
|
||||||
|
if (!columnIndex || typeof columnIndex !== 'number') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show confirmation dialog
|
||||||
|
this.showDeleteConfirmation(processorId, columnIndex);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Attach Plotly click event
|
||||||
|
plotContainer.on('plotly_click', clickHandler);
|
||||||
|
this.activeListeners.set(processorId, clickHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove click handler from a plot container
|
||||||
|
* @param {string} processorId - Processor ID
|
||||||
|
* @param {HTMLElement} plotContainer - Plot container element
|
||||||
|
*/
|
||||||
|
detachClickHandler(processorId, plotContainer) {
|
||||||
|
const handler = this.activeListeners.get(processorId);
|
||||||
|
if (handler && plotContainer) {
|
||||||
|
plotContainer.removeListener('plotly_click', handler);
|
||||||
|
this.activeListeners.delete(processorId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show confirmation dialog for column deletion
|
||||||
|
* @param {string} processorId - Processor ID
|
||||||
|
* @param {number} columnIndex - Column index to delete (1-based)
|
||||||
|
*/
|
||||||
|
showDeleteConfirmation(processorId, columnIndex) {
|
||||||
|
const message = `Вы хотите удалить столбец ${columnIndex}?`;
|
||||||
|
|
||||||
|
if (confirm(message)) {
|
||||||
|
this.deleteColumn(processorId, columnIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send delete column command to backend
|
||||||
|
* @param {string} processorId - Processor ID
|
||||||
|
* @param {number} columnIndex - Column index to delete (1-based)
|
||||||
|
*/
|
||||||
|
deleteColumn(processorId, columnIndex) {
|
||||||
|
if (!this.websocket || !this.websocket.ws || this.websocket.ws.readyState !== WebSocket.OPEN) {
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'error',
|
||||||
|
title: 'Ошибка подключения',
|
||||||
|
message: 'WebSocket не подключен'
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const message = {
|
||||||
|
type: 'delete_column',
|
||||||
|
processor_id: processorId,
|
||||||
|
column_index: columnIndex
|
||||||
|
};
|
||||||
|
|
||||||
|
this.websocket.ws.send(JSON.stringify(message));
|
||||||
|
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'info',
|
||||||
|
title: 'Удаление столбца',
|
||||||
|
message: `Удаление столбца ${columnIndex}...`
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to send delete column command:', error);
|
||||||
|
this.notifications?.show?.({
|
||||||
|
type: 'error',
|
||||||
|
title: 'Ошибка удаления',
|
||||||
|
message: `Не удалось удалить столбец ${columnIndex}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up all listeners
|
||||||
|
*/
|
||||||
|
destroy() {
|
||||||
|
this.activeListeners.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -186,6 +186,13 @@ const ICONS = {
|
|||||||
{ type: 'path', attrs: { d: 'M3 5v14c0 1.66 4 3 9 3s9-1.34 9-3V5' } },
|
{ type: 'path', attrs: { d: 'M3 5v14c0 1.66 4 3 9 3s9-1.34 9-3V5' } },
|
||||||
{ type: 'path', attrs: { d: 'M3 12c0 1.66 4 3 9 3s9-1.34 9-3' } }
|
{ type: 'path', attrs: { d: 'M3 12c0 1.66 4 3 9 3s9-1.34 9-3' } }
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
plus: {
|
||||||
|
viewBox: '0 0 24 24',
|
||||||
|
elements: [
|
||||||
|
{ type: 'line', attrs: { x1: 12, y1: 5, x2: 12, y2: 19 } },
|
||||||
|
{ type: 'line', attrs: { x1: 5, y1: 12, x2: 19, y2: 12 } }
|
||||||
|
]
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -105,8 +105,9 @@ export function createPlotlyPlot(container, data = [], layoutOverrides = {}, con
|
|||||||
* @param {HTMLElement} container - Container element
|
* @param {HTMLElement} container - Container element
|
||||||
* @param {Array} data - Plotly data traces
|
* @param {Array} data - Plotly data traces
|
||||||
* @param {Object} layoutOverrides - Layout overrides
|
* @param {Object} layoutOverrides - Layout overrides
|
||||||
|
* @param {Object} configOverrides - Config overrides
|
||||||
*/
|
*/
|
||||||
export async function updatePlotlyPlot(container, data = [], layoutOverrides = {}) {
|
export async function updatePlotlyPlot(container, data = [], layoutOverrides = {}, configOverrides = {}) {
|
||||||
if (!container || typeof Plotly === 'undefined') return;
|
if (!container || typeof Plotly === 'undefined') return;
|
||||||
|
|
||||||
const layout = {
|
const layout = {
|
||||||
@ -118,7 +119,12 @@ export async function updatePlotlyPlot(container, data = [], layoutOverrides = {
|
|||||||
delete layout.width;
|
delete layout.width;
|
||||||
delete layout.height;
|
delete layout.height;
|
||||||
|
|
||||||
await Plotly.react(container, data, layout, defaultPlotlyConfig);
|
const config = {
|
||||||
|
...defaultPlotlyConfig,
|
||||||
|
...configOverrides
|
||||||
|
};
|
||||||
|
|
||||||
|
await Plotly.react(container, data, layout, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
Reference in New Issue
Block a user