fourier fixed & data_limitation removed

This commit is contained in:
mipt-user
2025-10-03 18:07:03 +03:00
parent c16cf2ba8a
commit 9f42987c22
8 changed files with 12125 additions and 35 deletions

View File

@ -0,0 +1,18 @@
{
"preset": {
"filename": "s11_start100_stop8800_points1000_bw1khz.bin",
"mode": "s11",
"start_freq": 100000000.0,
"stop_freq": 8800000000.0,
"points": 1000,
"bandwidth": 1000.0
},
"calibration_name": "grey_small",
"standards": [
"open",
"short",
"load"
],
"created_timestamp": "2025-10-02T19:49:07.091261",
"is_complete": true
}

View File

@ -0,0 +1,16 @@
{
"preset": {
"filename": "s11_start100_stop8800_points1000_bw1khz.bin",
"mode": "s11",
"start_freq": 100000000.0,
"stop_freq": 8800000000.0,
"points": 1000,
"bandwidth": 1000.0
},
"calibration_name": "grey_small",
"standard": "load",
"sweep_number": 100,
"sweep_timestamp": 1759423699.7687562,
"created_timestamp": "2025-10-02T19:49:07.086451",
"total_points": 1000
}

View File

@ -0,0 +1,16 @@
{
"preset": {
"filename": "s11_start100_stop8800_points1000_bw1khz.bin",
"mode": "s11",
"start_freq": 100000000.0,
"stop_freq": 8800000000.0,
"points": 1000,
"bandwidth": 1000.0
},
"calibration_name": "grey_small",
"standard": "open",
"sweep_number": 36,
"sweep_timestamp": 1759423548.4661014,
"created_timestamp": "2025-10-02T19:49:07.065432",
"total_points": 1000
}

View File

@ -0,0 +1,16 @@
{
"preset": {
"filename": "s11_start100_stop8800_points1000_bw1khz.bin",
"mode": "s11",
"start_freq": 100000000.0,
"stop_freq": 8800000000.0,
"points": 1000,
"bandwidth": 1000.0
},
"calibration_name": "grey_small",
"standard": "short",
"sweep_number": 75,
"sweep_timestamp": 1759423640.196192,
"created_timestamp": "2025-10-02T19:49:07.078508",
"total_points": 1000
}

View File

@ -48,7 +48,7 @@ class BScanProcessor(BaseProcessor):
return { return {
"open_air": False, # Toggle for reference usage "open_air": False, # Toggle for reference usage
"axis": "abs", # "real", "abs", or "phase" "axis": "abs", # "real", "abs", or "phase"
"data_limitation": None, # None, "ph_only_1", "ph_only_2" # "data_limitation": None, # None, "ph_only_1", "ph_only_2"
"cut": 0.824, # Cut parameter (meters) "cut": 0.824, # Cut parameter (meters)
"max": 1.0, # Max depth (meters) "max": 1.0, # Max depth (meters)
"gain": 1.0, # Gain exponent "gain": 1.0, # Gain exponent
@ -75,13 +75,13 @@ class BScanProcessor(BaseProcessor):
value=cfg["axis"], value=cfg["axis"],
options={"choices": ["real", "abs", "phase"]}, options={"choices": ["real", "abs", "phase"]},
), ),
UIParameter( # UIParameter(
name="data_limitation", # name="data_limitation",
label="Data Limitation", # label="Data Limitation",
type="select", # type="select",
value=cfg["data_limitation"], # value=cfg["data_limitation"],
options={"choices": [None, "ph_only_1", "ph_only_2"]}, # options={"choices": [None, "ph_only_1", "ph_only_2"]},
), # ),
UIParameter( UIParameter(
name="cut", name="cut",
label="Cut (m)", label="Cut (m)",
@ -221,7 +221,7 @@ class BScanProcessor(BaseProcessor):
"frequency_range": analysis["freq_range"], "frequency_range": analysis["freq_range"],
"reference_used": bool(self._config["open_air"] and reference_data is not None), "reference_used": bool(self._config["open_air"] and reference_data is not None),
"axis_type": self._config["axis"], "axis_type": self._config["axis"],
"data_limitation": self._config["data_limitation"], # "data_limitation": self._config["data_limitation"],
"points_processed": int(complex_data.size), "points_processed": int(complex_data.size),
"plot_history_count": len(self._plot_history), "plot_history_count": len(self._plot_history),
} }
@ -327,8 +327,8 @@ class BScanProcessor(BaseProcessor):
if processed_data.get("reference_used", False): if processed_data.get("reference_used", False):
config_info += " | Open Air: ON" config_info += " | Open Air: ON"
if self._config["data_limitation"]: # if self._config["data_limitation"]:
config_info += f" | Limit: {self._config['data_limitation']}" # config_info += f" | Limit: {self._config['data_limitation']}"
layout = { layout = {
"title": f"B-Scan Heatmap - {config_info}", "title": f"B-Scan Heatmap - {config_info}",
@ -463,8 +463,8 @@ class BScanProcessor(BaseProcessor):
try: try:
# Determine effective frequency range (Hz) # Determine effective frequency range (Hz)
if vna_config: if vna_config:
freq_start = max(float(vna_config.get("start_freq", 100e6)), self._config["start_freq"] * 1e6) freq_start = float(vna_config.get("start_freq", 100e6))
freq_stop = min(float(vna_config.get("stop_freq", 8.8e9)), self._config["stop_freq"] * 1e6) freq_stop = float(vna_config.get("stop_freq", 8.8e9))
else: else:
freq_start = self._config["start_freq"] * 1e6 freq_start = self._config["start_freq"] * 1e6
freq_stop = self._config["stop_freq"] * 1e6 freq_stop = self._config["stop_freq"] * 1e6
@ -472,11 +472,13 @@ class BScanProcessor(BaseProcessor):
# Frequency vector over current data length # Frequency vector over current data length
freq_axis = np.linspace(freq_start, freq_stop, complex_data.size, dtype=float) freq_axis = np.linspace(freq_start, freq_stop, complex_data.size, dtype=float)
# print("freq_axis", freq_axis.shape, freq_axis[0], freq_axis[-1])
# Optionally normalize amplitude (phase-only modes) # Optionally normalize amplitude (phase-only modes)
limited = self._apply_data_limitations(complex_data) # limited = self._apply_data_limitations(complex_data)
# IFFT to time domain # IFFT to time domain
depth_m, time_response = self._perform_ifft(limited, freq_axis, axis=self._config["axis"]) depth_m, time_response = self._perform_ifft(complex_data, freq_axis, axis=self._config["axis"])
# Depth windowing and gain shaping # Depth windowing and gain shaping
depth_out, time_out = self._apply_depth_processing(depth_m, time_response) depth_out, time_out = self._apply_depth_processing(depth_m, time_response)
@ -485,37 +487,36 @@ class BScanProcessor(BaseProcessor):
"time_data": time_out, "time_data": time_out,
"distance": depth_out, "distance": depth_out,
"freq_range": [freq_start, freq_stop], "freq_range": [freq_start, freq_stop],
"complex_time": limited, "complex_time": complex_data,
} }
except Exception as exc: # noqa: BLE001 except Exception as exc: # noqa: BLE001
logger.error("Data analysis failed", error=repr(exc)) logger.error("Data analysis failed", error=repr(exc))
return None return None
def _apply_data_limitations(self, s: NDArray[np.complex128]) -> NDArray[np.complex128]: # def _apply_data_limitations(self, s: NDArray[np.complex128]) -> NDArray[np.complex128]:
""" # """
Apply optional amplitude normalization to emphasize phase information. # Apply optional amplitude normalization to emphasize phase information.
Modes # Modes
----- # -----
- None: passthrough # - None: passthrough
- "ph_only_1": normalize by magnitude # - "ph_only_1": normalize by magnitude
- "ph_only_2": same normalization (kept for behavioral parity) # - "ph_only_2": same normalization (kept for behavioral parity)
""" # """
try: # try:
mode = self._config.get("data_limitation") # mode = self._config.get("data_limitation")
if mode in {"ph_only_1", "ph_only_2"}: # if mode in {"ph_only_1", "ph_only_2"}:
# Avoid division by zero # # Avoid division by zero
return s / (np.abs(s) + 1e-12) # return s / (np.abs(s) + 1e-12)
return s # return s
except Exception as exc: # noqa: BLE001 # except Exception as exc: # noqa: BLE001
logger.error("Data limitation processing failed", error=repr(exc)) # logger.error("Data limitation processing failed", error=repr(exc))
return s # return s
def _perform_ifft( def _perform_ifft(
self, self,
s_array: NDArray[np.complex128], s_array: NDArray[np.complex128],
frequencies_hz: NDArray[np.floating], frequencies_hz: NDArray[np.floating],
*,
axis: str = "abs", axis: str = "abs",
) -> tuple[NDArray[np.floating], NDArray[np.floating]]: ) -> tuple[NDArray[np.floating], NDArray[np.floating]]:
""" """
@ -537,6 +538,8 @@ class BScanProcessor(BaseProcessor):
f = frequencies_hz[mask] f = frequencies_hz[mask]
s = s_array[mask] s = s_array[mask]
# print("f", f.shape, f[0], f[-1])
n = f.size n = f.size
if n < 2: if n < 2:
raise ValueError("Not enough frequency points after filtering") raise ValueError("Not enough frequency points after filtering")