{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Import libraries"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%matplotlib inline\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"from scipy import constants\n",
"from scipy import signal as sigproc\n",
"from scipy.fft import next_fast_len\n",
"import math"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The data is read from ISF files, using the [`isfreader`](https://github.com/justengel/isfreader) library. (Can be installed by \"!pip3 install isfreader\" command.)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import isfreader"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"shot_no = 41860"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Data loading"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ds = np.DataSource('/tmp') # temporary storage for downloaded files\n",
"data_URL = 'http://golem.fjfi.cvut.cz/shots/{shot_no}/Diagnostics/Interferometry/Default/DAS_raw_data_dir/ch{ch_id}.isf'\n",
"scalars_URL = 'http://golem.fjfi.cvut.cz/shots/{shot_no}/Diagnostics/BasicDiagnostics/DetectPlasma/Results/{name}'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def get_scalar(shot_no, name):\n",
" return float(ds.open(scalars_URL.format(shot_no=shot_no, name=name)).read())"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"t_plasma_start = get_scalar(shot_no, 't_plasma_start')\n",
"t_plasma_end = get_scalar(shot_no, 't_plasma_end')\n",
"b_plasma = get_scalar(shot_no, 'b_plasma')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def load_channel(shot_no, chanel_id):\n",
" fname = ds.open(data_URL.format(shot_no=shot_no, ch_id=chanel_id)).name\n",
" data = isfreader.read_file(fname)\n",
" data[:, 0] = data[:, 0] * 1e3\n",
" return data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mixer = load_channel(shot_no, 1)\n",
"ref_saw = load_channel(shot_no, 3)\n",
"# phase_det = load_channel(shot_no, 4)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x, y = mixer.shape\n",
"f_s = x / (mixer[-1, 0] - mixer[0, 0]) # ms -> kHz\n",
"print('Sampling frequency is {} MHz.'.format(round(f_s / 1000)))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Spectral analysis of the signal
\n",
"\n",
"The mixer signal is a base sine wave (the envelope of the mixing) at a frequency close to 500 kHz. The reference saw-tooth frequency sweeping wave has the same base frequency, but with a trail of harmonics forming the sharp saw-tooth shape."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def calculate_spectrum(signal, target_dfreq=10):\n",
" nperseg = int(f_s / target_dfreq)\n",
" f, psd = sigproc.welch(signal[:, 1], fs=f_s, nperseg=nperseg, nfft=next_fast_len(nperseg))\n",
" return f, psd"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ref_saw_f, ref_saw_psd = calculate_spectrum(ref_saw)\n",
"mixer_f, mixer_psd = calculate_spectrum(mixer)\n",
"f_base = mixer_f[mixer_psd.argmax()]\n",
"f_base_ref_saw = ref_saw_f[ref_saw_psd.argmax()]\n",
"print('The base frequency of the mixer is {} kHz.'.format(f_base))\n",
"print('The base frequency of the ref_saw is {} kHz.'.format(f_base_ref_saw))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig, ax = plt.subplots(dpi=150)\n",
"ax.set(xlabel='frequency [kHz]', ylabel='power spectral density [V$^2$]')\n",
"ax.plot(ref_saw_f, ref_saw_psd, label='ref_saw')\n",
"ax.plot(mixer_f, mixer_psd, label='mixer')\n",
"plt.axvline(f_base, label=f'base frequency f={f_base:.0f} kHz', color='C3')\n",
"ax.loglog()\n",
"plt.grid()\n",
"plt.legend();"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Extract the baseband from the signal
\n",
"\n",
"The instantaneous phase and amplitude of the base signal can be inferred only for the baseband, i.e. by removing higher and lower frequencies (i.e. extracting the base sine wave from the reference saw-tooth signal)."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"base_band_hwidth = 50 # kHz\n",
"base_band_filter = sigproc.iirfilter(8, [f_base - base_band_hwidth, f_base + base_band_hwidth], fs=f_s, btype='bandpass', output='sos')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def freq_filter(signal, sos_filter):\n",
" signal[:, 1] = sigproc.sosfiltfilt(sos_filter, signal[:, 1])\n",
" return signal"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mixer_filtered = freq_filter(mixer, base_band_filter)\n",
"ref_saw_filtered = freq_filter(ref_saw, base_band_filter)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"cut 0.1 ms from the beginning and from the end for better signal processing"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"mixer_filtered = mixer_filtered[(mixer_filtered[:, 0] < (mixer_filtered[-1, 0] - 0.1)) & ((mixer_filtered[0, 0] + 0.1) < mixer_filtered[:, 0])]\n",
"ref_saw_filtered = ref_saw_filtered[(ref_saw_filtered[:, 0] < (ref_saw_filtered[-1, 0] - 0.1)) & ((ref_saw_filtered[0, 0] + 0.1) < ref_saw_filtered[:, 0])]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Define signal processing functions
\n",
"\n",
"func find_peaks - finds peaks with optimal output array"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def find_peaks(data):\n",
" peaks_indexes, _ = sigproc.find_peaks(data[:, 1])\n",
" return np.vstack((data[peaks_indexes, 0], data[peaks_indexes, 1])).T"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func initial_phase_shift - mixer and ref_saw signals are in general a bit phase shifted from each other -> it calculates \"initial\" phase shift and removes it"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def initial_phase_shift(peaks, peaks_ref):\n",
" phase_mean = peaks[0, 0] - peaks_ref[0, 0]\n",
" peaks_ref[:, 0] += phase_mean\n",
" return peaks_ref"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func cut_edges - cut first and last data point, which is distorted from the spectral filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def cut_edges(peaks, peaks_ref):\n",
" peaks = peaks[(peaks[0, 0] < peaks[:, 0]) & (peaks[:, 0] < peaks[-1, 0])]\n",
" peaks_ref = peaks_ref[(peaks_ref[0, 0] < peaks_ref[:, 0]) & (peaks_ref[:, 0] < peaks_ref[-1, 0])]\n",
" return peaks, peaks_ref"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func smooth - classic func for signal smoothing"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def smooth(y, box_pts):\n",
" box = np.ones(box_pts) / box_pts\n",
" y_smooth = np.convolve(y, box, mode='same')\n",
" return y_smooth"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func without_correction - a sum of basic operations"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def without_correction(mixer_filtered, ref_saw_filtered):\n",
" peaks = find_peaks(mixer_filtered)\n",
" peaks_ref = find_peaks(ref_saw_filtered)\n",
" peaks_ref = initial_phase_shift(peaks, peaks_ref)\n",
" peaks, peaks_ref = cut_edges(peaks, peaks_ref)\n",
" return peaks, peaks_ref"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func find_nearest - finds the nearest peak of the given one"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def find_nearest(array, value):\n",
" array = array.copy()\n",
" x, y = array.shape\n",
" array_data = np.zeros((x, 1))\n",
" for i in range(0, x):\n",
" array_data[i, 0] = array[i, 0]\n",
" idx = (np.abs(array_data[:, 0] - value)).argmin()\n",
" return array_data[idx]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func calc_dphase_unchanged - calculates dphase from unrepaired data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def calc_dphase_unchanged(peaks, peaks_ref):\n",
" x_peaks, y_peaks = peaks.shape\n",
" x_ref_peaks, y_ref_peaks = peaks_ref.shape\n",
" dphase = np.ones((min(x_peaks, x_ref_peaks), 2))\n",
" for i in range(0, int(len(dphase))):\n",
" dphase[i, 0] = peaks[i, 0]\n",
" dphase[i, 1] = peaks[i, 0] - peaks_ref[i, 0]\n",
" dphase[:, 1] *= 2 * math.pi * f_base\n",
" return dphase"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func calc_lost_phase - calculates lost phase in the signal - to define how much the signal was damaged"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def calc_lost_phase(peaks, peaks_ref):\n",
" dphase = calc_dphase_unchanged(peaks, peaks_ref)\n",
" time_interval = 0.1 # ms\n",
" indexes = np.argwhere(dphase[:, 0] > (dphase[-1, 0] - time_interval))\n",
" data = dphase[indexes[:, 0]]\n",
" return np.average(data[:, 1]), dphase"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func optimizing_cycle - defines the most probable parts of the data, where the signal was damaged and deletes the corresponding waveforms from the reference signal, because these waveforms did not travelled properly through the plasma"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def optimizing_cycle(number_of_bad_peaks, deriv_sort, distance, peaks, peaks_ref):\n",
" bad_peaks_indexes = np.empty((0, 1))\n",
" k = 0 # help variable\n",
" l = 0 # help variable\n",
" while k < number_of_bad_peaks:\n",
" index = np.argwhere(peaks[:, 0] == deriv_sort[l, 0])\n",
" if len(bad_peaks_indexes) != 0 and (abs((index[0, 0] - find_nearest(bad_peaks_indexes, index[0, 0]))) < distance): # or (t_plasma_end < peaks[index, 0]) or (peaks[index, 0] < t_plasma_start):\n",
" l += 1\n",
" else:\n",
" bad_peaks_indexes = np.vstack((bad_peaks_indexes, index[0, 0]))\n",
" peaks_ref = np.delete(peaks_ref, index, 0)\n",
" k += 1\n",
" l += 1\n",
" return bad_peaks_indexes, peaks_ref"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func repair - creates the most plausible repair of the given damaged interferometer data (probably caused by the scatter of the probing wave from the plasma, plasma instability, ...)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def repair(dphase, peaks, peaks_ref):\n",
" global dphase_final, bad_peaks_indexes, best_smooth_factor, best_distance\n",
" smooth_factors = np.zeros((0, 1))\n",
" distances = np.zeros((0, 1))\n",
" varieties = np.zeros((0, 1))\n",
" for smooth_factor in range(1, 20):\n",
" deriv = dphase.copy()\n",
" deriv[:, 1] = np.gradient(dphase[:, 1])\n",
" deriv[:, 1] = smooth((deriv[:, 1]), smooth_factor)\n",
" deriv_sort = deriv.copy()\n",
" deriv_sort = deriv_sort[deriv_sort[:, 1].argsort()[::-1]]\n",
" for distance in range(1, 20):\n",
" bad_peaks_indexes, repaired_ref_peaks = optimizing_cycle(number_of_bad_peaks, deriv_sort, distance, peaks, peaks_ref)\n",
" dphase_final = calc_dphase(peaks, repaired_ref_peaks)\n",
" dphase_final[:, 1] = smooth(dphase_final[:, 1], 100)\n",
" if np.min(dphase_final[:, 1]) > -0.1:\n",
" smooth_factors = np.vstack((smooth_factors, smooth_factor))\n",
" distances = np.vstack((distances, distance))\n",
" varieties = np.vstack((varieties, calc_curve_length(dphase_final)))\n",
" all = np.hstack((smooth_factors, distances, varieties))\n",
" if len(varieties) == 0:\n",
" print(\"Cannot repair interferometric data.\")\n",
" else:\n",
" varieties_min = varieties.argmin()\n",
" best_smooth_factor = int(all[int(varieties_min), 0])\n",
" best_distance = int(all[int(varieties_min), 1])\n",
" deriv = dphase.copy()\n",
" deriv[:, 1] = np.gradient(dphase[:, 1])\n",
" deriv[:, 1] = smooth((deriv[:, 1]), best_smooth_factor)\n",
" deriv_sort = deriv.copy()\n",
" deriv_sort = deriv_sort[deriv_sort[:, 1].argsort()[::-1]]\n",
" bad_peaks_indexes, repaired_ref_peaks = optimizing_cycle(number_of_bad_peaks, deriv_sort, best_distance, peaks, peaks_ref)\n",
" dphase_final = calc_dphase(peaks, repaired_ref_peaks)\n",
" return dphase_final, bad_peaks_indexes, best_smooth_factor, best_distance"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func calc_dphase - calculates dphase"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def calc_dphase(mixer_peaks, repaired_ref_peaks):\n",
" x_peaks, y_peaks = mixer_peaks.shape\n",
" x_ref_peaks, y_ref_peaks = repaired_ref_peaks.shape\n",
" dphase_final = np.ones((min(x_peaks, x_ref_peaks), 2))\n",
" dphase_final[:, 0] = repaired_ref_peaks[0:len(dphase_final), 0]\n",
" dphase_final[:, 1] = (mixer_peaks[0:len(dphase_final), 0] - repaired_ref_peaks[0:len(dphase_final), 0]) * 2 * math.pi * f_base\n",
" # for i in range(0, int(len(dphase_final))):\n",
" # dphase_final[i, 0] = repaired_ref_peaks[i, 0]\n",
" # dphase_final[i, 1] = mixer_peaks[i, 0] - repaired_ref_peaks[i, 0]\n",
" # dphase_final[:, 1] *= 2 * math.pi * f_base\n",
" return dphase_final"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"func calc_curve_length - calculates the length of the dphase curve as a key parameter to decide, whether the repair is good enough"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def calc_curve_length(dphase_final):\n",
" x, y = dphase_final.shape\n",
" length = 0\n",
" part_length = np.sqrt(((dphase_final[1:-1, 0] - dphase_final[0:-2, 0]) ** 2) + (dphase_final[1:-1, 1] - dphase_final[0:-2, 1]) ** 2)\n",
" # for i in range(1, x):\n",
" # length += np.sqrt(((dphase_final[i, 0] - dphase_final[i - 1, 0]) ** 2) + (dphase_final[i, 1] - dphase_final[i - 1, 1]) ** 2)\n",
" # # distance = np.sqrt((xy_inside[:, 0] - xy_inside[int(((n_i) * (n_i) - 1) / 2), 0]) ** 2 + (xy_inside[:, 1] - xy_inside[int(((n_i) * (n_i) - 1) / 2), 1]) ** 2)\n",
" return np.sum(part_length)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Final signal processing cycle"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# prepare data without any correction\n",
"peaks_no_corr, peaks_ref_no_corr = without_correction(mixer_filtered, ref_saw_filtered)\n",
"# calculate lost phase and dphase_zero - dphase without any correction\n",
"lost_phase, dphase_zero = calc_lost_phase(peaks_no_corr, peaks_ref_no_corr)\n",
"repaired_discharge = False\n",
"unrepairable_discharge = False\n",
"number_of_bad_peaks = round(lost_phase / (2 * math.pi))\n",
"# make data repair, if it is necessary\n",
"if 1 <= number_of_bad_peaks:\n",
" try:\n",
" dphase_final, bad_peaks_indexes, best_smooth_factor, best_distance = repair(dphase_zero, peaks_no_corr, peaks_ref_no_corr)\n",
" dphase_final[:, 1] = smooth(dphase_final[:, 1], 100)\n",
" repaired_discharge = True\n",
" except:\n",
" unrepairable_discharge = True\n",
" print('Cannot repair data.')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Estimation of the electron density
\n",
"\n",
"The ordinary wave (O-mode) with a carrier frequency $\\omega$ traveling through a collisionless plasma with the plasma frequency $\\omega_{p} = \\sqrt{\\frac{n_e e^2}{\\epsilon_0 m_e}}$ has a refractive index $$N_O=\\sqrt{1-\\frac{\\omega_p^2}{\\omega^2}}$$\n",
"Under the assumption that the carrier wave frequency is much larger than the plasma frequency $\\omega>>\\omega_p$ this formula can be expanded into a Taylor series as $$N_O\\approx 1-\\frac{\\omega_p^2}{2\\omega^2}$$\n",
"A wave traveling through a medium with a refractive index $N(l)$ accumulates a total phase shift $\\varphi = \\frac{\\omega}{c} \\int N(l) \\mathrm{d}l$. Therefore, in comparison to a wave traveling in vacuum (or clear air) with $N\\approx 1$, the wave traveling through the plasma over a chord with length $L$ accumulates a relative phase shift of $$\\Delta \\varphi = \\frac{e^2}{2\\omega c\\epsilon_0 m_e}\\int\\limits_L n_e(l) \\mathrm{d}l$$\n",
"Therefore, it is possible to estimate the line-averaged density $\\bar n_e = \\frac{1}{L} \\int\\limits_L n_e(l) \\mathrm{d}l$ from the detected phase shift between the reference and mixer signals."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"omega_carrier = 2 * np.pi * 71e9 # 71 GHz microwave generator\n",
"a = 0.085 # limiter radius [m]\n",
"L = 2 * a # central plasma chord estimate\n",
"prop_const = constants.elementary_charge ** 2 / (2 * omega_carrier * constants.speed_of_light * constants.epsilon_0 * constants.m_e)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"ne_lav - final line-averaged electron density"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if repaired_discharge:\n",
" ne_lav = dphase_final.copy()\n",
" ne_lav[:, 1] = ne_lav[:, 1] * (1 / (prop_const * L))\n",
" ne_lav = ne_lav[(ne_lav[:, 0] >= 0) & (ne_lav[:, 0] <= (t_plasma_end + 5))]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"dphase_zero_raw = dphase_zero.copy()\n",
"dphase_zero[:, 1] = smooth(dphase_zero[:, 1], 100)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"ne_lav_zero - final line-averaged electron density without any correction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ne_lav_zero = dphase_zero.copy()\n",
"ne_lav_zero[:, 1] = ne_lav_zero[:, 1] * (1 / (prop_const * L))\n",
"ne_lav_zero = ne_lav_zero[(ne_lav_zero[:, 0] >= 0) & (ne_lav_zero[:, 0] <= (t_plasma_end + 5))]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"ne_lav_zero_raw - final line-averaged electron density without any correction and without any smoothing - used to see more clearly, where the data was damaged and repaired in the last figure"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ne_lav_zero_raw = dphase_zero_raw.copy()\n",
"ne_lav_zero_raw[:, 1] = ne_lav_zero_raw[:, 1] * (1 / (prop_const * L))\n",
"ne_lav_zero_raw = ne_lav_zero_raw[(ne_lav_zero_raw[:, 0] >= 0) & (ne_lav_zero_raw[:, 0] <= (t_plasma_end + 5))]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Comparison of the final line-averaged electron density between the damaged and repaired signal"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if repaired_discharge:\n",
" fig, ax = plt.subplots(dpi=200)\n",
" ax.plot(ne_lav_zero[:, 0], ne_lav_zero[:, 1] / 1e18, label='damaged signal', linestyle='dotted', color='red')\n",
" ax.plot(ne_lav[:, 0], ne_lav[:, 1] / 1e18, label='repaired signal', color='deepskyblue')\n",
" if b_plasma:\n",
" if (np.max(ne_lav_zero[:, 1] / 1e18) < 0.1):\n",
" ax.set_ylim([-0.1, 5])\n",
" else:\n",
" for t in (t_plasma_start, t_plasma_end):\n",
" plt.axvline(t, color='k', linestyle='--')\n",
" else:\n",
" ax.set_ylim([-0.1, 5])\n",
" ax.set(xlabel='time [ms]', ylabel='$\\\\bar n_e$ [10$^{18}$ m$^{-3}$]')\n",
" plt.title('Line-averaged electron density (repaired waveforms: {})'.format(number_of_bad_peaks))\n",
" plt.legend()\n",
" plt.grid()\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Final figure of the temporal evolution of the line-averaged electron density"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig, ax = plt.subplots(dpi=200)\n",
"if repaired_discharge:\n",
" ax.plot(ne_lav[:, 0], ne_lav[:, 1] / 1e18, label='$\\\\bar n_e$', color='deepskyblue')\n",
"else:\n",
" ax.plot(ne_lav_zero[:, 0], ne_lav_zero[:, 1] / 1e18, label='$\\\\bar n_e$', color='deepskyblue')\n",
"if b_plasma:\n",
" if (np.max(ne_lav_zero[:, 1] / 1e18) < 0.1):\n",
" ax.set_ylim([-0.1, 5])\n",
" else:\n",
" for t in (t_plasma_start, t_plasma_end):\n",
" plt.axvline(t, color='k', linestyle='--')\n",
"else:\n",
" ax.set_ylim([-0.1, 5])\n",
"ax.set(xlabel='time [ms]', ylabel='$\\\\bar n_e$ [10$^{18}$ m$^{-3}$]')\n",
"if unrepairable_discharge:\n",
" plt.title('Line-averaged electron density (!!!UNREPAIRED waveforms: {}!!!)'.format(number_of_bad_peaks))\n",
"else:\n",
" plt.title('Line-averaged electron density (repaired waveforms: {})'.format(number_of_bad_peaks))\n",
"plt.legend()\n",
"plt.grid()\n",
"plt.savefig('icon-fig.png')\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### FOR TESTING (orange lines are repaired locations)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if repaired_discharge:\n",
" fig, ax = plt.subplots(dpi=200)\n",
" ax.plot(ne_lav_zero_raw[:, 0], ne_lav_zero_raw[:, 1] / 1e18, label='damaged signal', color='red')\n",
" ax.plot(ne_lav[:, 0], ne_lav[:, 1] / 1e18, label='$\\\\bar n_e$', color='deepskyblue')\n",
" x, y = bad_peaks_indexes.shape\n",
" for i in range(0, x):\n",
" plt.axvline(peaks_no_corr[int(bad_peaks_indexes[i, 0]), 0], color='orange', linestyle='--')\n",
" ax.set(xlabel='time [ms]', ylabel='$\\\\bar n_e$ [10$^{18}$ m$^{-3}$]')\n",
" plt.title('best smooth factor: {}, best distance: {}, repaired waveforms: {}'.format(best_smooth_factor, best_distance, number_of_bad_peaks))\n",
" plt.legend()\n",
" plt.grid()\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Save data\n",
"\n",
"file ne_lav.csv - the final line-averaged electron density data\n",
"file ne_lav_max.txt - max value of the line-averaged electron density\n",
"file ne_lav_mean.txt - mean value of the line-averaged electron density"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if repaired_discharge:\n",
" np.savetxt('ne_lav.csv', ne_lav, delimiter=\",\") # repaired data\n",
" if b_plasma:\n",
" ne_lav_plasma = ne_lav[(t_plasma_start < ne_lav[:, 0]) & (ne_lav[:, 0] < t_plasma_end)]\n",
" for m in ('mean', 'max'):\n",
" v = getattr(ne_lav_plasma[:, 1], m)()\n",
" with open(f'ne_lav_{m}', 'w') as f: f.write(f'{v:.3e}')\n",
"else:\n",
" np.savetxt('ne_lav.csv', ne_lav_zero, delimiter=\",\")\n",
" if b_plasma:\n",
" if unrepairable_discharge:\n",
" for m in ('mean', 'max'):\n",
" v = float('NaN') # unrepaired data\n",
" with open(f'ne_lav_{m}', 'w') as f: f.write(f'{v:.3e}')\n",
" else:\n",
" ne_lav_plasma = ne_lav_zero[(t_plasma_start < ne_lav_zero[:, 0]) & (ne_lav_zero[:, 0] < t_plasma_end)]\n",
" for m in ('mean', 'max'):\n",
" v = getattr(ne_lav_plasma[:, 1], m)()\n",
" with open(f'ne_lav_{m}', 'w') as f: f.write(f'{v:.3e}')\n",
" else:\n",
" for m in ('mean', 'max'):\n",
" v = 0 # no plasma\n",
" with open(f'ne_lav_{m}', 'w') as f:f.write(f'{v:.3e}')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
}
},
"nbformat": 4,
"nbformat_minor": 2
}