Source code for tofu.plugins.AUG.SXR.data._core

# -*- coding: utf-8 -*-
"""
Provide basic python routines for load AUG SXR data in a ToFu-compatible format

@author: didiervezinet
"""

from __future__ import print_function
import sys

import os
import itertools as itt
import string
import numpy as np
import warnings
# import scipy.interpolate as scpinterp

import matplotlib.pyplot as plt
import datetime as dtm

# ToFu specific
import tofu.defaults as tfd
import tofu.pathfile as tfpf
import tofu.helper as tfh
import tofu.treat as tft
from ... import _path as _tfaug_path
from .. import _helper as tfaugSXRh
from .. import geom as tfaugsxrg

# AUG specific
import dd

__author__ = "Didier Vezinet"
__all__ = ["load"]




############################################################################
############################################################################
############################################################################
#       Default path for PreData saving
############################################################################


def _get_defaultsSavePathsdtime(SavePathObj=None, SavePathInp=None, dtime=None, dtFormat=tfd.dtmFormat, Type='Object'):
    assert SavePathObj is None or type(SavePathObj) is str, "Arg SavePathObj must be a str !"
    assert SavePathInp is None or type(SavePathInp) is str, "Arg SavePathInp must be a str !"
    assert dtime is None or type(dtime) is str or isinstance(dtime,dtm.datetime), "Arg dtime must be a str or a dtm.datetime instance !"
    assert type(dtFormat) is str, "Arg dtFormat must be a str !"
    RP = tfpf.Find_Rootpath()
    if SavePathInp is None:
        SavePathInp = RP+'/Inputs_'+Exp+'/'
    if SavePathObj is None:
        SavePathObj = RP+'/Objects_'+Exp+'/' if Type=='Object' else RP+'/Outputs_'+Exp+'/'
    if dtime is None:
        dtime = dtm.datetime.now()
    elif type(dtime) is str:
        dtime = dtm.strptime(dtime,dtFormat)
    return SavePathObj, SavePathInp, dtime







############################################################################
############################################################################
############################################################################
#       Data loading
############################################################################


[docs]def load(shot=None, Names=None, Mode='SSX', Dt=None, Join=True, tRef='fmin', Method='interp', NoGeom=True, Tofu=True, Verb=True, Test=True): # Add kwdarg tRef to decide which time base to use """ Load SXR data from the AUG database and returns it either as numpy arrays or a ToFu-compatible object with appropriate methods Part of the difficulty is that all channels do not have the same sampling frequency and time vector. Hence, there is an option to uniformize the time base. Parameters ---------- shot : int Shot number for which the SXR data should be loaded Names : None / str / list If provided, data is loaded only for the specified channel(s) Mode : str Flag indicating whether data should be loaded from a down-sampled database ('SSX', , faster loading) or from the complete database ( depending on the channel) Dt : None / iterable If provided, a len()=2 iterable giving the time interval of interest (recommended to avoid very large data files) Join : bool Flag, useful when Mode='SX', indicating whether the different time bases of the channels shall be uniformized tRef : None / str / np.ndarray Flag or time vector, useful when Mode='SX' and Join=True, indicating which time basis should be used as a reference - 'fmin': the time vector of the channel presenting the smallest sampling frequency is used as a reference - 'fmax': the time vector of the channel presenting the highest sampling frequency is used as a reference - any channel name: the time vector of the specifiec channel is used as a reference - np.ndarray: the provided time vector is used as a reference Method : str Flag, usefule when Mode='SX' and Join=True, indicating how the NoGeom : bool Flag indicating whether the data should be stripped of its geometrical calibration factor (thus being expressed in W instead of W/m2), to allow for: - Application of a tofu-computed etendue in case of a LOS approach - Direct use of the signal in case of a VOS approach Tofu : bool Flag indicating whether the loaded data should be returned as (SXR, t, Names) where the first two are np.ndarrays and Names is a list or as a tofu.data.PreData object Verb : bool Flag indicating whether extra comments should be printed to give feedback on the progress of the routine Test : bool Flag indicating whether the inputs should be tested for conformity Returns ------- out : tuple / tofu.data.PreData """ if Test: assert type(shot) is int, "Arg shot must be a int !" assert Names is None or type(Names) is str or (hasattr(Names,'__iter__') and all([type(ss) is str for ss in Names])), "Arg Names must be a str or an iterable of str (channel names) !" assert Mode in ['SSX','SX'], "Arg Mode must be in ['SSX','SX'] !" assert type(Join) is bool, "Arg Join must be a bool !" assert Dt is None or (hasattr(Dt,'__iter__') and len(Dt)==2 and all([type(dt) in [float,int,np.float64] for dt in Dt]) and Dt[0]<Dt[1]), "Arg Dt be an iterable of len()==2 with increasing values !" assert tRef is None or type(tRef) is str or (type(tRef) is np.ndarray and tRef.ndim==1), "Arg tRef must be a str or a 1D np.ndarray !" assert not (type(tRef) is np.ndarray and Dt is not None), "Conflicting time interval between Dt and tRef !" assert Method in ['interp'], "Arg Method must be in ['interp'] !" assert type(NoGeom) is bool, "Arg NoGeom must be a bool !" assert type(Tofu) is bool, "Arg ToFu must be a bool !" assert type(Verb) is bool, "Arg Verb must be a bool !" # Pre-formatting input if Mode=='SSX' and not NoGeom: Names = [Names] if type(Names) is str else Names Names = sorted(tfaugSXRh._CamHeads().keys()) if Names is None else sorted(Names) else: Dict = tfaugSXRh._WhichSX(shot=shot, Names=Names, Verb=Verb) Names = sorted(Dict.keys()) NN = len(Names) assert NN>0, "None of the desired channels available for {0}".format(shot) if type(tRef) is np.ndarray: Dt = [np.nanmin(tRef), np.nanmax(tRef)] Dtlarge = [Dt[0]-0.1*np.diff(Dt), Dt[1]+0.1*np.diff(Dt)] if not Dt is None else None # Loading # Use diagnostic SSX (down-sampled data) if Mode=='SSX': try: SXR, t, Names = _load_SSX(shot, Names, NN, Dt) except Exception: print(" Could not load SSX for ", shot) SXR, t, Names = np.empty((0,0)), np.empty((0,0)), [] assert len(Names)==SXR.shape[1], " There seems to be a mistake in the number of channels !" indt = (t>=Dt[0]) & (t<=Dt[1]) t = t[(t>=Dt[0]) & (t<=Dt[1])] # Use diagnostic SX. elif Mode=='SX': Ldiags = [Dict[nn]['Diag'] for nn in Names] ind = [Ldiags[ii] is None for ii in range(0,NN)] if any(ind): print(" Could not find "+str([Names[ii] for ii in range(0,NN) if ind[ii]])+" in the SX shotfiles for {0}".format(shot)) Names = [Names[ii] for ii in range(0,NN) if not ind[ii]] Ldiags = [Ldiags[ii] for ii in range(0,NN) if not ind[ii]] NN = len(Names) Ldiagsu = sorted(list(set(Ldiags))) SXR, t = [None for ii in range(0,NN)], [None for ii in range(0,NN)] dtl = Dtlarge if Join else Dt # To have margin (for boundary effects) for unifying the time basis if Verb: print("---------------------------------------------------------", end="\n") for ii in range(0,len(Ldiagsu)): try: SXR, t = _load_SX(shot, Ldiagsu[ii], Names, NN, Ldiags, dtl, SXR, t) except Exception: print(" Could not load "+Ldiagsu[ii]+" for "+str(shot)) ind = [ii for ii in range(0,NN) if not SXR[ii] is None] t = [t[ii] for ii in ind] Names = [Names[ii] for ii in ind] SXR = [SXR[ii] for ii in ind] NN = len(Names) if Join: if Verb: print("---------------------------------------------------------", end="\n") print(" Unifying the time bases") tRef = _get_tRef(Names, t, tRef) tRef = tRef[(tRef>=Dt[0]) & (tRef<=Dt[1])] SXR, t = _Unify(SXR, t, tRef, Method=Method) elif Tofu: Tofu = False warnings.warn("ToFu output can only be issued if Join=True !") if NoGeom and NN>0: FourPiOnEt = np.asarray([Dict[nn]['MULTIA02'] for nn in Names]) if type(SXR) is list: SXR = [SXR[ii]/FourPiOnEt[ii] for ii in range(0,NN)] else: SXR = SXR.dot(np.diag(1./FourPiOnEt)) if not Tofu: out = (SXR, t, Names) else: out = _create_PreData(shot, SXR, t, Names, Mode, Dt, tRef, Method, NoGeom) return out
def _load_SSX(shot, Names, NN, Dt): sh = dd.shotfile('SSX',shot) sxr, t = [None for ii in range(0,NN)], [None for ii in range(0,NN)] for ii in range(0,NN): try: load = sh(Names[ii]) time = load.time indin = np.ones((time.size,),dtype=bool) if not Dt is None: indin = (time>=Dt[0]) & (time<=Dt[1]) if not np.any(indin): indin[np.argsort(np.abs(time-np.mean(Dt)))[0:3]] = True t[ii] = time[indin] sxr[ii] = load.data[indin] except Exception: print(" Could not load SSX "+Names[ii]+" for "+str(shot)) sh.close() ind = [ii for ii in range(0,NN) if not t[ii] is None] t = [t[ii] for ii in ind] assert all([np.all(tt==t[0]) for tt in t]), " SSX do not have same time base for all channels in "+str(shot) t = t[0] Names = [Names[ii] for ii in ind] SXR = np.asarray([sxr[ii] for ii in ind]).T return SXR, t, Names def _load_SX(shot, diag, Names, NN, Ldiags, Dt, SXR, t, Verb=True): inds = [jj for jj in range(0,NN) if Ldiags[jj]==diag] sh = dd.shotfile(diag, shot) for jj in range(0,len(inds)): try: load = sh(Names[inds[jj]]) if Verb: print(Names[inds[jj]], end=" ") sys.stdout.flush() time = load.time if not Dt is None: indin = (time>=Dt[0]) & (time<=Dt[1]) if not np.any(indin): indin[np.argsort(np.abs(time-np.mean(Dt)))[0:3]] = True t[inds[jj]] = time[indin] SXR[inds[jj]] = load.data[indin] except Exception: if Verb: print("",end="\n") print(" Could not load "+diag+" "+Names[inds[jj]]+" for {0}".format(shot), end='\n') if Verb: print("",end="\n") sh.close() return SXR, t ############################################################################ ############################################################################ ############################################################################ # Secondary routines ############################################################################ def _get_tRef(Names, t, tRef): if type(tRef) is np.ndarray: assert tRef==np.unique(tRef), "Arg tRef must be a vector of increasing values !" else: assert tRef in Names or tRef in ['fmin','fmax'], "Arg tRef must be a channel name or in ['fmin','fmax'] !" if tRef in Names: tRef = t[Names.index(tRef)] else: lf = [1./np.mean(np.diff(tt)) for tt in t] tRef = t[int(np.argmin(lf))] if tRef=='fmin' else t[int(np.argmax(lf))] return tRef def _Unify(SXR, t, tRef, Method='interp'): SXRref = np.nan*np.ones((tRef.size,len(SXR))) if Method=='interp': for ii in range(0,len(SXR)): SXRref[:,ii] = np.interp(tRef, t[ii], SXR[ii]) return SXRref, tRef ############################################################################ ############################################################################ ############################################################################ # --------- Objects creation ------- ############################################################################ def _create_PreData(shot, SXR, t, Names, Mode, Dt, tRef, Method, NoGeom, Name=None, SavePath=None, Root=_tfaug_path._Root, save=True, SXInfo=True, Filt=True, TorPos=True, NamesOut=[], CorrDef=True, NamesCorr=[], Test=True): """ Get the data from AUG shotfile for chosen shot and time interval, resampled with chosen frequency and using chosen diagnostic and return it as a TFT.PreData object Paramaters ---------- Name str, user-defined name to be given to the dataset for customization, advised to leave None (default) which automatically include useful information shot int, shot number to be used for shotfile loading Dt list, bounds of the time interval to which data should be restricted in seconds (default : [0,10]) MovMeanFreq float, frequency to used for resampling of the data in Hz (default 200.e3) Diag str, diagnostic name to be used for shotfile loading, for SXR on AUG one can choose between 'SSX' and 'SX' (default) Exp str, experiment name used shotfile loading (default: 'AUG') NamesOut list, names of the SXR channels to be left out of the data because they are thought to be corrupted or useless (default : []) OutChanDef bool, flag indicating whereas the default selection of corrupted channels shall be used (default : True) SXInfo bool, flag indicating whereas the default selection of corrupted channels shall use info available from SXinfo (default : True) Filt bool, flag indicating whether the default selection of corrupted channels shall use info regarding the filter type of each diode (default : True) TorPos bool, flag indicating whether the default selection of corrupted channels shall use info regarding the toroidal position of each channel (default : True) Resamp bool, flag indicating whether data shall be resampled (default : True) interpkind str, type of interpolation to be used for the resampling (default : 'linear') CorrDef bool, flag indicating whereas the default selection of channels needing correction shall be used (default : True) NamesCorr list, names of the SXR channels that are thought to be needing automated correction by retrofit (default : []) SavePathObj str, absolute path of the AUG ToFu objects (TFG.Tor and TFG.GDetect) to be used (default : None => automatic) SavePathInp str, absolute path of the ... dtimeObj dtm.datetime object indicating the kind of kind of SXR diagnostic geometry that should be used (default : None => automatic) dtFormat str, format to be used in the file names for the dtime indication (default : "D%Y%m%d_T%H%M%S") Test bool, flag indicating whether inputs shall be tested for consistency Returns ------- Pre : TFT.PreData Object containing the prepared data, additional treatments can be done using the object methods (channel selection, noise estimation...) """ if Test: assert type(SXR) is np.ndarray, "Arg SXR must be a np.ndarray !" assert type(t) is np.ndarray and t.ndim==1 and t.size==SXR.shape[0], "Arg t must be a time vector with size=SXR.shape[0] !" assert all([type(ss) is bool for ss in [SXInfo,Filt,TorPos]]), "Args [OutChanDef,SXInfo,Filt,TorPos] must be bools !" assert NamesOut is None or (type(NamesOut) is list and all([type(nn) is str for nn in NamesOut])), "Arg NamesOut must be a list of Names !" # Get pathfileext of geometry files to pass to PreData in case of data plotting with geometry CamHs = tfaugSXRh._CamHeads() CamHs = sorted(list(set([CamHs[nn] for nn in Names]))) LIdDet = tfaugsxrg.load(Cams=CamHs, shot=shot, out='Id') if len(CamHs)>1: LIdDet = list(itt.chain.from_iterable([idg.get_LObjasLId(Cls='Detect')['Detect'] for idg in LIdDet])) else: LIdDet = LIdDet[0].get_LObjasLId(Cls='Detect')['Detect'] LIdDet = [idd for idd in LIdDet if idd.Name in Names] # Path for saving PreData object if SavePath is None: SavePath = Root+'/tofu/plugins/AUG/SXR/data/Outputs/' # Loading data in tft.PreData object USRdict = {'Mode':Mode, 'Dt':Dt, 'tRef':tRef, 'Method':Method, 'NoGeom':NoGeom} # Creating the PreData object Pre = tft.PreData(SXR, t=t, Chans=Names, Id=Name, Exp='AUG', shot=shot, Diag='SXR', SavePath=SavePath, LIdDet=LIdDet, DtRef=Dt) Pre.Id.set_USRdict(USRdict) # Getting Default corrupted channels out if any([SXInfo,Filt,TorPos]): LOut = {'Cam':[], 'CamHead':[], 'Name':[]} Lout = _GetDefOutChannels(shot, Pre.Id.LObj['Detect'], LOut, SXInfo=SXInfo, Filt=Filt, TorPos=TorPos) NamesOut = NamesOut+Lout['Name']+LOut['Cam']+LOut['CamHead'] Pre.Out_add(Val=sorted(list(set(NamesOut))), LCrit=['Name','Cam','CamHead']) # Getting defaults Corr channels Corr = {'Cam':[], 'CamHead':[], 'Name':[]} if CorrDef: Corr = _GetCorrChannels(shot, Corr) NamesCorr = NamesCorr+Corr['Name']+Corr['Cam']+Corr['CamHead'] Pre.Corr_add(Val=sorted(list(set(NamesCorr))), LCrit=['Name','Cam','CamHead']) return Pre def _GetDefOutChannels(shot, LObjDet, LOut={'Cam':[],'CamHead':[],'Name':[]}, SXInfo=True, Filt=True, TorPos=True): if TorPos: LOut['Cam'] += ['F'] if Filt: Filt = [(usr['FiltMat'],usr['FiltThick']) for usr in LObjDet['USRdict']] Filtu = list(set(Filt)) FiltRef = [Filt.count(ff) for ff in Filtu] FiltRef = Filtu[FiltRef.index(max(FiltRef))] LOut['Name'] = [LObjDet['Name'][ii] for ii in range(0,len(LObjDet['Name'])) if not (LObjDet['USRdict'][ii]['FiltMat'],LObjDet['USRdict'][ii]['FiltThick'])==FiltRef] # The rest is taken from SXinfo (update regularly) if SXInfo: if shot >= 31679: LOut['Name'] += ['F_019', 'J_057','J_058','J_059','J_060', 'J_079','J_082'] if shot >= 25987: LOut['Name'] += ['K_020','K_049','K_058'] if shot >= 26663: LOut['Name'] += ['K_057'] if shot >= 27489 and shot <= 31621: LOut['Name'] += ['I_061'] if shot >= 30506 and shot <= 31549: LOut['Name'] += ['I_060'] if shot >= 25823 and shot <= 25890: LOut['Cam'] += ['L'] if shot >= 30161 and shot <= 30425: LOut['Cam'] += ['H'] return LOut def _GetCorrChannels(shot, Corr={'Name':[],'Cam':[],'CamHead':[]}): if shot >= 30446: Corr['CamHead'] += ['H2'] if shot == 25854: Corr['CamHead'] += ['J2'] if shot >= 31802: Corr['Cam'] += 'L' Corr['Cam'] += 'M' return Corr