Skip to content

Public API

All public packages, functions and classes are available in this module.

Functions:

Data classes:

Plotting:

IDSMapping(ids)

Bases: Mapping

Empty arrays are excluded from the mapping. You can still get/set these keys directly, but key in map returns False if map['key'] is an empty array.

Parameters:

  • ids (Any) –

    IMAS DB entry for the IDS.

Attributes:

  • exclude_empty (bool) –
Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def __init__(self, ids: Any) -> None:
    """Map the IMASDB object.

    Empty arrays are excluded from the mapping.
    You can still get/set these keys directly,
    but `key in map` returns `False` if `map['key']` is an empty array.

    Parameters
    ----------
    ids :
        IMAS DB entry for the IDS.

    Attributes
    ----------
    exclude_empty : bool
    """
    self._ids = ids

    # All available data fields are stored in this set.
    self._keys: set[str] = set()
    self._paths: dict[str, Any] = {}

    self.dive(ids, [])

dive(val, path)

Recursively find the data fields.

Parameters:

  • val

    Current nested object being evaluated

  • path (list) –

    Current path

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
def dive(self, val, path: list):
    """Recursively find the data fields.

    Parameters
    ----------
    val :
        Current nested object being evaluated
    path : list
        Current path
    """

    if isinstance(val, str):
        return

    if hasattr(val,
               '__getitem__') and not isinstance(val,
                                                 (np.ndarray, np.generic)):
        for i in range(len(val)):
            item = val[i]
            self.dive(item, path + [str(i)])
        return

    if hasattr(val, '__dict__'):
        for key, item in val.__dict__.items():
            self.dive(item, path + [key])
        return

    if not isinstance(val, (np.ndarray, np.generic)):
        return

    if val.size == 0:
        return

    # We made it here, the value can be stored
    str_path = '/'.join(path)
    self._keys.add(str_path)

    cur = self._paths
    for part in path[:-1]:
        cur.setdefault(part, {})
        cur = cur[part]
    cur[path[-1]] = str_path

find_by_group(pattern)

Find keys matching regex pattern by group.

The dict key is defined by match.groups(). Dict entries will be overwritten if the groups are not unique.

Parameters:

  • pattern (str) –

    Regex pattern (must contain groups)

Returns:

  • dict

    New dict with all matching key/value pairs.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
def find_by_group(self, pattern: str) -> dict[tuple | str, Any]:
    """Find keys matching regex pattern by group.

    The dict key is defined by `match.groups()`.
    Dict entries will be overwritten if the groups are not unique.

    Parameters
    ----------
    pattern : str
        Regex pattern (must contain groups)

    Returns
    -------
    dict
        New dict with all matching key/value pairs.
    """
    pattern = insert_re_caret_dollar(pattern)
    pattern = replace_index_str(pattern)

    pat = re.compile(pattern)

    new = {}
    for key in self._keys:
        m = pat.match(key)
        if m:
            groups = m.groups()
            idx = groups[0] if len(groups) == 1 else groups
            new[idx] = self[key]

    return new

findall(pattern)

Find keys matching regex pattern.

Parameters:

  • pattern (str) –

    Regex pattern

Returns:

  • dict

    New dict with all matching key/value pairs.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
def findall(self, pattern: str) -> dict[str, Any]:
    """Find keys matching regex pattern.

    Parameters
    ----------
    pattern : str
        Regex pattern

    Returns
    -------
    dict
        New dict with all matching key/value pairs.
    """
    pattern = insert_re_caret_dollar(pattern)
    pattern = replace_index_str(pattern)

    pat = re.compile(pattern)

    return {key: self[key] for key in self._keys if pat.match(key)}

get_at_index(variable, index, **kwargs)

Grab key with index replacement.

Example: IDSMapping.get_at_index(var, index=0)

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
144
145
146
147
148
149
150
151
def get_at_index(self, variable: str | IDSVariableModel,
                 index: int | Sequence[int], **kwargs) -> Any:
    """Grab key with index replacement.

    Example: `IDSMapping.get_at_index(var, index=0)`
    """
    path = self._path_at_index(variable, index)
    return self[path]

length_of_key(key)

length_of_key gives you the number of entries of a (partial) ids path, or None if the length does not exist.

Note: this is different then the length of an IDSMapping, which is defined as the number of keys

Note: calling len(map[key]) works as well

Example:
map.length_of_key('1d_profiles')

Parameters:

  • key (str) –

    key

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
def length_of_key(self, key: str):
    """length_of_key gives you the number of entries of a (partial) ids
    path, or None if the length does not exist.

    Note: this is different then the length of an IDSMapping, which is defined
    as the number of keys

    Note: calling `len(map[key])` works as well

    ## Example:


    ```python
    map.length_of_key('1d_profiles')
    ```

    Parameters
    ----------
    key : str
        key
    """
    try:
        return len(self[key])
    except Exception:
        pass

set_at_index(variable, index, value, **kwargs)

Grab key with index replacement.

Example: IDSMapping.set_at_index(var, value, index=0)

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
153
154
155
156
157
158
159
160
def set_at_index(self, variable: str | IDSVariableModel,
                 index: int | Sequence[int], value: Any, **kwargs):
    """Grab key with index replacement.

    Example: `IDSMapping.set_at_index(var, value, index=0)`
    """
    path = self._path_at_index(variable, index)
    self[path] = value

sync(target)

Synchronize updated data back to IMAS db entry.

Shortcut for 'put' command.

Parameters:

  • target (ImasHandle) –

    Points to an IMAS db entry of where the data should be written.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
def sync(self, target: ImasHandle):
    """Synchronize updated data back to IMAS db entry.

    Shortcut for 'put' command.

    Parameters
    ----------
    target : ImasHandle
        Points to an IMAS db entry of where the data should be written.
    """

    add_provenance_info(handle=target)

    with target.open() as db_entry:
        self._ids.put(db_entry=db_entry)

to_xarray(variables, empty_var_ok=False, **kwargs)

Return dataset for given variables.

Parameters:

  • variables (Sequence[str | IDSVariableModel]]) –

    Dictionary of data variables

  • empty_var_ok (bool, default: False ) –

    If True, silently skip data that are missing from the mapping. If False (default), raise an error when data that are missing from the dataset are requested.

Returns:

  • ds ( Dataset ) –

    Return query as Dataset

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
def to_xarray(
    self,
    variables: Sequence[str | IDSVariableModel],
    empty_var_ok: bool = False,
    **kwargs,
) -> xr.Dataset:
    """Return dataset for given variables.

    Parameters
    ----------
    variables : Sequence[str | IDSVariableModel]]
        Dictionary of data variables
    empty_var_ok : bool
        If True, silently skip data that are missing from the mapping.
        If False (default), raise an error when data that are missing
        from the dataset are requested.

    Returns
    -------
    ds : xr.Dataset
        Return query as Dataset
    """

    def _contains_empty(arr):
        if isinstance(arr, list):
            if len(arr) == 0:
                return True
            return any(_contains_empty(sub_arr) for sub_arr in arr)
        elif isinstance(arr, np.ndarray):
            return arr.size == 0
        elif isinstance(arr, (float, int)):
            return False
        else:
            raise ValueError(
                f"Don't know how to deal with: {var.name}: {arr}")

    import xarray as xr

    xr_data_vars: dict[str, tuple[list[str], np.ndarray]] = {}

    variables = lookup_vars(variables)

    for var in variables:
        parts = var.path.split('/*/')

        if len(parts) == 1:
            xr_data_vars[var.name] = (var.dims, self[var.path])
            continue

        arr = self._read_array_from_parts(*parts)

        if _contains_empty(arr):
            if empty_var_ok:
                continue
            else:
                raise EmptyVarError(
                    f'Variable {var.name!r} contains empty data.')
        xr_data_vars[var.name] = ([*var.dims], arr)

    ds = xr.Dataset(data_vars=xr_data_vars)  # type: ignore

    return ds

write_array_in_parts(variable_path, data)

write_back data, give the data, and the variable path, where * denotes the dimensions. This function will figure out how to write it back to the IDS.

Parameters:

  • variable_path (str) –

    path of the variable in the IDS, something like 'profiles_1d/*/zeff'

  • data (DataArray) –

    data of the variable, in the correct dimensions (every star in the variable_path is a dimension in this array)

Returns:

  • None
Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_mapping.py
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
def write_array_in_parts(self, variable_path: str,
                         data: xr.DataArray) -> None:
    """write_back data, give the data, and the variable path, where `*`
    denotes the dimensions. This function will figure out how to write it
    back to the IDS.

    Parameters
    ----------
    variable_path : str
        path of the variable in the IDS, something like 'profiles_1d/*/zeff'
    data : xr.DataArray
        data of the variable, in the correct dimensions (every star in the
        `variable_path` is a dimension in this array)

    Returns
    -------
    None
    """
    parts = variable_path.split('/*/')
    self._write_array_in_parts(data.data, *parts)

ImasHandle

Bases: ImasBaseModel

is_local_db property

Return True if the handle points to a local imas database.

copy_data_to(destination)

Copy ids entry to given destination.

Parameters:

  • destination (ImasHandle) –

    Copy data to a new location.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
168
169
170
171
172
173
174
175
176
177
178
179
180
181
def copy_data_to(self, destination: ImasHandle):
    """Copy ids entry to given destination.

    Parameters
    ----------
    destination : ImasHandle
        Copy data to a new location.
    """
    logger.debug('Copy %s to %s', self, destination)

    try:
        copy_ids_entry(self, destination)
    except Exception as err:
        raise OSError(f'Failed to copy {self}') from err

delete()

Remove data from entry.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
183
184
185
186
187
188
189
190
191
192
193
194
@add_to_op_queue('Removing ids', '{self}')
def delete(self):
    """Remove data from entry."""
    # ERASE_PULSE operation is yet supported by IMAS as of June 2022
    path = self.path()
    for suffix in SUFFIXES:
        to_delete = path.with_suffix(suffix)
        logger.debug('Removing %s', to_delete)
        try:
            to_delete.unlink()
        except FileNotFoundError:
            logger.warning('%s does not exist', to_delete)

entry(backend=imasdef.MDSPLUS_BACKEND)

Return reference to imas.DBEntry.

Parameters:

  • backend (optional, default: MDSPLUS_BACKEND ) –

    Which IMAS backend to use

Returns:

  • entry ( `imas.DBEntry` ) –

    IMAS database entry

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
322
323
324
325
326
327
328
329
330
331
332
333
334
335
def entry(self, backend=imasdef.MDSPLUS_BACKEND):
    """Return reference to `imas.DBEntry.`

    Parameters
    ----------
    backend : optional
        Which IMAS backend to use

    Returns
    ------
    entry : `imas.DBEntry`
        IMAS database entry
    """
    return imas.DBEntry(backend, self.db, self.shot, self.run, self.user)

exists()

Return true if the directory exists.

Returns:

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
158
159
160
161
162
163
164
165
166
def exists(self) -> bool:
    """Return true if the directory exists.

    Returns
    -------
    bool
    """
    path = self.path()
    return all(path.with_suffix(sf).exists() for sf in SUFFIXES)

from_string(string) classmethod

Return location from formatted string.

Format:

<user>/<db>/<shot>/<run>
<db>/<shot>/<run>

Default to the current user if the user is not specified.

For example:

g2user/jet/91234/555

Parameters:

  • string (str) –

    Input string containing imas db path

Returns:

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
@classmethod
def from_string(cls, string: str) -> ImasHandle:
    """Return location from formatted string.

    Format:

        <user>/<db>/<shot>/<run>
        <db>/<shot>/<run>

    Default to the current user if the user is not specified.

    For example:

        g2user/jet/91234/555

    Parameters
    ----------
    string : str
        Input string containing imas db path

    Returns
    -------
    ImasHandle
    """
    match = IMAS_PATTERN.match(string)

    if match:
        return cls(**match.groupdict())

    raise ValueError(f'Could not match {string!r}')

get(ids='core_profiles')

Map the data to a dict-like structure.

Parameters:

  • ids (str, default: 'core_profiles' ) –

    Name of profiles to open

Returns:

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
218
219
220
221
222
223
224
225
226
227
228
229
230
231
def get(self, ids: str = 'core_profiles') -> IDSMapping:
    """Map the data to a dict-like structure.

    Parameters
    ----------
    ids : str, optional
        Name of profiles to open

    Returns
    -------
    IDSMapping
    """
    raw_data = self.get_raw_data(ids)
    return IDSMapping(raw_data)

get_all_variables(extra_variables=[], squash=True, ids='core_profiles', **kwargs)

Get all variables that duqtools knows of from selected ids from the dataset.

This function looks up the data location from the duqtools.config.var_lookup table

Parameters:

  • variables (Sequence[IDSVariableModel]) –

    Extra variables to load in addition to the ones known by duqtools.

  • squash (bool, default: True ) –

    Squash placeholder variables

Returns:

  • ds ( xarray ) –

    The data in xarray format.

  • **kwargs

    These keyword arguments are passed to IDSMapping.to_xarray()

Raises:

  • ValueError

    When variables are from multiple IDSs.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
def get_all_variables(
    self,
    extra_variables: Sequence[IDSVariableModel] = [],
    squash: bool = True,
    ids: str = 'core_profiles',
    **kwargs,
) -> xr.Dataset:
    """Get all variables that duqtools knows of from selected ids from the
    dataset.

    This function looks up the data location from the
    `duqtools.config.var_lookup` table

    Parameters
    ----------
    variables : Sequence[IDSVariableModel]
        Extra variables to load in addition to the ones known by duqtools.
    squash : bool
        Squash placeholder variables

    Returns
    -------
    ds : xarray
        The data in `xarray` format.
    **kwargs
        These keyword arguments are passed to `IDSMapping.to_xarray()`

    Raises
    ------
    ValueError
        When variables are from multiple IDSs.
    """
    idsvar_lookup = var_lookup.filter_ids(ids)
    variables = list(
        set(list(extra_variables) + list(idsvar_lookup.keys())))
    return self.get_variables(variables,
                              squash,
                              empty_var_ok=True,
                              **kwargs)

get_raw_data(ids='core_profiles', **kwargs)

Get data from IDS entry.

Parameters:

  • ids (str, default: 'core_profiles' ) –

    Name of profiles to open.

  • **kwargs

    These keyword parameters are passed to ImasHandle.open().

Returns:

  • data
Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
def get_raw_data(self, ids: str = 'core_profiles', **kwargs):
    """Get data from IDS entry.

    Parameters
    ----------
    ids : str, optional
        Name of profiles to open.
    **kwargs
        These keyword parameters are passed to `ImasHandle.open()`.

    Returns
    -------
    data
    """
    with self.open(**kwargs) as data_entry:
        data = data_entry.get(ids)

    # reset string representation because output is extremely lengthy
    _patch_str_repr(data)

    return data

get_variables(variables, squash=True, **kwargs)

Get variables from data set.

This function looks up the data location from the duqtools.config.var_lookup table, and returns

Parameters:

  • variables (Sequence[Union[str, IDSVariableModel]]) –

    Variable names of the data to load.

  • squash (bool, default: True ) –

    Squash placeholder variables

Returns:

  • ds ( xarray ) –

    The data in xarray format.

  • **kwargs

    These keyword arguments are passed to IDSMapping.to_xarray()

Raises:

  • ValueError

    When variables are from multiple IDSs.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
def get_variables(
    self,
    variables: Sequence[str | IDSVariableModel],
    squash: bool = True,
    **kwargs,
) -> xr.Dataset:
    """Get variables from data set.

    This function looks up the data location from the
    `duqtools.config.var_lookup` table, and returns

    Parameters
    ----------
    variables : Sequence[Union[str, IDSVariableModel]]
        Variable names of the data to load.
    squash : bool
        Squash placeholder variables

    Returns
    -------
    ds : xarray
        The data in `xarray` format.
    **kwargs
        These keyword arguments are passed to `IDSMapping.to_xarray()`

    Raises
    ------
    ValueError
        When variables are from multiple IDSs.
    """
    var_models = lookup_vars(variables)

    idss = {var.ids for var in var_models}

    if len(idss) > 1:
        raise ValueError(
            f'All variables must belong to the same IDS, got {idss}')

    ids = var_models[0].ids

    data_map = self.get(ids)

    ds = data_map.to_xarray(variables=var_models, **kwargs)

    if squash:
        ds = squash_placeholders(ds)

    return ds

imasdb_path()

Return path to imasdb.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
154
155
156
def imasdb_path(self) -> Path:
    """Return path to imasdb."""
    return self.path().parents[2]

open(backend=imasdef.MDSPLUS_BACKEND, create=False)

Context manager to open database entry.

Parameters:

  • backend (optional, default: MDSPLUS_BACKEND ) –

    Which IMAS backend to use

  • create (bool, default: False ) –

    Create empty database entry if it does not exist.

Yields:

  • entry ( `imas.DBEntry` ) –

    Opened IMAS database entry

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
@contextmanager
def open(self, backend=imasdef.MDSPLUS_BACKEND, create: bool = False):
    """Context manager to open database entry.

    Parameters
    ----------
    backend : optional
        Which IMAS backend to use
    create : bool, optional
        Create empty database entry if it does not exist.

    Yields
    ------
    entry : `imas.DBEntry`
        Opened IMAS database entry
    """
    entry = self.entry(backend=backend)
    opcode, _ = entry.open()

    if opcode == 0:
        logger.debug('Data entry opened: %s', self)
    elif create:
        cpcode, _ = entry.create()
        if cpcode == 0:
            logger.debug('Data entry created: %s', self)
        else:
            raise OSError(
                f'Cannot create data entry: {self}. '
                f'Create a new db first using `imasdb {self.db}`')
    else:
        raise OSError(f'Data entry does not exist: {self}')

    try:
        yield entry
    finally:
        entry.close()

path(suffix=SUFFIXES[0])

Return location as Path.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
def path(self, suffix=SUFFIXES[0]) -> Path:
    """Return location as Path."""
    imas_home = os.environ.get('IMAS_HOME')

    if self.is_local_db:
        template = LOCAL_PATH_TEMPLATE
    elif imas_home and self.user == 'public':
        template = imas_home + '/' + PUBLIC_PATH_TEMPLATE
    else:
        template = GLOBAL_PATH_TEMPLATE

    return Path(
        template.format(user=self.user,
                        db=self.db,
                        shot=self.shot,
                        run=self.run,
                        suffix=suffix))

paths()

Return location of all files as a list of Paths.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
150
151
152
def paths(self) -> List[Path]:
    """Return location of all files as a list of Paths."""
    return [self.path(suffix) for suffix in SUFFIXES]

to_string()

Generate string representation of Imas location.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
123
124
125
def to_string(self) -> str:
    """Generate string representation of Imas location."""
    return f'{self.user}/{self.db}/{self.shot}/{self.run}'

validate()

Validate the user.

If the user is a path, then create it.

Raises:

  • ValueError:

    If the user is invalid.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_handle.py
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
def validate(self):
    """Validate the user.

    If the user is a path, then create it.

    Raises
    ------
    ValueError:
        If the user is invalid.
    """
    if self.is_local_db:
        # jintrac v220922
        self.path().parent.mkdir(parents=True, exist_ok=True)
    elif self.user == getuser() or self.user == 'public':
        # jintrac v210921
        pass
    else:
        raise ValueError(f'Invalid user: {self.user}')

Job(path, *, cfg)

Parameters:

  • path (Path) –

    Directory for simulation or model run.

  • cfg (Optional[Config]) –

    Duqtools config, defaults to global config if unspecified.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/models/_job.py
46
47
48
49
50
51
52
53
54
55
56
57
def __init__(self, path: Path, *, cfg: Config):
    """Summary.

    Parameters
    ----------
    path : Path
        Directory for simulation or model run.
    cfg : Optional[Config], optional
        Duqtools config, defaults to global config if unspecified.
    """
    self.path = Path(path).resolve()
    self.cfg = cfg

has_status: bool property

Return true if a status file exists.

has_submit_script: bool property

Return true if directory has submit script.

in_file: Path property

Return path to the input file for the job.

is_completed: bool property

Return true if the job has been completed succesfully.

is_done: bool property

Return true if the job is done (completed or failed).

is_failed: bool property

Return true if the job has failed.

is_running: bool property

Return true if the job is running.

is_submitted: bool property

Return true if the job has been submitted.

lockfile: Path property

Return the path of the lockfile.

out_file: Path property

Return path to the output file for the job.

status_file: Path property

Return the path of the status file.

status_symbol property

One letter status symbol.

submit_script: Path property

Return the path of the submit script.

start()

Submit job and return generate that raises StopIteration when done.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/models/_job.py
161
162
163
164
165
166
167
168
def start(self):
    """Submit job and return generate that raises StopIteration when
    done."""
    click.echo(f'Submitting {self}\033[K')
    self.submit()

    while self.status() in (JobStatus.RUNNING, JobStatus.NOSTATUS):
        yield

status()

Return the status of the job.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/models/_job.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
def status(self) -> str:
    """Return the status of the job."""
    if not self.has_status:
        return JobStatus.NOSTATUS

    sf = self.status_file
    with open(sf) as f:
        content = f.read()
        if self.cfg.system.msg_completed in content:
            return JobStatus.COMPLETED
        elif self.cfg.system.msg_failed in content:
            return JobStatus.FAILED
        elif self.cfg.system.msg_running in content:
            return JobStatus.RUNNING

    if self.is_submitted:
        return JobStatus.SUBMITTED

    return JobStatus.UNKNOWN

status_symbol_help() staticmethod

Return help string for status codes.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/models/_job.py
63
64
65
66
@staticmethod
def status_symbol_help():
    """Return help string for status codes."""
    return JobStatus.symbol_help()

submit()

Submit job.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/models/_job.py
153
154
155
156
157
158
159
def submit(self):
    """Submit job."""
    from ..system import get_system
    debug(f'Put lockfile in place for {self.lockfile}')
    self.lockfile.touch()

    get_system(self.cfg).submit_job(self)

wait_until_done(time_step=1.0)

Submit task and wait until done.

Parameters:

  • time_step (float, default: 1.0 ) –

    Time in seconds step between status updates.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/models/_job.py
170
171
172
173
174
175
176
177
178
179
def wait_until_done(self, time_step: float = 1.0):
    """Submit task and wait until done.

    Parameters
    ----------
    time_step : float, optional
        Time in seconds step between status updates.
    """
    while self.status() in (JobStatus.RUNNING, JobStatus.NOSTATUS):
        time.sleep(time_step)

Variable

Bases: IDSPath

Variable for describing data within a IMAS database.

The variable can be given a name, which will be used in the rest of the config to reference the variable. It will also be used as the column labels or on plots.

The dimensions for each variable must be specified. This ensures the the data will be self-consistent. For example for 1D data, you can use [x] and for 2D data, [x, y].

The IDS path may contain indices. You can point to a single index, by simply giving the complete path (i.e. profiles_1d/0/t_i_ave for the 0th time slice). To retrieve all time slices, you can use profiles_1d/*/t_i_ave.

alt_errorband_chart(source, *, x, y, z='time')

Generate an altair errorband plot from a dataframe.

Parameters:

  • source (DataFrame) –

    Input dataframe

  • x (str) –

    X-value to plot, corresponds to a column in the source data

  • y (str) –

    Y-value to plot, corresponds to a column in the source data

  • z (str, default: 'time' ) –

    Slider variable (time), corresponds to a column in the source data

Returns:

  • Chart

    Return an altair chart.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/_plot_utils.py
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
def alt_errorband_chart(source: Union[pd.DataFrame, xr.Dataset],
                        *,
                        x: str,
                        y: str,
                        z: str = 'time') -> alt.Chart:
    """Generate an altair errorband plot from a dataframe.

    Parameters
    ----------
    source : pd.DataFrame
        Input dataframe
    x : str
        X-value to plot, corresponds to a column in the source data
    y : str
        Y-value to plot, corresponds to a column in the source data
    z : str
        Slider variable (time), corresponds to a column in the source data

    Returns
    -------
    alt.Chart
        Return an altair chart.
    """
    source = _standardize_data(source, z=z)

    max_y = source[y].max()
    max_slider = source['slider'].max()

    line = alt.Chart(source).mark_line().encode(
        x=f'{x}:Q',
        y=alt.Y(
            f'mean({y}):Q',
            scale=alt.Scale(domain=(0, max_y)),
            axis=alt.Axis(format='.4~g'),
        ),
        color=alt.Color('tstep:N'),
    )

    # altair-viz.github.io/user_guide/generated/core/altair.ErrorBandDef
    band = alt.Chart(source).mark_errorband(extent='stdev',
                                            interpolate='linear').encode(
                                                x=f'{x}:Q',
                                                y=f'{y}:Q',
                                                color=alt.Color('tstep:N'),
                                            )

    ref = alt.Chart(source).mark_line(strokeDash=[5, 5]).encode(
        x=f'{x}:Q',
        y=f'mean({y}):Q',
        color=alt.Color('tstep:N'),
    )

    if max_slider != 0:
        slider = alt.binding_range(min=0, max=max_slider, step=1)
        select_step = alt.selection_point(name=z,
                                          fields=['slider'],
                                          bind=slider,
                                          value=0)

        line = line.add_params(select_step).transform_filter(
            select_step).interactive()
        band = band.add_params(select_step).transform_filter(
            select_step).interactive()

        slider = alt.binding_range(name='Reference time index',
                                   min=0,
                                   max=max_slider,
                                   step=1)

        select_step = alt.selection_point(name='reference',
                                          fields=['slider'],
                                          bind=slider,
                                          value=0)

        ref = ref.add_params(select_step).transform_filter(
            select_step).interactive()

    return line + band + ref

alt_line_chart(source, *, x, y, z='time', std=False)

Generate an altair line chart from a dataframe.

Parameters:

  • source (DataFrame) –

    Input dataframe

  • x (str) –

    X-value to plot, corresponds to a column in the source data

  • y (str) –

    Y-value to plot, corresponds to a column in the source data

  • z (str, default: 'time' ) –

    Slider variable (time), corresponds to a column in the source data

std : bool Plot the error bound from {x}_error_upper in the plot as well

Returns:

  • Chart

    Return an altair chart.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/_plot_utils.py
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
def alt_line_chart(source: Union[pd.DataFrame, xr.Dataset],
                   *,
                   x: str,
                   y: str,
                   z: str = 'time',
                   std: bool = False) -> alt.Chart:
    """Generate an altair line chart from a dataframe.

    Parameters
    ----------
    source : pd.DataFrame
        Input dataframe
    x : str
        X-value to plot, corresponds to a column in the source data
    y : str
        Y-value to plot, corresponds to a column in the source data
    z : str
        Slider variable (time), corresponds to a column in the source data

    std : bool
        Plot the error bound from {x}_error_upper in the plot as well

    Returns
    -------
    alt.Chart
        Return an altair chart.
    """
    source = _standardize_data(source, z=z)
    max_y = source[y].max()

    if std:
        source[y + '_upper'] = source[y] + source[y + '_error_upper']
        source[y + '_lower'] = source[y] - source[y + '_error_upper']
        max_y = source[y + '_upper'].max()

    max_slider = source['slider'].max()

    if std:
        band = alt.Chart(source).mark_area(opacity=0.3).encode(
            x=f'{x}:Q',
            y=alt.Y(f'{y}_upper:Q', title=y),
            y2=alt.Y2(f'{y}_lower:Q', title=y),
            color=alt.Color('run:N'),
        )

    line = alt.Chart(source).mark_line().encode(
        x=f'{x}:Q',
        y=alt.Y(
            f'{y}:Q',
            scale=alt.Scale(domain=(0, max_y)),
            axis=alt.Axis(format='.4~g'),
        ),
        color=alt.Color('run:N'),
        tooltip='run',
    )

    ref = alt.Chart(source).mark_line(strokeDash=[5, 5]).encode(
        x=f'{x}:Q', y=f'{y}:Q', color=alt.Color('run:N'), tooltip='run')

    if max_slider != 0:
        slider = alt.binding_range(name='Time index',
                                   min=0,
                                   max=max_slider,
                                   step=1)
        select_step = alt.selection_point(name=z,
                                          fields=['slider'],
                                          bind=slider,
                                          value=0)
        line = line.add_params(select_step).transform_filter(
            select_step).interactive()
        if std:
            band = band.transform_filter(select_step).interactive()

        first_run = source.iloc[0].run
        slider = alt.binding_range(name='Reference time index',
                                   min=0,
                                   max=max_slider,
                                   step=1)
        select_step = alt.selection_point(name='reference',
                                          fields=['slider'],
                                          bind=slider,
                                          value=0)

        ref = ref.add_params(select_step).transform_filter(
            select_step).transform_filter(
                alt.datum.run == first_run).interactive()

    if std:
        return line + ref + band
    else:
        return line + ref

create(config, **kwargs)

Wrapper around create for python api.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/create.py
302
303
304
305
306
307
308
309
310
311
312
313
def create_api(config: dict, **kwargs) -> dict[str, tuple[Job, Run]]:
    """Wrapper around create for python api."""
    cfg = Config.from_dict(config)
    runs = create(cfg=cfg, **kwargs)

    if len(runs) == 0:
        raise CreateError('No runs were created, check logs for errors.')

    return {
        str(run.shortname): (Job(run.dirname, cfg=cfg), run)
        for run in runs
    }

duqmap(function, *, runs=None, **kwargs)

Duqmap is a mapping function which can be used to map a user defined function function over either the runs created by duqtools, or the runs specified by the user in runs.

An important gotcha is that when Paths are used to define the runs, duqtools does not know how to associate the corresponding ImasHandles, as that information is not available. So when using it in this way, it is not possible to provide a function which takes an ImasHandle as input.

Parameters:

  • function (Callable[[Run | ImasHandle], Any]) –

    function which is called for each run, specified either by runs, or implicitly by any available runs.yaml

  • runs (Optional[List[Run | Path]], default: None ) –

    A list of runs over which to operate the function

  • kwargs

    optional arguments that need to be passed to each function that you provide

Returns:

  • List[Any]:

    A list of anything that your function returns

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/duqmap.py
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
def duqmap(function: Callable[[Run | ImasHandle], Any],
           *,
           runs: Optional[List[Run | Path]] = None,
           **kwargs) -> List[Any]:
    """Duqmap is a mapping function which can be used to map a user defined
    function `function` over either the runs created by duqtools, or the runs
    specified by the user in `runs`.

    An important gotcha is that when `Paths` are used to define the runs, duqtools
    does not know how to associate the corresponding ImasHandles, as that information
    is not available.  So when using it in this way, it is not possible to provide a
    function which takes an `ImasHandle` as input.

    Parameters
    ----------
    function : Callable[[Run | ImasHandle], Any]
        function which is called for each run, specified either by `runs`, or implicitly
        by any available `runs.yaml`
    runs : Optional[List[Run | Path]]
        A list of runs over which to operate the function
    kwargs :
        optional arguments that need to be passed to each `function` that you provide

    Returns
    -------
    List[Any]:
        A list of anything that your function returns
    """
    try:
        # Gets the type of the first argument to the function, if it exists
        argument = next(iter(signature(function).parameters.items()))[1]
    except Exception:
        raise NotImplementedError(
            f'Dont know how to map: {function}, which has no arguments')

    argument_type = argument.annotation

    if issubclass(argument_type, Run):
        map_fun: Callable[[Any], Any] = duqmap_run
    elif issubclass(argument_type, ImasHandle):
        map_fun = duqmap_imas
    else:
        raise NotImplementedError('Dont know how to map function signature:'
                                  f' {function.__name__}{signature(function)}')

    return map_fun(function, runs=runs, **kwargs)  # type: ignore

get_status(config, **kwargs)

Wrapper around status for python api.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/status.py
218
219
220
221
def status_api(config: dict, **kwargs):
    """Wrapper around status for python api."""
    cfg = Config.from_dict(config)
    return status(cfg=cfg, **kwargs)

rebase_all_coords(datasets, reference_dataset)

Rebase all coords, by applying rebase operations.

Parameters:

  • datasets (Sequence[Dataset]) –

    datasets

  • reference_dataset (Dataset) –

    reference_dataset

Returns:

  • tuple[Dataset, ...]
Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_rebase.py
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
def rebase_all_coords(
    datasets: Sequence[xr.Dataset],
    reference_dataset: xr.Dataset,
) -> tuple[xr.Dataset, ...]:
    """Rebase all coords, by applying rebase operations.

    Parameters
    ----------
    datasets : Sequence[xr.Dataset]
        datasets
    reference_dataset : xr.Dataset
        reference_dataset

    Returns
    -------
    tuple[xr.Dataset, ...]
    """

    interp_dict = {
        name: dim
        for name, dim in reference_dataset.coords.items() if dim.size > 1
    }

    return tuple(
        ds.interp(coords=interp_dict, kwargs={'fill_value': 'extrapolate'})
        for ds in datasets)

rebase_on_grid(ds, *, coord_dim, new_coords)

Rebase (interpolate) the coordinate dimension to the new coordinates.

Thin wrapper around xarray.Dataset.interp.

Parameters:

  • ds (Dataset) –

    Source dataset

  • coord_dim (str) –

    Name of the grid dimension (i.e. grid variable).

  • new_coords (ndarray) –

    The coordinates to interpolate to

Returns:

  • Dataset

    Rebased dataset

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_rebase.py
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
def rebase_on_grid(ds: xr.Dataset, *, coord_dim: str,
                   new_coords: np.ndarray) -> xr.Dataset:
    """Rebase (interpolate) the coordinate dimension to the new coordinates.

    Thin wrapper around `xarray.Dataset.interp`.

    Parameters
    ----------
    ds : xr.Dataset
        Source dataset
    coord_dim : str
        Name of the grid dimension (i.e. grid variable).
    new_coords : np.ndarray
        The coordinates to interpolate to

    Returns
    -------
    xr.Dataset
        Rebased dataset
    """
    return ds.interp(coords={coord_dim: new_coords},
                     kwargs={'fill_value': 'extrapolate'})

rebase_on_time(ds, *, time_dim='time', new_coords)

Rebase (interpolate) the time dimension to the new coordinates.

Thin wrapper around xarray.Dataset.interp.

Parameters:

  • ds (Dataset) –

    Source dataset

  • time_dim (str, default: 'time' ) –

    Name of the time dimension (i.e. time variable).

  • new_coords (ndarray) –

    The coordinates to interpolate to

Returns:

  • Dataset

    Rebased dataset

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_rebase.py
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
def rebase_on_time(ds: xr.Dataset,
                   *,
                   time_dim='time',
                   new_coords: np.ndarray) -> xr.Dataset:
    """Rebase (interpolate) the time dimension to the new coordinates.

    Thin wrapper around `xarray.Dataset.interp`.

    Parameters
    ----------
    ds : xr.Dataset
        Source dataset
    time_dim : str
        Name of the time dimension (i.e. time variable).
    new_coords : np.ndarray
        The coordinates to interpolate to

    Returns
    -------
    xr.Dataset
        Rebased dataset
    """
    if len(ds[time_dim]) < 2:
        # nothing to rebase with only 1 timestep
        return ds
    else:
        return rebase_on_grid(ds, coord_dim=time_dim, new_coords=new_coords)

recreate(config, runs, **kwargs)

Wrapper around create for python api.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/create.py
354
355
356
357
358
359
360
361
362
363
364
365
366
def recreate_api(config: dict, runs: Sequence[Path],
                 **kwargs) -> dict[str, tuple[Job, Run]]:
    """Wrapper around create for python api."""
    cfg = Config.from_dict(config)
    runs = recreate(cfg=cfg, runs=runs, **kwargs)

    if len(runs) == 0:
        raise CreateError('No runs were recreated, check logs for errors.')

    return {
        str(run.shortname): (Job(run.dirname, cfg=cfg), run)
        for run in runs
    }

standardize_grid_and_time(datasets, *, grid_var='rho_tor_norm', time_var='time', reference_dataset=0)

Standardize list of datasets by applying standard rebase operations.

Applies, in sequence: 1. rezero_time 2. standardize_grid 3. rebase_on_grid 4. rebase_on_time

Parameters:

  • datasets (Sequence[Dataset]) –

    List of source datasets

  • grid_var (str, default: 'rho_tor_norm' ) –

    Name of the grid dimension (i.e. grid variable)

  • time_var (str, default: 'time' ) –

    Name of the time dimension (i.e. time variable)

  • reference_dataset (int, default: 0 ) –

    The dataset with this index will be used as the reference for rebasing. The grid and time coordinates of the other datasets will be rebased to the reference.

Returns:

  • tuple[Dataset]

    Tuple of output datasets

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/ids/_rebase.py
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
def standardize_grid_and_time(
    datasets: Sequence[xr.Dataset],
    *,
    grid_var: str = 'rho_tor_norm',
    time_var: str = 'time',
    reference_dataset: int = 0,
) -> tuple[xr.Dataset, ...]:
    """Standardize list of datasets by applying standard rebase operations.

    Applies, in sequence:
    1. `rezero_time`
    2. `standardize_grid`
    3. `rebase_on_grid`
    4. `rebase_on_time`

    Parameters
    ----------
    datasets : Sequence[xr.Dataset]
        List of source datasets
    grid_var : str, optional
        Name of the grid dimension (i.e. grid variable)
    time_var : str, optional
        Name of the time dimension (i.e. time variable)
    reference_dataset : int, optional
        The dataset with this index will be used as the reference for rebasing.
        The grid and time coordinates of the other datasets will be rebased
        to the reference.

    Returns
    -------
    tuple[xr.Dataset]
        Tuple of output datasets
    """
    reference_grid = datasets[reference_dataset][grid_var].data

    datasets = tuple(
        rebase_on_grid(ds, coord_dim=grid_var, new_coords=reference_grid)
        for ds in datasets)

    reference_time = datasets[reference_dataset][time_var].data

    datasets = tuple(
        rebase_on_time(ds, time_dim=time_var, new_coords=reference_time)
        for ds in datasets)

    return datasets

submit(config, **kwargs)

Wrapper around submit for python api.

Source code in /home/docs/checkouts/readthedocs.org/user_builds/duqtools/envs/latest/lib/python3.11/site-packages/duqtools/submit.py
271
272
273
274
def submit_api(config: dict, **kwargs):
    """Wrapper around submit for python api."""
    cfg = Config.from_dict(config)
    return submit(cfg=cfg, **kwargs)