SwarmRequest

class viresclient.SwarmRequest(url=None, username=None, password=None, token=None, config=None, logging_level='NO_LOGGING')

Bases: viresclient._client.ClientRequest

Handles the requests to and downloads from the server.

Example usage:

from viresclient import SwarmRequest
# Set up connection with server
request = SwarmRequest()
# Set collection to use
request.set_collection("SW_OPER_MAGA_LR_1B")
# Set mix of products to fetch:
#  measurements (variables from the given collection)
#  models (magnetic model predictions at spacecraft sampling points)
#  auxiliaries (variables available with any collection)
request.set_products(
    measurements=["F", "B_NEC"],
    models=["CHAOS-Core"],
    auxiliaries=["QDLat", "QDLon"],
    sampling_step="PT10S"
)
# Fetch data from a given time interval
data = request.get_between(
    start_time="2014-01-01T00:00",
    end_time="2014-01-01T01:00"
)
# Load the data as an xarray.Dataset
ds = data.as_xarray()

Check what data are available with:

request.available_collections(details=False)
request.available_measurements("MAG")
request.available_auxiliaries()
request.available_models(details=False)
Parameters:
  • url (str) –
  • username (str) –
  • password (str) –
  • token (str) –
  • config (str or ClientConfig) –
  • logging_level (str) –
AUXILIARY_VARIABLES = ['Timestamp', 'Latitude', 'Longitude', 'Radius', 'Spacecraft', 'OrbitDirection', 'QDOrbitDirection', 'SyncStatus', 'Kp10', 'Kp', 'Dst', 'F107', 'IMF_BY_GSM', 'IMF_BZ_GSM', 'IMF_V', 'F10_INDEX', 'OrbitSource', 'OrbitNumber', 'AscendingNodeTime', 'AscendingNodeLongitude', 'QDLat', 'QDLon', 'QDBasis', 'MLT', 'SunDeclination', 'SunHourAngle', 'SunRightAscension', 'SunAzimuthAngle', 'SunZenithAngle', 'SunLongitude', 'SunVector', 'DipoleAxisVector', 'NGPLatitude', 'NGPLongitude', 'DipoleTiltAngle']
COLLECTIONS = {'EEF': ['SW_OPER_EEFATMS_2F', 'SW_OPER_EEFBTMS_2F', 'SW_OPER_EEFCTMS_2F'], 'EFI': ['SW_OPER_EFIA_LP_1B', 'SW_OPER_EFIB_LP_1B', 'SW_OPER_EFIC_LP_1B'], 'FAC': ['SW_OPER_FACATMS_2F', 'SW_OPER_FACBTMS_2F', 'SW_OPER_FACCTMS_2F', 'SW_OPER_FAC_TMS_2F'], 'IBI': ['SW_OPER_IBIATMS_2F', 'SW_OPER_IBIBTMS_2F', 'SW_OPER_IBICTMS_2F'], 'IPD': ['SW_OPER_IPDAIRR_2F', 'SW_OPER_IPDBIRR_2F', 'SW_OPER_IPDCIRR_2F'], 'MAG': ['SW_OPER_MAGA_LR_1B', 'SW_OPER_MAGB_LR_1B', 'SW_OPER_MAGC_LR_1B'], 'MAG_HR': ['SW_OPER_MAGA_HR_1B', 'SW_OPER_MAGB_HR_1B', 'SW_OPER_MAGC_HR_1B'], 'TEC': ['SW_OPER_TECATMS_2F', 'SW_OPER_TECBTMS_2F', 'SW_OPER_TECCTMS_2F']}
COLLECTION_SAMPLING_STEPS = {'EEF': 'PT90M', 'EFI': 'PT0.5S', 'FAC': 'PT1S', 'IBI': 'PT1S', 'IPD': 'PT1S', 'MAG': 'PT1S', 'MAG_HR': 'PT0.019S', 'TEC': 'PT1S'}
MAGNETIC_MODELS = ['IGRF', 'IGRF12', 'LCS-1', 'MF7', 'CHAOS-Core', 'CHAOS-Static', 'CHAOS-MMA-Primary', 'CHAOS-MMA-Secondary', 'CHAOS-6-Core', 'CHAOS-6-Static', 'CHAOS-6-MMA-Primary', 'CHAOS-6-MMA-Secondary', 'MCO_SHA_2C', 'MCO_SHA_2D', 'MLI_SHA_2C', 'MLI_SHA_2D', 'MLI_SHA_2E', 'MMA_SHA_2C-Primary', 'MMA_SHA_2C-Secondary', 'MMA_SHA_2F-Primary', 'MMA_SHA_2F-Secondary', 'MIO_SHA_2C-Primary', 'MIO_SHA_2C-Secondary', 'MIO_SHA_2D-Primary', 'MIO_SHA_2D-Secondary', 'AMPS', 'MCO_SHA_2X', 'CHAOS', 'CHAOS-MMA', 'MMA_SHA_2C', 'MMA_SHA_2F', 'MIO_SHA_2C', 'MIO_SHA_2D', 'SwarmCI']
MAGNETIC_MODEL_VARIABLES = ['F', 'B_NEC']
PRODUCT_VARIABLES = {'EEF': ['EEF', 'EEJ', 'RelErr', 'Flags'], 'EFI': ['U_orbit', 'Ne', 'Ne_error', 'Te', 'Te_error', 'Vs', 'Vs_error', 'Flags_LP', 'Flags_Ne', 'Flags_Te', 'Flags_Vs'], 'FAC': ['IRC', 'IRC_Error', 'FAC', 'FAC_Error', 'Flags', 'Flags_F', 'Flags_B', 'Flags_q'], 'IBI': ['Bubble_Index', 'Bubble_Probability', 'Flags_Bubble', 'Flags_F', 'Flags_B', 'Flags_q'], 'IPD': ['Ne', 'Te', 'Background_Ne', 'Foreground_Ne', 'PCP_flag', 'Grad_Ne_at_100km', 'Grad_Ne_at_50km', 'Grad_Ne_at_20km', 'Grad_Ne_at_PCP_edge', 'ROD', 'RODI10s', 'RODI20s', 'delta_Ne10s', 'delta_Ne20s', 'delta_Ne40s', 'Num_GPS_satellites', 'mVTEC', 'mROT', 'mROTI10s', 'mROTI20s', 'IBI_flag', 'Ionosphere_region_flag', 'IPIR_index', 'Ne_quality_flag', 'TEC_STD'], 'MAG': ['F', 'dF_AOCS', 'dF_other', 'F_error', 'B_VFM', 'B_NEC', 'dB_Sun', 'dB_AOCS', 'dB_other', 'B_error', 'q_NEC_CRF', 'Att_error', 'Flags_F', 'Flags_B', 'Flags_q', 'Flags_Platform', 'ASM_Freq_Dev'], 'MAG_HR': ['F', 'B_VFM', 'B_NEC', 'dB_Sun', 'dB_AOCS', 'dB_other', 'B_error', 'q_NEC_CRF', 'Att_error', 'Flags_B', 'Flags_q', 'Flags_Platform'], 'TEC': ['GPS_Position', 'LEO_Position', 'PRN', 'L1', 'L2', 'P1', 'P2', 'S1', 'S2', 'Elevation_Angle', 'Absolute_VTEC', 'Absolute_STEC', 'Relative_STEC', 'Relative_STEC_RMS', 'DCB', 'DCB_Error']}
available_auxiliaries()

Returns a list of the available auxiliary parameters.

available_collections(groupname=None, details=True)

Show details of available collections.

Parameters:
  • groupname (str) – one of: (“MAG”, “EFI”, etc.)
  • details (bool) – If True then print a nice output. If False then return a dict of available collections.
available_measurements(collection=None)

Returns a list of the available measurements for the chosen collection.

Parameters:collection (str) – one of: (“MAG”, “EFI”, “IBI”, “TEC”, “FAC”, “EEF”)
available_models(param=None, details=True, nice_output=True)

Show details of avalable models.

If details is True, return a dictionary of model names and details. If nice_output is True, the dictionary is printed nicely. If details is False, return a list of model names. If param is set, filter to only return entries including this

Note

F = Fast-Track Products
C = Comprehensive Inversion
D = Dedicated Chain
MCO = Core / main
MLI = Lithosphere
MMA = Magnetosphere
MIO = Ionosphere
Parameters:
  • param (str) – one of “F C D MCO MLI MMA MIO”
  • details (bool) – True for a dict of details, False for a brief list
  • nice_output (bool) – If True, just print the dict nicely
clear_range_filter()

Remove all applied filters.

get_between(start_time=None, end_time=None, filetype='cdf', asynchronous=True, show_progress=True, nrecords_limit=None, tmpdir=None)

Make the server request and download the data.

Parameters:
  • start_time (datetime / ISO_8601 string) –
  • end_time (datetime / ISO_8601 string) –
  • filetype (str) – one of (‘csv’, ‘cdf’)
  • asynchronous (bool) – True for asynchronous processing, False for synchronous
  • show_progress (bool) – Set to False to remove progress bars
  • nrecords_limit (int) – Override the default limit per request (e.g. nrecords_limit=3456000)
  • tmpdir (str) – Override the default temporary file directory
Returns:

Return type:

ReturnedData

get_model_info(models=None, custom_model=None, original_response=False)

Get model info from server.

Handles the same models input as .set_products(), and returns a dict like:

{‘IGRF12’: { ‘expression’: ‘IGRF12(max_degree=13,min_degree=0)’, ‘validity’: {‘start’: ‘1900-01-01T00:00:00Z’, ‘end’: ‘2020-01-01T00:00:00Z’ }, …}

If original_response=True, return the list of dicts like:

{‘expression’: ‘MCO_SHA_2C(max_degree=16,min_degree=0)’, ‘name’: ‘MCO_SHA_2C’, ‘validity’: {‘start’: ‘2013-11-30T14:38:24Z’, ‘end’: ‘2018-01-01T00:00:00Z’}}, …

Parameters:
  • models (list/dict) – as with set_products
  • custom_model (str) – as with set_products
  • original_response (bool) –
Returns:

dict or list

get_orbit_number(spacecraft, input_time)

Translate a time to an orbit number.

Parameters:
  • spacecraft (str) – one of (‘A’,’B’,’C’) or (“Alpha”, “Bravo”, “Charlie”)
  • input_time (datetime) – a point in time
Returns:

The current orbit number at the input_time

Return type:

int

get_times_for_orbits(spacecraft, start_orbit, end_orbit)

Translate a pair of orbit numbers to a time interval.

Parameters:
  • spacecraft (str) – one of (‘A’,’B’,’C’) or (“Alpha”, “Bravo”, “Charlie”)
  • start_orbit (int) – a starting orbit number
  • end_orbit (int) – a later orbit number
Returns:

(start_time, end_time) The start time of the start_orbit and the ending time of the end_orbit. (Based on ascending nodes of the orbits)

Return type:

tuple (datetime)

list_jobs()

Return job information from the server.

Returns:dict
set_collection(*args)

Set the collection(s) to use.

Parameters:(str) – one or several from .available_collections()
set_products(measurements=None, models=None, custom_model=None, auxiliaries=None, residuals=False, sampling_step=None)

Set the combination of products to retrieve.

If residuals=True then just get the measurement-model residuals, otherwise get both measurement and model values.

Parameters:
  • measurements (list(str)) – from .available_measurements(collection_key)
  • models (list(str)/dict) – from .available_models() or defineable with custom expressions
  • custom_model (str) – path to a custom model in .shc format
  • auxiliaries (list(str)) – from .available_auxiliaries()
  • residuals (bool) – True if only returning measurement-model residual
  • sampling_step (str) – ISO_8601 duration, e.g. 10 seconds: PT10S, 1 minute: PT1M
set_range_filter(parameter=None, minimum=None, maximum=None)

Set a filter to apply.

Filters data for minimum ≤ parameter ≤ maximum

Note

Apply multiple filters with successive calls to set_range_filter()

Parameters:
  • parameter (str) –
  • minimum (float) –
  • maximum (float) –

ReturnedData

class viresclient.ReturnedData(filetype=None, N=1, tmpdir=None)

Bases: object

Flexible object for working with data returned from the server

Holds a list of ReturnedDataFile objects under self.contents

Example usage:

...
data = request.get_between(..., ...)
data.sources
data.range_filters
data.magnetic_models
data.as_xarray()
data.as_dataframe(expand=True)
data.to_file()
as_dataframe(expand=False)

Convert the data to a pandas DataFrame.

If expand is True, expand some columns, e.g.:

B_NEC -> B_NEC_N, B_NEC_E, B_NEC_C

B_VFM -> B_VFM_i, B_VFM_j, B_VFM_k

Parameters:expand (bool) –
Returns:pandas.DataFrame
as_xarray()

Convert the data to an xarray Dataset.

Returns:xarray.Dataset
contents

List of ReturnedDataFile objects

filetype

Filetype string

magnetic_models

Get list of magnetic models used.

range_filters

Get list of filters applied.

sources

Get list of source product identifiers.

to_file(path, overwrite=False)

Saves the data to the specified file, when data is only in one file.

Only write to file if it does not yet exist, or if overwrite=True. Currently handles CSV and CDF formats.

Note

This is currently only implemented for smaller data when the request has not been split into multiple requests - the limit is the equivalent of 50 days of 1Hz measurements. In these situations, you can still load the data as pandas/xarray objects (the contents of each file is automatically concatenated) and save them as a different file type. Or use .to_files() to save the split data directly.

Parameters:
  • path (str) – path to the file to save as
  • overwrite (bool) – Will overwrite existing file if True
to_files(paths, overwrite=False)

Saves the data to the specified files.

Only write to file if it does not yet exist, or if overwrite=True. Currently handles CSV and CDF formats.

Parameters:
  • paths (list of str) – paths to the files to save as
  • overwrite (bool) – Will overwrite existing file if True
class viresclient.ReturnedDataFile(filetype=None, tmpdir=None)

Bases: object

For handling individual files returned from the server.

Holds the data returned from the server and the data type. Data is held in a NamedTemporaryFile, which is automatically closed and destroyed when it goes out of scope. Provides output to different file types and data objects.

as_dataframe(expand=False)

Convert the data to a pandas DataFrame.

Returns:pandas.DataFrame
as_xarray(group=None)

Convert the data to an xarray Dataset.

Note

Does not support csv

Only supports scalar and 3D vectors (currently)

Returns:xarray.Dataset
filetype

Filetype is one of (“csv”, “cdf”, “nc”)

magnetic_models
open_cdf()

Returns the opened file as cdflib.CDF

range_filters
sources
to_file(path, overwrite=False)

Saves the data to the specified file.

Only write to file if it does not yet exist, or if overwrite=True. Currently handles CSV and CDF formats.

Parameters:
  • path (str) – path to the file to save as
  • overwrite (bool) – Will overwrite existing file if True
to_netcdf(path, overwrite=False)

Saves the data as a netCDF4 file (this is compatible with HDF5)

Extension should be .nc

ClientConfig

class viresclient.ClientConfig(path=None)

Bases: object

Client configuration.

Example usage:

cc = ClientConfig()      # use default configuration file
cc = ClientConfig("./viresconf.ini")  # use custom configuration file

print(cc.path)           # print path
print(cc)                # print the whole configuration

cc.default_url = "https://foo.bar/ows"  # set default server

# access to credentials configuration ...
cc.set_site_config("https://foo.bar/ows", username="...", password="...")
cc.set_site_config("https://foo2.bar/ows", token="...")

cc.save()    # save configuration
default_url

Get default URL or None if not set.

get_site_config(url)

Get configuration for the given URL.

init(env_var_name='VIRES_ACCESS_CONFIG')

Initialize client configuration.

path

Get path of the configuration file.

save()

Save the configuration file.

set_site_config(url, **options)

Set configuration for the given URL.

set_token

viresclient.set_token(url='https://vires.services/ows', token=None, set_default=False)

Set the access token for a given URL, using user input.

Get an access token at https://vires.services/accounts/tokens/

See https://viresclient.readthedocs.io/en/latest/config_details.html

This will create a configuration file if not already present, and input a token configuration for a given URL, replacing the current token. It sets the given URL as the default if one is not already set. It uses getpass to hide the token from view.

Example usage:

set_token()
# user prompted for input of token, for https://vires.services/ows

set_token(url="https://vires.services/ows")
# user prompted for input of token, for given url

set_token(url="https://vires.services/ows", token="...")
# set a given url and token (no prompting)

DataUpload

class viresclient.DataUpload(url, token, **kwargs)

Bases: object

VirES for Swarm data upload API proxy.

Example usage:

from viresclient import ClientConfig, DataUpload

du = DataUpload("https://vires.services", token="...")

cc = ClientConfig()
url = cc.default_url
du = DataUpload(url, **cc.get_site_config(url))

# upload file
info = du.post("example.csv")
print(info)

# get information about the uploaded files
info = du.get()
print(info)

# remove any uploaded files
du.clear()

# check if the upload is valid and get list of missing mandatory parameters
info = du.post("example.cdf")
is_valid = info.get('is_valid', True)
missing_fields = info.get('missing_fields', {}).keys()
print(is_valid, missing_fields)

# get constant parameters
id = info['identifier']
parameters = du.get_constant_parameters(id)
print(parameters)

# set new constant parameters
parameters = du.set_constant_parameters(id, {'Radius': 7000000, 'Latitude': 24.0})
print(parameters)

# clear all constant parameters
parameters = du.set_constant_parameters(id, {}, replace=True)
print(parameters)

For more information about the supported file format see the file format specification

exception Error

Bases: Exception

Data upload error exception.

args
with_traceback()

Exception.with_traceback(tb) – set self.__traceback__ to tb and return self.

PATH_OWS = '/ows'
PATH_UPLOAD = '/custom_data/'
clear()

Remove all uploaded items.

delete(identifier)

REST/API DELETE request. Delete item of the given identifier.

get(identifier=None)

REST/API GET If an identifier provided, get info about the uploaded item. If no identifier provided, list all uploaded items.

classmethod get_api_url(url)

Translate WPS URL path to the upload REST/API URL path.

get_constant_parameters(identifier)

Get dictionary of the currently set constant parameters.

classmethod get_ows_url(url)

Translate REST/API URL path to the upload WPS URL path.

ids

Get list of identifiers.

patch(identifier, data)

REST/API PATCH Update metadata of the uploaded dataset.

post(file, filename=None)

HTTP POST multipart/form-data Upload file to the server and get info about the uploaded file.

set_constant_parameters(identifier, parameters, replace=False)

Set constant parameters form from give key value dictionary. Set replace to True if you prefer to replace the already set parameters rather then update them.