signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def add_elasticache_replication_group(self, replication_group, region):
<EOL>if not self.all_elasticache_replication_groups and replication_group['<STR_LIT>'] != '<STR_LIT>':<EOL><INDENT>return<EOL><DEDENT>dest = replication_group['<STR_LIT>'][<NUM_LIT:0>]['<STR_LIT>']['<STR_LIT>']<EOL>if not dest:<EOL><INDENT>return<EOL><DEDENT>self.index[dest] = [region, replication_group['<STR_LIT>']]<E...
Adds an ElastiCache replication group to the inventory and index
f10185:c0:m20
def get_route53_records(self):
r53_conn = route53.Route53Connection()<EOL>all_zones = r53_conn.get_zones()<EOL>route53_zones = [ zone for zone in all_zones if zone.name[:-<NUM_LIT:1>]<EOL>not in self.route53_excluded_zones ]<EOL>self.route53_records = {}<EOL>for zone in route53_zones:<EOL><INDENT>rrsets = r53_conn.get_all_rrsets(zone.id)<EOL>for rec...
Get and store the map of resource records to domain names that point to them.
f10185:c0:m21
def get_instance_route53_names(self, instance):
instance_attributes = [ '<STR_LIT>', '<STR_LIT>',<EOL>'<STR_LIT>', '<STR_LIT>' ]<EOL>name_list = set()<EOL>for attrib in instance_attributes:<EOL><INDENT>try:<EOL><INDENT>value = getattr(instance, attrib)<EOL><DEDENT>except AttributeError:<EOL><INDENT>continue<EOL><DEDENT>if value in self.route53_records:<EOL><INDENT>n...
Check if an instance is referenced in the records we have from Route53. If it is, return the list of domain names pointing to said instance. If nothing points to it, return an empty list.
f10185:c0:m22
def get_host_info_dict_from_describe_dict(self, describe_dict):
<EOL>host_info = {}<EOL>for key in describe_dict:<EOL><INDENT>value = describe_dict[key]<EOL>key = self.to_safe('<STR_LIT>' + self.uncammelize(key))<EOL>if key == '<STR_LIT>' and value:<EOL><INDENT>host_info['<STR_LIT>'] = value['<STR_LIT>']<EOL>host_info['<STR_LIT>'] = value['<STR_LIT>']<EOL><DEDENT>if key == '<STR_LI...
Parses the dictionary returned by the API call into a flat list of parameters. This method should be used only when 'describe' is used directly because Boto doesn't provide specific classes.
f10185:c0:m24
def get_host(self, host):
if len(self.index) == <NUM_LIT:0>:<EOL><INDENT>self.load_index_from_cache()<EOL><DEDENT>if not host in self.index:<EOL><INDENT>self.do_api_calls_update_cache()<EOL>if not host in self.index:<EOL><INDENT>return {}<EOL><DEDENT><DEDENT>(region, instance_id) = self.index[host]<EOL>instance = self.get_instance(region, insta...
Get variables about a specific host
f10185:c0:m25
def push(self, my_dict, key, element):
group_info = my_dict.setdefault(key, [])<EOL>if isinstance(group_info, dict):<EOL><INDENT>host_list = group_info.setdefault('<STR_LIT>', [])<EOL>host_list.append(element)<EOL><DEDENT>else:<EOL><INDENT>group_info.append(element)<EOL><DEDENT>
Push an element onto an array that may not have been defined in the dict
f10185:c0:m26
def push_group(self, my_dict, key, element):
parent_group = my_dict.setdefault(key, {})<EOL>if not isinstance(parent_group, dict):<EOL><INDENT>parent_group = my_dict[key] = {'<STR_LIT>': parent_group}<EOL><DEDENT>child_groups = parent_group.setdefault('<STR_LIT>', [])<EOL>if element not in child_groups:<EOL><INDENT>child_groups.append(element)<EOL><DEDENT>
Push a group as a child of another group.
f10185:c0:m27
def load_inventory_from_cache(self):
cache = open(self.cache_path_cache, '<STR_LIT:r>')<EOL>json_inventory = cache.read()<EOL>self.inventory = json.loads(json_inventory)<EOL>
Reads the inventory from the cache file and returns it as a JSON object
f10185:c0:m28
def load_index_from_cache(self):
cache = open(self.cache_path_index, '<STR_LIT:r>')<EOL>json_index = cache.read()<EOL>self.index = json.loads(json_index)<EOL>
Reads the index from the cache file sets self.index
f10185:c0:m29
def write_to_cache(self, data, filename):
json_data = json.dumps(data, sort_keys=True, indent=<NUM_LIT:2>)<EOL>cache = open(filename, '<STR_LIT:w>')<EOL>cache.write(json_data)<EOL>cache.close()<EOL>
Writes data in JSON format to a file
f10185:c0:m30
def to_safe(self, word):
regex = "<STR_LIT>"<EOL>if not self.replace_dash_in_groups:<EOL><INDENT>regex += "<STR_LIT>"<EOL><DEDENT>return re.sub(regex + "<STR_LIT:]>", "<STR_LIT:_>", word)<EOL>
Converts 'bad' characters in a string to underscores so they can be used as Ansible groups
f10185:c0:m32
def read(fname):
return codecs.open(<EOL>os.path.join(os.path.dirname(__file__), fname),<EOL>encoding='<STR_LIT:utf-8>'<EOL>).read()<EOL>
Read a file
f10187:m0
def __str__(self):
return repr(self.error_message)<EOL>
r""" This just returns one of the error messages listed in the checkresponse() function
f10192:c0:m1
def __init__(self, token):
self.base_url = '<STR_LIT>'<EOL>self.token = token<EOL>self.geo_criteria = ['<STR_LIT>', '<STR_LIT:state>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>',<EOL>'<STR_LIT>']<EOL>
r""" Instantiates an instance of MesoPy. Arguments: ---------- token: string, mandatory Your API token that authenticates you for requests against MesoWest.mes Returns: -------- None. Raises: ------- None.
f10192:c1:m0
@staticmethod<EOL><INDENT>def _checkresponse(response):<DEDENT>
results_error = '<STR_LIT>'<EOL>auth_error = '<STR_LIT>''<STR_LIT>'<EOL>rule_error = '<STR_LIT>''<STR_LIT>'<EOL>catch_error = '<STR_LIT>'<EOL>if response['<STR_LIT>']['<STR_LIT>'] == <NUM_LIT:1>:<EOL><INDENT>return response<EOL><DEDENT>elif response['<STR_LIT>']['<STR_LIT>'] == <NUM_LIT:2>:<EOL><INDENT>if response['<ST...
r""" Returns the data requested by the other methods assuming the response from the API is ok. If not, provides error handling for all possible API errors. HTTP errors are handled in the get_response() function. Arguments: ---------- None. Returns: -------- ...
f10192:c1:m1
def _get_response(self, endpoint, request_dict):
http_error = '<STR_LIT>''<STR_LIT>'<EOL>json_error = '<STR_LIT>'<EOL>try:<EOL><INDENT>qsp = urllib.parse.urlencode(request_dict, doseq=True)<EOL>resp = urllib.request.urlopen(self.base_url + endpoint + '<STR_LIT:?>' + qsp).read()<EOL><DEDENT>except AttributeError or NameError:<EOL><INDENT>try:<EOL><INDENT>qsp = urllib....
Returns a dictionary of data requested by each function. Arguments: ---------- endpoint: string, mandatory Set in all other methods, this is the API endpoint specific to each function. request_dict: string, mandatory A dictionary of parameters that are formatted ...
f10192:c1:m2
def _check_geo_param(self, arg_list):
geo_func = lambda a, b: any(i in b for i in a)<EOL>check = geo_func(self.geo_criteria, arg_list)<EOL>if check is False:<EOL><INDENT>raise MesoPyError('<STR_LIT>'<EOL>'<STR_LIT>')<EOL><DEDENT>
r""" Checks each function call to make sure that the user has provided at least one of the following geographic parameters: 'stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc'. Arguments: ---------- arg_list: list, mandatory A l...
f10192:c1:m3
def attime(self, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns a dictionary of latest observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameter...
f10192:c1:m4
def precip(self, start, end, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT:start>'] = start<EOL>kwargs['<STR_LIT:end>'] = end<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns precipitation observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may als...
f10192:c1:m6
def timeseries(self, start, end, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT:start>'] = start<EOL>kwargs['<STR_LIT:end>'] = end<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns a time series of observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may ...
f10192:c1:m7
def climatology(self, startclim, endclim, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT>'] = startclim<EOL>kwargs['<STR_LIT>'] = endclim<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns a climatology of observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may ...
f10192:c1:m8
def variables(self):
return self._get_response('<STR_LIT>', {'<STR_LIT>': self.token})<EOL>
Returns a dictionary of a list of variables that could be obtained from the 'vars' param in other functions. Some stations may not record all variables listed. Use the metadata() function to return metadata on each station. Arguments: ---------- None. Returns: ...
f10192:c1:m9
def climate_stats(self, startclim, endclim, type, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT:type>'] = type<EOL>kwargs['<STR_LIT>'] = startclim<EOL>kwargs['<STR_LIT>'] = endclim<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns a dictionary of aggregated yearly climate statistics (count, standard deviation, average, median, maximum, minimum, min time, and max time depending on user specified type) of a time series for a specified range of time at user specified location. Users must specify at least one geographic...
f10192:c1:m10
def time_stats(self, start, end, type, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT:type>'] = type<EOL>kwargs['<STR_LIT:start>'] = start<EOL>kwargs['<STR_LIT:end>'] = end<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns a dictionary of discrete time statistics (count, standard deviation, average, median, maximum, minimum, min time, and max time depending on user specified type) of a time series for a specified range of time at user specified location. Users must specify at least one geographic search param...
f10192:c1:m11
def metadata(self, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns the metadata for a station or stations. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may also be included. See below for optional ...
f10192:c1:m12
def latency(self, start, end, **kwargs):
self._check_geo_param(kwargs)<EOL>kwargs['<STR_LIT:start>'] = start<EOL>kwargs['<STR_LIT:end>'] = end<EOL>kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns data latency values for a station based on a start and end date/time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may also be inc...
f10192:c1:m13
def networks(self, **kwargs):
kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns the metadata associated with the MesoWest network ID(s) entered. Leaving this function blank will return all networks in MesoWest. Arguments: ---------- id: string, optional A single or comma-separated list of MesoNet network categories. e.g. ids='1,2,3' ...
f10192:c1:m14
def networktypes(self, **kwargs):
kwargs['<STR_LIT>'] = self.token<EOL>return self._get_response('<STR_LIT>', kwargs)<EOL>
r""" Returns the network type metadata depending on the ID specified. Can be left blank to return all network types. Arguments: ---------- id: string, optional A single or comma-separated list of MesoNet categories. e.g.: type_ids='1,2,3' Returns: -------- ...
f10192:c1:m15
def execute(action, io_loop=None):
if not io_loop:<EOL><INDENT>io_loop = IOLoop.current()<EOL><DEDENT>output = Future()<EOL>def call():<EOL><INDENT>try:<EOL><INDENT>result = _execute(_TornadoAction(action, io_loop))<EOL><DEDENT>except Exception:<EOL><INDENT>output.set_exc_info(sys.exc_info())<EOL><DEDENT>else:<EOL><INDENT>output.set_result(result)<EOL><...
Execute the given action and return a Future with the result. The ``forwards`` and/or ``backwards`` methods for the action may be synchronous or asynchronous. If asynchronous, that method must return a Future that will resolve to its result. See :py:func:`reversible.execute` for more details on the be...
f10197:m1
def _map_generator(f, generator):
item = next(generator)<EOL>while True:<EOL><INDENT>try:<EOL><INDENT>result = yield f(item)<EOL><DEDENT>except Exception:<EOL><INDENT>item = generator.throw(*sys.exc_info())<EOL><DEDENT>else:<EOL><INDENT>item = generator.send(result)<EOL><DEDENT><DEDENT>
Apply ``f`` to the results of the given bi-directional generator. Unfortunately, generator comprehension (``f(x) for x in gen``) does not work for as expected for bi-directional generators. It won't send exceptions and results back. This function implements a map function for generators that sends val...
f10198:m0
def gen(function, io_loop=None):
@functools.wraps(function) <EOL>def new_function(*args, **kwargs):<EOL><INDENT>try:<EOL><INDENT>value = function(*args, **kwargs)<EOL><DEDENT>except _RETURNS as result:<EOL><INDENT>return SimpleAction(<EOL>lambda ctx: ctx.value,<EOL>lambda _: None,<EOL>result,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>if isinstance(value, t...
Allows using a generator to chain together reversible actions. This function is very similar to :py:func:`reversible.gen` except that it may be used with actions whose ``forwards`` and/or ``backwards`` methods are couroutines. Specifically, if either of those methods return futures the generated action...
f10198:m1
def lift(future):
return _Lift(future)<EOL>
Returns the result of a Tornado Future inside a generator-based action. Inside a :py:func:`reversible.tornado.gen` context, the meaning of ``yield`` changes to "execute this possibly asynchronous action and return the result." However sometimes it is necessary to execute a standard Tornado coroutine. T...
f10198:m2
def execute(action):
<EOL>try:<EOL><INDENT>return action.forwards()<EOL><DEDENT>except Exception:<EOL><INDENT>log.exception('<STR_LIT>', action)<EOL>try:<EOL><INDENT>action.backwards()<EOL><DEDENT>except Exception:<EOL><INDENT>log.exception('<STR_LIT>', action)<EOL>raise<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>
Execute the given action. An action is any object with a ``forwards()`` and ``backwards()`` method. .. code-block:: python class CreateUser(object): def __init__(self, userinfo): self.userinfo = userinfo self.user_id = None def forwards(self): self.user_id =...
f10200:m0
def action(forwards=None, context_class=None):
context_class = context_class or dict<EOL>def decorator(_forwards):<EOL><INDENT>return ActionBuilder(_forwards, context_class)<EOL><DEDENT>if forwards is not None:<EOL><INDENT>return decorator(forwards)<EOL><DEDENT>else:<EOL><INDENT>return decorator<EOL><DEDENT>
Decorator to build functions. This decorator can be applied to a function to build actions. The decorated function becomes the ``forwards`` implementation of the action. The first argument of the ``forwards`` implementation is a context object that can be used to share state between the forwards and backwards implemen...
f10200:m1
def backwards(self, backwards):
if self._backwards is not None:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>self._backwards = backwards<EOL>return backwards<EOL>
Decorator to specify the ``backwards`` action.
f10200:c1:m2
def gen(function):
@functools.wraps(function) <EOL>def new_function(*args, **kwargs):<EOL><INDENT>try:<EOL><INDENT>value = function(*args, **kwargs)<EOL><DEDENT>except Return as result:<EOL><INDENT>return SimpleAction(<EOL>lambda ctx: ctx.value,<EOL>lambda _: None,<EOL>result,<EOL>)<EOL><DEDENT>else:<EOL><INDENT>if isinstance(value, typ...
Allows using a generator to chain together reversible actions. This decorator may be added to a generator that yields reversible actions (any object with a ``.forwards()`` and ``.backwards()`` method). These may be constructed manually or via :py:func:`reversible.action`. The decorated function, when called, will retu...
f10201:m0
@classmethod<EOL><INDENT>def tearDownClass(cls):<DEDENT>
writefiles = ['<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>',<EOL>'<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>',<EOL>'<STR_LIT>','<STR_LIT>','<STR_LIT>','<STR_LIT>',<EOL>'<STR_LIT>','<STR_LIT>','<STR_LIT>',<EOL>'<STR_LIT>','<STR_LIT>']<EOL>for file in writefiles:<EOL><INDENT>if os.path.isfile(file):<EOL><INDENT>os.r...
Clean up written files
f10205:c0:m15
def _rd_segment(file_name, dir_name, pb_dir, fmt, n_sig, sig_len, byte_offset,<EOL>samps_per_frame, skew, sampfrom, sampto, channels,<EOL>smooth_frames, ignore_skew):
<EOL>byte_offset = byte_offset[:]<EOL>samps_per_frame = samps_per_frame[:]<EOL>skew = skew[:]<EOL>for i in range(n_sig):<EOL><INDENT>if byte_offset[i] == None:<EOL><INDENT>byte_offset[i] = <NUM_LIT:0><EOL><DEDENT>if samps_per_frame[i] == None:<EOL><INDENT>samps_per_frame[i] = <NUM_LIT:1><EOL><DEDENT>if skew[i] == None:...
Read the digital samples from a single segment record's associated dat file(s). Parameters ---------- file_name : list The names of the dat files to be read. dir_name : str The full directory where the dat file(s) are located, if the dat file(s) are local. pb_dir : str The physiobank directory where th...
f10209:m0
def _rd_dat_signals(file_name, dir_name, pb_dir, fmt, n_sig, sig_len,<EOL>byte_offset, samps_per_frame, skew, sampfrom, sampto,<EOL>smooth_frames):
<EOL>tsamps_per_frame = sum(samps_per_frame)<EOL>read_len = sampto - sampfrom<EOL>(start_byte, n_read_samples, block_floor_samples,<EOL>extra_flat_samples, nan_replace) = _dat_read_params(fmt, sig_len,<EOL>byte_offset, skew,<EOL>tsamps_per_frame,<EOL>sampfrom, sampto)<EOL>total_read_bytes = _required_byte_num('<STR_LIT...
Read all signals from a WFDB dat file. Parameters ---------- file_name : str The name of the dat file * other params See docstring for `_rd_segment`. Returns ------- signals : numpy array, or list See docstring for `_rd_segment`. Notes ----- See docstring notes for `_rd_segment`.
f10209:m1
def _dat_read_params(fmt, sig_len, byte_offset, skew, tsamps_per_frame,<EOL>sampfrom, sampto):
<EOL>start_flat_sample = sampfrom * tsamps_per_frame<EOL>if (sampto + max(skew)) > sig_len:<EOL><INDENT>end_flat_sample = sig_len * tsamps_per_frame<EOL>extra_flat_samples = (sampto + max(skew) - sig_len) * tsamps_per_frame<EOL><DEDENT>else:<EOL><INDENT>end_flat_sample = (sampto + max(skew)) * tsamps_per_frame<EOL>extr...
Calculate the parameters used to read and process a dat file, given its layout, and the desired sample range. Parameters ---------- fmt : str The format of the dat file sig_len : int The signal length (per channel) of the dat file byte_offset : int The byte offset of the dat file skew : list The skew f...
f10209:m2
def _required_byte_num(mode, fmt, n_samp):
if fmt == '<STR_LIT>':<EOL><INDENT>n_bytes = math.ceil(n_samp*<NUM_LIT>)<EOL><DEDENT>elif fmt in ['<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>n_extra = n_samp % <NUM_LIT:3><EOL>if n_extra == <NUM_LIT:2>:<EOL><INDENT>if fmt == '<STR_LIT>':<EOL><INDENT>n_bytes = upround(n_samp * <NUM_LIT:4>/<NUM_LIT:3>, <NUM_LIT:4>)<EOL><DEDE...
Determine how many signal bytes are needed to read or write a number of desired samples from a dat file. Parameters ---------- mode : str Whether the file is to be read or written: 'read' or 'write'. fmt : str The wfdb dat format. n_samp : int The number of samples wanted. Returns ------- n_bytes : int ...
f10209:m3
def _rd_dat_file(file_name, dir_name, pb_dir, fmt, start_byte, n_samp):
<EOL>if fmt == '<STR_LIT>':<EOL><INDENT>byte_count = _required_byte_num('<STR_LIT>', '<STR_LIT>', n_samp)<EOL>element_count = byte_count<EOL><DEDENT>elif fmt in ['<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>byte_count = _required_byte_num('<STR_LIT>', fmt, n_samp)<EOL>element_count = byte_count<EOL><DEDENT>else:<EOL><INDENT>...
Read data from a dat file, either local or remote, into a 1d numpy array. This is the lowest level dat reading function (along with `_stream_dat` which this function may call), and is called by `_rd_dat_signals`. Parameters ---------- start_byte : int The starting byte number to read from. n_samp : int The to...
f10209:m4
def _blocks_to_samples(sig_data, n_samp, fmt):
if fmt == '<STR_LIT>':<EOL><INDENT>if n_samp % <NUM_LIT:2>:<EOL><INDENT>n_samp += <NUM_LIT:1><EOL>added_samps = <NUM_LIT:1><EOL>sig_data = np.append(sig_data, np.zeros(<NUM_LIT:1>, dtype='<STR_LIT>'))<EOL><DEDENT>else:<EOL><INDENT>added_samps = <NUM_LIT:0><EOL><DEDENT>sig_data = sig_data.astype('<STR_LIT>')<EOL>sig = n...
Convert uint8 blocks into signal samples for unaligned dat formats. Parameters ---------- sig_data : numpy array The uint8 data blocks. n_samp : int The number of samples contained in the bytes Returns ------- signal : numpy array The numpy array of digital samples
f10209:m5
def _skew_sig(sig, skew, n_sig, read_len, fmt, nan_replace, samps_per_frame=None):
if max(skew)><NUM_LIT:0>:<EOL><INDENT>if isinstance(sig, list):<EOL><INDENT>for ch in range(n_sig):<EOL><INDENT>if skew[ch]><NUM_LIT:0>:<EOL><INDENT>sig[ch][:read_len*samps_per_frame[ch]] = sig[ch][skew[ch]*samps_per_frame[ch]:]<EOL><DEDENT><DEDENT>for ch in range(n_sig):<EOL><INDENT>sig[ch] = sig[ch][:read_len*samps_p...
Skew the signal, insert nans and shave off end of array if needed. Parameters ---------- sig : numpy array The original signal skew : list List of samples to skew for each signal n_sig : int The number of signals Notes ----- `fmt` is just for the correct nan value. `samps_per_frame` is only used for skew...
f10209:m6
def _check_sig_dims(sig, read_len, n_sig, samps_per_frame):
if isinstance(sig, np.ndarray):<EOL><INDENT>if sig.shape != (read_len, n_sig):<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if len(sig) != n_sig:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>for ch in range(n_sig):<EOL><INDENT>if len(sig[ch]) != samps_per_frame[ch] * read_len...
Integrity check of a signal's shape after reading.
f10209:m7
def _digi_bounds(fmt):
if isinstance(fmt, list):<EOL><INDENT>return [_digi_bounds(f) for f in fmt]<EOL><DEDENT>if fmt == '<STR_LIT>':<EOL><INDENT>return (-<NUM_LIT>, <NUM_LIT>)<EOL><DEDENT>elif fmt == '<STR_LIT>':<EOL><INDENT>return (-<NUM_LIT>, <NUM_LIT>)<EOL><DEDENT>elif fmt == '<STR_LIT>':<EOL><INDENT>return (-<NUM_LIT>, <NUM_LIT>)<EOL><D...
Return min and max digital values for each format type. Accepts lists. Parmeters --------- fmt : str, or list The wfdb dat format, or a list of them.
f10209:m8
def _digi_nan(fmt):
if isinstance(fmt, list):<EOL><INDENT>return [_digi_nan(f) for f in fmt]<EOL><DEDENT>if fmt == '<STR_LIT>':<EOL><INDENT>return -<NUM_LIT><EOL><DEDENT>if fmt == '<STR_LIT>':<EOL><INDENT>return -<NUM_LIT><EOL><DEDENT>if fmt == '<STR_LIT>':<EOL><INDENT>return -<NUM_LIT><EOL><DEDENT>elif fmt == '<STR_LIT>':<EOL><INDENT>ret...
Return the wfdb digital value used to store nan for the format type. Parmeters --------- fmt : str, or list The wfdb dat format, or a list of them.
f10209:m9
def est_res(signals):
res_levels = np.power(<NUM_LIT:2>, np.arange(<NUM_LIT:0>, <NUM_LIT>))<EOL>if isinstance(signals, list):<EOL><INDENT>n_sig = len(signals)<EOL><DEDENT>else:<EOL><INDENT>if signals.ndim ==<NUM_LIT:1>:<EOL><INDENT>n_sig = <NUM_LIT:1><EOL><DEDENT>else:<EOL><INDENT>n_sig = signals.shape[<NUM_LIT:1>]<EOL><DEDENT><DEDENT>res =...
Estimate the resolution of each signal in a multi-channel signal in bits. Maximum of 32 bits. Parameters ---------- signals : numpy array, or list A 2d numpy array representing a uniform multichannel signal, or a list of 1d numpy arrays representing multiple channels of signals with different numbers of sa...
f10209:m10
def _wfdb_fmt(bit_res, single_fmt=True):
if isinstance(bit_res, list):<EOL><INDENT>if single_fmt:<EOL><INDENT>bit_res = [max(bit_res)] * len(bit_res)<EOL><DEDENT>return [wfdb_fmt(r) for r in bit_res]<EOL><DEDENT>if bit_res <= <NUM_LIT:8>:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>elif bit_res <= <NUM_LIT:12>:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>elif b...
Return the most suitable wfdb format(s) to use given signal resolutions. Parameters ---------- bit_res : int, or list The resolution of the signal, or a list of resolutions, in bits. single_fmt : bool, optional Whether to return the format for the maximum resolution signal. Returns ------- fmt : str or list ...
f10209:m11
def _fmt_res(fmt, max_res=False):
if isinstance(fmt, list):<EOL><INDENT>if max_res:<EOL><INDENT>bit_res = np.max([_fmt_res(f) for f in fmt if f is not None])<EOL><DEDENT>else:<EOL><INDENT>bit_res = [_fmt_res(f) for f in fmt]<EOL><DEDENT>return bit_res<EOL><DEDENT>return BIT_RES[fmt]<EOL>
Return the resolution of the WFDB dat format(s). Uses the BIT_RES dictionary, but accepts lists and other options. Parameters ---------- fmt : str The wfdb format. Can be a list of valid fmts. If it is a list, and `max_res` is True, the list may contain None. max_res : bool, optional If given a list of fmt...
f10209:m12
def _np_dtype(bit_res, discrete):
bit_res = min(bit_res, <NUM_LIT:64>)<EOL>for np_res in [<NUM_LIT:8>, <NUM_LIT:16>, <NUM_LIT:32>, <NUM_LIT:64>]:<EOL><INDENT>if bit_res <= np_res:<EOL><INDENT>break<EOL><DEDENT><DEDENT>if discrete is True:<EOL><INDENT>return '<STR_LIT:int>' + str(np_res)<EOL><DEDENT>else:<EOL><INDENT>return '<STR_LIT:float>' + str(max(n...
Given the bit resolution of a signal, return the minimum numpy dtype used to store it. Parameters ---------- bit_res : int The bit resolution. discrete : bool Whether the dtype is to be int or float. Returns ------- dtype : str String numpy dtype used to store the signal of the given resolution
f10209:m13
def wr_dat_file(file_name, fmt, d_signal, byte_offset, expanded=False,<EOL>e_d_signal=None, samps_per_frame=None, write_dir='<STR_LIT>'):
<EOL>if expanded:<EOL><INDENT>n_sig = len(e_d_signal)<EOL>sig_len = int(len(e_d_signal[<NUM_LIT:0>])/samps_per_frame[<NUM_LIT:0>])<EOL>d_signal = np.zeros((sig_len, sum(samps_per_frame)), dtype = '<STR_LIT>')<EOL>expand_ch = <NUM_LIT:0><EOL>for ch in range(n_sig):<EOL><INDENT>spf = samps_per_frame[ch]<EOL>for framenum ...
Write a dat file. All bytes are written one at a time to avoid endianness issues.
f10209:m14
def describe_list_indices(full_list):
unique_elements = []<EOL>element_indices = {}<EOL>for i in range(len(full_list)):<EOL><INDENT>item = full_list[i]<EOL>if item not in unique_elements:<EOL><INDENT>unique_elements.append(item)<EOL>element_indices[item] = [i]<EOL><DEDENT>else:<EOL><INDENT>element_indices[item].append(i)<EOL><DEDENT><DEDENT>return unique_e...
Parameters ---------- full_list : list The list of items to order and Returns ------- unique_elements : list A list of the unique elements of the list, in the order in which they first appear. element_indices : dict A dictionary of lists for each unique element, giving all the indices in which they...
f10209:m15
def _infer_sig_len(file_name, fmt, n_sig, dir_name, pb_dir=None):
if pb_dir is None:<EOL><INDENT>file_size = os.path.getsize(os.path.join(dir_name, file_name))<EOL><DEDENT>else:<EOL><INDENT>file_size = download._remote_file_size(file_name=file_name,<EOL>pb_dir=pb_dir)<EOL><DEDENT>sig_len = int(file_size / (BYTES_PER_SAMPLE[fmt] * n_sig))<EOL>return sig_len<EOL>
Infer the length of a signal from a dat file. Parameters ---------- file_name : str Name of the dat file fmt : str WFDB fmt of the dat file n_sig : int Number of signals contained in the dat file Notes ----- sig_len * n_sig * bytes_per_sample == file_size
f10209:m16
def downround(x, base):
return base * math.floor(float(x)/base)<EOL>
Round <x> down to nearest <base>
f10209:m17
def upround(x, base):
return base * math.ceil(float(x)/base)<EOL>
Round <x> up to nearest <base>
f10209:m18
def check_sig_cohesion(self, write_fields, expanded):
<EOL>if expanded:<EOL><INDENT>spf = self.samps_per_frame<EOL>for ch in range(len(spf)):<EOL><INDENT>if spf[ch] is None:<EOL><INDENT>spf[ch] = <NUM_LIT:1><EOL><DEDENT><DEDENT>if self.n_sig != len(self.e_d_signal):<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>for ch in range(self.n_sig):<EOL><INDENT>if len(self....
Check the cohesion of the d_signal/e_d_signal field with the other fields used to write the record
f10209:c0:m1
def set_p_features(self, do_dac=False, expanded=False):
if expanded:<EOL><INDENT>if do_dac:<EOL><INDENT>self.check_field('<STR_LIT>')<EOL>self.check_field('<STR_LIT>', '<STR_LIT:all>')<EOL>self.check_field('<STR_LIT>', '<STR_LIT:all>')<EOL>self.check_field('<STR_LIT>', '<STR_LIT:all>')<EOL>self.check_field('<STR_LIT>', '<STR_LIT:all>')<EOL>self.e_p_signal = self.dac(expande...
Use properties of the physical signal field to set the following features: n_sig, sig_len. Parameters ---------- do_dac : bool Whether to use the digital signal field to perform dac conversion to get the physical signal field beforehand. expanded : bool Whether to use the `e_p_signal` or `p_signal` field. ...
f10209:c0:m2
def set_d_features(self, do_adc=False, single_fmt=True, expanded=False):
if expanded:<EOL><INDENT>if do_adc:<EOL><INDENT>self.check_field('<STR_LIT>', channels='<STR_LIT:all>')<EOL>if self.fmt is None:<EOL><INDENT>if self.adc_gain is not None or self.baseline is not None:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>res = est_res(self.e_p_signal)<EOL>self.fmt = _wfdb_fmt(res, single...
Use properties of the digital signal field to set the following features: n_sig, sig_len, init_value, checksum, and possibly *(fmt, adc_gain, baseline). Parameters ---------- do_adc : bools Whether to use the physical signal field to perform adc conversion to get the digital signal field beforehand. single_fmt...
f10209:c0:m3
def adc(self, expanded=False, inplace=False):
<EOL>d_nans = _digi_nan(self.fmt)<EOL>intdtype = '<STR_LIT>'<EOL>if inplace:<EOL><INDENT>if expanded:<EOL><INDENT>for ch in range(self.n_sig):<EOL><INDENT>ch_nanlocs = np.isnan(self.e_p_signal[ch])<EOL>np.multiply(self.e_p_signal[ch], self.adc_gain[ch],<EOL>self.e_p_signal[ch])<EOL>np.add(e_p_signal[ch], self.baseline[...
Performs analogue to digital conversion of the physical signal stored in p_signal if expanded is False, or e_p_signal if expanded is True. The p_signal/e_p_signal, fmt, gain, and baseline fields must all be valid. If inplace is True, the adc will be performed inplace on the variable, the d_signal/e_d_signal attribute...
f10209:c0:m4
def dac(self, expanded=False, return_res=<NUM_LIT:64>, inplace=False):
<EOL>d_nans = _digi_nan(self.fmt)<EOL>if return_res == <NUM_LIT:64>:<EOL><INDENT>floatdtype = '<STR_LIT>'<EOL><DEDENT>elif return_res == <NUM_LIT:32>:<EOL><INDENT>floatdtype = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>floatdtype = '<STR_LIT>'<EOL><DEDENT>if inplace:<EOL><INDENT>if expanded:<EOL><INDENT>for ch in range(...
Performs the digital to analogue conversion of the signal stored in `d_signal` if expanded is False, or `e_d_signal` if expanded is True. The d_signal/e_d_signal, fmt, gain, and baseline fields must all be valid. If inplace is True, the dac will be performed inplace on the variable, the p_signal/e_p_signal attribute ...
f10209:c0:m5
def calc_adc_params(self):
adc_gains = []<EOL>baselines = []<EOL>if np.where(np.isinf(self.p_signal))[<NUM_LIT:0>].size:<EOL><INDENT>raise ValueError('<STR_LIT>')<EOL><DEDENT>minvals = np.nanmin(self.p_signal, axis=<NUM_LIT:0>)<EOL>maxvals = np.nanmax(self.p_signal, axis=<NUM_LIT:0>)<EOL>for ch in range(np.shape(self.p_signal)[<NUM_LIT:1>]):<EOL...
Compute appropriate adc_gain and baseline parameters for adc conversion, given the physical signal and the fmts. Returns ------- adc_gains : list List of calculated `adc_gain` values for each channel. baselines : list List of calculated `baseline` values for each channel. Notes ----- This is the mapping equat...
f10209:c0:m6
def calc_checksum(self, expanded=False):
if expanded:<EOL><INDENT>cs = [int(np.sum(self.e_d_signal[ch]) % <NUM_LIT>) for ch in range(self.n_sig)]<EOL><DEDENT>else:<EOL><INDENT>cs = np.sum(self.d_signal, <NUM_LIT:0>) % <NUM_LIT><EOL>cs = [int(c) for c in cs]<EOL><DEDENT>return cs<EOL>
Calculate the checksum(s) of the d_signal (expanded=False) or e_d_signal field (expanded=True)
f10209:c0:m8
def wr_dat_files(self, expanded=False, write_dir='<STR_LIT>'):
<EOL>file_names, dat_channels = describe_list_indices(self.file_name)<EOL>DAT_FMTS = {}<EOL>dat_offsets = {}<EOL>for fn in file_names:<EOL><INDENT>DAT_FMTS[fn] = self.fmt[dat_channels[fn][<NUM_LIT:0>]]<EOL>if self.byte_offset is None:<EOL><INDENT>dat_offsets[fn] = <NUM_LIT:0><EOL><DEDENT>else:<EOL><INDENT>dat_offsets[f...
Write each of the specified dat files
f10209:c0:m9
def smooth_frames(self, sigtype='<STR_LIT>'):
spf = self.samps_per_frame[:]<EOL>for ch in range(len(spf)):<EOL><INDENT>if spf[ch] is None:<EOL><INDENT>spf[ch] = <NUM_LIT:1><EOL><DEDENT><DEDENT>tspf = sum(spf)<EOL>if sigtype == '<STR_LIT>':<EOL><INDENT>n_sig = len(self.e_p_signal)<EOL>sig_len = int(len(self.e_p_signal[<NUM_LIT:0>])/spf[<NUM_LIT:0>])<EOL>signal = np...
Convert expanded signals with different samples/frame into a uniform numpy array. Input parameters - sigtype (default='physical'): Specifies whether to mooth the e_p_signal field ('physical'), or the e_d_signal field ('digital').
f10209:c0:m10
def set_db_index_url(db_index_url=PB_INDEX_URL):
config.db_index_url = db_index_url<EOL>
Set the database index url to a custom value, to stream remote files from another location. Parameters ---------- db_index_url : str, optional The desired new database index url. Leave as default to reset to the physiobank index url.
f10210:m0
def _remote_file_size(url=None, file_name=None, pb_dir=None):
<EOL>if file_name and pb_dir:<EOL><INDENT>url = posixpath.join(config.db_index_url, pb_dir, file_name)<EOL><DEDENT>response = requests.head(url, headers={'<STR_LIT>': '<STR_LIT>'})<EOL>response.raise_for_status()<EOL>remote_file_size = int(response.headers['<STR_LIT>'])<EOL>return remote_file_size<EOL>
Get the remote file size in bytes Parameters ---------- url : str, optional The full url of the file. Use this option to explicitly state the full url. file_name : str, optional The base file name. Use this argument along with pb_dir if you want the full url to be constructed. pb_dir : str, optional ...
f10210:m1
def _stream_header(file_name, pb_dir):
<EOL>url = posixpath.join(config.db_index_url, pb_dir, file_name)<EOL>response = requests.get(url)<EOL>response.raise_for_status()<EOL>filelines = response.content.decode('<STR_LIT>').splitlines()<EOL>header_lines = []<EOL>comment_lines = []<EOL>for line in filelines:<EOL><INDENT>line = str(line.strip())<EOL>if line.st...
Stream the lines of a remote header file. Parameters ---------- file_name : str pb_dir : str The Physiobank database directory from which to find the required header file. eg. For file '100.hea' in 'http://physionet.org/physiobank/database/mitdb', pb_dir='mitdb'.
f10210:m2
def _stream_dat(file_name, pb_dir, byte_count, start_byte, dtype):
<EOL>url = posixpath.join(config.db_index_url, pb_dir, file_name)<EOL>end_byte = start_byte + byte_count - <NUM_LIT:1><EOL>headers = {"<STR_LIT>":"<STR_LIT>" % (start_byte, end_byte),<EOL>'<STR_LIT>': '<STR_LIT:*>'}<EOL>response = requests.get(url, headers=headers, stream=True)<EOL>response.raise_for_status()<EOL>sig_d...
Stream data from a remote dat file, into a 1d numpy array. Parameters ---------- file_name : str The name of the dat file to be read. pb_dir : str The physiobank directory where the dat file is located. byte_count : int The number of bytes to be read. start_byte : int The starting byte number to read f...
f10210:m3
def _stream_annotation(file_name, pb_dir):
<EOL>url = posixpath.join(config.db_index_url, pb_dir, file_name)<EOL>response = requests.get(url)<EOL>response.raise_for_status()<EOL>ann_data = np.fromstring(response.content, dtype=np.dtype('<STR_LIT>'))<EOL>return ann_data<EOL>
Stream an entire remote annotation file from physiobank Parameters ---------- file_name : str The name of the annotation file to be read. pb_dir : str The physiobank directory where the annotation file is located.
f10210:m4
def get_dbs():
url = posixpath.join(config.db_index_url, '<STR_LIT>')<EOL>response = requests.get(url)<EOL>dbs = response.content.decode('<STR_LIT:ascii>').splitlines()<EOL>dbs = [re.sub('<STR_LIT>', '<STR_LIT:\t>', line).split('<STR_LIT:\t>') for line in dbs]<EOL>return dbs<EOL>
Get a list of all the Physiobank databases available. Examples -------- >>> dbs = get_dbs()
f10210:m5
def get_record_list(db_dir, records='<STR_LIT:all>'):
<EOL>db_url = posixpath.join(config.db_index_url, db_dir)<EOL>if records == '<STR_LIT:all>':<EOL><INDENT>response = requests.get(posixpath.join(db_url, '<STR_LIT>'))<EOL>if response.status_code == <NUM_LIT>:<EOL><INDENT>raise ValueError('<STR_LIT>' % db_url)<EOL><DEDENT>record_list = response.content.decode('<STR_LIT:a...
Get a list of records belonging to a database. Parameters ---------- db_dir : str The database directory, usually the same as the database slug. The location to look for a RECORDS file. records : list, optional A Option used when this function acts as a helper function. Leave as default 'all' to get al...
f10210:m6
def make_local_dirs(dl_dir, dl_inputs, keep_subdirs):
<EOL>if not os.path.isdir(dl_dir):<EOL><INDENT>os.makedirs(dl_dir)<EOL>print('<STR_LIT>' % dl_dir)<EOL><DEDENT>if keep_subdirs:<EOL><INDENT>dl_dirs = set([os.path.join(dl_dir, d[<NUM_LIT:1>]) for d in dl_inputs])<EOL>for d in dl_dirs:<EOL><INDENT>if not os.path.isdir(d):<EOL><INDENT>os.makedirs(d)<EOL><DEDENT><DEDENT><...
Make any required local directories to prepare for downloading
f10210:m8
def dl_pb_file(inputs):
basefile, subdir, db, dl_dir, keep_subdirs, overwrite = inputs<EOL>url = posixpath.join(config.db_index_url, db, subdir, basefile)<EOL>remote_file_size = _remote_file_size(url)<EOL>if keep_subdirs:<EOL><INDENT>dldir = os.path.join(dl_dir, subdir)<EOL><DEDENT>else:<EOL><INDENT>dldir = dl_dir<EOL><DEDENT>local_file = os....
Download a file from physiobank. The input args are to be unpacked for the use of multiprocessing map, because python2 doesn't have starmap...
f10210:m9
def dl_full_file(url, save_file_name):
response = requests.get(url)<EOL>with open(save_file_name, '<STR_LIT:wb>') as writefile:<EOL><INDENT>writefile.write(response.content)<EOL><DEDENT>return<EOL>
Download a file. No checks are performed. Parameters ---------- url : str The url of the file to download save_file_name : str The name to save the file as
f10210:m10
def dl_files(db, dl_dir, files, keep_subdirs=True, overwrite=False):
<EOL>db_url = posixpath.join(config.db_index_url, db)<EOL>response = requests.get(db_url)<EOL>response.raise_for_status()<EOL>dl_inputs = [(os.path.split(file)[<NUM_LIT:1>], os.path.split(file)[<NUM_LIT:0>], db, dl_dir, keep_subdirs, overwrite) for file in files]<EOL>make_local_dirs(dl_dir, dl_inputs, keep_subdirs)<EOL...
Download specified files from a Physiobank database. Parameters ---------- db : str The Physiobank database directory to download. eg. For database: 'http://physionet.org/physiobank/database/mitdb', db='mitdb'. dl_dir : str The full local directory path in which to download the files. files : list A li...
f10210:m11
def _check_item_type(item, field_name, allowed_types, expect_list=False,<EOL>required_channels='<STR_LIT:all>'):
if expect_list:<EOL><INDENT>if not isinstance(item, list):<EOL><INDENT>raise TypeError('<STR_LIT>' % field_name)<EOL><DEDENT>if required_channels == '<STR_LIT:all>':<EOL><INDENT>required_channels = list(range(len(item)))<EOL><DEDENT>for ch in range(len(item)):<EOL><INDENT>if ch in required_channels:<EOL><INDENT>allowed...
Check the item's type against a set of allowed types. Vary the print message regarding whether the item can be None. Helper to `BaseRecord.check_field`. Parameters ---------- item : any The item to check. field_name : str The field name. allowed_types : iterable Iterable of types the item is allowed to be....
f10211:m0
def check_np_array(item, field_name, ndim, parent_class, channel_num=None):
<EOL>if item.ndim != ndim:<EOL><INDENT>error_msg = '<STR_LIT>' % (field_name, ndim)<EOL>if channel_num is not None:<EOL><INDENT>error_msg = ('<STR_LIT>' % channel_num) + error_msg[<NUM_LIT:1>:]<EOL><DEDENT>raise TypeError(error_msg)<EOL><DEDENT>if not np.issubdtype(item.dtype, parent_class):<EOL><INDENT>error_msg = '<S...
Check a numpy array's shape and dtype against required specifications. Parameters ---------- item : numpy array The numpy array to check field_name : str The name of the field to check ndim : int The required number of dimensions parent_class : type The parent class of the dtype. ie. np.integer, np.flo...
f10211:m1
def rdheader(record_name, pb_dir=None, rd_segments=False):
dir_name, base_record_name = os.path.split(record_name)<EOL>dir_name = os.path.abspath(dir_name)<EOL>header_lines, comment_lines = _header._read_header_lines(base_record_name,<EOL>dir_name, pb_dir)<EOL>record_fields = _header._parse_record_line(header_lines[<NUM_LIT:0>])<EOL>if record_fields['<STR_LIT>'] is None:<EOL><...
Read a WFDB header file and return a `Record` or `MultiRecord` object with the record descriptors as attributes. Parameters ---------- record_name : str The name of the WFDB record to be read, without any file extensions. If the argument contains any path delimiter characters, the argument will be interpre...
f10211:m2
def rdrecord(record_name, sampfrom=<NUM_LIT:0>, sampto=None, channels=None,<EOL>physical=True, pb_dir=None, m2s=True, smooth_frames=True,<EOL>ignore_skew=False, return_res=<NUM_LIT:64>, force_channels=True,<EOL>channel_names=None, warn_empty=False):
dir_name, base_record_name = os.path.split(record_name)<EOL>dir_name = os.path.abspath(dir_name)<EOL>record = rdheader(record_name, pb_dir=pb_dir, rd_segments=False)<EOL>if sampto is None:<EOL><INDENT>if record.sig_len is None:<EOL><INDENT>if record.n_sig == <NUM_LIT:0>:<EOL><INDENT>record.sig_len = <NUM_LIT:0><EOL><DE...
Read a WFDB record and return the signal and record descriptors as attributes in a Record or MultiRecord object. Parameters ---------- record_name : str The name of the WFDB record to be read, without any file extensions. If the argument contains any path delimiter characters, the argument will be interpre...
f10211:m3
def rdsamp(record_name, sampfrom=<NUM_LIT:0>, sampto=None, channels=None, pb_dir=None,<EOL>channel_names=None, warn_empty=False):
record = rdrecord(record_name=record_name, sampfrom=sampfrom,<EOL>sampto=sampto, channels=channels, physical=True,<EOL>pb_dir=pb_dir, m2s=True, channel_names=channel_names,<EOL>warn_empty=warn_empty)<EOL>signals = record.p_signal<EOL>fields = {}<EOL>for field in ['<STR_LIT>','<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR...
Read a WFDB record, and return the physical signals and a few important descriptor fields. Parameters ---------- record_name : str The name of the WFDB record to be read (without any file extensions). If the argument contains any path delimiter characters, the argument will be interpreted as PATH/baserecor...
f10211:m4
def _get_wanted_channels(wanted_sig_names, record_sig_names, pad=False):
if pad:<EOL><INDENT>return [record_sig_names.index(s) if s in record_sig_names else None for s in wanted_sig_names]<EOL><DEDENT>else:<EOL><INDENT>return [record_sig_names.index(s) for s in wanted_sig_names if s in record_sig_names]<EOL><DEDENT>
Given some wanted signal names, and the signal names contained in a record, return the indices of the record channels that intersect. Parameters ---------- wanted_sig_names : list List of desired signal name strings record_sig_names : list List of signal names for a single record pad : bool, optional Wheth...
f10211:m5
def wrsamp(record_name, fs, units, sig_name, p_signal=None, d_signal=None,<EOL>fmt=None, adc_gain=None, baseline=None, comments=None,<EOL>base_time=None, base_date=None, write_dir='<STR_LIT>'):
<EOL>if p_signal is not None and d_signal is not None:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>if d_signal is not None:<EOL><INDENT>if fmt is None or adc_gain is None or baseline is None:<EOL><INDENT>raise Exception("<STR_LIT>")<EOL><DEDENT><DEDENT>if p_signal is not None:<EOL><INDENT>record = Record(recor...
Write a single segment WFDB record, creating a WFDB header file and any associated dat files. Parameters ---------- record_name : str The string name of the WFDB record to be written (without any file extensions). fs : int, or float The sampling frequency of the record. units : list A list of strings g...
f10211:m6
def is_monotonic(full_list):
prev_elements = set({full_list[<NUM_LIT:0>]})<EOL>prev_item = full_list[<NUM_LIT:0>]<EOL>for item in full_list:<EOL><INDENT>if item != prev_item:<EOL><INDENT>if item in prev_elements:<EOL><INDENT>return False<EOL><DEDENT>prev_item = item<EOL>prev_elements.add(item)<EOL><DEDENT><DEDENT>return True<EOL>
Determine whether elements in a list are monotonic. ie. unique elements are clustered together. ie. [5,5,3,4] is, [5,3,5] is not.
f10211:m7
def dl_database(db_dir, dl_dir, records='<STR_LIT:all>', annotators='<STR_LIT:all>',<EOL>keep_subdirs=True, overwrite=False):
<EOL>db_url = posixpath.join(download.config.db_index_url, db_dir)<EOL>r = requests.get(db_url)<EOL>r.raise_for_status()<EOL>recordlist = download.get_record_list(db_dir, records)<EOL>annotators = download.get_annotators(db_dir, annotators)<EOL>allfiles = []<EOL>for rec in recordlist:<EOL><INDENT>if rec.endswith('<STR_...
Download WFDB record (and optionally annotation) files from a Physiobank database. The database must contain a 'RECORDS' file in its base directory which lists its WFDB records. Parameters ---------- db_dir : str The Physiobank database directory to download. eg. For database: 'http://physionet.org/physiobank/...
f10211:m8
def check_field(self, field, required_channels='<STR_LIT:all>'):
item = getattr(self, field)<EOL>if item is None:<EOL><INDENT>raise Exception('<STR_LIT>' % field)<EOL><DEDENT>expect_list = True if field in LIST_FIELDS else False<EOL>_check_item_type(item, field_name=field,<EOL>allowed_types=ALLOWED_TYPES[field],<EOL>expect_list=expect_list,<EOL>required_channels=required_channels)<E...
Check whether a single field is valid in its basic form. Does not check compatibility with other fields. Parameters ---------- field : str The field name required_channels : list, optional Used for signal specification fields. All channels are checked for their integrity if present, but channels that do ...
f10211:c0:m1
def check_read_inputs(self, sampfrom, sampto, channels, physical,<EOL>smooth_frames, return_res):
<EOL>if not hasattr(sampfrom, '<STR_LIT>'):<EOL><INDENT>raise TypeError('<STR_LIT>')<EOL><DEDENT>if not hasattr(sampto, '<STR_LIT>'):<EOL><INDENT>raise TypeError('<STR_LIT>')<EOL><DEDENT>if not isinstance(channels, list):<EOL><INDENT>raise TypeError('<STR_LIT>')<EOL><DEDENT>if sampfrom < <NUM_LIT:0>:<EOL><INDENT>raise ...
Ensure that input read parameters (from rdsamp) are valid for the record
f10211:c0:m2
def _adjust_datetime(self, sampfrom):
if sampfrom:<EOL><INDENT>dt_seconds = sampfrom / self.fs<EOL>if self.base_date and self.base_time:<EOL><INDENT>self.base_datetime = datetime.datetime.combine(self.base_date,<EOL>self.base_time)<EOL>self.base_datetime += datetime.timedelta(seconds=dt_seconds)<EOL>self.base_date = self.base_datetime.date()<EOL>self.base_...
Adjust date and time fields to reflect user input if possible. Helper function for the `_arrange_fields` of both Record and MultiRecord objects.
f10211:c0:m3
def wrsamp(self, expanded=False, write_dir='<STR_LIT>'):
<EOL>self.wrheader(write_dir=write_dir)<EOL>if self.n_sig > <NUM_LIT:0>:<EOL><INDENT>self.wr_dats(expanded=expanded, write_dir=write_dir)<EOL><DEDENT>
Write a wfdb header file and any associated dat files from this object. Parameters ---------- expanded : bool, optional Whether to write the expanded signal (e_d_signal) instead of the uniform signal (d_signal). write_dir : str, optional The directory in which to write the files.
f10211:c1:m2
def _arrange_fields(self, channels, sampfrom=<NUM_LIT:0>, expanded=False):
<EOL>for field in _header.SIGNAL_SPECS.index:<EOL><INDENT>item = getattr(self, field)<EOL>setattr(self, field, [item[c] for c in channels])<EOL><DEDENT>if expanded:<EOL><INDENT>if self.sig_len != int(len(self.e_d_signal[<NUM_LIT:0>]) / self.samps_per_frame[<NUM_LIT:0>]):<EOL><INDENT>self.checksum = self.calc_checksum(e...
Arrange/edit object fields to reflect user channel and/or signal range input. Parameters ---------- channels : list List of channel numbers specified. sampfrom : int, optional Starting sample number read. expanded : bool, optional Whether the record was read in expanded mode.
f10211:c1:m3
def wrsamp(self, write_dir='<STR_LIT>'):
<EOL>self.wrheader(write_dir=write_dir)<EOL>for seg in self.segments:<EOL><INDENT>seg.wrsamp(write_dir=write_dir)<EOL><DEDENT>
Write a multi-segment header, along with headers and dat files for all segments, from this object.
f10211:c2:m1
def _check_segment_cohesion(self):
if self.n_seg != len(self.segments):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>for i in range(n_seg):<EOL><INDENT>s = self.segments[i]<EOL>if i == <NUM_LIT:0> and self.seg_len[<NUM_LIT:0>] == <NUM_LIT:0>:<EOL><INDENT>for file_name in s.file_name:<EOL><INDENT>if file_name != '<STR_LIT>':<EOL><INDENT>raise Va...
Check the cohesion of the segments field with other fields used to write the record
f10211:c2:m2
def _required_segments(self, sampfrom, sampto):
<EOL>if self.layout == '<STR_LIT>':<EOL><INDENT>startseg = <NUM_LIT:0><EOL><DEDENT>else:<EOL><INDENT>startseg = <NUM_LIT:1><EOL><DEDENT>cumsumlengths = list(np.cumsum(self.seg_len[startseg:]))<EOL>seg_numbers = [[sampfrom < cs for cs in cumsumlengths].index(True)]<EOL>if sampto == cumsumlengths[len(cumsumlengths) - <NU...
Determine the segments and the samples within each segment in a multi-segment record, that lie within a sample range. Parameters ---------- sampfrom : int The starting sample number to read for each channel. sampto : int The sample number at which to stop reading for each channel.
f10211:c2:m3
def _required_channels(self, seg_numbers, channels, dir_name, pb_dir):
<EOL>if self.layout == '<STR_LIT>':<EOL><INDENT>required_channels = [channels] * len(seg_numbers)<EOL><DEDENT>else:<EOL><INDENT>required_channels = []<EOL>l_sig_names = self.segments[<NUM_LIT:0>].sig_name<EOL>w_sig_names = [l_sig_names[c] for c in channels]<EOL>for i in range(len(seg_numbers)):<EOL><INDENT>if self.seg_...
Get the channel numbers to be read from each specified segment, given the channel numbers specified for the entire record. Parameters ---------- seg_numbers : list List of segment numbers to read. channels : list The channel indices to read for the whole record. Same one specified by user input. Returns -...
f10211:c2:m4
def _arrange_fields(self, seg_numbers, seg_ranges, channels,<EOL>sampfrom=<NUM_LIT:0>, force_channels=True):
<EOL>for i in range(len(seg_numbers)):<EOL><INDENT>self.seg_len[seg_numbers[i]] = seg_ranges[i][<NUM_LIT:1>] - seg_ranges[i][<NUM_LIT:0>]<EOL><DEDENT>if self.layout == '<STR_LIT>':<EOL><INDENT>self.n_sig = len(channels)<EOL>self.segments = self.segments[seg_numbers[<NUM_LIT:0>]:seg_numbers[-<NUM_LIT:1>]+<NUM_LIT:1>]<EO...
Arrange/edit object fields to reflect user channel and/or signal range inputs. Updates layout specification header if necessary. Parameters ---------- seg_numbers : list List of integer segment numbers read. seg_ranges: list List of integer pairs, giving the sample ranges for each segment number read. chan...
f10211:c2:m5
def multi_to_single(self, physical, return_res=<NUM_LIT:64>):
<EOL>fields = self.__dict__.copy()<EOL>for attr in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>del(fields[attr])<EOL><DEDENT>if self.layout == '<STR_LIT>':<EOL><INDENT>for attr in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>']:<EOL><INDENT>fields[attr] = getattr(self.segments[<N...
Create a Record object from the MultiRecord object. All signal segments will be combined into the new object's `p_signal` or `d_signal` field. For digital format, the signals must have the same storage format, baseline, and adc_gain in all segments. Parameters ---------- physical : bool Whether to convert the phys...
f10211:c2:m6
def wfdb_strptime(time_string):
n_colons = time_string.count('<STR_LIT::>')<EOL>if n_colons == <NUM_LIT:0>:<EOL><INDENT>time_fmt = '<STR_LIT>'<EOL><DEDENT>elif n_colons == <NUM_LIT:1>:<EOL><INDENT>time_fmt = '<STR_LIT>'<EOL><DEDENT>elif n_colons == <NUM_LIT:2>:<EOL><INDENT>time_fmt = '<STR_LIT>'<EOL><DEDENT>if '<STR_LIT:.>' in time_string:<EOL><INDEN...
Given a time string in an acceptable wfdb format, return a datetime.time object. Valid formats: SS, MM:SS, HH:MM:SS, all with and without microsec.
f10212:m0
def _read_header_lines(base_record_name, dir_name, pb_dir):
file_name = base_record_name + '<STR_LIT>'<EOL>if pb_dir is None:<EOL><INDENT>with open(os.path.join(dir_name, file_name), '<STR_LIT:r>') as fp:<EOL><INDENT>header_lines = []<EOL>comment_lines = []<EOL>for line in fp:<EOL><INDENT>line = line.strip()<EOL>if line.startswith('<STR_LIT:#>'):<EOL><INDENT>comment_lines.appen...
Read the lines in a local or remote header file. Parameters ---------- base_record_name : str The base name of the WFDB record to be read, without any file extensions. dir_name : str The local directory location of the header file. This parameter is ignored if `pb_dir` is set. pb_dir : str Option u...
f10212:m1
def _parse_record_line(record_line):
<EOL>record_fields = {}<EOL>(record_fields['<STR_LIT>'], record_fields['<STR_LIT>'],<EOL>record_fields['<STR_LIT>'], record_fields['<STR_LIT>'],<EOL>record_fields['<STR_LIT>'], record_fields['<STR_LIT>'],<EOL>record_fields['<STR_LIT>'], record_fields['<STR_LIT>'],<EOL>record_fields['<STR_LIT>']) = re.findall(_rx_record...
Extract fields from a record line string into a dictionary
f10212:m2
def _parse_signal_lines(signal_lines):
n_sig = len(signal_lines)<EOL>signal_fields = {}<EOL>for field in SIGNAL_SPECS.index:<EOL><INDENT>signal_fields[field] = n_sig * [None]<EOL><DEDENT>for ch in range(n_sig):<EOL><INDENT>(signal_fields['<STR_LIT>'][ch], signal_fields['<STR_LIT>'][ch],<EOL>signal_fields['<STR_LIT>'][ch], signal_fields['<STR_LIT>'][ch],<EOL...
Extract fields from a list of signal line strings into a dictionary.
f10212:m3