docstring stringlengths 52 499 | function stringlengths 67 35.2k | __index_level_0__ int64 52.6k 1.16M |
|---|---|---|
log function
Args:
msg: the text message to be logged | def log(self, msg):
time = self.get_time()
msg = "{:s}\t {:s}".format(time, msg)
self.history.append(msg)
self.history_model.insertRow(0, QtGui.QStandardItem(msg)) | 850,359 |
Calls the plot function of the script, and redraws both plots
Args:
script: script to be plotted | def plot_script(self, script):
script.plot([self.matplotlibwidget_1.figure, self.matplotlibwidget_2.figure])
self.matplotlibwidget_1.draw()
self.matplotlibwidget_2.draw() | 850,364 |
waits for a signal emitted from a thread and updates the gui
Args:
progress:
Returns: | def update_status(self, progress):
# interval at which the gui will be updated, if requests come in faster than they will be ignored
update_interval = 0.2
now = datetime.datetime.now()
if not self._last_progress_update is None and now-self._last_progress_update < datetime.tim... | 850,365 |
checks the plottype of the script and plots it accordingly
Args:
script: script to be plotted | def plot_script_validate(self, script):
script.plot_validate([self.matplotlibwidget_1.figure, self.matplotlibwidget_2.figure])
self.matplotlibwidget_1.draw()
self.matplotlibwidget_2.draw() | 850,367 |
updates the script based on the information provided in item
Args:
script: script to be updated
item: B26QTreeItem that contains the new settings of the script | def update_script_from_item(self, item):
script, path_to_script, script_item = item.get_script()
# build dictionary
# get full information from script
dictator = list(script_item.to_dict().values())[0] # there is only one item in the dictionary
for instrument in list... | 850,369 |
fills a QTreeWidget with nested parameters, in future replace QTreeWidget with QTreeView and call fill_treeview
Args:
tree: QtWidgets.QTreeWidget
parameters: dictionary or Parameter object
show_all: boolean if true show all parameters, if false only selected ones
Retu... | def fill_treewidget(self, tree, parameters):
tree.clear()
assert isinstance(parameters, (dict, Parameter))
for key, value in parameters.items():
if isinstance(value, Parameter):
B26QTreeItem(tree, key, value, parameters.valid_values[key], parameters.info[ke... | 850,370 |
fills a treeview with nested parameters
Args:
tree: QtWidgets.QTreeView
parameters: dictionary or Parameter object
Returns: | def fill_treeview(self, tree, input_dict):
tree.model().removeRows(0, tree.model().rowCount())
def add_element(item, key, value):
child_name = QtWidgets.QStandardItem(key)
if isinstance(value, dict):
for key_child, value_child in value.items():
... | 850,371 |
refresh trees with current settings
Args:
tree: a QtWidgets.QTreeWidget object or a QtWidgets.QTreeView object
items: dictionary or Parameter items with which to populate the tree
show_all: boolean if true show all parameters, if false only selected ones | def refresh_tree(self, tree, items):
if tree == self.tree_scripts or tree == self.tree_settings:
tree.itemChanged.disconnect()
self.fill_treewidget(tree, items)
tree.itemChanged.connect(lambda: self.update_parameters(tree))
elif tree == self.tree_gui_setting... | 850,373 |
fills the tree with data sets where datasets is a dictionary of the form
Args:
tree:
data_sets: a dataset
Returns: | def fill_dataset_tree(self, tree, data_sets):
tree.model().removeRows(0, tree.model().rowCount())
for index, (time, script) in enumerate(data_sets.items()):
name = script.settings['tag']
type = script.name
item_time = QtGui.QStandardItem(str(time))
... | 850,374 |
checks if the file is a valid config file
Args:
filepath: | def load_config(self, filepath=None):
# load config or default if invalid
def load_settings(filepath):
instruments_loaded = {}
probes_loaded = {}
scripts_loaded = {}
if filepath and os.path.isfile(filepath):
in_dat... | 850,375 |
hide the parameters that had been hidden
Args:
file_name: config file that has the information about which parameters are hidden | def _hide_parameters(self, file_name):
try:
in_data = load_b26_file(file_name)
except:
in_data = {}
def set_item_visible(item, is_visible):
if isinstance(is_visible, dict):
for child_id in range(item.childCount()):
... | 850,376 |
saves gui configuration to out_file_name
Args:
filepath: name of file | def save_config(self, filepath):
def get_hidden_parameter(item):
num_sub_elements = item.childCount()
if num_sub_elements == 0:
dictator = {item.name : item.visible}
else:
dictator = {item.name:{}}
for child_id in ran... | 850,377 |
saves current dataset to out_file_name
Args:
out_file_name: name of file | def save_dataset(self, out_file_name):
for time_tag, script in self.data_sets.items():
script.save(os.path.join(out_file_name, '{:s}.b26s'.format(time_tag))) | 850,378 |
If there is not currently a selected NV within self.settings[patch_size] of pt, adds it to the selected list. If
there is, removes that point from the selected list.
Args:
pt: the point to add or remove from the selected list
Poststate: updates selected list | def toggle_NV(self, pt):
if not self.data['nv_locations']: #if self.data is empty so this is the first point
self.data['nv_locations'].append(pt)
self.data['image_data'] = None # clear image data
else:
# use KDTree to find NV closest to mouse click
... | 850,479 |
Compress dinf flow direction to D8 direction with weight follows ArcGIS D8 codes.
Args:
angle: D-inf flow direction angle
nodata: NoData value
Returns:
1. Updated Dinf values
2. Compressed flow direction follows ArcGIS D8 codes rule
3. Weight ... | def compress_dinf(angle, nodata):
if MathClass.floatequal(angle, nodata):
return DEFAULT_NODATA, DEFAULT_NODATA, DEFAULT_NODATA
taud, d = DinfUtil.check_orthogonal(angle)
if d != -1:
return taud, d, 1
if angle < FlowModelConst.ne:
a1 = angle
... | 850,573 |
Output compressed Dinf flow direction and weight to raster file
Args:
dinfflowang: Dinf flow direction raster file
compdinffile: Compressed D8 flow code
weightfile: The correspond weight | def output_compressed_dinf(dinfflowang, compdinffile, weightfile):
dinf_r = RasterUtilClass.read_raster(dinfflowang)
data = dinf_r.data
xsize = dinf_r.nCols
ysize = dinf_r.nRows
nodata_value = dinf_r.noDataValue
cal_dir_code = frompyfunc(DinfUtil.compress_dinf, ... | 850,574 |
Get the downslope directions of an dinf direction value
Args:
a: Dinf value
Returns:
downslope directions | def dinf_downslope_direction(a):
taud, d = DinfUtil.check_orthogonal(a)
if d != -1:
down = [d]
return down
else:
if a < FlowModelConst.ne: # 129 = 1+128
down = [1, 2]
elif a < FlowModelConst.n: # 192 = 128+64
... | 850,575 |
find downslope coordinate for Dinf of TauDEM
Args:
dinfdir_value: dinf direction value
i: current row
j: current col
Returns:
downstream (row, col)s | def downstream_index_dinf(dinfdir_value, i, j):
down_dirs = DinfUtil.dinf_downslope_direction(dinfdir_value)
down_coors = []
for dir_code in down_dirs:
row, col = D8Util.downstream_index(dir_code, i, j)
down_coors.append([row, col])
return down_coors | 850,576 |
Eliminate reach with zero length and return the reach ID map.
Args:
streamnet_file: original stream net ESRI shapefile
output_reach_file: serialized stream net, ESRI shapefile
Returns:
id pairs {origin: newly assigned} | def serialize_streamnet(streamnet_file, output_reach_file):
FileClass.copy_files(streamnet_file, output_reach_file)
ds_reach = ogr_Open(output_reach_file, update=True)
layer_reach = ds_reach.GetLayer(0)
layer_def = layer_reach.GetLayerDefn()
i_link = layer_def.GetFieldIn... | 850,577 |
Assign stream link ID according to subbasin ID.
Args:
stream_file: input stream raster file
subbasin_file: subbasin raster file
out_stream_file: output stream raster file | def assign_stream_id_raster(stream_file, subbasin_file, out_stream_file):
stream_raster = RasterUtilClass.read_raster(stream_file)
stream_data = stream_raster.data
nrows = stream_raster.nRows
ncols = stream_raster.nCols
nodata = stream_raster.noDataValue
subbain_... | 850,578 |
updates the internal dictionary and sends changed values to instrument
Args:
settings: parameters to be set
# mabe in the future:
# Returns: boolean that is true if update successful | def update(self, settings):
Instrument.update(self, settings)
for key, value in settings.items():
if key == 'test1':
self._internal_state = value | 850,580 |
requestes value from the instrument and returns it
Args:
key: name of requested value
Returns: reads values from instrument | def read_probes(self, key):
assert key in list(self._PROBES.keys())
import random
if key == 'value1':
value = random.random()
elif key == 'value2':
value = self.settings['output probe2']
elif key == 'internal':
value = self._internal_... | 850,581 |
requestes value from the instrument and returns it
Args:
key: name of requested value
Returns: reads values from instrument | def read_probes(self, key):
assert key in list(self._PROBES.keys())
if key == 'output':
value = self._output
return value | 850,586 |
if value is valid sets the data to value
Args:
column: column of item
role: role of item (see Qt doc)
value: value to be set | def setData(self, column, role, value):
assert isinstance(column, int)
assert isinstance(role, int)
# make sure that the right row is selected, this is not always the case for checkboxes and
# combo boxes because they are items on top of the tree structure
if isinstance... | 850,612 |
cast the value into the type typ
if type is not provided it is set to self.valid_values
Args:
var: variable to be cast
type: target type
Returns: the variable var csat into type typ | def cast_type(self, var, cast_type=None):
if cast_type is None:
cast_type = self.valid_values
try:
if cast_type == int:
return int(var)
elif cast_type == float:
return float(var)
elif type == str:
... | 850,613 |
finds the item that contains the sub_script with name sub_script_name
Args:
sub_script_name: name of subscript
Returns: B26QTreeItem in QTreeWidget which is a script | def get_subscript(self, sub_script_name):
# get tree of item
tree = self.treeWidget()
items = tree.findItems(sub_script_name, QtCore.Qt.MatchExactly | QtCore.Qt.MatchRecursive)
if len(items) >= 1:
# identify correct script by checking that it is a sub_element of t... | 850,616 |
figures out if item is a point, that is if it has two subelements of type float
Args:
self:
Returns: if item is a point (True) or not (False) | def is_point(self):
if self.childCount() == 2:
if self.child(0).valid_values == float and self.child(1).valid_values == float:
return True
else:
return False | 850,617 |
Get raster value by (row, col).
Args:
row: row number.
col: col number.
Returns:
raster value, None if the input are invalid. | def get_value_by_row_col(self, row, col):
if row < 0 or row >= self.nRows or col < 0 or col >= self.nCols:
raise ValueError("The row or col must be >=0 and less than "
"nRows (%d) or nCols (%d)!" % (self.nRows, self.nCols))
else:
value = self... | 850,697 |
Get raster value by xy coordinates.
Args:
x: X Coordinate.
y: Y Coordinate.
Returns:
raster value, None if the input are invalid. | def get_value_by_xy(self, x, y):
if x < self.xMin or x > self.xMax or y < self.yMin or y > self.yMax:
return None
# raise ValueError("The x or y value must be within the Min and Max!")
else:
row = self.nRows - int(numpy.ceil((y - self.yMin) / self.dx))
... | 850,698 |
Get the coordinates of central grid.
Args:
row: row number, range from 0 to (nRows - 1).
col: col number, range from 0 to (nCols - 1).
Returns:
XY coordinates. If the row or col are invalid, raise ValueError. | def get_central_coors(self, row, col):
if row < 0 or row >= self.nRows or col < 0 or col >= self.nCols:
raise ValueError("The row (%d) or col (%d) must be >=0 and less than "
"nRows (%d) or nCols (%d)!" % (row, col, self.nRows, self.nCols))
else:
... | 850,699 |
Read raster by GDAL.
Args:
raster_file: raster file path.
Returns:
Raster object. | def read_raster(raster_file):
ds = gdal_Open(raster_file)
band = ds.GetRasterBand(1)
data = band.ReadAsArray()
xsize = band.XSize
ysize = band.YSize
nodata_value = band.GetNoDataValue()
geotrans = ds.GetGeoTransform()
dttype = band.DataType
... | 850,700 |
Generate mask data from a given raster data.
Args:
rasterfile: raster file path.
outmaskfile: output mask file path.
Returns:
Raster object of mask data. | def get_mask_from_raster(rasterfile, outmaskfile, keep_nodata=False):
raster_r = RasterUtilClass.read_raster(rasterfile)
xsize = raster_r.nCols
ysize = raster_r.nRows
nodata_value = raster_r.noDataValue
srs = raster_r.srs
x_min = raster_r.xMin
y_max = ras... | 850,701 |
Reclassify raster by given classifier dict.
Args:
srcfile: source raster file.
v_dict: classifier dict.
dstfile: destination file path.
gdaltype (:obj:`pygeoc.raster.GDALDataType`): GDT_Float32 as default. | def raster_reclassify(srcfile, v_dict, dstfile, gdaltype=GDT_Float32):
src_r = RasterUtilClass.read_raster(srcfile)
src_data = src_r.data
dst_data = numpy.copy(src_data)
if gdaltype == GDT_Float32 and src_r.dataType != GDT_Float32:
gdaltype = src_r.dataType
n... | 850,702 |
Output Raster to GeoTiff format file.
Args:
f_name: output gtiff file name.
n_rows: Row count.
n_cols: Col count.
data: 2D array data.
geotransform: geographic transformation.
srs: coordinate system.
nodata_value: nodata value.... | def write_gtiff_file(f_name, n_rows, n_cols, data, geotransform, srs, nodata_value,
gdal_type=GDT_Float32):
UtilClass.mkdir(os.path.dirname(FileClass.get_file_fullpath(f_name)))
driver = gdal_GetDriverByName(str('GTiff'))
try:
ds = driver.Create(f_na... | 850,703 |
Output Raster to ASCII file.
Args:
filename: output ASCII filename.
data: 2D array data.
xsize: Col count.
ysize: Row count.
geotransform: geographic transformation.
nodata_value: nodata_flow value. | def write_asc_file(filename, data, xsize, ysize, geotransform, nodata_value):
UtilClass.mkdir(os.path.dirname(FileClass.get_file_fullpath(filename)))
header = 'NCOLS %d\n' \
'NROWS %d\n' \
'XLLCENTER %f\n' \
'YLLCENTER %f\n' \
... | 850,704 |
Converting Raster format to GeoTIFF.
Args:
tif: source raster file path.
geotif: output raster file path.
change_nodata: change NoDataValue to -9999 or not.
gdal_type (:obj:`pygeoc.raster.GDALDataType`): GDT_Float32 as default.
change_gdal_type: If Tr... | def raster_to_gtiff(tif, geotif, change_nodata=False, change_gdal_type=False):
rst_file = RasterUtilClass.read_raster(tif)
nodata = rst_file.noDataValue
if change_nodata:
if not MathClass.floatequal(rst_file.noDataValue, DEFAULT_NODATA):
nodata = DEFAULT_NODA... | 850,705 |
Converting Raster format to ASCII raster.
Args:
raster_f: raster file.
asc_f: output ASCII file. | def raster_to_asc(raster_f, asc_f):
raster_r = RasterUtilClass.read_raster(raster_f)
RasterUtilClass.write_asc_file(asc_f, raster_r.data, raster_r.nCols, raster_r.nRows,
raster_r.geotrans, raster_r.noDataValue) | 850,706 |
Get basic statistics of raster data.
Args:
raster_file: raster file path.
Returns:
min, max, mean, std. | def raster_statistics(raster_file):
ds = gdal_Open(raster_file)
band = ds.GetRasterBand(1)
minv, maxv, meanv, std = band.ComputeStatistics(False)
return minv, maxv, meanv, std | 850,707 |
Split raster by given shapefile and field name.
Args:
rs: origin raster file.
split_shp: boundary (ESRI Shapefile) used to spilt raster.
field_name: field name identify the spilt value.
temp_dir: directory to store the spilt rasters. | def split_raster(rs, split_shp, field_name, temp_dir):
UtilClass.rmmkdir(temp_dir)
ds = ogr_Open(split_shp)
lyr = ds.GetLayer(0)
lyr.ResetReading()
ft = lyr.GetNextFeature()
while ft:
cur_field_name = ft.GetFieldAsString(field_name)
for r ... | 850,708 |
Mask raster data.
Args:
in_raster: list or one raster
mask: Mask raster data
out_raster: list or one raster | def mask_raster(in_raster, mask, out_raster):
if is_string(in_raster) and is_string(out_raster):
in_raster = [str(in_raster)]
out_raster = [str(out_raster)]
if len(in_raster) != len(out_raster):
raise RuntimeError('input raster and output raster must have the... | 850,710 |
Make the raster into binarization.
The opening and closing are based on binary image. Therefore we need to
make the raster into binarization.
Args:
given_value: The given value's pixels will be value in 1,
other pixels will be value in 0.
rasterfilename: The... | def raster_binarization(given_value, rasterfilename):
origin_raster = RasterUtilClass.read_raster(rasterfilename)
binary_raster = numpy.where(origin_raster.data == given_value, 1, 0)
return binary_raster | 850,711 |
Erode the raster image.
Find the min pixel's value in 8-neighborhood. Then change the compute
pixel's value into the min pixel's value.
Args:
rasterfile: input original raster image, type can be filename(string,
like "test1.tif"), rasterfile(class Raster) or numpy.nda... | def raster_erosion(rasterfile):
if is_string(rasterfile):
origin_raster = RasterUtilClass.read_raster(str(rasterfile))
elif isinstance(rasterfile, Raster):
origin_raster = rasterfile.data
elif isinstance(rasterfile, numpy.ndarray):
origin_raster = ras... | 850,712 |
Dilate the raster image.
Find the max pixel's value in 8-neighborhood. Then change the compute
pixel's value into the max pixel's value.
Args:
rasterfile: input original raster image, type can be filename(string,
like "test1.tif"), rasterfile(class Raster) or numpy.nd... | def raster_dilation(rasterfile):
if is_string(rasterfile):
origin_raster = RasterUtilClass.read_raster(str(rasterfile))
elif isinstance(rasterfile, Raster):
origin_raster = rasterfile.data
elif isinstance(rasterfile, numpy.ndarray):
origin_raster = ra... | 850,713 |
Do openning.
Openning: Erode firstly, then Dilate.
Args:
input_rasterfilename: input original raster image filename.
times: Erode and Dilate times.
Returns:
openning_raster: raster image after open. | def openning(input_rasterfilename, times):
input_raster = RasterUtilClass.read_raster(input_rasterfilename)
openning_raster = input_raster
for i in range(times):
openning_raster = RasterUtilClass.raster_erosion(openning_raster)
for i in range(times):
open... | 850,714 |
Do closing.
Closing: Dilate firstly, then Erode.
Args:
input_rasterfilename: input original raster image filename.
times: Erode and Dilate times.
Returns:
closing_raster: raster image after close. | def closing(input_rasterfilename, times):
input_raster = RasterUtilClass.read_raster(input_rasterfilename)
closing_raster = input_raster
for i in range(times):
closing_raster = RasterUtilClass.raster_dilation(closing_raster)
for i in range(times):
closing... | 850,715 |
When each subscript is called, uses its standard plotting
Args:
figure_list: list of figures passed from the guit | def plot(self, figure_list):
#TODO: be smarter about how we plot ScriptIterator
if self._current_subscript_stage is not None:
if self._current_subscript_stage['current_subscript'] is not None:
self._current_subscript_stage['current_subscript'].plot(figure_list)
... | 850,760 |
assigning the actual script settings depending on the iterator type
this might be overwritten by classes that inherit form ScriptIterator
Args:
sub_scripts: dictionary with the subscripts
script_order: execution order of subscripts
script_execution_freq: execution f... | def get_default_settings(sub_scripts, script_order, script_execution_freq, iterator_type):
def populate_sweep_param(scripts, parameter_list, trace=''):
def get_parameter_from_dict(trace, dic, parameter_list, valid_values=None):
if valid_values ... | 850,761 |
Initializes the log file in the proper format.
Arguments:
filename (str): Path to a file. Or None if logging is to
be disabled.
loglevel (str): Determines the level of the log output. | def init_log(logger, filename=None, loglevel=None):
formatter = logging.Formatter(
'[%(asctime)s] %(levelname)s: %(name)s: %(message)s'
)
if loglevel:
logger.setLevel(getattr(logging, loglevel))
# We will allways print warnings and higher to stderr... | 850,854 |
gets activated when the user clicks on a plot
Args:
mouse_event: | def plot_clicked(self, mouse_event):
if isinstance(self.current_script, SelectPoints) and self.current_script.is_running:
if (not (mouse_event.xdata == None)):
if (mouse_event.button == 1):
pt = np.array([mouse_event.xdata, mouse_event.ydata])
... | 850,858 |
checks if the file is a valid config file
Args:
file_name: | def load_config(self, file_name):
# load config or default if invalid
def load_settings(file_name):
instruments_loaded = {}
probes_loaded = {}
scripts_loaded = {}
if os.path.isfile(file_name):
in_data = load_b26_fi... | 850,861 |
saves gui configuration to out_file_name
Args:
out_file_name: name of file | def save_config(self, out_file_name):
def get_hidden_parameter(item):
numer_of_sub_elements = item.childCount()
if numer_of_sub_elements == 0:
dictator = {item.name : item.visible}
else:
dictator = {item.name:{}}
for... | 850,862 |
returns the *single sided* power spectral density of the time trace x which is sampled at intervals time_step
Args:
x (array): timetrace
time_step (float): sampling interval of x
freq_range (array or tuple): frequency range in the form [f_min, f_max] to return only the spectrum within this ... | def power_spectral_density(x, time_step, freq_range = None):
N = len(x)
P = 2 * np.abs(np.fft.rfft(x))**2 / N * time_step
F = np.fft.rfftfreq(len(x), time_step)
if freq_range is not None:
brange = np.all([F >= freq_range[0], F <= freq_range[1]], axis=0)
P = P[brange]
F = F[... | 850,911 |
creates a probe...
Args:
name (optinal): name of probe, if not provided take name of function
settings (optinal): a Parameter object that contains all the information needed in the script | def __init__(self, instrument, probe_name, name = None, info = None, buffer_length = 100):
assert isinstance(instrument, Instrument)
assert isinstance(probe_name, str)
assert probe_name in instrument._PROBES
if name is None:
name = probe_name
assert isins... | 850,917 |
tries to instantiate all the instruments that are imported in /instruments/__init__.py
and saves instruments that could be instantiate into a .b2 file in the folder path
Args:
target_folder: target path for .b26 files | def export_default_instruments(target_folder, source_folder = None, raise_errors = False, verbose=True):
print('export_def_instr called')
instruments_to_load = get_classes_in_folder(source_folder, Instrument, verbose = True)
print('instruments to load:')
print(instruments_to_load)
if verbose:
... | 850,971 |
Add arguments to the parser for collection in app.args.
Args:
parser:
`argparse.ArgumentParser`. Parser.
Arguments added here are server on
self.args. | def add_arguments(cls, parser):
parser.add_argument(
'-t', '--title',
action='store',
nargs='?',
const='',
dest='title',
help="[issue] task/issue title.",
)
parser.add_argument(
'-b', '--body',
... | 851,105 |
Determine the format of an audio file.
Parameters:
data (bytes-like object, str, os.PathLike, or file-like object):
A bytes-like object, filepath, path-like object
or file-like object of an audio file.
extension (str): The file extension of the file.
Used as a tie-breaker for formats that can
be used ... | def determine_format(data, extension=None):
if isinstance(data, (os.PathLike, str)):
data = open(data, 'rb')
data_reader = DataReader(data)
data_reader.seek(0, os.SEEK_SET)
d = data_reader.read(4)
if d.startswith((b'ID3', b'\xFF\xFB')): # TODO: Catch all MP3 possibilities.
if extension is None or extensi... | 851,305 |
Load audio metadata from filepath or file-like object.
Parameters:
f (str, os.PathLike, or file-like object):
A filepath, path-like object or file-like object of an audio file.
Returns:
Format: An audio format object.
Raises:
UnsupportedFormat: If file is not of a supported format.
ValueError: If filep... | def load(f):
if isinstance(f, (os.PathLike, str)):
fileobj = open(f, 'rb')
else:
try:
f.read(0)
except AttributeError:
raise ValueError("Not a valid file-like object.")
except Exception:
raise ValueError("Can't read from file-like object.")
fileobj = f
parser_cls = determine_format(fileobj, o... | 851,306 |
Load audio metadata from a bytes-like object.
Parameters:
b (bytes-like object): A bytes-like object of an audio file.
Returns:
Format: An audio format object.
Raises:
UnsupportedFormat: If file is not of a supported format. | def loads(b):
parser_cls = determine_format(b)
if parser_cls is None:
raise UnsupportedFormat("Supported format signature not found.")
return parser_cls.load(b) | 851,307 |
Add arguments to the parser for collection in app.args.
Args:
parser:
`argparse.ArgumentParser`. Parser.
Arguments added here are server on
self.args. | def add_arguments(cls, parser):
parser.add_argument(
'-as-api', '--asana-api',
action='store',
nargs='?',
const='',
dest='asana_api',
help="[setting] asana api key.",
)
parser.add_argument(
'-gh-ap... | 851,472 |
Determines if the n-th bit of passed bytes is 1 or 0.
Arguments:
hash_bytes - List of hash byte values for which the n-th bit value
should be checked. Each element of the list should be an integer from
0 to 255.
Returns:
True if the bit is 1. False if the bit ... | def _get_bit(self, n, hash_bytes):
if hash_bytes[n // 8] >> int(8 - ((n % 8) + 1)) & 1 == 1:
return True
return False | 851,616 |
Generates matrix that describes which blocks should be coloured.
Arguments:
hash_bytes - List of hash byte values for which the identicon is being
generated. Each element of the list should be an integer from 0 to
255.
Returns:
List of rows, where each element i... | def _generate_matrix(self, hash_bytes):
# Since the identicon needs to be symmetric, we'll need to work on half
# the columns (rounded-up), and reflect where necessary.
half_columns = self.columns // 2 + self.columns % 2
cells = self.rows * half_columns
# Initialise th... | 851,617 |
Saves a issue data (tasks, etc.) to local data.
Args:
issue:
`int`. Github issue number.
task:
`int`. Asana task ID.
namespace:
`str`. Namespace for storing this issue. | def save_issue_data_task(self, issue, task_id, namespace='open'):
issue_data = self.get_saved_issue_data(issue, namespace)
if not issue_data.has_key('tasks'):
issue_data['tasks'] = [task_id]
elif task_id not in issue_data['tasks']:
issue_data['tasks'].append(ta... | 851,631 |
Returns issue data from local data.
Args:
issue:
`int`. Github issue number.
namespace:
`str`. Namespace for storing this issue. | def get_saved_issue_data(self, issue, namespace='open'):
if isinstance(issue, int):
issue_number = str(issue)
elif isinstance(issue, basestring):
issue_number = issue
else:
issue_number = issue.number
issue_data_key = self._issue_data_key(na... | 851,633 |
Returns task data from local data.
Args:
task:
`int`. Asana task number. | def get_saved_task_data(self, task):
if isinstance(task, int):
task_number = str(task)
elif isinstance(task, basestring):
task_number = task
else:
task_number = task['id']
task_data_key = self._task_data_key()
task_data = self.data.g... | 851,636 |
Args:
filename:
Filename for database.
args:
Program arguments.
version:
Version of file. | def __init__(self, filename, args, version):
self.args = args
self.version = version
self.filename = filename
try:
with open(self.filename, 'rb') as file:
self.data = json.load(file)
except IOError:
self.data = {} | 851,646 |
Add arguments to the parser for collection in app.args.
Args:
parser:
`argparse.ArgumentParser`. Parser.
Arguments added here are server on
self.args. | def add_arguments(cls, parser):
parser.add_argument(
'-i', '--issue',
action='store',
nargs='?',
const='',
dest='issue',
help="[pr] issue #",
)
parser.add_argument(
'-br', '--branch',
a... | 851,669 |
Send string to module level log
Args:
logstr (str): string to print.
priority (int): priority, supports 3 (default) and 4 (special). | def ekm_log(logstr, priority=3):
if priority <= ekmmeters_log_level:
dt = datetime.datetime
stamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M.%f")
ekmmeters_log_func("[EKM Meter Debug Message: " + stamp + "] -> " + logstr)
pass | 851,860 |
Passthrough for pyserial Serial.write().
Args:
output (str): Block to write to port | def write(self, output):
view_str = output.encode('ascii', 'ignore')
if (len(view_str) > 0):
self.m_ser.write(view_str)
self.m_ser.flush()
self.m_ser.reset_input_buffer()
time.sleep(self.m_force_wait)
pass | 851,863 |
Optional polling loop control
Args:
max_waits (int): waits
wait_sleep (int): ms per wait | def setPollingValues(self, max_waits, wait_sleep):
self.m_max_waits = max_waits
self.m_wait_sleep = wait_sleep | 851,864 |
Poll for finished block or first byte ACK.
Args:
context (str): internal serial call context.
Returns:
string: Response, implict cast from byte array. | def getResponse(self, context=""):
waits = 0 # allowed interval counter
response_str = "" # returned bytes in string default
try:
waits = 0 # allowed interval counter
while (waits < self.m_max_waits):
bytes_to_read = self.m_ser.inWaiting()
... | 851,865 |
Translate FieldType to portable SQL Type. Override if needful.
Args:
fld_type (int): :class:`~ekmmeters.FieldType` in serial block.
fld_len (int): Binary length in serial block
Returns:
string: Portable SQL type and length where appropriate. | def mapTypeToSql(fld_type=FieldType.NoType, fld_len=0):
if fld_type == FieldType.Float:
return "FLOAT"
elif fld_type == FieldType.String:
return "VARCHAR(" + str(fld_len) + ")"
elif fld_type == FieldType.Int:
return "INT"
elif fld_type == Fiel... | 851,868 |
Return query portion below CREATE.
Args:
qry_str (str): String as built.
Returns:
string: Passed string with fields appended. | def fillCreate(self, qry_str):
count = 0
for fld in self.m_all_fields:
fld_type = self.m_all_fields[fld][MeterData.TypeValue]
fld_len = self.m_all_fields[fld][MeterData.SizeValue]
qry_spec = self.mapTypeToSql(fld_type, fld_len)
if count > 0:
... | 851,869 |
Reasonably portable SQL INSERT for from combined read buffer.
Args:
def_buf (SerialBlock): Database only serial block of all fields.
raw_a (str): Raw A read as hex string.
raw_b (str): Raw B read (if exists, otherwise empty) as hex string.
Returns:
str: S... | def sqlInsert(def_buf, raw_a, raw_b):
count = 0
qry_str = "INSERT INTO Meter_Reads ( \n\t"
for fld in def_buf:
if count > 0:
qry_str += ", \n\t"
qry_str = qry_str + fld
count += 1
qry_str += (",\n\t" + Field.Time_Stamp + ", \n... | 851,871 |
Call overridden dbExec() with built insert statement.
Args:
def_buf (SerialBlock): Block of read buffer fields to write.
raw_a (str): Hex string of raw A read.
raw_b (str): Hex string of raw B read or empty. | def dbInsert(self, def_buf, raw_a, raw_b):
self.dbExec(self.sqlInsert(def_buf, raw_a, raw_b)) | 851,872 |
Required override of dbExec() from MeterDB(), run query.
Args:
query_str (str): query to run | def dbExec(self, query_str):
try:
connection = sqlite3.connect(self.m_connection_string)
cursor = connection.cursor()
cursor.execute(query_str)
connection.commit()
cursor.close()
connection.close()
return True
e... | 851,873 |
Sqlite callback accepting the cursor and the original row as a tuple.
Simple return of JSON safe types.
Args:
cursor (sqlite cursor): Original cursory
row (sqlite row tuple): Original row.
Returns:
dict: modified row. | def dict_factory(self, cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
val = row[idx]
name = col[0]
if name == Field.Time_Stamp:
d[col[0]] = str(val)
continue
if name == "Raw_A" or name == "Raw_B... | 851,874 |
Sqlite callback accepting the cursor and the original row as a tuple.
Simple return of JSON safe types, including raw read hex strings.
Args:
cursor (sqlite cursor): Original cursory
row (sqlite row tuple): Original row.
Returns:
dict: modified row. | def raw_dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
val = row[idx]
name = col[0]
if name == Field.Time_Stamp or name == Field.Meter_Address:
d[name] = str(val)
continue
if name =... | 851,875 |
Simple since Time_Stamp query returned as JSON records.
Args:
timestamp (int): Epoch time in seconds.
meter (str): 12 character meter address to query
Returns:
str: JSON rendered read records. | def renderJsonReadsSince(self, timestamp, meter):
result = ""
try:
connection = sqlite3.connect(self.m_connection_string)
connection.row_factory = self.dict_factory
select_cursor = connection.cursor()
select_cursor.execute("select * from Meter_Rea... | 851,876 |
Set context string for serial command. Private setter.
Args:
context_str (str): Command specific string. | def setContext(self, context_str):
if (len(self.m_context) == 0) and (len(context_str) >= 7):
if context_str[0:7] != "request":
ekm_log("Context: " + context_str)
self.m_context = context_str | 851,878 |
Drop in pure python replacement for ekmcrc.c extension.
Args:
buf (bytes): String or byte array (implicit Python 2.7 cast)
Returns:
str: 16 bit CRC per EKM Omnimeters formatted as hex string. | def calc_crc16(buf):
crc_table = [0x0000, 0xc0c1, 0xc181, 0x0140, 0xc301, 0x03c0, 0x0280, 0xc241,
0xc601, 0x06c0, 0x0780, 0xc741, 0x0500, 0xc5c1, 0xc481, 0x0440,
0xcc01, 0x0cc0, 0x0d80, 0xcd41, 0x0f00, 0xcfc1, 0xce81, 0x0e40,
0x0a00, 0xcac1... | 851,879 |
Simple wrap to calc legacy PF value
Args:
pf: meter power factor reading
Returns:
int: legacy push pf | def calcPF(pf):
pf_y = pf[:1]
pf_x = pf[1:]
result = 100
if pf_y == CosTheta.CapacitiveLead:
result = 200 - int(pf_x)
elif pf_y == CosTheta.InductiveLag:
result = int(pf_x)
return result | 851,880 |
Serial call to set max demand period.
Args:
period (int): : as int.
password (str): Optional password.
Returns:
bool: True on completion with ACK. | def setMaxDemandPeriod(self, period, password="00000000"):
result = False
self.setContext("setMaxDemandPeriod")
try:
if period < 1 or period > 3:
self.writeCmdMsg("Correct parameter: 1 = 15 minute, 2 = 30 minute, 3 = hour")
self.setContext("")... | 851,881 |
Serial Call to set meter password. USE WITH CAUTION.
Args:
new_pwd (str): 8 digit numeric password to set
pwd (str): Old 8 digit numeric password.
Returns:
bool: True on completion with ACK. | def setMeterPassword(self, new_pwd, pwd="00000000"):
result = False
self.setContext("setMeterPassword")
try:
if len(new_pwd) != 8 or len(pwd) != 8:
self.writeCmdMsg("Passwords must be exactly eight characters.")
self.setContext("")
... | 851,882 |
Wrapper for struct.unpack with SerialBlock buffer definitionns.
Args:
data (str): Implicit cast bytes to str, serial port return.
def_buf (SerialBlock): Block object holding field lengths.
Returns:
tuple: parsed result of struct.unpack() with field definitions. | def unpackStruct(self, data, def_buf):
struct_str = "="
for fld in def_buf:
if not def_buf[fld][MeterData.CalculatedFlag]:
struct_str = struct_str + str(def_buf[fld][MeterData.SizeValue]) + "s"
if len(data) == 255:
contents = struct.unpack(struct_... | 851,883 |
Move data from raw tuple into scaled and conveted values.
Args:
contents (tuple): Breakout of passed block from unpackStruct().
def_buf (): Read buffer destination.
kwh_scale (int): :class:`~ekmmeters.ScaleKWH` as int, from Field.kWhScale`
Returns:
bool... | def convertData(self, contents, def_buf, kwh_scale=ScaleKWH.EmptyScale):
log_str = ""
count = 0
# getting scale does not require a full read. It does require that the
# reads have the scale value in the first block read. This requirement
# is filled by default... | 851,884 |
Translate the passed serial block into string only JSON.
Args:
def_buf (SerialBlock): Any :class:`~ekmmeters.SerialBlock` object.
Returns:
str: JSON rendering of meter record. | def jsonRender(self, def_buf):
try:
ret_dict = SerialBlock()
ret_dict[Field.Meter_Address] = self.getMeterAddress()
for fld in def_buf:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
... | 851,885 |
Internal read CRC wrapper.
Args:
raw_read (str): Bytes with implicit string cast from serial read
def_buf (SerialBlock): Populated read buffer.
Returns:
bool: True if passed CRC equals calculated CRC. | def crcMeterRead(self, raw_read, def_buf):
try:
if len(raw_read) == 0:
ekm_log("(" + self.m_context + ") Empty return read.")
return False
sent_crc = self.calc_crc16(raw_read[1:-2])
logstr = "(" + self.m_context + ")CRC sent = " + str(... | 851,886 |
Remove an observer from the meter update() chain.
Args:
observer (MeterObserver): Subclassed MeterObserver. | def unregisterObserver(self, observer):
if observer in self.m_observers:
self.m_observers.remove(observer)
pass | 851,888 |
Return the requested tariff schedule :class:`~ekmmeters.SerialBlock` for meter.
Args:
period_group (int): A :class:`~ekmmeters.ReadSchedules` value.
Returns:
SerialBlock: The requested tariff schedules for meter. | def getSchedulesBuffer(self, period_group):
empty_return = SerialBlock()
if period_group == ReadSchedules.Schedules_1_To_4:
return self.m_schd_1_to_4
elif period_group == ReadSchedules.Schedules_5_To_6:
return self.m_schd_5_to_6
else:
return e... | 851,891 |
Get the months tariff SerialBlock for meter.
Args:
direction (int): A :class:`~ekmmeters.ReadMonths` value.
Returns:
SerialBlock: Requested months tariffs buffer. | def getMonthsBuffer(self, direction):
if direction == ReadMonths.kWhReverse:
return self.m_rev_mons
# default direction == ReadMonths.kWh
return self.m_mons | 851,895 |
Serial set time with day of week calculation.
Args:
yy (int): Last two digits of year.
mm (int): Month 1-12.
dd (int): Day 1-31
hh (int): Hour 0 to 23.
minutes (int): Minutes 0 to 59.
ss (int): Seconds 0 to 59.
password (str): ... | def setTime(self, yy, mm, dd, hh, minutes, ss, password="00000000"):
result = False
self.setContext("setTime")
try:
if mm < 1 or mm > 12:
self.writeCmdMsg("Month must be between 1 and 12")
self.setContext("")
return result
... | 851,896 |
Serial call to set CT ratio for attached inductive pickup.
Args:
new_ct (int): A :class:`~ekmmeters.CTRatio` value, a legal amperage setting.
password (str): Optional password.
Returns:
bool: True on completion with ACK. | def setCTRatio(self, new_ct, password="00000000"):
ret = False
self.setContext("setCTRatio")
try:
self.clearCmdMsg()
if ((new_ct != CTRatio.Amps_100) and (new_ct != CTRatio.Amps_200) and
(new_ct != CTRatio.Amps_400) and (new_ct != CTRatio.Amps... | 851,897 |
Assign one schedule tariff period to meter bufffer.
Args:
schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extents.Schedules).
tariff (int): :class:`~ekmmeters.Tariffs` value or in range(Extents.Tariffs).
hour (int): Hour from 0-23.
minute (int):... | def assignSchedule(self, schedule, period, hour, minute, tariff):
if ((schedule not in range(Extents.Schedules)) or
(period not in range(Extents.Tariffs)) or
(hour < 0) or (hour > 23) or (minute < 0) or
(minute > 59) or (tariff < 0)):
ekm_log(... | 851,898 |
Define a single season and assign a schedule
Args:
season (int): A :class:`~ekmmeters.Seasons` value or in range(Extent.Seasons).
month (int): Month 1-12.
day (int): Day 1-31.
schedule (int): A :class:`~ekmmeters.LCDItems` value or in range(Extent.Schedules).
... | def assignSeasonSchedule(self, season, month, day, schedule):
season += 1
schedule += 1
if ((season < 1) or (season > Extents.Seasons) or (schedule < 1) or
(schedule > Extents.Schedules) or (month > 12) or (month < 0) or
(day < 0) or (day > 31)):
... | 851,899 |
Serial command to set seasons table.
If no dictionary is passed, the meter object buffer is used.
Args:
cmd_dict (dict): Optional dictionary of season schedules.
password (str): Optional password
Returns:
bool: True on completion and ACK. | def setSeasonSchedules(self, cmd_dict=None, password="00000000"):
result = False
self.setContext("setSeasonSchedules")
if not cmd_dict:
cmd_dict = self.m_seasons_sched_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CR... | 851,900 |
Set a singe holiday day and month in object buffer.
There is no class style enum for holidays.
Args:
holiday (int): 0-19 or range(Extents.Holidays).
month (int): Month 1-12.
day (int): Day 1-31
Returns:
bool: True on completion. | def assignHolidayDate(self, holiday, month, day):
holiday += 1
if (month > 12) or (month < 0) or (day > 31) or (day < 0) or (holiday < 1) or (holiday > Extents.Holidays):
ekm_log("Out of bounds: month " + str(month) + " day " + str(day) + " holiday " + str(holiday))
retu... | 851,901 |
Serial call to set holiday list.
If a buffer dictionary is not supplied, the method will use
the class object buffer populated with assignHolidayDate.
Args:
cmd_dict (dict): Optional dictionary of holidays.
password (str): Optional password.
Returns:
... | def setHolidayDates(self, cmd_dict=None, password="00000000"):
result = False
self.setContext("setHolidayDates")
if not cmd_dict:
cmd_dict = self.m_holiday_date_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on set... | 851,902 |
Serial call to set weekend and holiday :class:`~ekmmeters.Schedules`.
Args:
new_wknd (int): :class:`~ekmmeters.Schedules` value to assign.
new_hldy (int): :class:`~ekmmeters.Schedules` value to assign.
password (str): Optional password..
Returns:
bool: T... | def setWeekendHolidaySchedules(self, new_wknd, new_hldy, password="00000000"):
result = False
self.setContext("setWeekendHolidaySchedules")
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not s... | 851,903 |
Serial call to read schedule tariffs buffer
Args:
tableset (int): :class:`~ekmmeters.ReadSchedules` buffer to return.
Returns:
bool: True on completion and ACK. | def readSchedules(self, tableset):
self.setContext("readSchedules")
try:
req_table = binascii.hexlify(str(tableset).zfill(1))
req_str = "01523102303037" + req_table + "282903"
self.request(False)
req_crc = self.calc_crc16(req_str[2:].decode("hex"... | 851,904 |
Read a single schedule tariff from meter object buffer.
Args:
schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extent.Schedules).
tariff (int): A :class:`~ekmmeters.Tariffs` value or in range(Extent.Tariffs).
Returns:
bool: True on completion. | def extractSchedule(self, schedule, period):
ret = namedtuple("ret", ["Hour", "Min", "Tariff", "Period", "Schedule"])
work_table = self.m_schd_1_to_4
if Schedules.Schedule_5 <= schedule <= Schedules.Schedule_6:
work_table = self.m_schd_5_to_6
period += 1
sche... | 851,905 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.