code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def power_on(self):
try:
self.send_command("POWER_ON")
self._power = POWER_ON
self._state = STATE_ON
return True
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: power on command not sent.")
return False | Turn off receiver via command. |
def _get_version_for_class_from_state(state, klass):
names = [_importable_name(klass)]
from .util import class_rename_registry
names.extend(class_rename_registry.old_handled_by(klass))
for n in names:
try:
return state['class_tree_versions'][n]
exc... | retrieves the version of the current klass from the state mapping from old locations to new ones. |
def pairwise_compare(afa, leven, threads, print_list, ignore_gaps):
seqs = {seq[0]: seq for seq in nr_fasta([afa], append_index = True)}
num_seqs = len(seqs)
pairs = ((i[0], i[1], ignore_gaps) for i in itertools.combinations(list(seqs.values()), 2))
pool = multithread(threads)
if leven is True:
... | make pairwise sequence comparisons between aligned sequences |
def markLoadingStarted(self):
if self.isThreadEnabled():
XLoaderWidget.start(self)
if self.showTreePopup():
tree = self.treePopupWidget()
tree.setCursor(Qt.WaitCursor)
tree.clear()
tree.setUpdatesEnabled(False)
tree.blockSig... | Marks this widget as loading records. |
def _assert_safe_casting(cls, data, subarr):
if not issubclass(data.dtype.type, np.signedinteger):
if not np.array_equal(data, subarr):
raise TypeError('Unsafe NumPy casting, you must '
'explicitly cast') | Ensure incoming data can be represented as ints. |
def toner_status(self, filter_supported: bool = True) -> Dict[str, Any]:
toner_status = {}
for color in self.COLOR_NAMES:
try:
toner_stat = self.data.get(
'{}_{}'.format(SyncThru.TONER, color), {})
if filter_supported and toner_stat.g... | Return the state of all toners cartridges. |
def __get_last_update_time():
now = datetime.datetime.utcnow()
first_tuesday = __get_first_tuesday(now)
if first_tuesday < now:
return first_tuesday
else:
first_of_month = datetime.datetime(now.year, now.month, 1)
last_month = first_of_month + datetime.timedelta(days=-1)
... | Returns last FTP site update time |
def individuals(self, ind_ids=None):
query = self.query(Individual)
if ind_ids:
query = query.filter(Individual.ind_id.in_(ind_ids))
return query | Fetch all individuals from the database. |
def loadBestScore(self):
try:
with open(self.scores_file, 'r') as f:
self.best_score = int(f.readline(), 10)
except:
return False
return True | load local best score from the default file |
def panic(self, *args):
self._err("fatal", *args)
if self.test_errs_mode is False:
sys.exit(1) | Creates a fatal error and exit |
def diff(self, other):
diff = {}
for k in self.__class__.defaults:
left = getattr(self, k)
right = getattr(other, k)
if left != right:
diff[k] = (left, right)
return diff | Return differences between self and other as dictionary of 2-tuples. |
def point_arrays(self):
pdata = self.GetPointData()
narr = pdata.GetNumberOfArrays()
if hasattr(self, '_point_arrays'):
keys = list(self._point_arrays.keys())
if narr == len(keys):
if keys:
if self._point_arrays[keys[0]].size == self.n_... | Returns the all point arrays |
def toggleCollapseBefore( self ):
if ( self.isCollapsed() ):
self.uncollapse()
else:
self.collapse( XSplitterHandle.CollapseDirection.Before ) | Collapses the splitter before this handle. |
def __fetch_data(self, url):
url += '&api_key=' + self.api_key
try:
response = urlopen(url)
root = ET.fromstring(response.read())
except HTTPError as exc:
root = ET.fromstring(exc.read())
raise ValueError(root.get('message'))
return root | helper function for fetching data given a request URL |
def convert_level(self, record):
level = record.levelno
if level >= logging.CRITICAL:
return levels.CRITICAL
if level >= logging.ERROR:
return levels.ERROR
if level >= logging.WARNING:
return levels.WARNING
if level >= logging.INFO:
... | Converts a logging level into a logbook level. |
def auth(self):
self.send(nsq.auth(self.auth_secret))
frame, data = self.read_response()
if frame == nsq.FRAME_TYPE_ERROR:
raise data
try:
response = json.loads(data.decode('utf-8'))
except ValueError:
self.close_stream()
raise erro... | Send authorization secret to nsqd. |
def plot_all(self, show=True, **kwargs):
figs = []; app = figs.append
app(self.plot_stacked_hist(show=show))
app(self.plot_efficiency(show=show))
app(self.plot_pie(show=show))
return figs | Call all plot methods provided by the parser. |
def _tag_net_direction(data):
src = data['packet']['src_domain']
dst = data['packet']['dst_domain']
if src == 'internal':
if dst == 'internal' or 'multicast' in dst or 'broadcast' in dst:
return 'internal'
else:
return 'outgoing'
el... | Create a tag based on the direction of the traffic |
def graph_format(new_mem, old_mem, is_firstiteration=True):
if is_firstiteration:
output = " n/a "
elif new_mem - old_mem > 50000000:
output = " +++++"
elif new_mem - old_mem > 20000000:
output = " ++++ "
elif new_mem - old_mem > 5000000:
output = " +++ "
el... | Show changes graphically in memory consumption |
def _select_ftdi_channel(channel):
if channel < 0 or channel > 8:
raise ArgumentError("FTDI-selected multiplexer only has channels 0-7 valid, "
"make sure you specify channel with -c channel=number", channel=channel)
from pylibftdi import BitBangDevice
bb = BitBangDevice(... | Select multiplexer channel. Currently uses a FTDI chip via pylibftdi |
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
response = self.adv_filters_handle(request,
extra_context=extra_context)
if response:
return response
return super(Admin... | Add advanced_filters form to changelist context |
def retrace(self, rewards, dones, q_values, state_values, rho, final_values):
rho_bar = torch.min(torch.ones_like(rho) * self.retrace_rho_cap, rho)
q_retraced_buffer = torch.zeros_like(rewards)
next_value = final_values
for i in reversed(range(rewards.size(0))):
q_retraced = ... | Calculate Q retraced targets |
def valuePasses(self, value):
return self._conditional_cmp[self.op](value, self.value) | Returns whether this value passes this filter |
def event_dispatcher(nameko_config, **kwargs):
amqp_uri = nameko_config[AMQP_URI_CONFIG_KEY]
serializer, _ = serialization.setup(nameko_config)
serializer = kwargs.pop('serializer', serializer)
ssl = nameko_config.get(AMQP_SSL_CONFIG_KEY)
publisher = Publisher(amqp_uri, serializer=serializer, ssl=ss... | Return a function that dispatches nameko events. |
def filter_belief():
if request.method == 'OPTIONS':
return {}
response = request.body.read().decode('utf-8')
body = json.loads(response)
stmts_json = body.get('statements')
belief_cutoff = body.get('belief_cutoff')
if belief_cutoff is not None:
belief_cutoff = float(belief_cutof... | Filter to beliefs above a given threshold. |
def build_vec(self):
for item in all_calls:
self.__dict__[item] = []
for dev in self.devices:
for item in all_calls:
if self.system.__dict__[dev].n == 0:
val = False
else:
val = self.system.__dict__[dev].call... | build call validity vector for each device |
def process_shells_ordered(self, shells):
output = []
for shell in shells:
entry = shell['entry']
config = ShellConfig(script=entry['script'], title=entry['title'] if 'title' in entry else '',
model=shell['model'], env=shell['env'], item=shell['it... | Processing a list of shells one after the other. |
def chunk(seq, n):
for i in range(0, len(seq), n):
yield seq[i:i + n] | Yield successive n-sized chunks from seq. |
def minver_error(pkg_name):
print(
'ERROR: specify minimal version of "{}" using '
'">=" or "=="'.format(pkg_name),
file=sys.stderr
)
sys.exit(1) | Report error about missing minimum version constraint and exit. |
def load_defense_output(filename):
result = {}
with open(filename) as f:
for row in csv.reader(f):
try:
image_filename = row[0]
if image_filename.endswith('.png') or image_filename.endswith('.jpg'):
image_filename = image_filename[:image_filename.rfind('.')]
label = int(r... | Loads output of defense from given file. |
def cli(env, identifier, credential_id):
mgr = SoftLayer.ObjectStorageManager(env.client)
credential = mgr.delete_credential(identifier, credential_id=credential_id)
env.fout(credential) | Delete the credential of an Object Storage Account. |
def add_to_js(self, name, var):
frame = self.page().mainFrame()
frame.addToJavaScriptWindowObject(name, var) | Add an object to Javascript. |
def stderr(msg, silent=False):
if not silent:
print(msg, file=sys.stderr) | write msg to stderr if not silent |
def not_next(e):
def match_not_next(s, grm=None, pos=0):
try:
e(s, grm, pos)
except PegreError as ex:
return PegreResult(s, Ignore, (pos, pos))
else:
raise PegreError('Negative lookahead failed', pos)
return match_not_next | Create a PEG function for negative lookahead. |
def _string_parser(strip_whitespace):
def _parse_string_value(element_text, _state):
if element_text is None:
value = ''
elif strip_whitespace:
value = element_text.strip()
else:
value = element_text
return value
return _parse_string_value | Return a parser function for parsing string values. |
def _validate_handler(column_name, value, predicate_refs):
if value is not None:
for predicate_ref in predicate_refs:
predicate, predicate_name, predicate_args = _decode_predicate_ref(predicate_ref)
validate_result = predicate(value, *predicate_args)
if isinstance(validat... | handle predicate's return value |
def _get_key(cls, device_id):
var_name = "USER_KEY_{0:08X}".format(device_id)
if var_name not in os.environ:
raise NotFoundError("No user key could be found for devices", device_id=device_id,
expected_variable_name=var_name)
key_var = os.environ[var_na... | Attempt to get a user key from an environment variable |
def on_episode_begin(self, episode, logs):
assert episode not in self.metrics
assert episode not in self.starts
self.metrics[episode] = []
self.starts[episode] = timeit.default_timer() | Initialize metrics at the beginning of each episode |
def find_close_value(self, LIST, value):
diff = inf
for a in LIST:
if abs(value - a) < diff:
diff = abs(value - a)
result = a
return(result) | take a LIST and find the nearest value in LIST to 'value' |
async def refresh(self):
while True:
await asyncio.sleep(5/6 * self.lifetime)
request = stun.Message(message_method=stun.Method.REFRESH,
message_class=stun.Class.REQUEST)
request.attributes['LIFETIME'] = self.lifetime
await self.... | Periodically refresh the TURN allocation. |
def _calc_strain_max(self, loc_input, loc_layer, motion, *args):
return motion.calc_peak(
self.calc_strain_tf(loc_input, loc_layer)) | Compute the effective strain at the center of a layer. |
def send(self):
self.log.info("Saying hello (%d)." % self.counter)
f = stomper.Frame()
f.unpack(stomper.send(DESTINATION, 'hello there (%d)' % self.counter))
self.counter += 1
self.transport.write(f.pack()) | Send out a hello message periodically. |
def draw(self):
indices = np.arange(len(self.classes_))
prev = np.zeros(len(self.classes_))
colors = resolve_colors(
colors=self.colors,
n_colors=len(self.classes_))
for idx, row in enumerate(self.predictions_):
self.ax.bar(indices, row, label=self.cla... | Renders the class prediction error across the axis. |
def write_json_to_file(self, net_type, filename, indent='no-indent'):
export_data.write_json_to_file(self, net_type, filename, indent) | Save dat or viz as a JSON to file. |
def rel_path(self, other):
try:
memo_dict = self._memo['rel_path']
except KeyError:
memo_dict = {}
self._memo['rel_path'] = memo_dict
else:
try:
return memo_dict[other]
except KeyError:
pass
if se... | Return a path to "other" relative to this directory. |
def csv_writer(csvfile):
if sys.version_info >= (3,):
writer = csv.writer(csvfile, delimiter=',', lineterminator='\n')
else:
writer = csv.writer(csvfile, delimiter=b',', lineterminator='\n')
return writer | Get a CSV writer for the version of python that is being run. |
def chi_eff(self):
return conversions.chi_eff(self.mass1, self.mass2, self.spin1z,
self.spin2z) | Returns the effective spin. |
def rotate_point(self, p):
p = Quaternion(0, p[0], p[1], p[2], False)
q1 = self.normalize()
q2 = self.inverse()
r = (q1*p)*q2
return r.x, r.y, r.z | Rotate a Point instance using this quaternion. |
def _set_subset_indices(self, y_min, y_max, x_min, x_max):
y_coords, x_coords = self.xd.lsm.coords
dx = self.xd.lsm.dx
dy = self.xd.lsm.dy
lsm_y_indices_from_y, lsm_x_indices_from_y = \
np.where((y_coords >= (y_min - 2*dy)) &
(y_coords <= (y_max + 2*dy)))... | load subset based on extent |
def validate(self):
extents_valid = (0 <= self.lower_extent <= self.upper_extent
<= self.global_size)
if not extents_valid:
raise ValueError("Dimension '{d}' fails 0 <= {el} <= {eu} <= {gs}"
.format(d=self.name, gs=self.global_size,
el=self.low... | Validate the contents of a dimension data dictionary |
def config_cred(config, providers):
expected = ['aws', 'azure', 'gcp', 'alicloud']
cred = {}
to_remove = []
for item in providers:
if any(item.startswith(itemb) for itemb in expected):
try:
cred[item] = dict(list(config[item].items()))
except KeyError as e... | Read credentials from configfile. |
def sech(x, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_sech,
(BigFloat._implicit_convert(x),),
context,
) | Return the hyperbolic secant of x. |
def filter_users(self, users):
return [LeaderboardInstance(x) for x in self._leaderboard if x['user_id'] in users] | Expects an interable of User IDs ints |
def precesion(date):
zeta, theta, z = np.deg2rad(_precesion(date))
return rot3(zeta) @ rot2(-theta) @ rot3(z) | Precession as a rotation matrix |
def yaml(self):
return ordered_dump(OrderedDict(self),
Dumper=yaml.SafeDumper,
default_flow_style=False) | returns the yaml output of the dict. |
def addItem(self, item):
try:
self.tree.addItem(item)
except AttributeError, e:
raise VersionError('Saved versions are immutable') | Adds an item if the tree is mutable |
def nonparabolicity(self, **kwargs):
Eg = self.Eg_Gamma(**kwargs)
meff = self.meff_e_Gamma(**kwargs)
T = kwargs.get('T', 300.)
return k*T/Eg * (1 - meff)**2 | Returns the Kane band nonparabolicity parameter for the Gamma-valley. |
def load_model_from_package(name, **overrides):
cls = importlib.import_module(name)
return cls.load(**overrides) | Load a model from an installed package. |
def _init_get_dict():
get_dict = {'main chain': PandasPdb._get_mainchain,
'hydrogen': PandasPdb._get_hydrogen,
'c-alpha': PandasPdb._get_calpha,
'carbon': PandasPdb._get_carbon,
'heavy': PandasPdb._get_heavy}
return get_dict | Initialize dictionary for filter operations. |
def build_progress_message(total=None,
running=None,
finished=None,
failed=None,
cached=None):
progress_message = {}
if total:
progress_message['total'] = total
if running:
progress_me... | Build the progress message with correct formatting. |
def execute(self, eopatch):
feature_type, feature_name = next(self.feature(eopatch))
eopatch[feature_type][feature_name] = self.process(eopatch[feature_type][feature_name])
return eopatch | Execute method takes EOPatch and changes the specified feature |
def dumped(text, level, indent=2):
return indented("{\n%s\n}" % indented(text, level + 1, indent) or "None", level, indent) + "\n" | Put curly brackets round an indented text |
def get(self, key, *, encoding=_NOTSET):
return self.execute(b'GET', key, encoding=encoding) | Get the value of a key. |
def availablePageSizes(self):
sizes = [x for x in dir(QPagedPaintDevice)
if type(getattr(QPagedPaintDevice, x)) == QPagedPaintDevice.PageSize]
return sizes | List available page sizes. |
def add_list_opt(self, opt, values):
self.add_opt(opt)
for val in values:
self.add_opt(val) | Add an option with a list of non-file parameters. |
def multipartite(corpus, featureset_names, min_weight=1, filters={}):
pairs = Counter()
node_type = {corpus._generate_index(p): {'type': 'paper'}
for p in corpus.papers}
for featureset_name in featureset_names:
ftypes = {}
featureset = _get_featureset(corpus, featureset_name... | A network of papers and one or more featuresets. |
def _combine_ngrams(ngrams, joiner) -> str:
if isinstance(ngrams, str):
return ngrams
else:
combined = joiner.join(ngrams)
return combined | Construct keys for checking in trie |
def match(self, expression=None, xpath=None, namespaces=None):
class MatchObject(Dict):
pass
def _match(function):
self.matches.append(
MatchObject(expression=expression, xpath=xpath, function=function, namespaces=namespaces))
def wrapper(self, *args,... | decorator that allows us to match by expression or by xpath for each transformation method |
def _DecodeKey(self, key):
if self.dict.attrindex.HasBackward(key):
return self.dict.attrindex.GetBackward(key)
return key | Turn a key into a string if possible |
def from_entity(cls, entity: Entity) -> 'DictModel':
dict_obj = {}
for field_name in entity.meta_.attributes:
dict_obj[field_name] = getattr(entity, field_name)
return dict_obj | Convert the entity to a dictionary record |
def _on_ready_read(self):
while self.bytesAvailable():
if not self._header_complete:
self._read_header()
else:
self._read_payload() | Read bytes when ready read |
def _add_res(line):
global resource
fields = line.strip().split()
if resource:
ret.append(resource)
resource = {}
resource["resource name"] = fields[0]
resource["local role"] = fields[1].split(":")[1]
resource["local volumes"] = []
resource["peer nodes"] = [] | Analyse the line of local resource of ``drbdadm status`` |
def extra_reading_spec(self):
field_names = ("frame_number", "action", "reward", "done")
data_fields = {
name: tf.FixedLenFeature([1], tf.int64) for name in field_names
}
decoders = {
name: tf.contrib.slim.tfexample_decoder.Tensor(tensor_key=name)
for name in field_names
}
... | Additional data fields to store on disk and their decoders. |
def add_peer(self, peerAddr, networks=None):
if _debug: BTR._debug("add_peer %r networks=%r", peerAddr, networks)
if peerAddr in self.peers:
if not networks:
networks = []
else:
self.peers[peerAddr].extend(networks)
else:
if not... | Add a peer and optionally provide a list of the reachable networks. |
def _remove_pending_return(self, job, pending_returns):
tpls_to_remove = [ ]
call_stack_copy = job.call_stack_copy()
while call_stack_copy.current_return_target is not None:
ret_target = call_stack_copy.current_return_target
call_stack_copy = call_stack_copy.ret(ret_targe... | Remove all pending returns that are related to the current job. |
def ensure_dir_does_not_exist(*args):
path = os.path.join(*args)
if os.path.isdir(path):
shutil.rmtree(path) | Ensures that the given directory does not exist. |
def autolink(self, raw_url, is_email):
if self.check_url(raw_url):
url = self.rewrite_url(('mailto:' if is_email else '') + raw_url)
url = escape_html(url)
return '<a href="%s">%s</a>' % (url, escape_html(raw_url))
else:
return escape_html('<%s>' % raw_url... | Filters links generated by the ``autolink`` extension. |
def _unparse_changetype(self, mod_len):
if mod_len == 2:
changetype = 'add'
elif mod_len == 3:
changetype = 'modify'
else:
raise ValueError("modlist item of wrong length")
self._unparse_attr('changetype', changetype) | Detect and write the changetype. |
def vectorize_raw(audio: np.ndarray) -> np.ndarray:
if len(audio) == 0:
raise InvalidAudio('Cannot vectorize empty audio!')
return vectorizers[pr.vectorizer](audio) | Turns audio into feature vectors, without clipping for length |
def prettify(amount, separator=','):
orig = str(amount)
new = re.sub("^(-?\d+)(\d{3})", "\g<1>{0}\g<2>".format(separator), str(amount))
if orig == new:
return new
else:
return prettify(new) | Separate with predefined separator. |
def to_json(value, **kwargs):
if isinstance(value, HasProperties):
return value.serialize(**kwargs)
try:
return json.loads(json.dumps(value))
except TypeError:
raise TypeError(
"Cannot convert type {} to JSON without calling 'serialize' "
... | Convert instance to JSON |
def _get(self):
user = self.USER
try:
uid = pwd.getpwnam(user).pw_uid
except KeyError:
log.info('User does not exist')
return False
cmd = self.gsetting_command + ['get', str(self.SCHEMA), str(self.KEY)]
environ = {}
environ['XDG_RUNTIME... | get the value for user in gsettings |
def _adapt_response(self, response):
errors, meta = super(ServerError, self)._adapt_response(response)
return errors[0], meta | Convert various error responses to standardized ErrorDetails. |
async def _send_rtcp_pli(self, media_ssrc):
if self.__rtcp_ssrc is not None:
packet = RtcpPsfbPacket(fmt=RTCP_PSFB_PLI, ssrc=self.__rtcp_ssrc, media_ssrc=media_ssrc)
await self._send_rtcp(packet) | Send an RTCP packet to report picture loss. |
def pseudo_core_density(self):
mesh, values, attrib = self._parse_radfunc("pseudo_core_density")
return RadialFunction(mesh, values) | The pseudized radial density. |
def disable_reporting(self):
self.reporting = False
msg = bytearray([REPORT_DIGITAL + self.port_number, 0])
self.board.sp.write(msg) | Disable the reporting of the port. |
def _batch_entry(self):
try:
while True:
self._batch_entry_run()
except:
self.exc_info = sys.exc_info()
os.kill(self.pid, signal.SIGUSR1) | Entry point for the batcher thread. |
def run(self, bin, *args, **kwargs):
bin = self._bin(bin)
cmd = [bin] + list(args)
shell = kwargs.get("shell", False)
call = kwargs.pop("call", False)
input_ = kwargs.pop("input_", None)
if shell:
cmd = list_to_shell_command(cmd)
try:
if se... | Run a command inside the Python environment. |
def find_validation_workspaces(name, rounds=None):
workspaces = []
if rounds is not None:
rounds = indices_from_str(rounds)
else:
rounds = itertools.count(1)
for round in rounds:
workspace = pipeline.ValidatedDesigns(name, round)
if not workspace.exists(): break
w... | Find all the workspaces containing validated designs. |
def _document_root(self, fully_qualified=True):
nsmap = {"xsi": utils.NAMESPACES["xsi"], "xlink": utils.NAMESPACES["xlink"]}
if fully_qualified:
nsmap["mets"] = utils.NAMESPACES["mets"]
else:
nsmap[None] = utils.NAMESPACES["mets"]
attrib = {
"{}schemaL... | Return the mets Element for the document root. |
def as_enum(enum):
if isinstance(enum, string_types):
try:
enum = getattr(gl, 'GL_' + enum.upper())
except AttributeError:
try:
enum = _internalformats['GL_' + enum.upper()]
except KeyError:
raise ValueError('Could not find int valu... | Turn a possibly string enum into an integer enum. |
def close(self):
if not (yield from super().close()):
return False
nio = self._ethernet_adapter.get_nio(0)
if isinstance(nio, NIOUDP):
self.manager.port_manager.release_udp_port(nio.lport, self._project)
if self._local_udp_tunnel:
self.manager.port_man... | Closes this VPCS VM. |
def next(self):
base_depth = self.__root.count(os.path.sep)
for root, subFolders, files in os.walk(self.__root):
if not self.filter_folder(root):
continue
if self.__depth_limit is not None:
curr_depth = root.count(os.path.sep)
if cu... | Return all files in folder. |
def tail(self, fname, encoding, window, position=None):
if window <= 0:
raise ValueError('invalid window %r' % window)
encodings = ENCODINGS
if encoding:
encodings = [encoding] + ENCODINGS
for enc in encodings:
try:
f = self.open(encodi... | Read last N lines from file fname. |
def getAllNodes(self):
ret = TagCollection()
for tag in self:
ret.append(tag)
ret += tag.getAllChildNodes()
return ret | getAllNodes - Gets all the nodes, and all their children for every node within this collection |
def extract_energy(rate, sig):
mfcc = python_speech_features.mfcc(sig, rate, appendEnergy=True)
energy_row_vec = mfcc[:, 0]
energy_col_vec = energy_row_vec[:, np.newaxis]
return energy_col_vec | Extracts the energy of frames. |
def autodocs():
"create Sphinx docs locally, and start a watchdog"
build_dir = path('docs/_build')
index_html = build_dir / 'html/index.html'
if build_dir.exists():
build_dir.rmtree()
with pushd("docs"):
print "\n*** Generating API doc ***\n"
sh("sphinx-apidoc -o apidoc -f -T... | create Sphinx docs locally, and start a watchdog |
def _fixed_width_info(self, lines):
for string in lines:
for line in [string[i:i+80] for i in range(0, len(string), 80)]:
msg.info(line)
msg.blank() | Prints the specified string as information with fixed width of 80 chars. |
def _enum_member_error(err, eid, name, value, bitmask):
exception, msg = ENUM_ERROR_MAP[err]
enum_name = idaapi.get_enum_name(eid)
return exception(('add_enum_member(enum="{}", member="{}", value={}, bitmask=0x{:08X}) '
'failed: {}').format(
enum_name,
name,
val... | Format enum member error. |
def rosen(self, x, alpha=1e2):
x = [x] if isscalar(x[0]) else x
f = [sum(alpha * (x[:-1]**2 - x[1:])**2 + (1. - x[:-1])**2) for x in x]
return f if len(f) > 1 else f[0] | Rosenbrock test objective function |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.