code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def _update_volume_stats(self): <NEW_LINE> <INDENT> LOG.debug(_("Updating volume stats.")) <NEW_LINE> self._stats = self._stats or {} <NEW_LINE> netapp_backend = 'NetApp_ESeries' <NEW_LINE> backend_name = self.configuration.safe_get('volume_backend_name') <NEW_LINE> self._stats["volume_backend_name"] = ( backend_name or netapp_backend) <NEW_LINE> self._stats["vendor_name"] = 'NetApp' <NEW_LINE> self._stats["driver_version"] = '1.0' <NEW_LINE> self._stats["storage_protocol"] = 'iSCSI' <NEW_LINE> self._stats["total_capacity_gb"] = 0 <NEW_LINE> self._stats["free_capacity_gb"] = 0 <NEW_LINE> self._stats["reserved_percentage"] = 0 <NEW_LINE> self._stats["QoS_support"] = False <NEW_LINE> self._update_capacity() <NEW_LINE> self._garbage_collect_tmp_vols()
Update volume statistics.
625941c11d351010ab855a8d
def solution01(a, b): <NEW_LINE> <INDENT> return (a+b)
Args: a: number b: number Returns: a + b
625941c163f4b57ef0001090
def make_xml(self): <NEW_LINE> <INDENT> param_list = ['name', 'version', 'description'] <NEW_LINE> f = open('../genkernel/templates/package.xml') <NEW_LINE> o = open(self.dir_list[0] + '/package.xml', 'a') <NEW_LINE> while 1: <NEW_LINE> <INDENT> line = f.readline() <NEW_LINE> if not line: break <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> line = line.replace('[{0}]'.format(i), self.pkg_dict[param_list[i]]) <NEW_LINE> <DEDENT> if line.find('[3]') != -1: <NEW_LINE> <INDENT> o.write(' <maintainer email="{1}">{0}</maintainer>\n'.format(self.pkg_dict['maintainer']['name'], self.pkg_dict['maintainer']['email'])) <NEW_LINE> <DEDENT> elif line.find('[4]') != -1: <NEW_LINE> <INDENT> for depend in self.pkg_dict['depend']: <NEW_LINE> <INDENT> o.write(' <build_depend>{0}</build_depend>\n'.format(depend)) <NEW_LINE> <DEDENT> <DEDENT> elif line.find('[5]') != -1: <NEW_LINE> <INDENT> for depend in self.pkg_dict['depend']: <NEW_LINE> <INDENT> o.write(' <exec_depend>{0}</exec_depend>\n'.format(depend)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> o.write(line) <NEW_LINE> <DEDENT> <DEDENT> o.close() <NEW_LINE> f.close()
Function make xml file of a package
625941c12ae34c7f2600d0a2
def distance_SH(liste_of_interactions, atom1, atom2, dico_dist, type_inter): <NEW_LINE> <INDENT> dist_x=abs(atom1.xpos-atom2.xpos) <NEW_LINE> dist_y=abs(atom1.ypos-atom2.ypos) <NEW_LINE> dist_z=abs(atom1.zpos-atom2.zpos) <NEW_LINE> if (dist_x<=dico_dist[type_inter] or dist_y<=dico_dist[type_inter] or dist_z<=dico_dist[type_inter]) : <NEW_LINE> <INDENT> dist=distance(atom1, atom2) <NEW_LINE> if dist>0 and dist<=dico_dist[type_inter] : <NEW_LINE> <INDENT> if (type_inter=="SHBOND" and atom1.atome_name=="SG" and atom2.atome_name=="SG" and dist<dico_dist["SSBOND"]): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> liste_of_interactions.append(Interaction(atom1, atom2, dist, type_inter)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return liste_of_interactions
Verifies the distance between two Atome instances provided (atom1 and atom2), based on the interaction type specified dico_dist is a dictionary containing distance cut-off for each interaction type type_inter is the interaction type (string) liste_of_interactions is the list of Interaction isntances the user wants to add an new interaction Returns the list of Interaction instances found
625941c1ab23a570cc2500f1
def get_request_factory(self): <NEW_LINE> <INDENT> return requests.get
Return request factory to perform actual HTTP request.
625941c1097d151d1a222dcc
def mac_access_list_standard_hide_mac_acl_std_seq_action(self, **kwargs): <NEW_LINE> <INDENT> config = ET.Element("config") <NEW_LINE> mac = ET.SubElement(config, "mac", xmlns="urn:brocade.com:mgmt:brocade-mac-access-list") <NEW_LINE> if kwargs.pop('delete_mac', False) is True: <NEW_LINE> <INDENT> delete_mac = config.find('.//*mac') <NEW_LINE> delete_mac.set('operation', 'delete') <NEW_LINE> <DEDENT> access_list = ET.SubElement(mac, "access-list") <NEW_LINE> if kwargs.pop('delete_access_list', False) is True: <NEW_LINE> <INDENT> delete_access_list = config.find('.//*access-list') <NEW_LINE> delete_access_list.set('operation', 'delete') <NEW_LINE> <DEDENT> standard = ET.SubElement(access_list, "standard") <NEW_LINE> if kwargs.pop('delete_standard', False) is True: <NEW_LINE> <INDENT> delete_standard = config.find('.//*standard') <NEW_LINE> delete_standard.set('operation', 'delete') <NEW_LINE> <DEDENT> name_key = ET.SubElement(standard, "name") <NEW_LINE> name_key.text = kwargs.pop('name') <NEW_LINE> if kwargs.pop('delete_name', False) is True: <NEW_LINE> <INDENT> delete_name = config.find('.//*name') <NEW_LINE> delete_name.set('operation', 'delete') <NEW_LINE> <DEDENT> hide_mac_acl_std = ET.SubElement(standard, "hide-mac-acl-std") <NEW_LINE> if kwargs.pop('delete_hide_mac_acl_std', False) is True: <NEW_LINE> <INDENT> delete_hide_mac_acl_std = config.find('.//*hide-mac-acl-std') <NEW_LINE> delete_hide_mac_acl_std.set('operation', 'delete') <NEW_LINE> <DEDENT> seq = ET.SubElement(hide_mac_acl_std, "seq") <NEW_LINE> if kwargs.pop('delete_seq', False) is True: <NEW_LINE> <INDENT> delete_seq = config.find('.//*seq') <NEW_LINE> delete_seq.set('operation', 'delete') <NEW_LINE> <DEDENT> seq_id_key = ET.SubElement(seq, "seq-id") <NEW_LINE> seq_id_key.text = kwargs.pop('seq_id') <NEW_LINE> if kwargs.pop('delete_seq_id', False) is True: <NEW_LINE> <INDENT> delete_seq_id = config.find('.//*seq-id') <NEW_LINE> delete_seq_id.set('operation', 'delete') <NEW_LINE> <DEDENT> action = ET.SubElement(seq, "action") <NEW_LINE> if kwargs.pop('delete_action', False) is True: <NEW_LINE> <INDENT> delete_action = config.find('.//*action') <NEW_LINE> delete_action.set('operation', 'delete') <NEW_LINE> <DEDENT> action.text = kwargs.pop('action') <NEW_LINE> callback = kwargs.pop('callback', self._callback) <NEW_LINE> return callback(config)
Auto Generated Code
625941c1a79ad161976cc0b6
def __init__(self, pid, pathname): <NEW_LINE> <INDENT> if not pid: <NEW_LINE> <INDENT> raise ValueError("The process id must be set") <NEW_LINE> <DEDENT> self.pid=pid <NEW_LINE> if not pathname: <NEW_LINE> <INDENT> raise ValueError("The pathname must be set") <NEW_LINE> <DEDENT> self.file=pathname <NEW_LINE> print(self.file)
Constructor for the dataFile Class :param pid: process locking the file :param filename: file name of the file being locked by the current application
625941c123849d37ff7b3001
def _adunit_test_exec(conn, test_session, test_set_path, result_obj): <NEW_LINE> <INDENT> global result_buffer <NEW_LINE> result_buffer = result_obj <NEW_LINE> result_obj.set_status(0) <NEW_LINE> checked = False <NEW_LINE> i = 0 <NEW_LINE> for tc in test_set_path['cases']: <NEW_LINE> <INDENT> LOGGER.info('[ android unit test, entry: %s ]' % tc['entry']) <NEW_LINE> inst_pack = conn.get_installed_package(tc['entry'][:tc['entry'].rindex('.')]) <NEW_LINE> if not checked and i == 0: <NEW_LINE> <INDENT> if len(inst_pack) > 0: <NEW_LINE> <INDENT> checked = True <NEW_LINE> test_cmd = ANDROID_UNIT_START % (tc['entry'], '.'.join(tc['entry'].split('.')[:-1])) <NEW_LINE> _code, _out, _error = conn.shell_cmd_ext(cmd=test_cmd, timeout=None, boutput=True, callbk=_adunit_lines_handler) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> elif checked: <NEW_LINE> <INDENT> test_cmd = ANDROID_UNIT_START % (tc['entry'], '.'.join(tc['entry'].split('.')[:-1])) <NEW_LINE> _code, _out, _error = conn.shell_cmd_ext(cmd=test_cmd, timeout=None, boutput=True, callbk=_adunit_lines_handler) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> result_obj.set_status(1)
function for running core tests
625941c14d74a7450ccd4134
def as_date(value): <NEW_LINE> <INDENT> return datetime.strptime(value.strip(), '%Y%m%d').date()
Converte uma sequência string para um objeto :class:`datetime.date`. Os espaços em branco das bordas da sequência serão removidos antes da conversão. :param str value: String contendo uma data ANSI (``yyyymmdd``) :rtype: datetime.date
625941c182261d6c526ab40d
def play(self, reward, new_state): <NEW_LINE> <INDENT> maxQprime = np.max(self.Q[new_state]) <NEW_LINE> self.Q[self.s, self.a] = (1-self.alpha)*self.Q[self.s, self.a] + self.alpha*(reward + self.gamma * maxQprime) <NEW_LINE> self.T[self.s, self.a, new_state] += 1 <NEW_LINE> self.R[self.s, self.a] = (1-self.alpha)*self.R[self.s, self.a] + self.alpha * reward <NEW_LINE> self.set_history() <NEW_LINE> self.hallucinate(new_state) <NEW_LINE> action = self.choose_action(new_state) <NEW_LINE> self.random_actions_rate *= self.random_actions_decrease <NEW_LINE> self.s = new_state <NEW_LINE> self.a = action <NEW_LINE> self.QExplore[new_state, action] += 1.0 <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("s = {} a = {} reward = {}".format(new_state, action, reward)) <NEW_LINE> <DEDENT> return action
Given a new state, and a reward for the previous action, chooses an action, updating the Q table in the process. :param new_state: The resulting state for the previous action. :param reward: The reward for the previous action. :returns: The chosen action.
625941c1ab23a570cc2500f2
def __init__(self, index): <NEW_LINE> <INDENT> self.index = index
Store index of images
625941c10383005118ecf555
def msg_console(self,msg): <NEW_LINE> <INDENT> self._update_tools()
Message handler for console tool messages: CONSOLE_SWITCHED CONSOLE_ENGINE_CONNECTED
625941c1cad5886f8bd26f4b
def get_empty_mixin(self): <NEW_LINE> <INDENT> class Struct: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> mixin = Struct() <NEW_LINE> mixin.messages = {} <NEW_LINE> mixin.count = Counter() <NEW_LINE> mixin.tracked_errors = 0 <NEW_LINE> mixin.unique_data_names = set() <NEW_LINE> mixin.all_data_names = set() <NEW_LINE> mixin.data_names_by_key = defaultdict(list) <NEW_LINE> mixin.announce_suppressed = Counter() <NEW_LINE> return mixin
Return a bundle of freshly initialized counters and tracking information. The bundle is used to isolate mixin parameters from subclass parameters to prevent accidental overrides.
625941c1236d856c2ad44748
def var(self, ddof: int = 1) -> Union[DataFrame, Series]: <NEW_LINE> <INDENT> assert ddof in (0, 1) <NEW_LINE> return self._reduce_for_stat_function( F.var_pop if ddof == 0 else F.var_samp, only_numeric=True )
Compute variance of groups, excluding missing values. Parameters ---------- ddof : int, default 1 Delta Degrees of Freedom. The divisor used in calculations is N - ddof, where N represents the number of elements. See Also -------- pyspark.pandas.Series.groupby pyspark.pandas.DataFrame.groupby
625941c199cbb53fe6792b58
def parse(self, mapfile, toolchain): <NEW_LINE> <INDENT> self.tc_name = toolchain.title() <NEW_LINE> if toolchain in ("ARM", "ARM_STD", "ARM_MICRO", "ARMC6"): <NEW_LINE> <INDENT> parser = _ArmccParser <NEW_LINE> <DEDENT> elif toolchain == "GCC_ARM": <NEW_LINE> <INDENT> parser = _GccParser <NEW_LINE> <DEDENT> elif toolchain == "IAR": <NEW_LINE> <INDENT> parser = _IarParser <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open(mapfile, 'r') as file_input: <NEW_LINE> <INDENT> self.modules = parser().parse_mapfile(file_input) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open("%s.old" % mapfile, 'r') as old_input: <NEW_LINE> <INDENT> self.old_modules = parser().parse_mapfile(old_input) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.old_modules = None <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except IOError as error: <NEW_LINE> <INDENT> print("I/O error({0}): {1}".format(error.errno, error.strerror)) <NEW_LINE> return False
Parse and decode map file depending on the toolchain Positional arguments: mapfile - the file name of the memory map file toolchain - the toolchain used to create the file
625941c1d18da76e23532445
def suite_names(self): <NEW_LINE> <INDENT> return sorted(list(self.res_table.keys()))
List of suite names in alphabetical order.
625941c1656771135c3eb7de
def testMaskErrorIncompatibleRank4(self): <NEW_LINE> <INDENT> np_mask = np.ones((3, 3, 4, 5)) <NEW_LINE> x = tf.constant(0.0, shape=(2, 8, 8, 6)) <NEW_LINE> for mask in (np_mask, tf.convert_to_tensor(np_mask)): <NEW_LINE> <INDENT> with self.assertRaises(snt.Error) as cm: <NEW_LINE> <INDENT> snt.Conv2D(output_channels=4, kernel_shape=5, mask=mask)(x) <NEW_LINE> <DEDENT> self.assertTrue(str(cm.exception).startswith( "Invalid mask shape: {}".format(np_mask.shape)))
Errors are thrown for incompatible rank 4 mask.
625941c14f6381625f1149ae
def move_poles(self): <NEW_LINE> <INDENT> for pole in self.poles: <NEW_LINE> <INDENT> pole.move_pole(self.speed_pole_moving) <NEW_LINE> <DEDENT> x_max_bird = 0 <NEW_LINE> for bird in self.birds: <NEW_LINE> <INDENT> x_max_bird = max(x_max_bird, bird.position[0]) <NEW_LINE> <DEDENT> if self.poles[0].position[1] < x_max_bird: <NEW_LINE> <INDENT> self.poles = self.poles[1:]
This method make the poles move and disappear when they are behind the bird
625941c1ac7a0e7691ed4042
def write(self, c): <NEW_LINE> <INDENT> c = c.encode('ASCII') <NEW_LINE> return self._call(0x21, '>B', c[0])
Print a single character to the LCD.
625941c11d351010ab855a8e
def dump(self, filename): <NEW_LINE> <INDENT> f = file(filename, "w") <NEW_LINE> for name in sorted(self._fields): <NEW_LINE> <INDENT> self._fields[name].dump(f, name) <NEW_LINE> <DEDENT> f.close()
Dump the registered fields to a file Argument: | ``filename`` -- the file to write to
625941c1a17c0f6771cbdfc4
def definitions(self, nameFilter=None): <NEW_LINE> <INDENT> if nameFilter: <NEW_LINE> <INDENT> payload = {'name': nameFilter} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> payload = None <NEW_LINE> <DEDENT> return self.get_tfs_resource('build/definitions', underProject=True, payload=payload)
List of build definitions :param nameFilter: Filters to definitions whose names equal this value. Use ``*`` as a wildcard, ex: 'Release_11.*' or 'Release_*_11.0' :return: list of :class:`Definition` object
625941c1dd821e528d63b11c
def sim_loop(self): <NEW_LINE> <INDENT> frame_delay = self._renderer.get_frame_delay() <NEW_LINE> for i in range(0, self._generation_count): <NEW_LINE> <INDENT> self._sim.advance() <NEW_LINE> print("\ngeneration %d - births: %d - deaths: %d" % (i, self._sim.births, self._sim.deaths)) <NEW_LINE> self._renderer.render(self._universe) <NEW_LINE> if frame_delay > 0.0: <NEW_LINE> <INDENT> time.sleep(frame_delay) <NEW_LINE> <DEDENT> <DEDENT> cnt = self._universe.get_transition_count() <NEW_LINE> print("total transitions: ", cnt)
Runs the simulation.
625941c1507cdc57c6306c47
def print_nginx_logs_stats(): <NEW_LINE> <INDENT> print(f"{ log({}) } logs") <NEW_LINE> print("Methods:") <NEW_LINE> print(f"\tmethod GET: { log({'method': 'GET'}) }") <NEW_LINE> print(f"\tmethod POST: { log({'method': 'POST'}) }") <NEW_LINE> print(f"\tmethod PUT: {log({'method': 'PUT'})}") <NEW_LINE> print(f"\tmethod PATCH: {log({'method': 'PATCH'})}") <NEW_LINE> print(f"\tmethod DELETE: {log({'method': 'DELETE'})}") <NEW_LINE> print(f"{log({'method': 'GET', 'path': '/status'})} status check")
Provide statistics for stored Nginx logs
625941c1fff4ab517eb2f3ac
def testACL(t, env): <NEW_LINE> <INDENT> c = env.c1 <NEW_LINE> c.init_connection() <NEW_LINE> fh, stateid = c.create_confirm(t.code) <NEW_LINE> ops = c.use_obj(fh) <NEW_LINE> acl = [nfsace4(0, 0, 0,"123")] <NEW_LINE> ops += [c.setattr({FATTR4_ACL: acl})] <NEW_LINE> res = c.compound(ops) <NEW_LINE> check(res) <NEW_LINE> ops = c.use_obj(fh) <NEW_LINE> ops += [c.getattr([FATTR4_ACL])] <NEW_LINE> res = c.compound(ops) <NEW_LINE> check(res)
SETATTR/GETATTR of a simple ACL FLAGS: acl all DEPEND: LOOKFILE CODE: ACL5
625941c11f037a2d8b946170
def test_can_score_all_strike_score_sheet(self): <NEW_LINE> <INDENT> self.assertEqual(Game('X X X X X X X X X X X X').score(), 300)
Can parse score sheet with all strikes
625941c1d6c5a10208143fba
def GetComparitiveStats(self, for_input): <NEW_LINE> <INDENT> left_stats = self.left_stats[for_input] <NEW_LINE> right_stats = self.right_stats[for_input] <NEW_LINE> memoized = self.memoized_stats[for_input] <NEW_LINE> if memoized: <NEW_LINE> <INDENT> left_count, right_count, codelet_stats, success_stats = memoized <NEW_LINE> if left_count == left_stats.count and right_count == right_stats.count: <NEW_LINE> <INDENT> return (codelet_stats, success_stats) <NEW_LINE> <DEDENT> <DEDENT> left_successful_codelets = ( left_stats.stats_per_state[b'SuccessfulCompletion'].codelet_counts) <NEW_LINE> right_successful_codelets = ( right_stats.stats_per_state[b'SuccessfulCompletion'].codelet_counts) <NEW_LINE> codelet_count_stats = GetTStatsDict(left_successful_codelets, right_successful_codelets) <NEW_LINE> descriptor = ( Descriptor(t=codelet_count_stats['t'], df=codelet_count_stats['df'], less='Faster', more='Slower')) <NEW_LINE> codelet_count_stats['descriptor'] = descriptor <NEW_LINE> success_stats = GetTStatsDict([1 if x < len(left_successful_codelets) else 0 for x in range(left_stats.count)], [1 if x < len(right_successful_codelets) else 0 for x in range(right_stats.count)]) <NEW_LINE> descriptor = ( Descriptor(t=success_stats['t'], df=success_stats['df'], more='More Success', less='Less Success')) <NEW_LINE> success_stats['descriptor'] = descriptor <NEW_LINE> self.memoized_stats[for_input] = (left_stats.count, right_stats.count, codelet_count_stats, success_stats) <NEW_LINE> return (codelet_count_stats, success_stats)
For a given input name, returns comparitive stats along two dimensions. Specifically, it returns two dicts (see output of :py:func:`GetTStatsDict` for details), one comparing running times when there was success, and one comparing how frequently each was successful. Memoizes the result, and recalculates only if more data is available. Args: for_input: The string representation of the input (for indexing into left_stats and right_stats) Returns: A 2-tuple (codelet_count_stats, success_stats).
625941c1d53ae8145f87a1e5
def plot_eschelle(fit_dict, settings, enns = [], ells = [], freqs = []): <NEW_LINE> <INDENT> if len(freqs) == 0: <NEW_LINE> <INDENT> f = fit_dict['fit']['mode_freqs']['best_fit'] <NEW_LINE> fl = f - fit_dict['fit']['mode_freqs']['16th'] <NEW_LINE> fu = fit_dict['fit']['mode_freqs']['84th'] - f <NEW_LINE> ells = fit_dict['fit']['ells'] <NEW_LINE> enns = fit_dict['fit']['enns'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = freqs <NEW_LINE> fl = np.zeros_like(f) <NEW_LINE> fu = np.zeros_like(f) <NEW_LINE> if len(ells) == 0: ells = np.zeros_like(f, dtype = int) - 1 <NEW_LINE> if len(enns) == 0: enns = np.zeros_like(f, dtype = int) - 1 <NEW_LINE> <DEDENT> idxl0 = fit_dict['fit']['ells'] == 0 <NEW_LINE> fit_dict['star']['Del_nu'] = np.median(np.diff(fit_dict['fit']['mode_freqs']['init_guess'][idxl0])) <NEW_LINE> dnu = fit_dict['star']['Del_nu'] <NEW_LINE> colors = ['k', 'b', 'g', 'r', 'm'] <NEW_LINE> ell_colors = np.zeros_like(ells, dtype = str) <NEW_LINE> for i, n in enumerate(range(-1, max(ells.astype(int))+1)): <NEW_LINE> <INDENT> ell_colors[ells == n] = colors[i%len(colors)] <NEW_LINE> <DEDENT> eschfig = plt.figure(figsize = (15, 15)) <NEW_LINE> eschax = eschfig.add_subplot(111) <NEW_LINE> eschax.set_xlabel('Frequency modulo %.2f [$\mu$Hz]' % dnu) <NEW_LINE> eschax.set_ylabel(r'Frequency [$\mu$Hz]') <NEW_LINE> for i in range(len(f)): <NEW_LINE> <INDENT> plt.plot(f[i]%dnu, f[i], 'o', color = ell_colors[i], markersize = 10) <NEW_LINE> plt.errorbar(f[i]%dnu, f[i], xerr = [[fl[i]],[fu[i]]],ls = 'dotted', color = ell_colors[i]) <NEW_LINE> <DEDENT> eschfig.tight_layout()
Plots the eschelle diagram using either the best_fit key from the fit dictionary or the frequencies from the *initial_guesses.txt. Only the former has proper errors associated with them. The frequencies are color coded by ell
625941c150485f2cf553cd0a
def do_search_results(self, arg): <NEW_LINE> <INDENT> self.controller.set_search_results(arg)
Set the number of results to display: SEARCH_RESULTS 10
625941c15fcc89381b1e162f
def run_training(datasets, tensorboard_path='/tmp/data', checkpoint_path='./models', num_classes=10, image_size=28, max_steps=10, batch_size=100, learning_rate=1e-4, channel=3): <NEW_LINE> <INDENT> if not os.path.isdir(tensorboard_path): <NEW_LINE> <INDENT> os.makedirs(tensorboard_path) <NEW_LINE> <DEDENT> if not os.path.isdir(checkpoint_path): <NEW_LINE> <INDENT> os.makedirs(checkpoint_path) <NEW_LINE> <DEDENT> train_images, test_images, train_labels, test_labels = datasets <NEW_LINE> with tf.Graph().as_default(): <NEW_LINE> <INDENT> feature_size = image_size * image_size * channel <NEW_LINE> images_placeholder = tf.placeholder("float", shape=(None, feature_size)) <NEW_LINE> labels_placeholder = tf.placeholder("float", shape=(None, num_classes)) <NEW_LINE> keep_prob = tf.placeholder("float") <NEW_LINE> logits = inference(images_placeholder, keep_prob, num_classes=num_classes, image_size=image_size, channel=channel) <NEW_LINE> loss_value = calculate_loss(logits, labels_placeholder) <NEW_LINE> train_op = training(loss_value, learning_rate) <NEW_LINE> accuracy = calculate_accuracy(logits, labels_placeholder) <NEW_LINE> saver = tf.train.Saver() <NEW_LINE> sess = tf.Session() <NEW_LINE> if checkpoint_exists(checkpoint_path): <NEW_LINE> <INDENT> saver.restore(sess, checkpoint_path + '/model.ckpt') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sess.run(tf.global_variables_initializer()) <NEW_LINE> <DEDENT> summary_op = tf.summary.merge_all() <NEW_LINE> summary_writer = tf.summary.FileWriter(tensorboard_path, graph=sess.graph) <NEW_LINE> for step in range(max_steps): <NEW_LINE> <INDENT> for i in range(int(len(train_images) / batch_size)): <NEW_LINE> <INDENT> batch = batch_size * i <NEW_LINE> sess.run(train_op, feed_dict={ images_placeholder: train_images[batch:batch+batch_size], labels_placeholder: train_labels[batch:batch+batch_size], keep_prob: 0.5}) <NEW_LINE> <DEDENT> train_accuracy = sess.run(accuracy, feed_dict={ images_placeholder: train_images, labels_placeholder: train_labels, keep_prob: 1.0}) <NEW_LINE> test_accuracy = sess.run(accuracy, feed_dict={ images_placeholder: test_images, labels_placeholder: test_labels, keep_prob: 1.0}) <NEW_LINE> print('step {}, training accuracy {}, test accuracy {}'.format( step, train_accuracy, test_accuracy )) <NEW_LINE> summary_str = sess.run(summary_op, feed_dict={ images_placeholder: train_images, labels_placeholder: train_labels, keep_prob: 1.0}) <NEW_LINE> summary_writer.add_summary(summary_str, step) <NEW_LINE> save_path = saver.save(sess, checkpoint_path + '/model.ckpt') <NEW_LINE> <DEDENT> summary_writer.close()
トレーニングの実行 @param datasets データセットタプル(train_images, test_images, train_labels, test_labels) num_classes 分類数 image_size 画像の1辺のpixel数 max_steps トレーニング実行回数 batch_size 1回のトレーニングに使用する画像枚数 learning_rate 学習率
625941c1236d856c2ad44749
def load_beam_profile(beam_spec, frequencies, params): <NEW_LINE> <INDENT> path = resource_filename(hide.__name__, PROFILE_PATH) <NEW_LINE> gain_sun = np.genfromtxt(path, skip_header=True) <NEW_LINE> sun_freq = gain_sun[:,0] <NEW_LINE> sun_Ae = np.radians(gain_sun[:,2]) <NEW_LINE> sigmas = np.interp(frequencies, sun_freq, sun_Ae) <NEW_LINE> fwhms = sigma2fwhm(sigmas) <NEW_LINE> beam_profiles = [airy_wrapper(fwhm) for fwhm in fwhms] <NEW_LINE> beam_norms = [normalization(fwhm, params.beam_nside) for fwhm in fwhms] <NEW_LINE> return beam_profiles, beam_norms
Creates a 2d airy beam profile using the given gain template :param params: The params instance with the paramterization :returns profile: A list of callable beam profiles
625941c18c3a873295158329
def authenticate_application(self, api_token, admin_token, override=False, fetch=True): <NEW_LINE> <INDENT> if (self.context.has_auth_params('Gem-Application') and not override): <NEW_LINE> <INDENT> raise OverrideError('Gem-Application') <NEW_LINE> <DEDENT> if (not api_token or not admin_token or not self.context.authorize('Gem-Application', api_token=api_token, admin_token=admin_token)): <NEW_LINE> <INDENT> raise AuthUsageError(self.context, 'Gem-Application') <NEW_LINE> <DEDENT> return self.application if fetch else True
Set credentials for Application authentication. Important Note: Do not use Application auth on any end-user device. Application auth provides read-access to all Users who have authorized an Application. Use on a secure application server only. Args: api_token (str): Token issued to your Application through the Gem Developer Console. admin_token (str): Token issued to run an instance of your App THIS IS A SECRET. TREAT IT LIKE A SECRET. override (boolean): Replace existing Application credentials. fetch (boolean): Return the authenticated Application. Returns: An Application object if `fetch` is True.
625941c130bbd722463cbd35
def apply_settings( device_list: Union[List[Device], Iterator[Device]], roles: Optional[List[str]] = None ) -> None: <NEW_LINE> <INDENT> raise NotImplementedError
Apply settings to devices
625941c1b57a9660fec337f3
def _viewNameChanged(self, joystickType, origViewName, newViewName): <NEW_LINE> <INDENT> if not self._emittingSignal: <NEW_LINE> <INDENT> i = self._views.get_iter_first() <NEW_LINE> while i is not None: <NEW_LINE> <INDENT> if self._views.get_value(i, 0)==origViewName: <NEW_LINE> <INDENT> self._views.set_value(i, 0, newViewName) <NEW_LINE> break <NEW_LINE> <DEDENT> i = self._views.iter_next(i)
Called when the view with the given name has been renamed.
625941c1fbf16365ca6f6131
def test_unregister_machine_sat6(self): <NEW_LINE> <INDENT> unregister = self.session.delete(self.base_url + system_api + '/' + self.system_id) <NEW_LINE> Util.log_assert(unregister.status_code == 204, "Unregister machine status code is not 204") <NEW_LINE> check_if_unregistered = self.session.get(self.base_url + system_api + '/' + self.system_id) <NEW_LINE> response = check_if_unregistered.json() <NEW_LINE> LOGGER.info(response) <NEW_LINE> Util.log_assert(response['isCheckingIn'] == False, "Incorrect value of isCheckingIn") <NEW_LINE> Util.log_assert(response['unregistered_at'] is not None, "Unregistered at field is None") <NEW_LINE> reports = self.session.get(self.base_url + report_api + '?system_id=' + self.system_id) <NEW_LINE> LOGGER.info(reports.json()) <NEW_LINE> LOGGER.info(reports.status_code)
Test if the above registered system has been unregistered and not checking in.[sat 6]
625941c196565a6dacc8f63e
def __add__(self, other): <NEW_LINE> <INDENT> if not len(self): <NEW_LINE> <INDENT> if isinstance(other, Points): <NEW_LINE> <INDENT> return other.__copy__() <NEW_LINE> <DEDENT> return self.wrap(other) <NEW_LINE> <DEDENT> if len(other): <NEW_LINE> <INDENT> self.positions.update({axis: np.append(self.positions[axis], other.positions[axis]) for axis in other.positions}) <NEW_LINE> self.lower.update({axis: np.append(self.lower[axis], other.lower[axis]) for axis in other.lower}) <NEW_LINE> self.upper.update({axis: np.append(self.upper[axis], other.upper[axis]) for axis in other.upper}) <NEW_LINE> if isinstance(other, Point): <NEW_LINE> <INDENT> self.indexes = np.vstack((self.indexes, other.indexes)) <NEW_LINE> <DEDENT> elif len(other): <NEW_LINE> <INDENT> self.indexes = np.concatenate((self.indexes, other.indexes), axis=0) <NEW_LINE> <DEDENT> self.duration = np.append(self.duration, other.duration) <NEW_LINE> self.delay_after = np.append(self.delay_after, other.delay_after) <NEW_LINE> <DEDENT> return self
input: other (Point or Points) Appends the positions, bounds, indices, duration, delay of another Points or Point to self Assumes that dimensions are shared, or that either self or other have no positions. returns: self
625941c18da39b475bd64ee3
def timestamp_from_date(date): <NEW_LINE> <INDENT> return int(time.mktime(date.timetuple())) * 1000000
:param date: дата, которую нужно преобразовать в timestamp в микросекундах :return:
625941c15510c4643540f35b
def audiofile_to_input_vector(audio_filename, numcep, numcontext): <NEW_LINE> <INDENT> fs, audio = wav.read(audio_filename) <NEW_LINE> features = mfcc(audio, samplerate=fs, numcep=numcep, winlen=0.032, winstep=0.02, winfunc=np.hamming) <NEW_LINE> empty_context = np.zeros((numcontext, numcep), dtype=features.dtype) <NEW_LINE> features = np.concatenate((empty_context, features, empty_context)) <NEW_LINE> return features
Given a WAV audio file at ``audio_filename``, calculates ``numcep`` MFCC features at every 0.01s time step with a window length of 0.025s. Appends ``numcontext`` context frames to the left and right of each time step, and returns this data in a numpy array.
625941c1d10714528d5ffc53
def Sleep(self, throttle_name=None): <NEW_LINE> <INDENT> if throttle_name is None: <NEW_LINE> <INDENT> for throttle_name in self.throttles: <NEW_LINE> <INDENT> self.Sleep(throttle_name=throttle_name) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> self.VerifyThrottleName(throttle_name) <NEW_LINE> thread = threading.currentThread() <NEW_LINE> while True: <NEW_LINE> <INDENT> duration = self.get_time() - self.last_rotate[throttle_name] <NEW_LINE> total = 0 <NEW_LINE> for count in self.prior_block[throttle_name].values(): <NEW_LINE> <INDENT> total += count <NEW_LINE> <DEDENT> if total: <NEW_LINE> <INDENT> duration += self.ROTATE_PERIOD <NEW_LINE> <DEDENT> for count in self.transferred[throttle_name].values(): <NEW_LINE> <INDENT> total += count <NEW_LINE> <DEDENT> sleep_time = self._SleepTime(total, self.throttles[throttle_name], duration) <NEW_LINE> if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms ' '(duration=%.1f ms, total=%d)', thread.getName(), throttle_name, sleep_time * 1000, duration * 1000, total) <NEW_LINE> self.thread_sleep(sleep_time) <NEW_LINE> if thread.exit_flag: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self._RotateCounts(throttle_name)
Possibly sleep in order to limit the transfer rate. Note that we sleep based on *prior* transfers rather than what we may be about to transfer. The next transfer could put us under/over and that will be rectified *after* that transfer. Net result is that the average transfer rate will remain within bounds. Spiky behavior or uneven rates among the threads could possibly bring the transfer rate above the requested limit for short durations. Args: throttle_name: The name of the throttle to sleep on. If None or omitted, then sleep on all throttles.
625941c107f4c71912b113f2
def new_init(self, *args, **kwargs): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> warnings.warn('Using positional arguments with the GObject constructor has been deprecated. ' 'Please specify keyword(s) for "%s" or use a class specific constructor. ' 'See: https://wiki.gnome.org/PyGObject/InitializerDeprecations' % ', '.join(arg_names[:len(args)]), category, stacklevel=stacklevel) <NEW_LINE> new_kwargs = dict(zip(arg_names, args)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_kwargs = {} <NEW_LINE> <DEDENT> new_kwargs.update(kwargs) <NEW_LINE> aliases_used = [] <NEW_LINE> for key, alias in deprecated_aliases.items(): <NEW_LINE> <INDENT> if alias in new_kwargs: <NEW_LINE> <INDENT> new_kwargs[key] = new_kwargs.pop(alias) <NEW_LINE> aliases_used.append(key) <NEW_LINE> <DEDENT> <DEDENT> if aliases_used: <NEW_LINE> <INDENT> warnings.warn('The keyword(s) "%s" have been deprecated in favor of "%s" respectively. ' 'See: https://wiki.gnome.org/PyGObject/InitializerDeprecations' % (', '.join(deprecated_aliases[k] for k in sorted(aliases_used)), ', '.join(sorted(aliases_used))), category, stacklevel=stacklevel) <NEW_LINE> <DEDENT> defaults_used = [] <NEW_LINE> for key, value in deprecated_defaults.items(): <NEW_LINE> <INDENT> if key not in new_kwargs: <NEW_LINE> <INDENT> new_kwargs[key] = deprecated_defaults[key] <NEW_LINE> defaults_used.append(key) <NEW_LINE> <DEDENT> <DEDENT> if defaults_used: <NEW_LINE> <INDENT> warnings.warn('Initializer is relying on deprecated non-standard ' 'defaults. Please update to explicitly use: %s ' 'See: https://wiki.gnome.org/PyGObject/InitializerDeprecations' % ', '.join('%s=%s' % (k, deprecated_defaults[k]) for k in sorted(defaults_used)), category, stacklevel=stacklevel) <NEW_LINE> <DEDENT> for key in ignore: <NEW_LINE> <INDENT> if key in new_kwargs: <NEW_LINE> <INDENT> new_kwargs.pop(key) <NEW_LINE> <DEDENT> <DEDENT> return super_init_func(self, **new_kwargs)
Initializer for a GObject based classes with support for property sets through the use of explicit keyword arguments.
625941c18c3a87329515832a
def proba_fail(origin, next_ids, graph): <NEW_LINE> <INDENT> prod = 1 <NEW_LINE> for next_id in next_ids: <NEW_LINE> <INDENT> if next_id in graph[origin]: <NEW_LINE> <INDENT> prod *= graph[origin][next_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prod *= 1-graph[next_id][origin] <NEW_LINE> <DEDENT> <DEDENT> return prod
Return the probability that the origin should be predicted before all the next_ids (supposing independency)
625941c1adb09d7d5db6c703
def test_partially_qualified_NS1_code(self): <NEW_LINE> <INDENT> link = Link('wikipedia:Talk:Main Page') <NEW_LINE> link.parse() <NEW_LINE> self.assertEqual(link.site, self.get_site()) <NEW_LINE> self.assertEqual(link.title, 'Talk:Main Page') <NEW_LINE> self.assertEqual(link.namespace, 4)
Test 'wikipedia:Talk:Main Page' on enwp is namespace 4.
625941c130dc7b76659018da
def _get_period(self, cr, uid, context={}): <NEW_LINE> <INDENT> account_period_obj = self.pool.get('account.period') <NEW_LINE> ids = account_period_obj.find(cr, uid, context=context) <NEW_LINE> period_id = False <NEW_LINE> if ids: <NEW_LINE> <INDENT> period_id = ids[0] <NEW_LINE> <DEDENT> return period_id
Return default account period value
625941c1f9cc0f698b14056f
def change_progress(self, received_value): <NEW_LINE> <INDENT> self.current_value += received_value <NEW_LINE> if self.current_value >= self.target_value: <NEW_LINE> <INDENT> return self._give_achievement()
>>> progress = Progress(Progress.EXPERIENCE_ID, 'Experience') >>> progress.change_progress(50) >>> progress.current_value 50
625941c126238365f5f0eddd
def test_param_dict_propagation1(): <NEW_LINE> <INDENT> default_model = Zheng07Cens() <NEW_LINE> defocc_lowmass = default_model.mean_occupation(prim_haloprop=lowmass) <NEW_LINE> alt_model = Zheng07Cens() <NEW_LINE> alt_model.param_dict['sigma_logM'] *= 2. <NEW_LINE> updated_defocc_lowmass = alt_model.mean_occupation(prim_haloprop=lowmass) <NEW_LINE> assert updated_defocc_lowmass > defocc_lowmass
Verify that directly changing model parameters without a new instantiation also behaves properly
625941c13eb6a72ae02ec449
def auto(self, minutes, steps=2000, seed=None): <NEW_LINE> <INDENT> if seed: <NEW_LINE> <INDENT> np.random.seed(seed) <NEW_LINE> <DEDENT> def run(T, steps): <NEW_LINE> <INDENT> E = self.energy() <NEW_LINE> prevState = self.copy_state(self.state) <NEW_LINE> prevEnergy = E <NEW_LINE> accepts, improves = 0, 0 <NEW_LINE> for _ in range(steps): <NEW_LINE> <INDENT> self.move() <NEW_LINE> E = self.energy() <NEW_LINE> dE = E - prevEnergy <NEW_LINE> if dE > 0.0 and math.exp(-dE / T) < np.random.random(): <NEW_LINE> <INDENT> self.state = self.copy_state(prevState) <NEW_LINE> E = prevEnergy <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> accepts += 1 <NEW_LINE> if dE < 0.0: <NEW_LINE> <INDENT> improves += 1 <NEW_LINE> <DEDENT> prevState = self.copy_state(self.state) <NEW_LINE> prevEnergy = E <NEW_LINE> <DEDENT> <DEDENT> return E, float(accepts) / steps, float(improves) / steps <NEW_LINE> <DEDENT> step = 0 <NEW_LINE> self.start = time.time() <NEW_LINE> T = 0.0 <NEW_LINE> E = self.energy() <NEW_LINE> self.update(step, T, E, None, None) <NEW_LINE> while T == 0.0: <NEW_LINE> <INDENT> step += 1 <NEW_LINE> self.move() <NEW_LINE> T = abs(self.energy() - E) <NEW_LINE> <DEDENT> E, acceptance, improvement = run(T, steps) <NEW_LINE> step += steps <NEW_LINE> while acceptance > 0.98: <NEW_LINE> <INDENT> T = round_figures(T / 1.5, 2) <NEW_LINE> E, acceptance, improvement = run(T, steps) <NEW_LINE> step += steps <NEW_LINE> self.update(step, T, E, acceptance, improvement) <NEW_LINE> <DEDENT> while acceptance < 0.98: <NEW_LINE> <INDENT> T = round_figures(T * 1.5, 2) <NEW_LINE> E, acceptance, improvement = run(T, steps) <NEW_LINE> step += steps <NEW_LINE> self.update(step, T, E, acceptance, improvement) <NEW_LINE> <DEDENT> Tmax = T <NEW_LINE> while improvement > 0.0: <NEW_LINE> <INDENT> T = round_figures(T / 1.5, 2) <NEW_LINE> E, acceptance, improvement = run(T, steps) <NEW_LINE> step += steps <NEW_LINE> self.update(step, T, E, acceptance, improvement) <NEW_LINE> <DEDENT> Tmin = T <NEW_LINE> elapsed = time.time() - self.start <NEW_LINE> duration = round_figures(int(60.0 * minutes * step / elapsed), 2) <NEW_LINE> return {'tmax': Tmax, 'tmin': Tmin, 'steps': duration, 'updates': self.updates}
Explores the annealing landscape and estimates optimal temperature settings. Returns a dictionary suitable for the `set_schedule` method.
625941c1e8904600ed9f1e9c
def _join_host_port(host, port): <NEW_LINE> <INDENT> template = "%s:%s" <NEW_LINE> host_requires_bracketing = ':' in host or '%' in host <NEW_LINE> if host_requires_bracketing: <NEW_LINE> <INDENT> template = "[%s]:%s" <NEW_LINE> <DEDENT> return template % (host, port)
Adapted golang's net.JoinHostPort
625941c16fb2d068a760f00d
def find_duplicates(parentFolder): <NEW_LINE> <INDENT> dups = {} <NEW_LINE> for dir_name, subdirs, fileList in os.walk(parentFolder): <NEW_LINE> <INDENT> print('Scanning %s...' % dir_name) <NEW_LINE> for filename in fileList: <NEW_LINE> <INDENT> path = os.path.join(dir_name, filename) <NEW_LINE> print(path) <NEW_LINE> file_hash = hashfile(path) <NEW_LINE> if file_hash in dups: <NEW_LINE> <INDENT> dups[file_hash].append(path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dups[file_hash] = [path] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dups
Build and return an object with a key for each unique hash, and a list of all matching files as it's value: {hash:[names]}
625941c13317a56b86939bcf
@raises(ValueError) <NEW_LINE> def test_hue_space(): <NEW_LINE> <INDENT> utils._hue_space_params("cielab")
Test that desaturation space choise is constrained.
625941c10fa83653e4656f2e
def is_working_hour(dt=datetime.datetime.now()): <NEW_LINE> <INDENT> if dt.time().hour <= 9 and dt.time().minute < 15: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif (dt.time().hour >= 11 and dt.time().minute > 30) and (dt.time().hour < 13): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif dt.time().hour >= 15 and dt.time().minute > 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
检查今天是否是交易时间段 :param dt: :return:
625941c130bbd722463cbd36
def generate_base_uri(self, uri): <NEW_LINE> <INDENT> base_uri = uri.rsplit("/", 1)[0] <NEW_LINE> return base_uri
Return dataset base URI given a uri.
625941c10383005118ecf556
def searchForEntryChanged(self, widget): <NEW_LINE> <INDENT> self.searchString = widget.get_text() <NEW_LINE> findButton = self.get_widget("findButton") <NEW_LINE> if len(self.searchString) > 0: <NEW_LINE> <INDENT> findButton.set_sensitive(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> findButton.set_sensitive(False)
Signal handler for the "changed" signal for the searchForEntry GtkEntry widget. The user has changed the string to be searched for. Arguments: - widget: the component that generated the signal.
625941c18e7ae83300e4af3e
def insert_character(_str: str, _char: str, index: int) -> str: <NEW_LINE> <INDENT> return _str[:index] + _char + _str[index:]
Insert a character at the specified point of a string
625941c15510c4643540f35c
def get_login_result_form(tel_num, user_password, validate_code): <NEW_LINE> <INDENT> post_data_dict = dict() <NEW_LINE> post_data_dict['mobileNum'] = des_encode(tel_num) <NEW_LINE> post_data_dict['servicePWD'] = des_encode(user_password) <NEW_LINE> post_data_dict['randCode'] = validate_code <NEW_LINE> post_data_dict['smsRandomCode'] = '' <NEW_LINE> post_data_dict['submitMode'] = '2' <NEW_LINE> post_data_dict['logonMode'] = '1' <NEW_LINE> post_data_dict['FieldID'] = '1' <NEW_LINE> post_data_dict['ReturnURL'] = 'www.sd.10086.cn/eMobile/jsp/common/prior.jsp' <NEW_LINE> post_data_dict['ErrorUrl'] = '../mainLogon.do' <NEW_LINE> post_data_dict['entrance'] = 'IndexBrief' <NEW_LINE> post_data_dict['codeFlag'] = '0' <NEW_LINE> post_data_dict['openFlag'] = '1' <NEW_LINE> return post_data_dict
Assemble form for get_login_result :return: form in dict
625941c1091ae35668666ed4
def finalise_episode(self): <NEW_LINE> <INDENT> pass
Do things that need to be done when an episode was finished
625941c1377c676e9127211b
@main.command(help="Delete base containers for the current research tool.") <NEW_LINE> @click.option( "-i", "--image", "images", type=EnumChoice(ImageBase), multiple=True, help="Only delete the given image. Can be given multiple times." ) <NEW_LINE> def delete(images: tp.List[ImageBase]) -> None: <NEW_LINE> <INDENT> if images: <NEW_LINE> <INDENT> delete_base_images(images) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> delete_base_images()
Delete base containers for the current research tool. Args: images: the images to delete; delete all if empty
625941c129b78933be1e5622
def __init__(self, stat_names, root): <NEW_LINE> <INDENT> self.stat_vals = {} <NEW_LINE> self.make_rows(stat_names, root) <NEW_LINE> self.make_fault_button(len(stat_names), root) <NEW_LINE> root.update_idletasks() <NEW_LINE> root.update()
makes grid entries for can msg segment names and their values
625941c1cdde0d52a9e52fa3
def base36decode(str_in): <NEW_LINE> <INDENT> return long(_codec(str_in, 36, 10))
Get a base10 number for a base36 number.
625941c1566aa707497f44df
def testFunc2(*args): <NEW_LINE> <INDENT> print(args)
Usage: param1 <int>,...... param2 <str>,...... param3 <list>,......
625941c1e1aae11d1e749c28
def get_step_from_latest_checkpoint(dir): <NEW_LINE> <INDENT> import re <NEW_LINE> import tensorflow as tf <NEW_LINE> path = tf.train.latest_checkpoint(dir) <NEW_LINE> if not path: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> found = re.search("(\d+)$", path) <NEW_LINE> return int(found.group()) if found else None
get global_step from checkpoint_path when outside of graph
625941c115baa723493c3ee6
def get_aliases(aliases_path=None): <NEW_LINE> <INDENT> global _EXECUTABLES <NEW_LINE> if _EXECUTABLES: <NEW_LINE> <INDENT> return _EXECUTABLES <NEW_LINE> <DEDENT> if not aliases_path: <NEW_LINE> <INDENT> aliases_path = ALIASES_PATH <NEW_LINE> <DEDENT> assert aliases_path is not None <NEW_LINE> _LOGGER.debug('Loading aliases path: %s', aliases_path) <NEW_LINE> exes = {} <NEW_LINE> for name in aliases_path.split(':'): <NEW_LINE> <INDENT> alias_mod = plugin_manager.load('treadmill.bootstrap', name) <NEW_LINE> exes.update(getattr(alias_mod, 'ALIASES')) <NEW_LINE> <DEDENT> tm = os.environ.get('TREADMILL') <NEW_LINE> if tm is not None: <NEW_LINE> <INDENT> exes['treadmill'] = tm <NEW_LINE> <DEDENT> _EXECUTABLES = exes <NEW_LINE> return _EXECUTABLES
Load aliases of external binaries that can invoked.
625941c13346ee7daa2b2cdd
def get_play_queue(self, queue_limit=100): <NEW_LINE> <INDENT> self.info('Getting play queue') <NEW_LINE> entries = [] <NEW_LINE> self.__loop_query_model(func=entries.append, query_model=self.__get_play_queue_model(), limit=queue_limit) <NEW_LINE> return entries
Returns the play queue, limited to 100 entries by default
625941c1cad5886f8bd26f4c
def test_Model_stochastic_train(): <NEW_LINE> <INDENT> from learning import transfer, error, validation, MLP <NEW_LINE> dataset = datasets.get_iris() <NEW_LINE> model = MLP( (len(dataset[0][0]), 3, len(dataset[1][0])), transfers=transfer.SoftmaxTransfer(), error_func=error.CrossEntropyError()) <NEW_LINE> model.stochastic_train( *dataset, error_break=0.02, pattern_selection_func=lambda X, Y: base.select_sample(X, Y, size=30), train_kwargs={'iterations': 5, 'error_break': 0.1}) <NEW_LINE> assert validation.get_error(model, *dataset) <= 0.03
Train with stochastic gradient descent.
625941c1d6c5a10208143fbb
def p_arglist18(self, p): <NEW_LINE> <INDENT> args, kwargs = self._validate_arglist_and_kwlist(p, p[1], p[5] + [p[6]]) <NEW_LINE> p[0] = Arguments(args=args, keywords=kwargs, starargs=p[3], kwargs=p[9])
arglist : arglist_list STAR test COMMA arglist_list argument COMMA DOUBLESTAR test
625941c1d268445f265b4de1
@numba.njit <NEW_LINE> def jump_process_transition(x_range, dx, jump_prob, x_bar_jump, zeta_jump, upper_edge, lower_edge): <NEW_LINE> <INDENT> dim_mu = x_range.shape[0] <NEW_LINE> transition_prob = ((1. - jump_prob) * np.identity(dim_mu) + (jump_prob * np.exp(-.5 * (x_range - x_bar_jump) ** 2 / zeta_jump ** 2) / np.sqrt(2. * np.pi * zeta_jump ** 2) * dx)).T <NEW_LINE> transition_prob[0] += upper_edge <NEW_LINE> transition_prob[-1] += lower_edge <NEW_LINE> return transition_prob
Computes the (discretized) transition probability for the MJP. :param x_range: np.array Discretization of the common process x. :param dx: float Bin size of discretization. :param jump_prob: float Probability of jumping int the interval. :param x_bar_jump: float Depreciated, is always 0. :param zeta_jump: float Depreciated, is always 1. :param upper_edge: np.array Probability mass that would run out at the upper end of discretization. :param lower_edge: np.array Probability mass that would run out at the lower end of discretization. :return: np.array Transition matrix.
625941c1f548e778e58cd4ef
def punctuations_space(text: str) -> str: <NEW_LINE> <INDENT> return SPACE_AFTER_PUNCTUATIONS_RE.sub(' ', text)
Add spaces after punctuations used in urdu writing Args: text (str): text Returns: str
625941c11b99ca400220aa23
def getBranches(): <NEW_LINE> <INDENT> repo = getRepoAuth() <NEW_LINE> branches = repo.get_branches() <NEW_LINE> branches_list = [] <NEW_LINE> for branch in branches: <NEW_LINE> <INDENT> branches_list.append(branch.name) <NEW_LINE> <DEDENT> return {"branches": branches_list}
Obtener las branches del repositorio obtenido
625941c1e5267d203edcdc12
def multiples_figaspect(n_rows, n_cols, x_range, y_range, fig_width=None, max_height=None): <NEW_LINE> <INDENT> if fig_width is None: <NEW_LINE> <INDENT> fig_width, fig_height = matplotlib.rcParams['figure.figsize'][0:1] <NEW_LINE> <DEDENT> w = float(fig_width) <NEW_LINE> n_rows, n_cols, x_range, y_range = (float(i) for i in (n_rows, n_cols, x_range, y_range)) <NEW_LINE> h = (y_range * n_rows * w) / (x_range * n_cols) <NEW_LINE> if max_height is not None: <NEW_LINE> <INDENT> max_height = float(max_height) <NEW_LINE> y_box = h / n_row <NEW_LINE> n_rows_perpage = floor(max_height / y_box) <NEW_LINE> n_pages = ceil(n_rows/n_rows_perpage) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n_rows_perpage = n_rows <NEW_LINE> n_pages = 1 <NEW_LINE> <DEDENT> return w, h, n_rows_perpage, n_pages
DIMENSIONS, assuming no margins anywhere. Plottable area: w, h (inches) Data units: x_range, y_range (km, array shape, etc.) Subaxis count: n_cols, n_rows (laid out in a uniformly spaced grid) Subaxis size: x_box, y_box (inches) y_box = h / n_row x_box = w / n_col Assume we're working with a plottable area of fig_width (no margins). Also assume we want squared-up units (e.g, square grid pixels if x_range, y_range are nx and ny for an array, or physical units, so 1 km north = 1 km east). In that case, we want: y_range / y_box = x_range / x_box (e.g., km/in) So, given w = fig_width (taken from rcParams by default): (y_range * n_rows) / h = (x_range * n_col) / w or h = (y_range * n_rows * w) / (x_range * n_col) If we want to calculate the number of rows that can fit on a page with max_height, y_box = h / n_rows n_rows_perpage = floor(max_height / y_box) Returns: w, h, n_rows_perpage, n_pages
625941c1091ae35668666ed5
def get_ip_for_tracker(self): <NEW_LINE> <INDENT> return self.sessconfig['ip']
Returns the IP address being reported to the tracker. @return String
625941c1a05bb46b383ec796
def json_fields_required(*fields): <NEW_LINE> <INDENT> def dec(f): <NEW_LINE> <INDENT> async def res_func(request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> r = await request.json() <NEW_LINE> list_of_missing_fields = [field for field in fields if field not in r] <NEW_LINE> if list_of_missing_fields: <NEW_LINE> <INDENT> return web.json_response({'error': True, 'error_text': 'Missing fields', 'fields': list_of_missing_fields}) <NEW_LINE> <DEDENT> return await f(request) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return web.json_response({'error': True, 'error_text': 'Wrong request'}) <NEW_LINE> <DEDENT> res_func.__doc__ = f.__doc__ <NEW_LINE> return res_func <NEW_LINE> <DEDENT> return dec
decorator for required fields :param *fields: list of required fields (str type)
625941c17b25080760e393cd
def is_resource_url(url_string): <NEW_LINE> <INDENT> return isinstance(url_string, string_types) and urlparse.urlparse(url_string).scheme in ('http', 'https')
Checks if the given URL string is a resource URL. Currently, this check only looks if the URL scheme is either "http" or "https".
625941c1bd1bec0571d905a1
def get_committer_date(self): <NEW_LINE> <INDENT> return ''
There seems to be no way to create a commit with a given timestamp set for Bazar.
625941c16e29344779a62587
def do_GET(self): <NEW_LINE> <INDENT> args = self.path[1:].split("?") <NEW_LINE> if self.path != "/" and args[0] not in self.server.dispatcher.methods.keys(): <NEW_LINE> <INDENT> self.send_error(404, "Method not found: %s" % args[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.path == "/": <NEW_LINE> <INDENT> response = self.server.dispatcher.wsdl() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> req, res, doc = self.server.dispatcher.help(args[0]) <NEW_LINE> if len(args) == 1 or args[1] == "request": <NEW_LINE> <INDENT> response = req <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = res <NEW_LINE> <DEDENT> <DEDENT> self.send_response(200) <NEW_LINE> self.send_header("Content-type", "text/xml") <NEW_LINE> self.end_headers() <NEW_LINE> self.wfile.write(response)
User viewable help information and wsdl
625941c1b830903b967e9880
def bulk_update(self, tuples): <NEW_LINE> <INDENT> moves = [] <NEW_LINE> del_errors = [] <NEW_LINE> for color, x, y in tuples: <NEW_LINE> <INDENT> if color is E and not self.is_empty(x, y): <NEW_LINE> <INDENT> moves.append(golib.model.Move('np', (color, x, y))) <NEW_LINE> <DEDENT> elif color in (B, W): <NEW_LINE> <INDENT> if not self.is_empty(x, y): <NEW_LINE> <INDENT> existing_mv = self.vmanager.controller.locate(y, x) <NEW_LINE> if color is not existing_mv.color: <NEW_LINE> <INDENT> moves.append(golib.model.Move('np', (E, x, y))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> self._check_dels(x, y) <NEW_LINE> moves.append(golib.model.Move('np', (color, x, y))) <NEW_LINE> <DEDENT> except camkifu.core.DeletedError as de: <NEW_LINE> <INDENT> del_errors.append(de) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(moves): <NEW_LINE> <INDENT> self.vmanager.controller.pipe("bulk", moves) <NEW_LINE> self.vmanager.controller.pipe("auto_save") <NEW_LINE> <DEDENT> if len(del_errors): <NEW_LINE> <INDENT> msg = "Bulk_update:warning: All non-conflicting locations have been sent." <NEW_LINE> raise camkifu.core.DeletedError(del_errors, message=msg)
Indicate to the controller a series of updates have happened on the goban. May be processed asynchronously. Note: if a move points to an already occupied location but with a different color, the previous stone is removed, then the new one is added. The color can't simply be changed due to consistency issues, notably some previous kills may have to be invalidated and the history reworked accordingly. Args: tuples: [ (color1, r1, c1), (color2, r2, c2), ... ] The list of update moves. A move with color E is interpreted as a removed stone. r and c, the intersection row and column, are in numpy coord system.
625941c1090684286d50ec56
def filter_color_n(string): <NEW_LINE> <INDENT> return ANSI_ESCAPE_REGEX.subn('', string)
Filter ANSI escape codes out of |string|, returns tuple of (new string, # of ANSI codes removed)
625941c194891a1f4081ba1b
def __init__(self, file_path, parsed): <NEW_LINE> <INDENT> self.file = file_path <NEW_LINE> self.parsed = parsed
Creates a new _YamlInfo. Args: file_path: str, The full path the file that was parsed. parsed: The parsed yaml data as one of the *_info objects.
625941c192d797404e3040fc
def search_file_passwords(): <NEW_LINE> <INDENT> pwdfiles = { "LOGPWDS": {"cmd": "find /var/log -name '*.log' 2>/dev/null | xargs -l10 egrep 'pwd|password' 2>/dev/null", "msg": "Logs containing keyword 'password'", "results": []}, "CONFPWDS": {"cmd": "find /etc -name '*.c*' 2>/dev/null | xargs -l10 egrep 'pwd|password' 2>/dev/null", "msg": "Config files containing keyword 'password'", "results": []}, "SHADOW": {"cmd": "cat /etc/shadow 2>/dev/null", "msg": "Shadow File (Privileged)", "results": []} } <NEW_LINE> pwdfiles = execute_cmd(pwdfiles) <NEW_LINE> print_results(pwdfiles)
Search File for passwords (search_file_passwords) Search the identified file systems for files with potential credentials :return: None :TODO: Add searches for common cred files like ssh keys and access tokens
625941c176e4537e8c3515e3
def todate(self): <NEW_LINE> <INDENT> return self._date
Return the PartialDate as a datetime.date object with unspecified fields set to 1.
625941c163d6d428bbe44462
def UseDHCP(): <NEW_LINE> <INDENT> LCD.clear() <NEW_LINE> LCD.message('Are you sure?\nPress Sel for Y') <NEW_LINE> while 1: <NEW_LINE> <INDENT> if LCD.buttonPressed(LCD.LEFT): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if LCD.buttonPressed(LCD.SELECT): <NEW_LINE> <INDENT> LCD.clear() <NEW_LINE> LCD.message('Please reboot') <NEW_LINE> sleep(1.5) <NEW_LINE> break <NEW_LINE> <DEDENT> sleep(0.25)
Allows you to switch to a network config that uses DHCP
625941c1009cb60464c63326
def linkbdfs(filename, scandict=None, bdfdir=default_bdfdir): <NEW_LINE> <INDENT> if not scandict: <NEW_LINE> <INDENT> scandict = ps.read_scans(filename, bdfdir=bdfdir) <NEW_LINE> sourcedict = ps.read_sources(filename, bdfdir=bdfdir) <NEW_LINE> <DEDENT> ASDMBinarydir = os.path.join(os.path.basename(filename.rstrip('/')), 'ASDMBinary') <NEW_LINE> if not os.path.exists(ASDMBinarydir): <NEW_LINE> <INDENT> os.makedirs(ASDMBinarydir) <NEW_LINE> <DEDENT> for scan in scandict.keys(): <NEW_LINE> <INDENT> bdfORIG = scandict[scan]['bdfstr'].rstrip('/') <NEW_LINE> bdfLINK = os.path.join(ASDMBinarydir, os.path.basename(bdfORIG)) <NEW_LINE> logger.debug('Creating softlink %s to BDF %s' % (bdfLINK,bdfORIG) ) <NEW_LINE> if not os.path.exists(bdfLINK): <NEW_LINE> <INDENT> os.symlink(bdfORIG, bdfLINK)
Takes proto-sdm filename and makes soft links to create true sdm. scandict is optional dictionary from parsesdm that defines scans to link (and the bdf location).
625941c163f4b57ef0001092
def L2S(lunarD, lunarM, lunarY, lunarLeap, tZ=7): <NEW_LINE> <INDENT> if (lunarM < 11): <NEW_LINE> <INDENT> a11 = getLunarMonth11(lunarY - 1, tZ) <NEW_LINE> b11 = getLunarMonth11(lunarY, tZ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> a11 = getLunarMonth11(lunarY, tZ) <NEW_LINE> b11 = getLunarMonth11(lunarY + 1, tZ) <NEW_LINE> <DEDENT> k = int(0.5 + (a11 - 2415021.076998695) / 29.530588853) <NEW_LINE> off = lunarM - 11 <NEW_LINE> if (off < 0): <NEW_LINE> <INDENT> off += 12 <NEW_LINE> <DEDENT> if (b11 - a11 > 365): <NEW_LINE> <INDENT> leapOff = getLeapMonthOffset(a11, tZ) <NEW_LINE> leapM = leapOff - 2 <NEW_LINE> if (leapM < 0): <NEW_LINE> <INDENT> leapM += 12 <NEW_LINE> <DEDENT> if (lunarLeap != 0 and lunarM != leapM): <NEW_LINE> <INDENT> return [0, 0, 0] <NEW_LINE> <DEDENT> elif (lunarLeap != 0 or off >= leapOff): <NEW_LINE> <INDENT> off += 1 <NEW_LINE> <DEDENT> <DEDENT> monthStart = getNewMoonDay(k + off, tZ) <NEW_LINE> return jdToDate(monthStart + lunarD - 1)
def L2S(lunarD, lunarM, lunarY, lunarLeap, tZ = 7): Convert a lunar date to the corresponding solar date.
625941c1be8e80087fb20bb9
def xtestClassifiersA(self): <NEW_LINE> <INDENT> import orange, orngTree <NEW_LINE> data1 = orange.ExampleTable('/Volumes/xdisc/_sync/_x/src/music21Ext/mlDataSets/chinaMitteleuropa-b/chinaMitteleuropa-b1.tab') <NEW_LINE> data2 = orange.ExampleTable('/Volumes/xdisc/_sync/_x/src/music21Ext/mlDataSets/chinaMitteleuropa-b/chinaMitteleuropa-b2.tab') <NEW_LINE> majority = orange.MajorityLearner <NEW_LINE> bayes = orange.BayesLearner <NEW_LINE> tree = orngTree.TreeLearner <NEW_LINE> knn = orange.kNNLearner <NEW_LINE> for classifierType in [majority, bayes, tree, knn]: <NEW_LINE> <INDENT> print('') <NEW_LINE> for classifierData, classifierStr, matchData, matchStr in [ (data1, 'data1', data1, 'data1'), (data1, 'data1', data2, 'data2'), (data2, 'data2', data2, 'data2'), (data2, 'data2', data1, 'data1'), ]: <NEW_LINE> <INDENT> classifier = classifierType(classifierData) <NEW_LINE> mismatch = 0 <NEW_LINE> for i in range(len(matchData)): <NEW_LINE> <INDENT> c = classifier(matchData[i]) <NEW_LINE> if c != matchData[i].getclass(): <NEW_LINE> <INDENT> mismatch += 1 <NEW_LINE> <DEDENT> <DEDENT> print('%s %s: misclassified %s/%s of %s' % (classifierStr, classifierType, mismatch, len(matchData), matchStr))
Using an already created test file with a BayesLearner.
625941c1ab23a570cc2500f3
def mock_public_registry(adapter, request, *args, **kwargs): <NEW_LINE> <INDENT> resp = requests.Response() <NEW_LINE> resp.status_code = 200 <NEW_LINE> resp._content_consumed = True <NEW_LINE> if request.headers and request.headers.get('X-Docker-Token') == 'true': <NEW_LINE> <INDENT> resp.headers['x-docker-token'] = 'foobar' <NEW_LINE> <DEDENT> if request.url.endswith('deadbeef76543210/layer'): <NEW_LINE> <INDENT> resp._content = "abcdef0123456789xxxxxx=-//" <NEW_LINE> <DEDENT> elif request.url.endswith('deadbeef76543210/json'): <NEW_LINE> <INDENT> resp._content = ('{"id": "deadbeef76543210", ' '"created": "2014-02-03T16:47:06.615279788Z"}') <NEW_LINE> <DEDENT> elif request.url.endswith('deadbeef76543210/ancestry'): <NEW_LINE> <INDENT> resp._content = '["deadbeef76543210"]' <NEW_LINE> <DEDENT> elif request.url.endswith('test/tags'): <NEW_LINE> <INDENT> resp._content = ('[' '{"layer": "deadbeef76543210", "name": "latest"},' '{"layer": "deadbeef76543210", "name": "0.1.2"}' ']') <NEW_LINE> <DEDENT> elif request.url.endswith('test/tags/latest'): <NEW_LINE> <INDENT> resp._content = '[{"pk": 1234567890, "id": "deadbeef76543210"}]' <NEW_LINE> <DEDENT> elif request.url.endswith('test/tags/0.1.2'): <NEW_LINE> <INDENT> resp._content = '[{"pk": 1234567890, "id": "deadbeef76543210"}]' <NEW_LINE> <DEDENT> elif request.url.endswith('test/images'): <NEW_LINE> <INDENT> resp._content = '[{"checksum": "", "id": "deadbeef76543210"}]' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resp.status_code = 404 <NEW_LINE> <DEDENT> return resp
branch logic for DockerHub, as their endpoints are not the same.
625941c123849d37ff7b3003
def __add__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Coordinate): <NEW_LINE> <INDENT> return Surface.translate(self,other) <NEW_LINE> <DEDENT> raise NotImplementedError
Add a Surface object and a Coordinate object
625941c17047854f462a137f
def isValid_v0(self, s): <NEW_LINE> <INDENT> right2left = {")": "(", "]": "[", "}": "{"} <NEW_LINE> stack = [] <NEW_LINE> for ch in s: <NEW_LINE> <INDENT> if ch in right2left: <NEW_LINE> <INDENT> if stack and stack[-1] == right2left[ch]: <NEW_LINE> <INDENT> stack.pop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> stack.append(ch) <NEW_LINE> <DEDENT> <DEDENT> return not stack
:type s: str :rtype: bool
625941c1aad79263cf3909b1
def SetThreadDesktop(self,) -> 'None': <NEW_LINE> <INDENT> pass
Assigns this desktop to the calling thread Args: Returns: None
625941c19f2886367277a802
def get_review(self, id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._request('/rest-service/reviews-v1/%s' % id, http_handlers=CrucibleRest.http_handlers_none_on_errors) <NEW_LINE> <DEDENT> except StandardError: <NEW_LINE> <INDENT> return None
A ReviewData json if the id represents an existing review, None otherwise
625941c1ab23a570cc2500f4
def move_east(self, grid): <NEW_LINE> <INDENT> if self.dx < grid.available_width: <NEW_LINE> <INDENT> self.__move(dx=1, dy=0) <NEW_LINE> <DEDENT> sleep(0.25) <NEW_LINE> self.location = self.dx, self.dy <NEW_LINE> grid.update(self)
Method to move the player east one position Params: grid: object
625941c1cad5886f8bd26f4d
def get_size_in_bytes(self, handle): <NEW_LINE> <INDENT> fpath = self._fpath_from_handle(handle) <NEW_LINE> return self.conn.getAttributes(self.service_name, fpath).file_size
Return the size in bytes.
625941c10383005118ecf557
def print_to_stdout(level, str_out): <NEW_LINE> <INDENT> if level == NOTICE: <NEW_LINE> <INDENT> col = Fore.GREEN <NEW_LINE> <DEDENT> elif level == WARNING: <NEW_LINE> <INDENT> col = Fore.RED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> col = Fore.YELLOW <NEW_LINE> <DEDENT> if not is_py3: <NEW_LINE> <INDENT> str_out = str_out.encode(encoding, 'replace') <NEW_LINE> <DEDENT> print((col + str_out + Fore.RESET))
The default debug function
625941c132920d7e50b28141
def get_speed(self): <NEW_LINE> <INDENT> speed = self.send_command('speed?') <NEW_LINE> try: <NEW_LINE> <INDENT> speed = float(speed) <NEW_LINE> if self.imperial is True: <NEW_LINE> <INDENT> speed = round((speed / 44.704), 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> speed = round((speed / 27.7778), 1) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return speed
現在のスピードを返す Returns: int: 現在スピード, km/h または Mile/h
625941c1b5575c28eb68df72
def refpath(self, name): <NEW_LINE> <INDENT> if os.path.sep != "/": <NEW_LINE> <INDENT> name = name.replace("/", os.path.sep) <NEW_LINE> <DEDENT> return os.path.join(self.path, name)
Return the disk path of a ref.
625941c155399d3f05588626
def _determine_content_type(self): <NEW_LINE> <INDENT> ending = self._url.split(".")[-1] <NEW_LINE> if ending == 'jpg' or ending == 'png': <NEW_LINE> <INDENT> content_type = 'image/{0}'.format(ending) <NEW_LINE> <DEDENT> elif ending == 'js': <NEW_LINE> <INDENT> content_type = 'application/javascript' <NEW_LINE> <DEDENT> elif ending == 'css': <NEW_LINE> <INDENT> content_type = 'text/css' <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> content_type = 'text/html' <NEW_LINE> <DEDENT> return content_type
Determines the content type of the object's url :return: content type
625941c18c0ade5d55d3e92c
def get_gpu_memory_map(): <NEW_LINE> <INDENT> result = subprocess.check_output( [ 'nvidia-smi', '--query-gpu=memory.used', '--format=csv,nounits,noheader' ], encoding='utf-8') <NEW_LINE> gpu_memory = [int(x) for x in result.strip().split('\n')] <NEW_LINE> gpu_memory_map = dict(zip(range(len(gpu_memory)), gpu_memory)) <NEW_LINE> return gpu_memory_map
Get the current gpu usage. From https://discuss.pytorch.org/t/access-gpu-memory-usage-in-pytorch/3192/4 Returns ------- usage: dict Keys are device ids as integers. Values are memory usage as integers in MB.
625941c1bde94217f3682d66
def do_prediccion(self): <NEW_LINE> <INDENT> sql = read_file("sql/athena/prediccion.sql").strip() <NEW_LINE> min_ela = self.db.one("select max(cast(elaborado as date)) from aemet.prediccion") <NEW_LINE> if min_ela is not None: <NEW_LINE> <INDENT> min_ela = min_ela - timedelta(days=10) <NEW_LINE> sql = sql + " and\n elaborado>'{:%Y-%m-%dT00:00:00}'".format(min_ela) <NEW_LINE> <DEDENT> self.copy(sql, "prediccion", key="elaborado, municipio, fecha", overwrite=False)
Copia a la base de datos las predicciones de la AEMET guardadas en Athena
625941c15166f23b2e1a50cc
def update_positions(self): <NEW_LINE> <INDENT> source_pos = self.start_socket.get_socket_position() <NEW_LINE> source_pos[0] += self.start_socket.node.graphics_node.pos().x() <NEW_LINE> source_pos[1] += self.start_socket.node.graphics_node.pos().y() <NEW_LINE> self.graphics_edge.set_source(*source_pos) <NEW_LINE> if self.end_socket is not None: <NEW_LINE> <INDENT> end_pos = self.end_socket.get_socket_position() <NEW_LINE> end_pos[0] += self.end_socket.node.graphics_node.pos().x() <NEW_LINE> end_pos[1] += self.end_socket.node.graphics_node.pos().y() <NEW_LINE> self.graphics_edge.set_destination(*end_pos) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.graphics_edge.set_destination(*source_pos) <NEW_LINE> <DEDENT> self.graphics_edge.update()
When the line needs to be redraw set the GraphicsEdge's new starting and end socket locations.
625941c11f037a2d8b946171
def state_definition_to_dict(state_definition): <NEW_LINE> <INDENT> if isinstance(state_definition, Mapping): <NEW_LINE> <INDENT> state_dict = state_definition <NEW_LINE> <DEDENT> elif isinstance(state_definition, Iterable): <NEW_LINE> <INDENT> state_dicts = [ assoc_in( {}, state_item[:-1], state_item[-1] ) if not isinstance(state_item, Mapping) else state_item for state_item in state_definition ] <NEW_LINE> if not is_cleanly_mergable(*state_dicts): <NEW_LINE> <INDENT> raise ValidationError("Some state item is defined multiple times") <NEW_LINE> <DEDENT> state_dict = deep_merge(*state_dicts) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert TypeError("State definition must either be a mapping or a sequence") <NEW_LINE> <DEDENT> seen_keys = set(concat(d.keys() for d in state_dict.values())) <NEW_LINE> bad_keys = seen_keys - set(["balance", "nonce", "storage", "code"]) <NEW_LINE> if bad_keys: <NEW_LINE> <INDENT> raise ValidationError( "State definition contains the following invalid account fields: {}".format( ", ".join(bad_keys) ) ) <NEW_LINE> <DEDENT> return state_dict
Convert a state definition to the canonical dict form. State can either be defined in the canonical form, or as a list of sub states that are then merged to one. Sub states can either be given as dictionaries themselves, or as tuples where the last element is the value and all others the keys for this value in the nested state dictionary. Example: ``` [ ("0xaabb", "balance", 3), ("0xaabb", "storage", { 4: 5, }), "0xbbcc", { "balance": 6, "nonce": 7 } ] ```
625941c138b623060ff0ad61
def read_one_file(self, data_set, f, ids_read): <NEW_LINE> <INDENT> raise NotImplementedError
Read data from a single file into a ``DataSet``. Formatter subclasses that break a DataSet into multiple data files may choose to override either this method, which handles one file at a time, or ``read`` which finds matching files on its own. Args: data_set (DataSet): the data we are reading into. f (file-like): a file-like object to read from, as provided by ``io_manager.open``. ids_read (set): ``array_id``s that we have already read. When you read an array, check that it's not in this set (except setpoints, which can be in several files with different inner loops) then add it to the set so other files know it should not be read again. Raises: ValueError: if a duplicate array_id of measured data is found
625941c150812a4eaa59c297
def GetParentBlipId(self): <NEW_LINE> <INDENT> return self._data.parent_blip_id
Returns the id of this blips parent or None if it is the root.
625941c14f6381625f1149af
def __init__(self): <NEW_LINE> <INDENT> self.formatter = "standard"
init format - format of the log default format: %(levelname)s: %(asctime)s: %(filename)s:%(lineno)d * %(thread)d %(message)s 'INFO: 12-09 18:02:42: log.py:40 * 139814749787872 HELLO WORLD'
625941c115fb5d323cde0a80
def point_to_square(x,y): <NEW_LINE> <INDENT> if startX < x < startX + gridSize and startY - gridSize < y < startY: <NEW_LINE> <INDENT> return "NorthWest" <NEW_LINE> <DEDENT> elif startX + gridSize < x < startX + 2*gridSize and startY - gridSize < y < startY: <NEW_LINE> <INDENT> return "North" <NEW_LINE> <DEDENT> elif startX + 2*gridSize < x < startX + 3*gridSize and startY - gridSize < y < startY: <NEW_LINE> <INDENT> return "NorthEast" <NEW_LINE> <DEDENT> elif startX < x < startX + gridSize and startY - 2*gridSize < y < startY - gridSize: <NEW_LINE> <INDENT> return "West" <NEW_LINE> <DEDENT> elif startX + gridSize < x < startX + 2*gridSize and startY - 2*gridSize < y < startY - gridSize: <NEW_LINE> <INDENT> return "Center" <NEW_LINE> <DEDENT> elif startX + 2*gridSize < x < startX + 3*gridSize and startY - 2*gridSize < y < startY - gridSize: <NEW_LINE> <INDENT> return "East" <NEW_LINE> <DEDENT> elif startX < x < startX + gridSize and startY - 3*gridSize < y < startY - 2*gridSize: <NEW_LINE> <INDENT> return "SouthWest" <NEW_LINE> <DEDENT> elif startX + gridSize < x < startX + 2*gridSize and startY - 3*gridSize < y < startY - 2*gridSize: <NEW_LINE> <INDENT> return "South" <NEW_LINE> <DEDENT> elif startX + 2*gridSize < x < startX + 3*gridSize and startY - 3*gridSize < y < startY - 2*gridSize: <NEW_LINE> <INDENT> return "SouthEast"
Each logic statement checks the x, y values passed to the function against the range of x, y values for each square in the TicTacToe grid
625941c1ac7a0e7691ed4044