code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def run(self, method, *args, **kwargs): <NEW_LINE> <INDENT> event = utils.EventData() <NEW_LINE> AppHelper.callAfter(self.__ui_runner, event, method, args, kwargs) <NEW_LINE> return event.wait()
Runs the given method in the main thread :param method: The method to call :param args: Method arguments :param kwargs: Method keyword arguments :return: The result of the method
625941c230dc7b7665901918
def execute(self, args): <NEW_LINE> <INDENT> from qt4i.driver.tools.dt import DT <NEW_LINE> import pprint <NEW_LINE> for device in DT().get_devices(): <NEW_LINE> <INDENT> pprint.pprint(device)
执行过程
625941c2293b9510aa2c3248
def testStsize(self): <NEW_LINE> <INDENT> self.assertEqual(None, stsizefmt(None), "An invalid size") <NEW_LINE> self.assertEqual(None, stsizefmt("."), "An invalid size") <NEW_LINE> self.assertEqual("N/A", stsizefmt("N/A"), "Not a valid stone size") <NEW_LINE> self.assertEqual("N/A", stsizefmt("n/a"), "Not a valid stone size") <NEW_LINE> self.assertEqual("0300", stsizefmt("3tk"), "Not a valid stone size") <NEW_LINE> self.assertEqual("0500X0400X0300", stsizefmt("3x4x5mm"), "Size format") <NEW_LINE> self.assertEqual("0500X0400X0300", stsizefmt("3x4x5"), "Size format") <NEW_LINE> self.assertEqual("0530X0400X0350", stsizefmt("3.5x4.0x5.3"), "Size format") <NEW_LINE> self.assertEqual("0400", stsizefmt("4"), "Size format") <NEW_LINE> self.assertEqual("0530X0400X0350", stsizefmt("053004000350"), "Size format") <NEW_LINE> self.assertEqual("0530X0400X0350", stsizefmt("040005300350"), "Size format") <NEW_LINE> self.assertEqual("0530X0400X0350", stsizefmt("0400X0530X0350"), "Size format") <NEW_LINE> self.assertEqual("0400", stsizefmt("4m"), "Size format") <NEW_LINE> self.assertEqual("0400-0350", stsizefmt("4m-3.5m"), "Size format") <NEW_LINE> self.assertEqual("5X4X3", stsizefmt("3x4x5", True), "Size format") <NEW_LINE> self.assertEqual("5X4X3", stsizefmt("0500X0400X0300", True), "Size format") <NEW_LINE> self.assertEqual("5X4X3", stsizefmt("0300X0500X0400", True), "Size format") <NEW_LINE> self.assertEqual("1-0", stsizefmt("0~1", True)) <NEW_LINE> self.assertEqual("1", stsizefmt("1.0", True)) <NEW_LINE> self.assertEqual("1", stsizefmt("1", True)) <NEW_LINE> self.assertEqual("1.5", stsizefmt("1.5", True)) <NEW_LINE> self.assertEqual("00", stsizefmt("00", True)) <NEW_LINE> self.assertEqual("00-000", stsizefmt("000-00", True)) <NEW_LINE> self.assertEqual("0-0000", stsizefmt("0000-0", True))
test for stone size parser
625941c255399d3f05588663
@app.route('/disconnect') <NEW_LINE> def disconnect(): <NEW_LINE> <INDENT> if 'username' in login_session: <NEW_LINE> <INDENT> gdisconnect() <NEW_LINE> del login_session['gplus_id'] <NEW_LINE> del login_session['access_token'] <NEW_LINE> del login_session['username'] <NEW_LINE> del login_session['email'] <NEW_LINE> del login_session['picture'] <NEW_LINE> del login_session['user_id'] <NEW_LINE> del login_session['provider'] <NEW_LINE> flash("You have successfully been logged out.") <NEW_LINE> return redirect(url_for('carMenu')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flash("You were not logged in") <NEW_LINE> return redirect(url_for('carMenu'))
Logout the user from session and delete all information
625941c2ec188e330fd5a753
def setUp(self): <NEW_LINE> <INDENT> self.origdir = os.getcwd() <NEW_LINE> self.dirname = tempfile.mkdtemp("test_dir") <NEW_LINE> os.chdir(self.dirname)
initialize a new temporary directory.
625941c25510c4643540f399
def linear_step(self): <NEW_LINE> <INDENT> x = self.x <NEW_LINE> dt = self.dt <NEW_LINE> real = self.wf.real <NEW_LINE> imag = self.wf.imag <NEW_LINE> prev = self.wf.prev <NEW_LINE> T = self.T <NEW_LINE> V = self.V <NEW_LINE> real += dt * (T.fast(imag) + V(x) * imag) <NEW_LINE> prev[:] = imag <NEW_LINE> imag -= dt * (T.fast(real) + V(x) * real)
Make one linear step dt forward in time
625941c20a366e3fb873e7c9
@pulsar <NEW_LINE> @sdc_min_version('4.4.0') <NEW_LINE> def test_pulsar_consumer_topic_header(sdc_builder, sdc_executor, pulsar): <NEW_LINE> <INDENT> topic_name = get_random_string() <NEW_LINE> input_text = 'Hello World!' <NEW_LINE> builder = sdc_builder.get_pipeline_builder() <NEW_LINE> pulsar_consumer = builder.add_stage('Pulsar Consumer').set_attributes(subscription_name=get_random_string(), consumer_name=get_random_string(), topic=topic_name, data_format='TEXT') <NEW_LINE> wiretap = builder.add_wiretap() <NEW_LINE> pulsar_consumer >> wiretap.destination <NEW_LINE> consumer_origin_pipeline = builder.build().configure_for_environment(pulsar) <NEW_LINE> sdc_executor.add_pipeline(consumer_origin_pipeline) <NEW_LINE> client = pulsar.client <NEW_LINE> admin = pulsar.admin <NEW_LINE> try: <NEW_LINE> <INDENT> sdc_executor.start_pipeline(consumer_origin_pipeline) <NEW_LINE> producer = client.create_producer(topic_name) <NEW_LINE> producer.send(input_text.encode()) <NEW_LINE> sdc_executor.wait_for_pipeline_metric(consumer_origin_pipeline, 'input_record_count', 1) <NEW_LINE> sdc_executor.stop_pipeline(consumer_origin_pipeline) <NEW_LINE> output_records = [record.field['text'] for record in wiretap.output_records] <NEW_LINE> assert 1 == len(output_records) <NEW_LINE> assert output_records == [input_text] <NEW_LINE> assert topic_name in str(wiretap.output_records[0].header.values) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> producer.close() <NEW_LINE> client.close() <NEW_LINE> admin.delete_topic(producer.topic())
Test for Pulsar consumer origin stage. We verify that the output records contains a header with the topic Pulsar Consumer pipeline: pulsar_consumer >> wiretap
625941c2956e5f7376d70e1e
def cleanSkyline_(self, skyline): <NEW_LINE> <INDENT> if len(skyline) == 0: <NEW_LINE> <INDENT> return list() <NEW_LINE> <DEDENT> clean = [skyline[0]] <NEW_LINE> for i in range(1, len(skyline)): <NEW_LINE> <INDENT> if skyline[i][0] == clean[-1][0]: <NEW_LINE> <INDENT> y = max(skyline[i][1], clean[-1][1]) <NEW_LINE> clean[-1] = (clean[-1][0], y) <NEW_LINE> <DEDENT> elif skyline[i][1] == clean[-1][1]: <NEW_LINE> <INDENT> x = min(skyline[i][0], clean[-1][0]) <NEW_LINE> clean[-1] = (x, clean[-1][1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> clean.append(skyline[i]) <NEW_LINE> <DEDENT> <DEDENT> return clean
Cleans the skyline by removing redundant points. :type skyline: List[(int, int)] :rtype: List[(int, int)]
625941c221a7993f00bc7c9d
def test_login_route_can_be_seen(testapp): <NEW_LINE> <INDENT> response = testapp.get("/login", status=200) <NEW_LINE> html = response.html <NEW_LINE> assert len(html.find_all("input")) == 3
Can send a GET request to the login route and see three input fields.
625941c273bcbd0ca4b2c027
def edit_operator(self, event): <NEW_LINE> <INDENT> item = self.tree.item(self.tree.focus()) <NEW_LINE> item_name = item['values'][0] <NEW_LINE> print('FINISH: Load operator with name {}'.format(item_name)) <NEW_LINE> operator = self.operators.get(item_name) <NEW_LINE> self.view_operator(operator, self._update_operator)
Retrieves the operator and shows it in the editor.
625941c28c0ade5d55d3e969
def save_model(self, request, obj, form, change): <NEW_LINE> <INDENT> if not change: <NEW_LINE> <INDENT> obj.created_by = request.user <NEW_LINE> <DEDENT> obj.save()
Sets the transaction creator and saves the transaction.
625941c2be8e80087fb20bf6
def add_mkdir_exclude(self, index, dirName): <NEW_LINE> <INDENT> self.__in_excludes[index][dirName] = None
Adds a mkdir exclude. @type index: integer @param index: Index of the dump file. @type dirName: string @param dirName: Name of the directory.
625941c2d8ef3951e32434ed
def corr2d_all_spixel_vid_seq(spixel_seqs): <NEW_LINE> <INDENT> corr_patches_superpixels = [] <NEW_LINE> for i in range(spixel_seqs.shape[0]): <NEW_LINE> <INDENT> corr_patches_superpixels.append(corr2d_spixel_vid_seq(spixel_seqs[i])) <NEW_LINE> <DEDENT> corr_patches_superpixels = np.vstack(corr_patches_superpixels) <NEW_LINE> return corr_patches_superpixels
computes the 2d image correlation between successive patches along a superpixel track for each superpixel track Parameters ---------- spixel_seqs : numpy array n_spixels x n_frames number of image patch sequences. (assumes all image patches are successive temporally per superpixel) Returns ------- corr_patches_superpixels : numpy array (n_spixels x n_frames) 2d correlation values for each frame for each superpixel track.
625941c2d8ef3951e32434ee
def test_neg_metagraph_split_and_merge(): <NEW_LINE> <INDENT> from ibeis.algo.graph import demo <NEW_LINE> from ibeis.algo.graph.state import (POSTV, NEGTV, INCMP, UNREV, UNKWN) <NEW_LINE> infr = demo.demodata_infr(num_pccs=4, pcc_size=5, pos_redun=3, ignore_pair=True, infer=True) <NEW_LINE> cc_a, cc_b, cc_c, cc_d = infr.positive_components() <NEW_LINE> a1, a2, a3, a4, a5 = cc_a <NEW_LINE> b1, b2, b3, b4, b5 = cc_b <NEW_LINE> c1, c2, c3, c4, c5 = cc_c <NEW_LINE> d1, d2, d3, d4, d5 = cc_d <NEW_LINE> nmg = infr.neg_metagraph <NEW_LINE> A, B, C, D = infr.node_labels(a1, b1, c1, d1) <NEW_LINE> infr.add_feedback((a1, b1), NEGTV) <NEW_LINE> infr.add_feedback((a2, b2), NEGTV) <NEW_LINE> infr.add_feedback((a3, b3), NEGTV) <NEW_LINE> infr.add_feedback((a4, c4), NEGTV) <NEW_LINE> infr.add_feedback((b4, d4), NEGTV) <NEW_LINE> infr.add_feedback((c1, d1), NEGTV) <NEW_LINE> infr.add_feedback((a4, d4), NEGTV) <NEW_LINE> assert nmg.edges[(A, B)]['weight'] == 3 <NEW_LINE> assert nmg.edges[(A, C)]['weight'] == 1 <NEW_LINE> assert (B, C) not in nmg.edges <NEW_LINE> assert nmg.edges[(A, D)]['weight'] == 1 <NEW_LINE> assert nmg.edges[(B, D)]['weight'] == 1 <NEW_LINE> assert nmg.number_of_edges() == 5 <NEW_LINE> assert nmg.number_of_nodes() == 4 <NEW_LINE> infr.add_feedback((a1, b1), POSTV) <NEW_LINE> assert nmg.number_of_edges() == 4 <NEW_LINE> assert nmg.number_of_nodes() == 3 <NEW_LINE> AB = infr.node_label(a1) <NEW_LINE> assert nmg.edges[(AB, AB)]['weight'] == 2 <NEW_LINE> infr.add_feedback((a1, b1), INCMP) <NEW_LINE> assert nmg.number_of_edges() == 5 <NEW_LINE> assert nmg.number_of_nodes() == 4 <NEW_LINE> assert nmg.edges[(A, B)]['weight'] == 2 <NEW_LINE> infr.assert_neg_metagraph() <NEW_LINE> for edge in list(infr.pos_graph.edges()): <NEW_LINE> <INDENT> infr.add_feedback(edge, INCMP) <NEW_LINE> <DEDENT> assert nmg.number_of_nodes() == infr.neg_graph.number_of_nodes() <NEW_LINE> assert nmg.number_of_edges() == infr.neg_graph.number_of_edges() <NEW_LINE> infr.assert_neg_metagraph()
Test that the negative metagraph tracks the number of negative edges between PCCs through label-changing split and merge operations
625941c28c0ade5d55d3e96a
def __init__(self, nature: str, party: str) -> None: <NEW_LINE> <INDENT> self.nature = nature <NEW_LINE> self.party = party
Initialize a Label object. :param str nature: The identified `nature` of the element. :param str party: The identified `party` of the element.
625941c23539df3088e2e2fc
def controlled_state_transition_graph(self, driver_nodes=[]): <NEW_LINE> <INDENT> self._check_compute_variables(attractors=True) <NEW_LINE> if self.keep_constants: <NEW_LINE> <INDENT> for dv in driver_nodes: <NEW_LINE> <INDENT> if dv in self.get_constants(): <NEW_LINE> <INDENT> warnings.warn("Cannot control a constant variable '%s'! Skipping" % self.nodes[dv].name ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> cstg = copy.deepcopy(self._stg) <NEW_LINE> cstg.name = 'C-' + cstg.name + ' (' + ','.join(map(str, [self.nodes[dv].name for dv in driver_nodes])) + ')' <NEW_LINE> for statenum in range(self.Nstates): <NEW_LINE> <INDENT> binstate = self.num2bin(statenum) <NEW_LINE> controlled_states = flip_binstate_bit_set(binstate, copy.copy(driver_nodes)) <NEW_LINE> controlled_states.remove(binstate) <NEW_LINE> for constate in controlled_states: <NEW_LINE> <INDENT> cstg.add_edge(statenum, self.bin2num(constate)) <NEW_LINE> <DEDENT> <DEDENT> return cstg
Returns the Controlled State-Transition-Graph (CSTG). In practice, it copies the original STG, flips driver nodes (variables), and updates the CSTG. Args: driver_nodes (list) : The list of driver nodes. Returns: (networkx.DiGraph) : The Controlled State-Transition-Graph. See also: :func:`attractor_driver_nodes`, :func:`controlled_attractor_graph`.
625941c29c8ee82313fbb725
def __del__(self): <NEW_LINE> <INDENT> if self._owner==True: <NEW_LINE> <INDENT> f=self._link.o2scl.o2scl_free_uniform_grid_log_width_ <NEW_LINE> f.argtypes=[ctypes.c_void_p] <NEW_LINE> f(self._ptr) <NEW_LINE> self._owner=False <NEW_LINE> self._ptr=0 <NEW_LINE> <DEDENT> return
Delete function for class uniform_grid_log_width
625941c2462c4b4f79d1d681
def _toggle_show_mip(self): <NEW_LINE> <INDENT> if self._toggle_show_mip_button.text() == 'Show Max Intensity Proj': <NEW_LINE> <INDENT> self._toggle_show_mip_button.setText('Hide Max Intensity Proj') <NEW_LINE> self._images['mip'] = list() <NEW_LINE> self._images['mip_chs'] = list() <NEW_LINE> ct_min, ct_max = np.nanmin(self._ct_data), np.nanmax(self._ct_data) <NEW_LINE> for axis in range(3): <NEW_LINE> <INDENT> ct_mip_data = np.max(self._ct_data, axis=axis).T <NEW_LINE> self._images['mip'].append( self._figs[axis].axes[0].imshow( ct_mip_data, cmap='gray', aspect='auto', vmin=ct_min, vmax=ct_max, zorder=5)) <NEW_LINE> xs, ys, colors = list(), list(), list() <NEW_LINE> for name, ras in self._chs.items(): <NEW_LINE> <INDENT> xyz = self._vox <NEW_LINE> xs.append(xyz[self._xy_idx[axis][0]]) <NEW_LINE> ys.append(xyz[self._xy_idx[axis][1]]) <NEW_LINE> colors.append(_CMAP(self._groups[name])) <NEW_LINE> <DEDENT> self._images['mip_chs'].append( self._figs[axis].axes[0].imshow( self._make_ch_image(axis, proj=True), aspect='auto', extent=self._ch_extents[axis], zorder=6, cmap=_CMAP, alpha=1, vmin=0, vmax=_N_COLORS)) <NEW_LINE> <DEDENT> for group in set(self._groups.values()): <NEW_LINE> <INDENT> self._update_lines(group, only_2D=True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for img in self._images['mip'] + self._images['mip_chs']: <NEW_LINE> <INDENT> img.remove() <NEW_LINE> <DEDENT> self._images.pop('mip') <NEW_LINE> self._images.pop('mip_chs') <NEW_LINE> self._toggle_show_mip_button.setText('Show Max Intensity Proj') <NEW_LINE> for group in set(self._groups.values()): <NEW_LINE> <INDENT> self._update_lines(group, only_2D=True) <NEW_LINE> <DEDENT> <DEDENT> self._draw()
Toggle whether the maximum-intensity projection is shown.
625941c2e1aae11d1e749c66
def _rav_getval_ ( self ) : <NEW_LINE> <INDENT> return self.getVal()
Get the value, associated with the variable >>> var = ... >>> print var.value
625941c20fa83653e4656f6d
def testBuildRev(self): <NEW_LINE> <INDENT> self.build_config['uprev'] = True <NEW_LINE> commands.UprevPackages(self.build_root, self._boards, [], enter_chroot=True) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.RunStage() <NEW_LINE> self.mox.VerifyAll()
Uprevving the build without uprevving chrome.
625941c276e4537e8c351621
def test_instances_pagination_and_filtration(self): <NEW_LINE> <INDENT> items_per_page = 1 <NEW_LINE> instance_count = 2 <NEW_LINE> instance_list = ["{0}-{1}".format(self.INSTANCE_NAME, item) for item in range(1, instance_count + 1)] <NEW_LINE> first_page_definition = {'Next': True, 'Prev': False, 'Count': items_per_page, 'Names': [instance_list[1]]} <NEW_LINE> second_page_definition = {'Next': False, 'Prev': False, 'Count': items_per_page, 'Names': [instance_list[0]]} <NEW_LINE> filter_first_page_definition = {'Next': False, 'Prev': False, 'Count': items_per_page, 'Names': [instance_list[1]]} <NEW_LINE> settings_page = self.home_pg.go_to_settings_usersettingspage() <NEW_LINE> settings_page.change_pagesize(items_per_page) <NEW_LINE> self.assertTrue( settings_page.find_message_and_dismiss(messages.SUCCESS)) <NEW_LINE> instances_page = self.home_pg.go_to_project_compute_instancespage() <NEW_LINE> instances_page.create_instance(self.INSTANCE_NAME, instance_count=instance_count) <NEW_LINE> self.assertTrue( instances_page.find_message_and_dismiss(messages.INFO)) <NEW_LINE> self.assertTrue(instances_page.is_instance_active(instance_list[1])) <NEW_LINE> instances_page = self.instances_page <NEW_LINE> instances_page.instances_table.set_filter_value('name') <NEW_LINE> instances_page.instances_table.filter(instance_list[1]) <NEW_LINE> instances_page.instances_table.assert_definition( filter_first_page_definition, sorting=True) <NEW_LINE> instances_page.instances_table.filter(instance_list[0]) <NEW_LINE> instances_page.instances_table.assert_definition( second_page_definition, sorting=True) <NEW_LINE> instances_page.instances_table.filter(self.INSTANCE_NAME) <NEW_LINE> instances_page.instances_table.assert_definition( first_page_definition, sorting=True) <NEW_LINE> instances_page.instances_table.filter('') <NEW_LINE> settings_page = self.home_pg.go_to_settings_usersettingspage() <NEW_LINE> settings_page.change_pagesize() <NEW_LINE> self.assertTrue( settings_page.find_message_and_dismiss(messages.SUCCESS)) <NEW_LINE> instances_page = self.instances_page <NEW_LINE> instances_page.delete_instances(instance_list) <NEW_LINE> self.assertTrue( instances_page.find_message_and_dismiss(messages.INFO)) <NEW_LINE> self.assertTrue(instances_page.are_instances_deleted(instance_list))
This test checks instance pagination and filtration Steps: 1) Login to Horizon Dashboard as admin 2) Go to to user settings page 3) Change 'Items Per Page' value to 1 4) Go to Project > Compute > Instances page 5) Create 2 instances 6) Go to Admin > Compute > Instances page 7) Check filter by Name of the first and the second instance in order to have one instance in the list (and it should have correct name) and no 'Next' link is available 8) Check filter by common part of Name of in order to have one instance in the list (and it should have correct name) and 'Next' link is available on the first page and is not available on the second page 9) Go to user settings page and restore 'Items Per Page' 10) Delete created instances via proper page
625941c2e64d504609d747f1
def _handle_mouse_press(self, event_data): <NEW_LINE> <INDENT> if not self._handler_enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for mouse_press_handler in self._mouse_press_handlers: <NEW_LINE> <INDENT> if mouse_press_handler(event_data): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> if event_data.inaxes == self._render_handler.get_slider_axes(): <NEW_LINE> <INDENT> self._animation_handler.stop()
Handle Matplotlib button_press_event for WindowManager Instance Parameters: * event_data - An object representing the key press event data
625941c26fece00bbac2d6ee
def setup_lrn_rate(self, global_step): <NEW_LINE> <INDENT> batch_size = FLAGS.batch_size * (1 if not FLAGS.enbl_multi_gpu else mgw.size()) <NEW_LINE> if FLAGS.mobilenet_version == 1: <NEW_LINE> <INDENT> nb_epochs = 100 <NEW_LINE> idxs_epoch = [30, 60, 80, 90] <NEW_LINE> decay_rates = [1.0, 0.1, 0.01, 0.001, 0.0001] <NEW_LINE> lrn_rate = setup_lrn_rate_piecewise_constant(global_step, batch_size, idxs_epoch, decay_rates) <NEW_LINE> nb_iters = int(FLAGS.nb_smpls_train * nb_epochs * FLAGS.nb_epochs_rat / batch_size) <NEW_LINE> <DEDENT> elif FLAGS.mobilenet_version == 2: <NEW_LINE> <INDENT> nb_epochs = 412 <NEW_LINE> epoch_step = 2.5 <NEW_LINE> decay_rate = 0.98 ** epoch_step <NEW_LINE> lrn_rate = setup_lrn_rate_exponential_decay(global_step, batch_size, epoch_step, decay_rate) <NEW_LINE> nb_iters = int(FLAGS.nb_smpls_train * nb_epochs * FLAGS.nb_epochs_rat / batch_size) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('invalid MobileNet version: {}'.format(FLAGS.mobilenet_version)) <NEW_LINE> <DEDENT> return lrn_rate, nb_iters
Setup the learning rate (and number of training iterations).
625941c2d10714528d5ffc92
def get_invoke_query(self, **kwargs): <NEW_LINE> <INDENT> operation = kwargs.get('operation', None) <NEW_LINE> if operation.lower() == "preapproval": <NEW_LINE> <INDENT> return self.get_invoke_preapproval_query(**kwargs) <NEW_LINE> <DEDENT> if operation.lower() == "preapprovaldetails": <NEW_LINE> <INDENT> return self.get_invoke_preapproval_details_query(**kwargs)
docstring for api payload
625941c260cbc95b062c64f3
def MP(candidate, references, n): <NEW_LINE> <INDENT> counts = Counter(ngrams(candidate, n)) <NEW_LINE> if not counts: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> max_counts = {} <NEW_LINE> for reference in references: <NEW_LINE> <INDENT> reference_counts = Counter(ngrams(reference, n)) <NEW_LINE> for ngram in counts: <NEW_LINE> <INDENT> max_counts[ngram] = max(max_counts.get(ngram, 0), reference_counts[ngram]) <NEW_LINE> <DEDENT> <DEDENT> clipped_counts = dict((ngram, min(count, max_counts[ngram])) for ngram, count in counts.items()) <NEW_LINE> return sum(clipped_counts.values()) / sum(counts.values())
calculate modified precision
625941c25fcc89381b1e166e
def starts_with_ignore_case(string: str, substring: str) -> bool: <NEW_LINE> <INDENT> return re.match(substring, string, re.I)
Checks if 'string' starts with 'substring' ignoring case Args: string: full string substring: sub string Returns: True if 'string' starts with 'substring' ignoring case
625941c2097d151d1a222e0c
def get(self, keylist): <NEW_LINE> <INDENT> return {}
Retreive a list of values as a python dict
625941c2656771135c3eb81d
def entropy_of_vector(vector): <NEW_LINE> <INDENT> entropy = 0 <NEW_LINE> for p in vector: <NEW_LINE> <INDENT> if p != 0: <NEW_LINE> <INDENT> entropy += p * math.log2(p) <NEW_LINE> <DEDENT> <DEDENT> return -entropy
Compute the entropy of a probability vector :param vector: the probability vector (example: [.1, .5, .4]) :return: the entropy of the vector (log base 2)
625941c250485f2cf553cd4a
def syntax(retCode): <NEW_LINE> <INDENT> print("arcgiscsv_report outputs VISTA table files in the current directory for an ArcGIS CSV ") <NEW_LINE> print("track.") <NEW_LINE> print() <NEW_LINE> print("Usage:") <NEW_LINE> print(" python arcgiscsv_report.py dbServer network user password arcgiscsvFile arcgiscsvPathMatch") <NEW_LINE> print(" [-p] [-s sourceID] -t refDateTime [-e endTime]") <NEW_LINE> print() <NEW_LINE> print("where:") <NEW_LINE> print(" -p outputs a problem report (suppresses other output)") <NEW_LINE> print(" -s is the sourceID to report in the travel_time output (0 by default)") <NEW_LINE> print(" -t is the zero-reference time that all arrival time outputs are related to.") <NEW_LINE> print(" (Note that the day is ignored.) Use the format HH:MM:SS.") <NEW_LINE> print(" -e is the end time in seconds (86400 by default)") <NEW_LINE> sys.exit(retCode)
Print usage information
625941c24d74a7450ccd4174
def R1bars(simulations, data): <NEW_LINE> <INDENT> denom = np.sum([np.sum(np.sqrt(1/dataset.error)*np.sqrt(dataset.y)) for dataset in data if dataset.use]) <NEW_LINE> return [1.0/denom*np.sqrt(1/dataset.error)* (np.sqrt(dataset.y) - np.sqrt(sim)) for (dataset, sim) in zip(data,simulations)]
Weighted crystallographic R-factor (R1)
625941c27d43ff24873a2c50
def insertImageFile(self, parentWindow, imageContext): <NEW_LINE> <INDENT> file = None <NEW_LINE> wildcard = u"Image files|*.gif;*.jpg;*.png;*.jpeg" <NEW_LINE> dialog = wx.FileDialog(parentWindow, u"Choose an image file.", u"", u"", wildcard, wx.OPEN) <NEW_LINE> if dialog.ShowModal() == wx.ID_OK: <NEW_LINE> <INDENT> file = getNoneString(dialog.GetPath()) <NEW_LINE> <DEDENT> dialog.Destroy() <NEW_LINE> if file: <NEW_LINE> <INDENT> (shortName, absPath, size, schemaDate) = getFileMetaData(file) <NEW_LINE> if shortName: <NEW_LINE> <INDENT> shortName = convertToUnicode(shortName) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shortName = u"" <NEW_LINE> <DEDENT> url = getUriFromFilePath(absPath) <NEW_LINE> attrs = { u"src" : url, u"alt" : shortName} <NEW_LINE> imageContext.insertImage(attrs)
insertImageFile(wxWindow, IZXHTMLEditControlImageContext) -> void Shows the File open dialog to display an image.
625941c2fbf16365ca6f6171
def calVolatilityRatio(macroEcoData, stockData, N=10): <NEW_LINE> <INDENT> result = [] <NEW_LINE> dates = [] <NEW_LINE> for dtt in macroEcoData.index[:]: <NEW_LINE> <INDENT> if dtt in stockData.index: <NEW_LINE> <INDENT> indexToUse = stockData.index.get_loc(dtt) <NEW_LINE> newStock = stockData[indexToUse - N:indexToUse] <NEW_LINE> todaysRange = stockData.iloc[indexToUse]["High"] - stockData.iloc[indexToUse]["Low"] <NEW_LINE> nPastDaysTrueRange = newStock["High"].max() - newStock["Low"].min() <NEW_LINE> volatilityRatio = todaysRange / nPastDaysTrueRange <NEW_LINE> result.append(volatilityRatio) <NEW_LINE> dates.append(dtt) <NEW_LINE> <DEDENT> <DEDENT> newEstDict = pd.DataFrame(index=dates) <NEW_LINE> newEstDict["Volatility_Ratio"] = result <NEW_LINE> newEstDict.dropna(inplace=True) <NEW_LINE> return newEstDict
Function to Calculate Volatility Ratio. Args: param1: MacroEconomic Data, Stock Data and N-Days Returns: A DataFrame of Volatility Ratio for the MacroEconomic Data Selected.
625941c20383005118ecf595
@pytest.mark.parametrize('table_data,expected_w,expected_h', [ ([[]], [], [0]), ([['']], [0], [0]), ([['', '']], [0, 0], [0]), ([[], []], [], [0, 0]), ([[''], ['']], [0], [0, 0]), ([['', ''], ['', '']], [0, 0], [0, 0]), ]) <NEW_LINE> def test_zero_length(table_data, expected_w, expected_h): <NEW_LINE> <INDENT> actual = max_dimensions(table_data) <NEW_LINE> assert actual == (expected_w, expected_h, expected_w, expected_h)
Test zero-length or empty tables. :param list table_data: Input table data to test. :param list expected_w: Expected widths. :param list expected_h: Expected heights.
625941c2a79ad161976cc0f6
def delete(self): <NEW_LINE> <INDENT> l, r = self._input_index('输入删除区间, 如:10 20\n') <NEW_LINE> confirm = input('是否删除记录%d-%d\n确认(y),任意键取消.\n' % (l, r)) <NEW_LINE> if confirm != 'y': <NEW_LINE> <INDENT> print('已放弃删除!') <NEW_LINE> return <NEW_LINE> <DEDENT> for i in range(r-l+1): <NEW_LINE> <INDENT> print('成功删除:', l+i, self._get_log(l-1)) <NEW_LINE> self._logs.pop(l-1) <NEW_LINE> self._count -= 1 <NEW_LINE> <DEDENT> self._save()
删除日志
625941c26e29344779a625c5
def delete_pet_by_id(self, data_json): <NEW_LINE> <INDENT> response = requests.delete(self.url + '/pet/' + str(json.loads(data_json)['id'])) <NEW_LINE> return response
method for deleting pet by id :param data_json: :return:
625941c2046cf37aa974ccfa
def generateParenthesis(self, n): <NEW_LINE> <INDENT> res = self.generateParenthesisHelper(n) <NEW_LINE> if n == 0: <NEW_LINE> <INDENT> return [''] <NEW_LINE> <DEDENT> return list(res)
:type n: int :rtype: List[str]
625941c250812a4eaa59c2d4
def GetTargetCompileTool(name, cross_compile=None): <NEW_LINE> <INDENT> env = dict(os.environ) <NEW_LINE> target_name = None <NEW_LINE> extra_args = [] <NEW_LINE> if name in ('as', 'ld', 'cc', 'cpp', 'ar', 'nm', 'ldr', 'strip', 'objcopy', 'objdump', 'dtc'): <NEW_LINE> <INDENT> target_name, *extra_args = env.get(name.upper(), '').split(' ') <NEW_LINE> <DEDENT> elif name == 'c++': <NEW_LINE> <INDENT> target_name, *extra_args = env.get('CXX', '').split(' ') <NEW_LINE> <DEDENT> if target_name: <NEW_LINE> <INDENT> return target_name, extra_args <NEW_LINE> <DEDENT> if cross_compile is None: <NEW_LINE> <INDENT> cross_compile = env.get('CROSS_COMPILE', '') <NEW_LINE> <DEDENT> if not cross_compile: <NEW_LINE> <INDENT> return name, [] <NEW_LINE> <DEDENT> if name in ('as', 'ar', 'nm', 'ldr', 'strip', 'objcopy', 'objdump'): <NEW_LINE> <INDENT> target_name = cross_compile + name <NEW_LINE> <DEDENT> elif name == 'ld': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if Run(cross_compile + 'ld.bfd', '-v'): <NEW_LINE> <INDENT> target_name = cross_compile + 'ld.bfd' <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> target_name = cross_compile + 'ld' <NEW_LINE> <DEDENT> <DEDENT> elif name == 'cc': <NEW_LINE> <INDENT> target_name = cross_compile + 'gcc' <NEW_LINE> <DEDENT> elif name == 'cpp': <NEW_LINE> <INDENT> target_name = cross_compile + 'gcc' <NEW_LINE> extra_args = ['-E'] <NEW_LINE> <DEDENT> elif name == 'c++': <NEW_LINE> <INDENT> target_name = cross_compile + 'g++' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target_name = name <NEW_LINE> <DEDENT> return target_name, extra_args
Get the target-specific version for a compile tool This first checks the environment variables that specify which version of the tool should be used (e.g. ${CC}). If those aren't specified, it checks the CROSS_COMPILE variable as a prefix for the tool with some substitutions (e.g. "${CROSS_COMPILE}gcc" for cc). The following table lists the target-specific versions of the tools this function resolves to: Compile Tool | First choice | Second choice --------------+----------------+---------------------------- as | ${AS} | ${CROSS_COMPILE}as ld | ${LD} | ${CROSS_COMPILE}ld.bfd | | or ${CROSS_COMPILE}ld cc | ${CC} | ${CROSS_COMPILE}gcc cpp | ${CPP} | ${CROSS_COMPILE}gcc -E c++ | ${CXX} | ${CROSS_COMPILE}g++ ar | ${AR} | ${CROSS_COMPILE}ar nm | ${NM} | ${CROSS_COMPILE}nm ldr | ${LDR} | ${CROSS_COMPILE}ldr strip | ${STRIP} | ${CROSS_COMPILE}strip objcopy | ${OBJCOPY} | ${CROSS_COMPILE}objcopy objdump | ${OBJDUMP} | ${CROSS_COMPILE}objdump dtc | ${DTC} | (no CROSS_COMPILE version) Args: name: Command name to run Returns: target_name: Exact command name to run instead extra_args: List of extra arguments to pass
625941c299cbb53fe6792b98
def test_traverse_partial_match(self): <NEW_LINE> <INDENT> status, node, path = self.rootpub.traverse(u'/nodeX') <NEW_LINE> self.assertEqual(status, TRAVERSE_STATUS.PARTIAL) <NEW_LINE> self.assertEqual(node, self.root) <NEW_LINE> self.assertEqual(path, '/nodeX') <NEW_LINE> status, node, path = self.rootpub.traverse(u'/node3/node4') <NEW_LINE> self.assertEqual(status, TRAVERSE_STATUS.PARTIAL) <NEW_LINE> self.assertEqual(node, self.root) <NEW_LINE> self.assertEqual(path, '/node3/node4') <NEW_LINE> status, node, path = self.rootpub.traverse(u'/node2/node4') <NEW_LINE> self.assertEqual(status, TRAVERSE_STATUS.PARTIAL) <NEW_LINE> self.assertEqual(node, self.node2) <NEW_LINE> self.assertEqual(path, '/node4')
Test for partial path matching.
625941c2596a897236089a74
def create_outname(outdir, infile): <NEW_LINE> <INDENT> basename = os.path.basename(infile) <NEW_LINE> outname = basename + '.out' <NEW_LINE> return os.path.join(outdir, outname)
Create the output file's name.
625941c2d486a94d0b98e0f6
def setup_synth(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self._libfluidsynth_path is not None: <NEW_LINE> <INDENT> log.info("Using libfluidsynth: " + self._libfluidsynth_path) <NEW_LINE> <DEDENT> self._fluidhandle = fluidhandle.FluidHandle(self._libfluidsynth_path) <NEW_LINE> self._fluidsettings = fluidsettings.FluidSettings(self._fluidhandle) <NEW_LINE> self._fluidsettings['synth.gain'] = 0.2 <NEW_LINE> self._fluidsynth = fluidsynth.FluidSynth(self._fluidhandle, self._fluidsettings) <NEW_LINE> self._load_soundfont() <NEW_LINE> self._fluidsettings['audio.driver'] = 'alsa' <NEW_LINE> self._driver = fluidaudiodriver.FluidAudioDriver(self._fluidhandle, self._fluidsynth, self._fluidsettings) <NEW_LINE> <DEDENT> except (AttributeError, FluidError) as e: <NEW_LINE> <INDENT> message = 'Failed to setup fluidsynth: ' + str(e) + '. Audio output will be disabled.' <NEW_LINE> log.warning(message) <NEW_LINE> log.debug(traceback.format_exc()) <NEW_LINE> raise PlayerException(message) <NEW_LINE> <DEDENT> self._no_sound = False <NEW_LINE> self.select_instrument(self._instrument)
Setup the synth so that it can produce sound. Raises: PlayerException: an error occured during the synth setup. Most common errors: fluidsynth library not found, soundfont not found.
625941c2ad47b63b2c509f31
def leerArchivo(pnombreArchivo): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> print("Se va a abrir el archivo: ", pnombreArchivo) <NEW_LINE> f=open(pnombreArchivo,"rb") <NEW_LINE> print("Se va a leer el archivo: ", pnombreArchivo) <NEW_LINE> pickle.load(f) <NEW_LINE> print("Se va a cerrar el archivo: ", pnombreArchivo) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return{} <NEW_LINE> <DEDENT> return
Función: Cargar y leer datos del archivo seleccionado Entradas: Nombre del archivo (binario) Salidas: NA
625941c294891a1f4081ba59
def __DoShowEditProgress(self): <NEW_LINE> <INDENT> song = viewmgr.Get()._selectedSong <NEW_LINE> if song: <NEW_LINE> <INDENT> viewmgr.signalSongEditProgress(song)
The edit progress panel needs to be visible, and shown. We do this through the view mgr because on this level we do not have access to that frame
625941c2d4950a0f3b08c302
def test_obtener_cantidad_personas_en_hotel_desde_ciudad_en_rango_anios_mensualmente(self): <NEW_LINE> <INDENT> query_string = [('AnioInicio', 2002), ('AnioFin', 2004)] <NEW_LINE> response = self.client.open('/server/INE/Operaciones/ObtenerCantidadPersonasEnHotelDesdeCiudadEnRangoAniosMensualmente/{CiudadOrigen}'.format(CiudadOrigen='CiudadOrigen_example'), method='GET', query_string=query_string) <NEW_LINE> self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
Test case for obtener_cantidad_personas_en_hotel_desde_ciudad_en_rango_anios_mensualmente Dado una ciudad y un rango de años obtiene la cantidad total de personas que van a hoteles desde esa ciudad de forma mensual
625941c2287bf620b61d3a16
def drawXLine(name): <NEW_LINE> <INDENT> print(len(name) * "X")
Draws a line of X
625941c2bf627c535bc1317f
def get_batch(self, partition): <NEW_LINE> <INDENT> if partition == 'train': <NEW_LINE> <INDENT> audio_paths = self.train_audio_paths <NEW_LINE> cur_index = self.cur_train_index <NEW_LINE> texts = self.train_texts <NEW_LINE> <DEDENT> elif partition == 'valid': <NEW_LINE> <INDENT> audio_paths = self.valid_audio_paths <NEW_LINE> cur_index = self.cur_valid_index <NEW_LINE> texts = self.valid_texts <NEW_LINE> <DEDENT> elif partition == 'test': <NEW_LINE> <INDENT> audio_paths = self.test_audio_paths <NEW_LINE> cur_index = self.cur_test_index <NEW_LINE> texts = self.test_texts <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Invalid partition. " "Must be train/validation") <NEW_LINE> <DEDENT> features = [self.normalize(self.featurize(a)) for a in audio_paths[cur_index:cur_index+self.minibatch_size]] <NEW_LINE> max_length = max([features[i].shape[0] for i in range(0, self.minibatch_size)]) <NEW_LINE> max_string_length = max([len(texts[cur_index+i]) for i in range(0, self.minibatch_size)]) <NEW_LINE> X_data = np.zeros([self.minibatch_size, max_length, self.feat_dim*self.spectrogram + self.mfcc_dim*(not self.spectrogram)]) <NEW_LINE> labels = np.ones([self.minibatch_size, max_string_length]) * 28 <NEW_LINE> input_length = np.zeros([self.minibatch_size, 1]) <NEW_LINE> label_length = np.zeros([self.minibatch_size, 1]) <NEW_LINE> for i in range(0, self.minibatch_size): <NEW_LINE> <INDENT> feat = features[i] <NEW_LINE> input_length[i] = feat.shape[0] <NEW_LINE> X_data[i, :feat.shape[0], :] = feat <NEW_LINE> label = np.array(text_to_int_sequence(texts[cur_index+i])) <NEW_LINE> labels[i, :len(label)] = label <NEW_LINE> label_length[i] = len(label) <NEW_LINE> <DEDENT> outputs = {'ctc': np.zeros([self.minibatch_size])} <NEW_LINE> inputs = {'the_input': X_data, 'the_labels': labels, 'input_length': input_length, 'label_length': label_length } <NEW_LINE> return (inputs, outputs)
Obtain a batch of train, validation, or test data
625941c2b545ff76a8913dc7
def attempt_recovery(self, task, args=None, kwargs=None, einfo=None, retval=None): <NEW_LINE> <INDENT> pass
Here you can try to recover from errors that Celery couldn't deal with. Example: if isinstance(retval, SoftTimeLimitExceeded): # half the number of processed objects first_half, second_half = args[0][0:len(args[0])/2], args[0][len(args[2]/2):] # resubmit args[0] = first_half task.apply_async(args=args, kwargs=kwargs) args[0] = second_half task.apply_async(args=args, kwargs=kwargs) Returns: are ignored
625941c2d58c6744b4257c11
def get_channel_info(channel_id: str) -> Dict[str, str]: <NEW_LINE> <INDENT> url = get_channel_video_url(channel_id) <NEW_LINE> soup = get_soup(url) <NEW_LINE> if soup is None: <NEW_LINE> <INDENT> logger.error("Got invalid response for channel: {url}") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> profile = soup.find(id="channelsProfile") <NEW_LINE> if profile is None: <NEW_LINE> <INDENT> logger.info(f"Couldn't get info for channel: {url}") <NEW_LINE> check_logged_out(soup) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> header = profile.find("div", {"class": "header"}) <NEW_LINE> wrapper = profile.find("div", {"class": "bottomExtendedWrapper"}) <NEW_LINE> title = profile.find("div", {"class": "title"}) <NEW_LINE> name = title.find("h1").text.strip() <NEW_LINE> name = name.replace(" ", "_") <NEW_LINE> name = re.sub(r"[\W]+", "_", name) <NEW_LINE> return {"name": name}
Get meta information from channel website.
625941c21f037a2d8b9461af
def pad(data, pad_width, pad_value=0, pad_mode="constant"): <NEW_LINE> <INDENT> if isinstance(pad_width, Expr) or (isinstance(pad_value, Expr)): <NEW_LINE> <INDENT> if not isinstance(pad_width, Expr): <NEW_LINE> <INDENT> pad_width = const(list(pad_width)) <NEW_LINE> <DEDENT> if not isinstance(pad_value, Expr): <NEW_LINE> <INDENT> pad_value = const(pad_value) <NEW_LINE> <DEDENT> return _dyn_make.pad(data, pad_width, pad_value, pad_mode) <NEW_LINE> <DEDENT> return _make.pad(data, pad_width, pad_value, pad_mode)
Padding This operator takes in a tensor and pads each axis by the specified widths using the specified value. Parameters ---------- data: tvm.relay.Expr The input data to the operator pad_width: tuple of <tuple of <int>>, or tvm.relay.Expr, required Number of values padded to the edges of each axis, in the format of ((before_1, after_1), ..., (before_N, after_N)) pad_value: float, or tvm.relay.Expr, optional, default=0 The value used for padding pad_mode: 'constant', 'edge', 'reflect' 'constant' pads with constant_value pad_value 'edge' pads using the edge values of the input array 'reflect' pads by reflecting values with respect to the edge Returns ------- result : tvm.relay.Expr The computed result.
625941c2283ffb24f3c558b4
def get_window(self): <NEW_LINE> <INDENT> return [self.x_lower , self.x_upper , self.y_lower , self.y_upper]
This function get the coordinates of the window as a list
625941c27b25080760e3940b
def uncondition(self) -> Query: <NEW_LINE> <INDENT> return Query( outcomes=self.outcomes | self.conditions, treatments=self.treatments, conditions=None, )
Move the conditions to outcomes.
625941c2a17c0f6771cbe003
def kickstart(ks, uploaded, path=settings.KS_DIR): <NEW_LINE> <INDENT> if not uploaded: <NEW_LINE> <INDENT> ks = "%s%s" % (path, ks) <NEW_LINE> <DEDENT> ksparser = DecoratedKickstartParser(makeVersion()) <NEW_LINE> ksparser.readKickstart(ks) <NEW_LINE> return ksparser
return parsed pykickstart object
625941c245492302aab5e273
@require(gribid=int, fileobj=file) <NEW_LINE> def grib_write(gribid, fileobj): <NEW_LINE> <INDENT> GRIB_CHECK(_internal.grib_c_write(gribid, fileobj))
@brief Write a message to a file.  Examples: ef set.py "set.py" @param gribid id of the grib loaded in memory @param fileobj python file object @exception GribInternalError
625941c2046cf37aa974ccfb
def check_win(self): <NEW_LINE> <INDENT> arr = self.arr <NEW_LINE> if arr[0][0] == arr[0][1] == arr[0][2] != '.': <NEW_LINE> <INDENT> return arr[0][0] <NEW_LINE> <DEDENT> elif arr[1][0] == arr[1][1] == arr[1][2] != '.': <NEW_LINE> <INDENT> return arr[1][0] <NEW_LINE> <DEDENT> elif arr[2][0] == arr[2][1] == arr[2][2] != '.': <NEW_LINE> <INDENT> return arr[2][0] <NEW_LINE> <DEDENT> elif arr[0][0] == arr[1][0] == arr[2][0] != '.': <NEW_LINE> <INDENT> return arr[0][0] <NEW_LINE> <DEDENT> elif arr[0][1] == arr[1][1] == arr[2][1] != '.': <NEW_LINE> <INDENT> return arr[0][1] <NEW_LINE> <DEDENT> elif arr[0][2] == arr[1][2] == arr[2][2] != '.': <NEW_LINE> <INDENT> return arr[0][2] <NEW_LINE> <DEDENT> elif arr[0][0] == arr[1][1] == arr[2][2] != '.': <NEW_LINE> <INDENT> return arr[0][0] <NEW_LINE> <DEDENT> elif arr[0][2] == arr[1][1] == arr[2][0] != '.': <NEW_LINE> <INDENT> return arr[0][2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '.'
Checks if there is a win!
625941c20a50d4780f666e42
def getTables(self): <NEW_LINE> <INDENT> return self.tables
Returns container tables.
625941c28c3a87329515836a
def test_nested_forall(): <NEW_LINE> <INDENT> l1 = Variable('l1',Variable.Real) <NEW_LINE> l2 = Variable('l2',Variable.Real) <NEW_LINE> ox = Variable('ox',Variable.Real) <NEW_LINE> oy = Variable('oy',Variable.Real) <NEW_LINE> param_bounds = logical_and(l1 > 0, l1 < 20, l2 > 0, l2 < 20, ox > -20, ox < 20, oy > -20, oy < 20) <NEW_LINE> px = Variable('px',Variable.Real) <NEW_LINE> py = Variable('py',Variable.Real) <NEW_LINE> ivar_bounds = logical_and(px > 20, px < 30, py > 20, py < 30) <NEW_LINE> ivar_assum = sqrt((25 - px)**2 + (25 - py)**2) < 4 <NEW_LINE> t1 = Variable('t1',Variable.Real) <NEW_LINE> t2 = Variable('t2',Variable.Real) <NEW_LINE> dvar_bounds = logical_and(t1 >= -3.14, t1 <= 3.14, t2 >= -3.14, t2 <= 3.14, t1 >= t2) <NEW_LINE> req = logical_and(px == l1*sin(t1) + l2*sin(t2) + ox, py == l1*cos(t1) + l2*cos(t2) + oy) <NEW_LINE> def exists(vs, fun): <NEW_LINE> <INDENT> return logical_not(forall(vs, logical_not(fun))) <NEW_LINE> <DEDENT> fun = logical_and(param_bounds, forall([px,py], logical_imply(logical_and(ivar_bounds, ivar_assum), exists([t1,t2], logical_and(dvar_bounds,req))))) <NEW_LINE> result = CheckSatisfiability(fun, 0.01) <NEW_LINE> print(result) <NEW_LINE> assert False
Test whether we can choose parameters for a system, such that there is a satisfying solution over an entire range of values. Basically, choose an origin (ox,oy) and the lengths of two arms (l1 & l2) so that there is an angle (t1 & t2) for each arm, that allows it to reach any point within a circle centered at (25,25) with radius 4. We want to make sure that the lengths and origin the tool chooses allows for > there should: > exists. l1 in (0,20) , l2 (0,20), ox (-20,20), oy (-20,20) > > such that: > for all. px in (20, 30), py in (20,30) > > given assumptions: > sqrt((25 - px)**2 + (25 - py)**2) <= 4 > > there should: > exists. t1 in (-pi, pi), t2 in (-pi,pi) > > with constraints: > t2 > t1 > > that meets the requirements: > (px == l2*sin(t1) + l2*sin(t2) + ox) > && (py == l1*cos(t1) + l2*cos(t2) + oy)
625941c28e05c05ec3eea324
def run(self): <NEW_LINE> <INDENT> print("Programme starts!") <NEW_LINE> self.root.mainloop()
Make everything begin
625941c2c432627299f04bf6
def not_null(self): <NEW_LINE> <INDENT> self._not_null = Clause('NOT NULL') <NEW_LINE> return self
Adds a |NOT NULL| constraint to the column
625941c2f548e778e58cd52e
def _sample_cauchy_process_at(self, times): <NEW_LINE> <INDENT> if times[0] != 0: <NEW_LINE> <INDENT> zero = False <NEW_LINE> times = np.insert(times, 0, [0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> zero = True <NEW_LINE> <DEDENT> deltas = np.diff(times) <NEW_LINE> levys = [levy.rvs(loc=0, scale=d ** 2 / 2, size=1, random_state=self.rng) for d in deltas] <NEW_LINE> ts = np.cumsum(levys) <NEW_LINE> if zero: <NEW_LINE> <INDENT> ts = np.insert(ts, 0, [0]) <NEW_LINE> <DEDENT> return self._sample_brownian_motion_at(ts)
Generate a realization of a Cauchy process.
625941c28e05c05ec3eea325
def get(self, index): <NEW_LINE> <INDENT> if index < 0: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> i = 0 <NEW_LINE> current = self.root.next <NEW_LINE> while i < index and current != self.root: <NEW_LINE> <INDENT> current = current.next <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> return current.val
Get the value of the index-th node in the linked list. If the index is invalid, return -1. :type index: int :rtype: int
625941c297e22403b379cf4b
def evaluate(self, *inputs): <NEW_LINE> <INDENT> inputs = np.broadcast_arrays(*inputs) <NEW_LINE> if isinstance(inputs, u.Quantity): <NEW_LINE> <INDENT> inputs = inputs.value <NEW_LINE> <DEDENT> shape = inputs[0].shape <NEW_LINE> inputs = [inp.flatten() for inp in inputs[: self.n_inputs]] <NEW_LINE> inputs = np.array(inputs).T <NEW_LINE> if not has_scipy: <NEW_LINE> <INDENT> raise ImportError("Tabular model requires scipy.") <NEW_LINE> <DEDENT> result = interpn(self.points, self.lookup_table, inputs, method=self.method, bounds_error=self.bounds_error, fill_value=self.fill_value) <NEW_LINE> if (isinstance(self.lookup_table, u.Quantity) and not isinstance(self.points[0], u.Quantity)): <NEW_LINE> <INDENT> result = result * self.lookup_table.unit <NEW_LINE> <DEDENT> if self.n_outputs == 1: <NEW_LINE> <INDENT> result = result.reshape(shape) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = [r.reshape(shape) for r in result] <NEW_LINE> <DEDENT> return result
Return the interpolated values at the input coordinates. Parameters ---------- inputs : list of scalar or list of ndarray Input coordinates. The number of inputs must be equal to the dimensions of the lookup table.
625941c2a05bb46b383ec7d5
def test_cube_oplst(): <NEW_LINE> <INDENT> oplst = [['Y', 'Y'], ['R', 'G'], ['B', 'G']] <NEW_LINE> cube = Cube(oplst) <NEW_LINE> assert cube.oplst() == oplst
Test opposite pairs method
625941c263f4b57ef00010cf
def test_user_tags(self): <NEW_LINE> <INDENT> nt = self.client.create_tag("明日之星") <NEW_LINE> print(nt) <NEW_LINE> tags = self.client.get_all_tags() <NEW_LINE> for t in tags: <NEW_LINE> <INDENT> print(t) <NEW_LINE> <DEDENT> self.client.remove_tag(nt.id)
测试用户标签
625941c210dbd63aa1bd2b56
def make_read_flag_selector (self,namespace,**kw): <NEW_LINE> <INDENT> sel = MSReadFlagSelector(namespace=namespace,**kw); <NEW_LINE> if self.flagsets: <NEW_LINE> <INDENT> sel.update_flagsets(self.flagsets); <NEW_LINE> <DEDENT> self._flag_selectors.append(sel); <NEW_LINE> return sel;
Makes an MSReadFlagSelector object connected to this MS selector.
625941c2aad79263cf3909f0
def _checkInputData(workload, sitewhitelist=None): <NEW_LINE> <INDENT> sitewhitelist = sitewhitelist or [] <NEW_LINE> self.assertEqual(workload.listPileupDatasets().values(), [{testArguments['Step2']['MCPileup']}]) <NEW_LINE> task = workload.getTaskByName(testArguments['Step1']['StepName']) <NEW_LINE> self.assertEqual(task.taskType(), "Production") <NEW_LINE> self.assertEqual(task.totalEvents(), testArguments['Step1']['RequestNumEvents']) <NEW_LINE> self.assertItemsEqual(task.listChildNames(), childNames) <NEW_LINE> self.assertEqual(task.getInputStep(), None) <NEW_LINE> self.assertDictEqual(task.getLumiMask(), {}) <NEW_LINE> self.assertEqual(task.getFirstEvent(), testArguments['Step1'].get('FirstEvent', 1)) <NEW_LINE> self.assertEqual(task.getFirstLumi(), testArguments['Step1'].get('FirstLumi', 1)) <NEW_LINE> self.assertEqual(task.parentProcessingFlag(), testArguments['Step1'].get('IncludeParents', False)) <NEW_LINE> self.assertEqual(task.inputDataset(), testArguments['Step1'].get('InputDataset')) <NEW_LINE> self.assertEqual(task.dbsUrl(), None) <NEW_LINE> self.assertEqual(task.inputBlockWhitelist(), testArguments['Step1'].get('inputBlockWhitelist')) <NEW_LINE> self.assertEqual(task.inputBlockBlacklist(), testArguments['Step1'].get('inputBlockBlacklist')) <NEW_LINE> self.assertEqual(task.inputRunWhitelist(), testArguments['Step1'].get('inputRunWhitelist')) <NEW_LINE> self.assertEqual(task.inputRunBlacklist(), testArguments['Step1'].get('inputRunBlacklist')) <NEW_LINE> self.assertItemsEqual(task.siteWhitelist(), sitewhitelist) <NEW_LINE> self.assertItemsEqual(task.siteBlacklist(), testArguments['Step1'].get('siteBlacklist', [])) <NEW_LINE> self.assertDictEqual(task.getTrustSitelists(), {'trustPUlists': False, 'trustlists': False}) <NEW_LINE> self.assertItemsEqual(task.getIgnoredOutputModulesForTask(), testArguments['Step1'].get('IgnoredOutputModules', [])) <NEW_LINE> splitParams = task.jobSplittingParameters() <NEW_LINE> self.assertTrue(splitParams['deterministicPileup']) <NEW_LINE> task = workload.getTaskByName('GENSIMMergeRAWSIMoutput') <NEW_LINE> self.assertEqual(task.getInputStep(), '/TestWorkload/GENSIM/cmsRun1') <NEW_LINE> task = workload.getTaskByName('RECOMergeAODSIMoutput') <NEW_LINE> self.assertEqual(task.getInputStep(), '/TestWorkload/GENSIM/cmsRun3') <NEW_LINE> task = workload.getTaskByName('RECOMergeRECOSIMoutput') <NEW_LINE> self.assertEqual(task.getInputStep(), '/TestWorkload/GENSIM/cmsRun3') <NEW_LINE> task = workload.getTaskByName('RECOMergeDQMoutput') <NEW_LINE> self.assertEqual(task.getInputStep(), '/TestWorkload/GENSIM/cmsRun3') <NEW_LINE> return
Validate input data/block/run/step/PU for the 4-tasks request
625941c24f88993c3716c01a
def testConceptNetworkGetLinksLabeledOrTo(self): <NEW_LINE> <INDENT> cn = ConceptNetwork() <NEW_LINE> nodeFrom = Node("From") <NEW_LINE> nodeTo1 = Node("To1") <NEW_LINE> nodeTo2 = Node("To2") <NEW_LINE> nodeLabel= Node("Label") <NEW_LINE> cn.addLink(nodeFrom, nodeTo1) <NEW_LINE> cn.addLink(nodeFrom, nodeLabel) <NEW_LINE> cn.addLink(nodeFrom, nodeTo2, nodeLabel) <NEW_LINE> links = cn.getLinksLabeledOrTo(nodeLabel) <NEW_LINE> self.assertEqual(2,len(links))
Get links with a label or to that label
625941c2d53ae8145f87a224
def parse_json(self, req, name, field): <NEW_LINE> <INDENT> json_data = self._cache.get('json') <NEW_LINE> if json_data is None: <NEW_LINE> <INDENT> if not req.is_json: <NEW_LINE> <INDENT> raise ApiError('Mimetype application/json expected but {} found.'.format(req.mimetype), 400) <NEW_LINE> <DEDENT> <DEDENT> return super().parse_json(req, name, field)
Pull a json value from the request.
625941c27cff6e4e81117937
def vlan_access_positive(dut, hs1, hs2, step): <NEW_LINE> <INDENT> dut_port1 = dut.ports["3"] <NEW_LINE> dut_port2 = dut.ports["4"] <NEW_LINE> configure_hosts(hs1, hs2, step) <NEW_LINE> step("Configure vlan 2 in switch") <NEW_LINE> with dut.libs.vtysh.ConfigVlan('2') as ctx: <NEW_LINE> <INDENT> ctx.no_shutdown() <NEW_LINE> <DEDENT> step("Configure switch interfaces with vlan access configuration") <NEW_LINE> with dut.libs.vtysh.ConfigInterface(dut_port1) as ctx: <NEW_LINE> <INDENT> ctx.no_routing() <NEW_LINE> ctx.no_shutdown() <NEW_LINE> ctx.vlan_access('2') <NEW_LINE> <DEDENT> with dut.libs.vtysh.ConfigInterface(dut_port2) as ctx: <NEW_LINE> <INDENT> ctx.no_routing() <NEW_LINE> ctx.no_shutdown() <NEW_LINE> ctx.vlan_access('2') <NEW_LINE> <DEDENT> dut.libs.vtysh.show_running_config() <NEW_LINE> for switch, portlbl in [(dut, dut_port1), (dut, dut_port2)]: <NEW_LINE> <INDENT> wait_until_interface_up(switch, portlbl) <NEW_LINE> <DEDENT> sleep(5) <NEW_LINE> step("Ping should succeed as the switch's interfaces will allow untagged" " packets to pass between " + dut_port1 + " and " + dut_port2) <NEW_LINE> ping = hs1.libs.ping.ping(10, '2.2.2.3') <NEW_LINE> assert ping['received'] >= 5, "Ping should have " "worked when vlan access 2 is enabled on both the interfaces" <NEW_LINE> unconfigure_hosts(hs1, hs2, step)
In this testcase reachability is tested when vlan access mode is configured on the interfaces of a switch.
625941c2b5575c28eb68dfb0
def __len__(self): <NEW_LINE> <INDENT> return self.nrows // self.bs
Denotes the number of batches per epoch
625941c2711fe17d82542321
def get_user(self): <NEW_LINE> <INDENT> return self.user
Get the user who created this changeset.
625941c2bde94217f3682da4
def process_request(self, request): <NEW_LINE> <INDENT> current_url = request.path_info <NEW_LINE> for reg in settings.VALID_URL: <NEW_LINE> <INDENT> if re.match(reg,current_url): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> permission_dict = request.session.get(settings.PERMISSION_SESSION_KEY) <NEW_LINE> if not permission_dict: <NEW_LINE> <INDENT> return redirect('/login/') <NEW_LINE> <DEDENT> request.breadcrumb_list = [ {'title': '首页', 'url': '/'}, ] <NEW_LINE> flag = False <NEW_LINE> for item in permission_dict.values(): <NEW_LINE> <INDENT> id = item.get('id') <NEW_LINE> pid = item.get('pid') <NEW_LINE> pname = item.get('pname') <NEW_LINE> reg = "^%s$" % item.get('url') <NEW_LINE> if re.match(reg, current_url): <NEW_LINE> <INDENT> flag = True <NEW_LINE> if pid: <NEW_LINE> <INDENT> request.current_menu_id = pid <NEW_LINE> request.breadcrumb_list.extend([ {'title': permission_dict[pname]['title'], 'url': permission_dict[pname]['url']}, {'title': item['title'], 'url': item['url']}, ]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.current_menu_id = id <NEW_LINE> request.breadcrumb_list.extend([ {'title': item['title'], 'url': item['url']}, ]) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> if not flag: <NEW_LINE> <INDENT> return HttpResponse('无权访问')
权限控制 :param request: :return:
625941c2d486a94d0b98e0f7
def linprogParams(ssd_matrix, ij, ji, graphix, graphmap, s, t): <NEW_LINE> <INDENT> L = ssd_matrix.shape[0] <NEW_LINE> A = np.zeros((L,len(graphmap)), dtype=np.int) <NEW_LINE> for n in range(L): <NEW_LINE> <INDENT> for i in ij[n]: A[n,graphmap['%d_%d' % (n,i)]] += 1 <NEW_LINE> for j in ji[n]: A[n,graphmap['%d_%d' % (j,n)]] -= 1 <NEW_LINE> <DEDENT> c = np.zeros((len(graphix),)) <NEW_LINE> for n in range(len(graphix)): <NEW_LINE> <INDENT> x,y = graphix[n].split('_') <NEW_LINE> x = int(x) <NEW_LINE> y = int(y) <NEW_LINE> c[n] = ssd_matrix[x,y] <NEW_LINE> <DEDENT> b = np.zeros((L,)) <NEW_LINE> b[s] = 1 <NEW_LINE> b[t] = -1 <NEW_LINE> return c, A, b
Compute the linear programming parameters for the given problem
625941c256b00c62f0f1460a
def tf_rotate_perturb_point_cloud(): <NEW_LINE> <INDENT> raise NotImplementedError
Tensorflow op: perturbation by slightly rotating a point cloud
625941c2adb09d7d5db6c742
def _get_original_port_for_delete(original_ip, original_port): <NEW_LINE> <INDENT> runtime_properties = ctx.target.instance.runtime_properties <NEW_LINE> if PORT_REPLACEMENT not in runtime_properties: <NEW_LINE> <INDENT> return original_port <NEW_LINE> <DEDENT> key = '{}:{}'.format(original_ip, original_port) <NEW_LINE> port = runtime_properties[PORT_REPLACEMENT].get(key) <NEW_LINE> return port if port else original_port
check may be we already replaced port by some new free port
625941c27d43ff24873a2c51
def play_sound(sound_file): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if platform.system() == "Windows": <NEW_LINE> <INDENT> winsound.PlaySound(sound_file, winsound.SND_FILENAME) <NEW_LINE> <DEDENT> elif platform.system() == "Darwin": <NEW_LINE> <INDENT> os.system("afplay " + sound_file + "&") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Sounds not supported") <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print("Error playing sound")
Plays a given sound on Windows and Mac OSX :param sound_file: **string** name of the sound to play :return:
625941c2b7558d58953c4ec9
def test_registration_validates_email_registration_ban(self): <NEW_LINE> <INDENT> Ban.objects.create( check_type=Ban.EMAIL, banned_value='lorem*', user_message="You can't register account like this.", registration_only=True, ) <NEW_LINE> response = self.client.post( self.api_link, data={ 'username': 'totallyNew', 'email': 'loremipsum@dolor.met', 'password': 'LoremP4ssword', }, ) <NEW_LINE> self.assertEqual(response.status_code, 400) <NEW_LINE> self.assertEqual(response.json(), { 'email': ["You can't register account like this."], })
api validates email registration-only ban
625941c25fcc89381b1e166f
def find(self, _limit=None, _offset=0, _step=5000, order_by='id', **_filter): <NEW_LINE> <INDENT> self._check_dropped() <NEW_LINE> if not isinstance(order_by, (list, tuple)): <NEW_LINE> <INDENT> order_by = [order_by] <NEW_LINE> <DEDENT> order_by = [o for o in order_by if o in self.table.columns] <NEW_LINE> order_by = [self._args_to_order_by(o) for o in order_by] <NEW_LINE> args = self._args_to_clause(_filter) <NEW_LINE> count_query = alias(self.table.select(whereclause=args, limit=_limit, offset=_offset), name='count_query_alias').count() <NEW_LINE> rp = self.database.executable.execute(count_query) <NEW_LINE> total_row_count = rp.fetchone()[0] <NEW_LINE> if _step is None or _step is False or _step == 0: <NEW_LINE> <INDENT> _step = total_row_count <NEW_LINE> <DEDENT> if total_row_count > _step and not order_by: <NEW_LINE> <INDENT> _step = total_row_count <NEW_LINE> log.warn("query cannot be broken into smaller sections because it is unordered") <NEW_LINE> <DEDENT> queries = [] <NEW_LINE> for i in count(): <NEW_LINE> <INDENT> qoffset = _offset + (_step * i) <NEW_LINE> qlimit = _step <NEW_LINE> if _limit is not None: <NEW_LINE> <INDENT> qlimit = min(_limit - (_step * i), _step) <NEW_LINE> <DEDENT> if qlimit <= 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> queries.append(self.table.select(whereclause=args, limit=qlimit, offset=qoffset, order_by=order_by)) <NEW_LINE> <DEDENT> return ResultIter((self.database.executable.execute(q) for q in queries))
Performs a simple search on the table. Simply pass keyword arguments as ``filter``. :: results = table.find(country='France') results = table.find(country='France', year=1980) Using ``_limit``:: # just return the first 10 rows results = table.find(country='France', _limit=10) You can sort the results by single or multiple columns. Append a minus sign to the column name for descending order:: # sort results by a column 'year' results = table.find(country='France', order_by='year') # return all rows sorted by multiple columns (by year in descending order) results = table.find(order_by=['country', '-year']) By default :py:meth:`find() <dataset.Table.find>` will break the query into chunks of ``_step`` rows to prevent huge tables from being loaded into memory at once. For more complex queries, please use :py:meth:`db.query() <dataset.Database.query>` instead.
625941c2009cb60464c63365
def assign(self, n, x): <NEW_LINE> <INDENT> return _math_linear.std_vector_vector_double_assign(self, n, x)
assign(std_vector_vector_double self, std::vector< std::vector< double > >::size_type n, std_vector_double x)
625941c221bff66bcd684906
def test_write_header_fields(self): <NEW_LINE> <INDENT> control_file = os.path.join(REGRESSION_TESTS_DIR, 'MSVBVM60.DLL') <NEW_LINE> pe = pefile.PE(control_file, fast_load=True) <NEW_LINE> pe.parse_data_directories( directories=[ pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_RESOURCE']]) <NEW_LINE> original_data = pe.write() <NEW_LINE> str1 = b'string1' <NEW_LINE> str2 = b'str2' <NEW_LINE> str3 = b'string3' <NEW_LINE> pe.FileInfo[0].StringTable[0].entries[b'FileDescription'] = str1 <NEW_LINE> pe.FileInfo[0].StringTable[0].entries[b'FileVersion'] = str2 <NEW_LINE> pe.FileInfo[0].StringTable[0].entries[b'InternalName'] = str3 <NEW_LINE> new_data = pe.write() <NEW_LINE> diff, differences = 0, list() <NEW_LINE> for idx in range( len(original_data) ): <NEW_LINE> <INDENT> if original_data[idx] != new_data[idx]: <NEW_LINE> <INDENT> diff += 1 <NEW_LINE> if new_data[idx] != 0: <NEW_LINE> <INDENT> differences.append(chr(new_data[idx])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.assertEqual(''.join(differences).encode('utf-8', 'backslashreplace'), str1+str2+str3) <NEW_LINE> pe.close()
Verify correct field data modification.
625941c2e8904600ed9f1edc
def get_headers(data): <NEW_LINE> <INDENT> return {k for row in data for k, _ in row.iteritems()} if data is not None else None
Loop through the data and collect any headers. :param data list[dict]: :rtype: set[str] :returns: set of headers
625941c28e71fb1e9831d75c
def show_config(jail): <NEW_LINE> <INDENT> ret = {} <NEW_LINE> if subprocess.call(["jls", "-nq", "-j", jail]) == 0: <NEW_LINE> <INDENT> jls = subprocess.check_output( ["jls", "-nq", "-j", jail] ) <NEW_LINE> jailopts = salt.utils.args.shlex_split(salt.utils.stringutils.to_unicode(jls)) <NEW_LINE> for jailopt in jailopts: <NEW_LINE> <INDENT> if "=" not in jailopt: <NEW_LINE> <INDENT> ret[jailopt.strip().rstrip(";")] = "1" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = jailopt.split("=")[0].strip() <NEW_LINE> value = jailopt.split("=")[-1].strip().strip('"') <NEW_LINE> ret[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for rconf in ("/etc/rc.conf", "/etc/rc.conf.local"): <NEW_LINE> <INDENT> if os.access(rconf, os.R_OK): <NEW_LINE> <INDENT> with salt.utils.files.fopen(rconf, "r") as _fp: <NEW_LINE> <INDENT> for line in _fp: <NEW_LINE> <INDENT> line = salt.utils.stringutils.to_unicode(line) <NEW_LINE> if not line.strip(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not line.startswith("jail_{0}_".format(jail)): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> key, value = line.split("=") <NEW_LINE> ret[key.split("_", 2)[2]] = value.split('"')[1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for jconf in ("/etc/jail.conf", "/usr/local/etc/jail.conf"): <NEW_LINE> <INDENT> if os.access(jconf, os.R_OK): <NEW_LINE> <INDENT> with salt.utils.files.fopen(jconf, "r") as _fp: <NEW_LINE> <INDENT> for line in _fp: <NEW_LINE> <INDENT> line = salt.utils.stringutils.to_unicode(line) <NEW_LINE> line = line.partition("#")[0].strip() <NEW_LINE> if line: <NEW_LINE> <INDENT> if line.split()[-1] == "{": <NEW_LINE> <INDENT> if line.split()[0] != jail and line.split()[0] != "*": <NEW_LINE> <INDENT> while line.split()[-1] != "}": <NEW_LINE> <INDENT> line = next(_fp) <NEW_LINE> line = line.partition("#")[0].strip() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> if line.split()[-1] == "}": <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if "=" not in line: <NEW_LINE> <INDENT> ret[line.strip().rstrip(";")] = "1" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = line.split("=")[0].strip() <NEW_LINE> value = line.split("=")[-1].strip().strip(";'\"") <NEW_LINE> ret[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return ret
Display specified jail's configuration CLI Example: .. code-block:: bash salt '*' jail.show_config <jail name>
625941c230c21e258bdfa44d
def cylinder(draw, v1_, v2_, r, **options): <NEW_LINE> <INDENT> options = {"fill": "#fff", **options} <NEW_LINE> draw.line([int(x) for x in [v1_[0], v1_[1], v2_[0], v2_[1]]], width=int(r * 2), fill=options["fill"])
draw a 3D cylinder
625941c2e5267d203edcdc51
def ouvrePortes(mat): <NEW_LINE> <INDENT> for i in range(len(mat)): <NEW_LINE> <INDENT> for j in range(len(mat[0])): <NEW_LINE> <INDENT> pos = mat[i][j] <NEW_LINE> if pos == "E": <NEW_LINE> <INDENT> mat[i][j] = "S" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return copy.deepcopy(mat)
Cette fonction permet de sortir une fois qu'il n'y a plus de diamant dans le jeu
625941c291af0d3eaac9b9c9
def isEmptyString(inp): <NEW_LINE> <INDENT> return (inp == '' or inp is None)
Returns true if the input is None or is an Empty string.
625941c226238365f5f0ee1e
@application.route('/semanticize/<langcode>', methods=['GET', 'POST']) <NEW_LINE> def _semanticize_handler(langcode): <NEW_LINE> <INDENT> text = _get_text_from_request() <NEW_LINE> settings = {"langcode": langcode} <NEW_LINE> for key, value in request.values.iteritems(): <NEW_LINE> <INDENT> assert key not in settings <NEW_LINE> settings[key] = value <NEW_LINE> <DEDENT> sem_result = _semanticize(langcode, settings, text) <NEW_LINE> json = _json_dumps(sem_result, "pretty" in settings) <NEW_LINE> return Response(json, mimetype=APPLICATION_JSON)
The function handling the /semanticize/<langcode> namespace. It uses the chain-of-command pattern to run all processors, using the corresponding preprocess, process, and postprocess steps. @param langcode: The language to use in the semanticizing @return: The body of the response, in this case a json formatted list of links and their relevance
625941c25fc7496912cc3930
def main(args_list=None): <NEW_LINE> <INDENT> if args_list is None: <NEW_LINE> <INDENT> args_list = sys.argv[1:] <NEW_LINE> <DEDENT> args = arg_parser.parse_args(args_list) <NEW_LINE> predictor = mhc_binding_predictor_from_args(args) <NEW_LINE> input_dictionary = { ("sequence%d" % i): seq for (i, seq) in enumerate(args.sequence) } <NEW_LINE> epitope_collection = predictor.predict(input_dictionary) <NEW_LINE> df = epitope_collection.to_dataframe() <NEW_LINE> print(df) <NEW_LINE> if args.output_csv: <NEW_LINE> <INDENT> df.to_csv(args.output_csv)
Script to make pMHC binding predictions from amino acid sequences. Usage example: mhctools --sequence SFFPIQQQQQAAALLLI --sequence SILQQQAQAQQAQAASSSC --mhc-predictor netmhc --mhc-alleles HLA-A0201 H2-Db --output-csv epitope.csv
625941c27047854f462a13be
def bar(self, serie, rescale=False): <NEW_LINE> <INDENT> serie_node = self.svg.serie(serie) <NEW_LINE> bars = self.svg.node(serie_node['plot'], class_="bars") <NEW_LINE> if rescale and self.secondary_series: <NEW_LINE> <INDENT> points = self._rescale(serie.points) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> points = serie.points <NEW_LINE> <DEDENT> for i, (x, y) in enumerate(points): <NEW_LINE> <INDENT> if None in (x, y) or (self.logarithmic and y <= 0): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> metadata = serie.metadata.get(i) <NEW_LINE> val = self._format(serie.values[i]) <NEW_LINE> bar = decorate( self.svg, self.svg.node(bars, class_='bar'), metadata) <NEW_LINE> x_, y_, width, height = self._bar( serie, bar, x, y, i, self.zero, secondary=rescale) <NEW_LINE> self._confidence_interval( serie_node['overlay'], x_ + width / 2, y_, serie.values[i], metadata) <NEW_LINE> self._tooltip_and_print_values( serie_node, serie, bar, i, val, metadata, x_, y_, width, height)
Draw a bar graph for a serie
625941c2283ffb24f3c558b5
@require_POST <NEW_LINE> @login_required <NEW_LINE> def unwatch_locale(request): <NEW_LINE> <INDENT> ReviewableRevisionInLocaleEvent.stop_notifying(request.user, locale=request.LANGUAGE_CODE) <NEW_LINE> return HttpResponse()
Stop watching a locale for revisions ready for review.
625941c230c21e258bdfa44e
def test_socket_server_init_success(self): <NEW_LINE> <INDENT> socket_object_keys = ['accept', 'bind', 'close', 'connect', 'recv', 'send', 'shutdown'] <NEW_LINE> server_socket = BaseSocket() <NEW_LINE> [self.assertTrue(x in server_socket.__dir__()) for x in socket_object_keys] <NEW_LINE> server_socket.close_unix_socket()
Checking if socket object has all needed properties
625941c24f6381625f1149ee
def test_html_meta_title(self): <NEW_LINE> <INDENT> input_fname = "../../test-plans/HTML/Meta-title/input.txt" <NEW_LINE> input_type = "html" <NEW_LINE> expected_output_fname = "../../test-plans/HTML/Meta-title/output.txt" <NEW_LINE> actual_output_fname = "../../test-plans/HTML/Meta-title/actual.txt" <NEW_LINE> self.run_test_check_output(input_fname, input_type, expected_output_fname, actual_output_fname)
throws AssertionError: If test case fails System test for meta title HTML test plan
625941c2462c4b4f79d1d682
def save_state(self, fname: str): <NEW_LINE> <INDENT> with open(fname, "wb") as fp: <NEW_LINE> <INDENT> pickle.dump(self.batch_indices, fp) <NEW_LINE> pickle.dump(self.curr_batch_index, fp) <NEW_LINE> np.save(fp, [a.asnumpy() for a in self.inverse_data_permutations]) <NEW_LINE> np.save(fp, [a.asnumpy() for a in self.data_permutations])
Saves the current state of iterator to a file, so that iteration can be continued. Note that the data is not saved, i.e. the iterator must be initialized with the same parameters as in the first call. :param fname: File name to save the information to.
625941c2ec188e330fd5a754
def test_negative_take(self): <NEW_LINE> <INDENT> assert_raises(ValueError, take, -3, xrange(10))
Make sure taking negative items results in a ValueError
625941c2b7558d58953c4eca
def test_ecdf_simple(self): <NEW_LINE> <INDENT> test_case = unittest.TestCase() <NEW_LINE> test_data = numpy.array([2,1,4,5]) <NEW_LINE> test_x, test_y = ecdf(test_data) <NEW_LINE> test_case.assertListEqual(test_x.tolist(),[1,2,4,5]) <NEW_LINE> test_case.assertListEqual(test_y.tolist(),[0.25,0.5,0.75,1.0])
should have property that f(x) = P(X ≤ x). f(x) = count where t ≤ x / total number in x
625941c37b25080760e3940c
def update_data(self, event=None): <NEW_LINE> <INDENT> self.server_append_caris_file.SetSelection( self.server_append_caris_file.FindString(str(self.settings_db.server_append_caris_file))) <NEW_LINE> self.server_append_caris_file.SetOwnBackgroundColour(wx.WHITE) <NEW_LINE> self.auto_export_on_server_send.SetSelection( self.auto_export_on_server_send.FindString(str(self.settings_db.auto_export_on_server_send))) <NEW_LINE> self.auto_export_on_server_send.SetOwnBackgroundColour(wx.WHITE) <NEW_LINE> self.server_apply_surface_sound_speed.SetSelection( self.server_apply_surface_sound_speed.FindString(str(self.settings_db.server_apply_surface_sound_speed))) <NEW_LINE> self.server_apply_surface_sound_speed.SetOwnBackgroundColour(wx.WHITE)
Update the data from the database
625941c367a9b606de4a7e6d
def reduce(self, f, check=True, degree_bound=None, coefficients=None, valuations=None): <NEW_LINE> <INDENT> f = self.domain().coerce(f) <NEW_LINE> if check: <NEW_LINE> <INDENT> v = self(f) <NEW_LINE> if v < 0: <NEW_LINE> <INDENT> raise ValueError("f must have non-negative valuation") <NEW_LINE> <DEDENT> elif v > 0: <NEW_LINE> <INDENT> return self.residue_ring().zero() <NEW_LINE> <DEDENT> <DEDENT> if coefficients is None: <NEW_LINE> <INDENT> constant_term = next(self.coefficients(f)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> constant_term = coefficients[0] <NEW_LINE> <DEDENT> constant_term_reduced = self._base_valuation.reduce(constant_term) <NEW_LINE> return constant_term_reduced(self._residue_field_generator())
Reduce ``f`` module this valuation. INPUT: - ``f`` -- an element in the domain of this valuation - ``check`` -- whether or not to check whether ``f`` has non-negative valuation (default: ``True``) - ``degree_bound`` -- an a-priori known bound on the degree of the result which can speed up the computation (default: not set) - ``coefficients`` -- the coefficients of ``f`` as produced by :meth:`~sage.rings.valuation.developing_valuation.DevelopingValuation.coefficients` or ``None`` (default: ``None``); this can be used to speed up the computation when the expansion of ``f`` is already known from a previous computation. - ``valuations`` -- the valuations of ``coefficients`` or ``None`` (default: ``None``); ignored OUTPUT: an element of the :meth:`residue_ring` of this valuation, the reduction modulo the ideal of elements of positive valuation EXAMPLES:: sage: R.<x> = QQ[] sage: v = GaussValuation(R, valuations.TrivialValuation(QQ)) sage: w = v.augmentation(x, 1) sage: w.reduce(x^2 + x + 1) 1 sage: w = v.augmentation(x^2 + x + 1, infinity) sage: w.reduce(x) u1 TESTS: Cases with non-trivial base valuation:: sage: R.<u> = Qq(4, 10) sage: S.<x> = R[] sage: v = GaussValuation(S) sage: v.reduce(x) x sage: v.reduce(S(u)) u0 sage: w = v.augmentation(x^2 + x + u, 1/2) sage: w.reduce(S.one()) 1 sage: w.reduce(S(2)) 0 sage: w.reduce(S(u)) u0 sage: w.reduce(x) # this gives the generator of the residue field extension of w over v u1 sage: f = (x^2 + x + u)^2 / 2 sage: w.reduce(f) x sage: w.reduce(f + x + 1) x + u1 + 1 sage: ww = w.augmentation((x^2 + x + u)^2 + 2, 5/3) sage: g = ((x^2 + x + u)^2 + 2)^3 / 2^5 sage: ww.reduce(g) x sage: ww.reduce(f) 1 sage: ww.is_equivalent(f, 1) True sage: ww.reduce(f * g) x sage: ww.reduce(f + g) x + 1
625941c28a349b6b435e8125
def get_last(fi_id): <NEW_LINE> <INDENT> db = DB() <NEW_LINE> query_tuple = ("SELECT * FROM Summaries WHERE FI_ID=%s ORDER BY PK DESC LIMIT 1",fi_id) <NEW_LINE> cur = db.execute(query_tuple) <NEW_LINE> summary = TwitterSummary() <NEW_LINE> summary.fi_id = fi_id <NEW_LINE> if cur.rowcount: <NEW_LINE> <INDENT> row = cur.fetchone() <NEW_LINE> summary.period_start = row['PERIOD_START'] <NEW_LINE> summary.period_end = row['PERIOD_END'] <NEW_LINE> summary.total_spend = row['TOTAL_SPEND'] <NEW_LINE> summary.total_impressions = row['TOTAL_IMPRESSIONS'] <NEW_LINE> summary.total_engagements = row['TOTAL_ENGAGEMENTS'] <NEW_LINE> summary.new_spend = row['NEW_SPEND'] <NEW_LINE> summary.new_impressions = row['NEW_IMPRESSIONS'] <NEW_LINE> summary.new_engagements = row['NEW_ENGAGEMENTS'] <NEW_LINE> <DEDENT> return summary
Return the latest Summary object of the give fi_id account.
625941c36aa9bd52df036d55
def query_popen(filename, chrom, start, end): <NEW_LINE> <INDENT> query = '{}:{}-{}'.format(chrom, start, end) <NEW_LINE> process = subprocess.Popen(['tabix', filename, query],stdout=subprocess.PIPE) <NEW_LINE> for line in process.stdout: <NEW_LINE> <INDENT> yield line.strip().split()
Call tabix and generate an array of strings for each line it returns.
625941c3ec188e330fd5a755
def _getUnderlyingVmDevicesInfo(self): <NEW_LINE> <INDENT> self._getUnderlyingNetworkInterfaceInfo() <NEW_LINE> self._getUnderlyingDriveInfo() <NEW_LINE> self._getUnderlyingSoundDeviceInfo() <NEW_LINE> self._getUnderlyingVideoDeviceInfo() <NEW_LINE> self._getUnderlyingGraphicsDeviceInfo() <NEW_LINE> self._getUnderlyingControllerDeviceInfo() <NEW_LINE> self._getUnderlyingBalloonDeviceInfo() <NEW_LINE> self._getUnderlyingWatchdogDeviceInfo() <NEW_LINE> self._getUnderlyingSmartcardDeviceInfo() <NEW_LINE> self._getUnderlyingRngDeviceInfo() <NEW_LINE> self._getUnderlyingConsoleDeviceInfo() <NEW_LINE> self._getUnderlyingHostDeviceInfo() <NEW_LINE> self._getUnderlyingMemoryDeviceInfo() <NEW_LINE> self._getUnderlyingUnknownDeviceInfo()
Obtain underlying vm's devices info from libvirt.
625941c307f4c71912b11433
def getRandom(self): <NEW_LINE> <INDENT> choice=self.list.val <NEW_LINE> head,i=self.list.next,1 <NEW_LINE> while head != None: <NEW_LINE> <INDENT> i+=1 <NEW_LINE> k=random.random() <NEW_LINE> if k <= 1.0/i: <NEW_LINE> <INDENT> choice=head.val <NEW_LINE> <DEDENT> head=head.next <NEW_LINE> <DEDENT> return choice
Returns a random node's value. :rtype: int
625941c37b180e01f3dc47b3
def reformat_json_data_into_csv(json_file: str, local_path_raw: str, local_path_csv: str) -> any: <NEW_LINE> <INDENT> filename_only = os.path.relpath(json_file, local_path_raw) <NEW_LINE> self.log.debug(f"json_file: {json_file}, local_path_raw: {local_path_raw}, local_path_csv: {local_path_csv}") <NEW_LINE> filename_wo_suffix = os.path.splitext(os.path.splitext(filename_only)[0])[0] <NEW_LINE> full_local_filename = os.path.join(local_path_csv, filename_wo_suffix)+'.csv' <NEW_LINE> if not os.path.exists(self.local_path_csv): <NEW_LINE> <INDENT> os.makedirs(self.local_path_csv) <NEW_LINE> <DEDENT> self.log.debug(f"Writing json output to '{full_local_filename}'") <NEW_LINE> with open(f'{full_local_filename}', 'w') as f_out: <NEW_LINE> <INDENT> if self.add_header: <NEW_LINE> <INDENT> header = self.delimiter.join(['date_utc', 'date_local', 'parameter', 'value', 'unit', 'location', 'city', 'country', 'latitude', 'longitude', 'source_name', 'source_type', 'mobile', 'averaging_unit', 'averaging_value']) <NEW_LINE> f_out.write(f"{header}\n") <NEW_LINE> <DEDENT> if self.gzipped: <NEW_LINE> <INDENT> with gzip.open(os.path.join(self.local_path_raw,json_file), 'rb') as f_in: <NEW_LINE> <INDENT> for line in f_in: <NEW_LINE> <INDENT> f_str = convert_json_line_to_str(line) <NEW_LINE> f_out.write(f"{f_str}\n") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> with open(os.path.join(self.local_path_raw,json_file), 'r') as f_in: <NEW_LINE> <INDENT> for line in f_in: <NEW_LINE> <INDENT> f_str = convert_json_line_to_str(line) <NEW_LINE> f_out.write(f"{f_str}\n") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return True
Extract OpenAQ data fields from gzipped json file and write them into csv file
625941c3796e427e537b0576
def on_expr(self, node): <NEW_LINE> <INDENT> return self.interp(node.value)
expression
625941c373bcbd0ca4b2c029