code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def test_empty_json_serialization_deserialization_fails(self): <NEW_LINE> <INDENT> json_string = EMPTY_JSON <NEW_LINE> with self.assertRaises(TypeError, msg="Person should not be instantiable with empty JSON!"): <NEW_LINE> <INDENT> JsonHelper.to_instance(json_string, Person) | It is an error to try deserializing an empty JSON back to an object that needs construction-time parameters | 625941c3a17c0f6771cbe013 |
def format_data(self, box_ids): <NEW_LINE> <INDENT> boxes = [Box.get(face_id) for face_id in box_ids] <NEW_LINE> videos = defaultdict(dict) <NEW_LINE> for box in boxes: <NEW_LINE> <INDENT> video_id = int(box.video_id) <NEW_LINE> box_id = int(box.id) <NEW_LINE> videos[box_id]['video_id'] = video_id <NEW_LINE> videos[box_id]['thumbnail'] = map( int, [box.timestamp, box.x, box.y, box.width, box.height]) <NEW_LINE> <DEDENT> data = { 'question': self.question, 'image_bucket': config.get('affine.s3.bucket'), } <NEW_LINE> data['data'] = { "evaluator_id": str(self.id), "videos": dict(videos), "reference_image": self.reference_image_url if self.reference_image_url is not None else '', } <NEW_LINE> return data | ideal number of box ids is 18, but the template will not break if there are more or less | 625941c3cc0a2c11143dce51 |
def set_grey_lower(self, val): <NEW_LINE> <INDENT> self.grey_lb = val | Use sliders to set GREY lower bound. | 625941c3baa26c4b54cb10e2 |
def get_ranksum(self, log): <NEW_LINE> <INDENT> values_per_group = {col_name: col for col_name, col in self.stratified_ilash.groupby('Population_x')['sum']} <NEW_LINE> populations = list(values_per_group.keys()) <NEW_LINE> populations_list = list(set([tuple(sorted([i, j])) for i in populations for j in populations])) <NEW_LINE> for pop1, pop2 in populations_list: <NEW_LINE> <INDENT> if pop1 != pop2: <NEW_LINE> <INDENT> result = stats.ranksums(values_per_group[pop1], values_per_group[pop2]) <NEW_LINE> log.logger.info("Population 1: " + pop1 + " Population 2: " + pop2 + " Statistic: " + str(result.statistic) + " P-value: " + str(result.pvalue)) | Args:
self object(:obj:`self`): self object with attributes
Returns:
For each population a paired Wilcoxen-rank sum tests: a test statistic a two-sided p-value of the test | 625941c3be7bc26dc91cd5c4 |
def add_like(obj, user): <NEW_LINE> <INDENT> obj_type = get_obj_type_for_model(obj) <NEW_LINE> with atomic(): <NEW_LINE> <INDENT> like, created = Like.objects.get_or_create(content_type=obj_type, object_id=obj.id, user=user) <NEW_LINE> if not created: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> likes, _ = Likes.objects.get_or_create(content_type=obj_type, object_id=obj.id) <NEW_LINE> likes.count = F('count') + 1 <NEW_LINE> params = { 'obj_type': obj_type, 'obj_id': obj.id, 'user_id': user.id } <NEW_LINE> cache.set(cache_key('object_like', params), True) <NEW_LINE> likes.save() <NEW_LINE> cache.delete(cache_key('object_like_count', params)) <NEW_LINE> <DEDENT> return like | Add a like to an object.
If the user has already liked the object nothing happends, so this function can be considered
idempotent.
:param obj: Any Django model instance.
:param user: User adding the like. :class:`~hashup.users.models.User` instance. | 625941c35f7d997b87174a57 |
def deserialize_numpy(self, str, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> end = 0 <NEW_LINE> start = end <NEW_LINE> end += 2 <NEW_LINE> (self.requestedID,) = _struct_h.unpack(str[start:end]) <NEW_LINE> return self <NEW_LINE> <DEDENT> except struct.error as e: <NEW_LINE> <INDENT> raise genpy.DeserializationError(e) | unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module | 625941c3f548e778e58cd53e |
def test_checkForExistingReaction_keeps_identical_reactions_with_duplicate_flag(self): <NEW_LINE> <INDENT> cerm = CoreEdgeReactionModel() <NEW_LINE> spcA = Species().fromSMILES('[H]') <NEW_LINE> spcB = Species().fromSMILES('C=C[CH2]C') <NEW_LINE> spcC = Species().fromSMILES('C=C=CC') <NEW_LINE> spcD = Species().fromSMILES('[H][H]') <NEW_LINE> spcA.label = '[H]' <NEW_LINE> spcB.label = 'C=C[CH2]C' <NEW_LINE> spcC.label = 'C=C=CC' <NEW_LINE> spcD.label = '[H][H]' <NEW_LINE> spcB.generate_resonance_structures() <NEW_LINE> cerm.addSpeciesToCore(spcA) <NEW_LINE> cerm.addSpeciesToCore(spcB) <NEW_LINE> cerm.addSpeciesToCore(spcC) <NEW_LINE> cerm.addSpeciesToCore(spcD) <NEW_LINE> reaction_in_model = TemplateReaction(reactants=[spcA, spcB], products=[spcC, spcD], family='H_Abstraction', template=['Csd', 'H'], duplicate=True) <NEW_LINE> reaction_in_model.reactants.sort() <NEW_LINE> reaction_in_model.products.sort() <NEW_LINE> reaction_to_add = TemplateReaction(reactants=[spcA, spcB], products=[spcC, spcD], family='H_Abstraction', template=['Cs12345', 'H'], duplicate=True) <NEW_LINE> cerm.addReactionToCore(reaction_in_model) <NEW_LINE> cerm.registerReaction(reaction_in_model) <NEW_LINE> found, rxn = cerm.checkForExistingReaction(reaction_to_add) <NEW_LINE> self.assertFalse(found, 'checkForExistingReaction failed to identify duplicate template reactions') | Test that checkForExistingReaction keeps reactions with different templates and duplicate=True. | 625941c3e1aae11d1e749c77 |
def response_cb(response): <NEW_LINE> <INDENT> if response == stock.ACCEPT: <NEW_LINE> <INDENT> use_http = c_use_http.get_active() <NEW_LINE> use_ipv6 = c_use_ipv6.get_active() <NEW_LINE> use_proxy = c_use_proxy.get_active() <NEW_LINE> use_auth = c_use_auth.get_active() <NEW_LINE> proxy_host = t_proxy_host.get_text() <NEW_LINE> proxy_port = t_proxy_port.get_text() <NEW_LINE> server_host = t_server_host.get_text() <NEW_LINE> server_port = t_server_port.get_text() <NEW_LINE> user = t_user.get_text() <NEW_LINE> passwd = t_passwd.get_text() <NEW_LINE> proxy = e3.Proxy(use_proxy, proxy_host, proxy_port, use_auth, user, passwd) <NEW_LINE> callback(use_http, use_ipv6, proxy, service, server_host, server_port, True) <NEW_LINE> <DEDENT> for widget in proxy_settings: <NEW_LINE> <INDENT> widget.destroy() <NEW_LINE> <DEDENT> window.destroy() | called on any response (close, accept, cancel) if accept
get the new values and call callback with those values | 625941c36fb2d068a760f05d |
def grouper(iterable, n, fillvalue=None): <NEW_LINE> <INDENT> args = [iter(iterable)] * n <NEW_LINE> return itertools.zip_longest(*args, fillvalue=fillvalue) | Collect data into fixed-length chunks or blocks
Example: grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" | 625941c3442bda511e8be3dc |
def register_window(): <NEW_LINE> <INDENT> global register_screen <NEW_LINE> global username <NEW_LINE> global password <NEW_LINE> global erase_username <NEW_LINE> global erase_pass <NEW_LINE> username = tk.StringVar() <NEW_LINE> password = tk.StringVar() <NEW_LINE> register_screen = tk.Toplevel(origin_screen) <NEW_LINE> register_screen.title("Register") <NEW_LINE> register_screen.geometry("300x250") <NEW_LINE> tk.Label(register_screen, text="Please enter details below").pack() <NEW_LINE> tk.Label(register_screen, text="").pack() <NEW_LINE> tk.Label(register_screen, text="Username *").pack() <NEW_LINE> erase_username = tk.Entry(register_screen, textvariable=username) <NEW_LINE> erase_username.pack() <NEW_LINE> tk.Label(register_screen, text="Password *").pack() <NEW_LINE> erase_pass = tk.Entry(register_screen, textvariable=password, show="*") <NEW_LINE> erase_pass.pack() <NEW_LINE> tk.Label(register_screen, text="").pack() <NEW_LINE> tk.Button( register_screen, text="Register", height="2", width="30", command=confirm_register, ).pack() | Design of Registration Window
- allows a user with no account to create one | 625941c3dd821e528d63b16c |
def block(nest=Nesting.POST, sub=None, opts=''): <NEW_LINE> <INDENT> def block_fn(parser:Callable) -> Block: <NEW_LINE> <INDENT> return Block(parser, nest, sub or ['all'], opts) <NEW_LINE> <DEDENT> return block_fn | Decorator for block style elements, to be used on a parser function.
eg:
```python
@block(...)
def BlockName(text):
""" docs for BlockName element go here """
return ['div', text]
```
The name of the function becomes the name of the block. There is automatic
sanitization/converstion that happens in the process.
So `BlockName` would have `name='block-name'` so that it's easier to type
in the plain-text format. | 625941c307d97122c4178849 |
def remove(self, obj): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if obj not in self._objects: <NEW_LINE> <INDENT> return self.remove_blacklist(obj) <NEW_LINE> <DEDENT> self._objects.remove(obj) <NEW_LINE> return True | Remove an object from the list.
:param obj: vmmLibvirtObject to remove
:returns: True if object removed, False if object was not found | 625941c399fddb7c1c9de353 |
def build_cfg(self): <NEW_LINE> <INDENT> pass | Called during the objects instantiation.
Override to set the requests `cfg` property. | 625941c34428ac0f6e5ba7b3 |
def get_sentiment(service, sentence): <NEW_LINE> <INDENT> body = get_request_body( sentence, syntax=False, entities=True, sentiment=True) <NEW_LINE> docs = service.documents() <NEW_LINE> request = docs.annotateText(body=body) <NEW_LINE> response = request.execute(num_retries=3) <NEW_LINE> sentiment = response.get('documentSentiment') <NEW_LINE> if sentiment is None: <NEW_LINE> <INDENT> return (None, None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pol = sentiment.get('polarity') <NEW_LINE> mag = sentiment.get('magnitude') <NEW_LINE> <DEDENT> if pol is None and mag is not None: <NEW_LINE> <INDENT> pol = 0 <NEW_LINE> <DEDENT> return (pol, mag) | Get the sentence-level sentiment. | 625941c3a17c0f6771cbe014 |
def login_user(client, user_id, password): <NEW_LINE> <INDENT> return client.post( '/api/auth/login', content_type='application/json', data=json.dumps( { 'user_id': user_id, 'password': password } ) ) | Helper function for user login | 625941c3bde94217f3682db4 |
def removeItem(self, p_int): <NEW_LINE> <INDENT> pass | QComboBox.removeItem(int) | 625941c31d351010ab855ade |
def get_h_all_text(doc): <NEW_LINE> <INDENT> start = [] <NEW_LINE> names = [] <NEW_LINE> regex = re.compile(r"<h[1-6].+\n.*") <NEW_LINE> for match in regex.finditer(doc): <NEW_LINE> <INDENT> h_name = re.findall(r">\n.*", match.group())[0][2:].strip() <NEW_LINE> names.append(h_name) <NEW_LINE> start.append(match.start()) <NEW_LINE> <DEDENT> s = start[2] <NEW_LINE> paragraphs = {} <NEW_LINE> for i in range(3,len(start)): <NEW_LINE> <INDENT> e = start[i] -1 <NEW_LINE> if names[i-1] not in ['Practice', 'Questions', 'Review', 'Explore More', 'Explore More I','Explore More II','Explore More III' 'References']: <NEW_LINE> <INDENT> paragraphs[names[0] + "_" + str(i)] = (BeautifulSoup(doc[s:e], 'html.parser').get_text()) <NEW_LINE> <DEDENT> s = e + 1 <NEW_LINE> <DEDENT> return paragraphs | get text between h tags, except:
practice', 'questions', 'review', 'explore more', 'references | 625941c34e4d5625662d439c |
def high_kick(self, game): <NEW_LINE> <INDENT> ball_pos = game.get_ball_position() <NEW_LINE> if game.is_team_side(game.get_ball_position(), self.my_team) and game.get_player_at(game.get_ball_position()) is None: <NEW_LINE> <INDENT> for player in game.get_players_on_pitch(self.my_team, up=True): <NEW_LINE> <INDENT> if Skill.BLOCK in player.get_skills(): <NEW_LINE> <INDENT> return Action(ActionType.SELECT_PLAYER, player=player, position=ball_pos) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return Action(ActionType.SELECT_NONE) | Select player to move under the ball. | 625941c310dbd63aa1bd2b66 |
def _worker_container(task_q, result_q, func): <NEW_LINE> <INDENT> _th_name = threading.current_thread().name <NEW_LINE> logger.debug('[W++] mpms worker %s starting'% _th_name) <NEW_LINE> while True: <NEW_LINE> <INDENT> taskid, args, kwargs = task_q.get() <NEW_LINE> if taskid is StopIteration: <NEW_LINE> <INDENT> logger.debug("[W++] mpms worker %s got stop signal"%_th_name) <NEW_LINE> break <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> result = func(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error("[W++] Unhandled error %s in worker thread, taskid: %s"%(e, taskid)) <NEW_LINE> if result_q is not None: <NEW_LINE> <INDENT> result_q.put_nowait((taskid, e)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if result_q is not None: <NEW_LINE> <INDENT> result_q.put_nowait((taskid, result)) | Args:
result_q (Queue|None) | 625941c32c8b7c6e89b35783 |
def __init__(self, url, **kwargs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.url = url <NEW_LINE> self.id = kwargs.pop("id", "") <NEW_LINE> self.title = kwargs.pop("title", "") <NEW_LINE> self.text = kwargs.pop("text", "") <NEW_LINE> self.language = kwargs.pop("language", "") <NEW_LINE> self.author = kwargs.pop("author", "") <NEW_LINE> self.date = kwargs.pop("date", "") <NEW_LINE> self.votes = kwargs.pop("votes", 0) <NEW_LINE> self.shares = kwargs.pop("shares", 0) <NEW_LINE> self.comments = kwargs.pop("comments", 0) <NEW_LINE> for k, v in list(kwargs.items()): <NEW_LINE> <INDENT> self[k] = v | An item in a list of results returned by SearchEngine.search(). All
dictionary keys are available as Unicode string attributes.
- id : unique identifier,
- url : the URL of the referred web content,
- title : the title of the content at the URL,
- text : the content text,
- language : the content language,
- author : for news items and posts, the author,
- date : for news items and posts, the publication date. | 625941c3be7bc26dc91cd5c5 |
def set_gradient_error_factor(self, value): <NEW_LINE> <INDENT> self._data['gradient_error'] = value <NEW_LINE> return | Set the error factor for steepness changes.
:param value:
:type value: | 625941c3091ae35668666f23 |
def extract_from_headers(mail): <NEW_LINE> <INDENT> extracted = {} <NEW_LINE> log.debug("Extracting values from custom headers") <NEW_LINE> branch = mail[X_GIT_BRANCH_HEADER] or None <NEW_LINE> version = mail[X_KERNEL_VERSION_HEADER] or None <NEW_LINE> tree = mail[X_TREE_HEADER] or None <NEW_LINE> patches = mail[X_PATCHES_HEADER] or None <NEW_LINE> if branch: <NEW_LINE> <INDENT> extracted["branch"] = branch <NEW_LINE> <DEDENT> if version: <NEW_LINE> <INDENT> extracted["version"] = fix_kernel_version(version) <NEW_LINE> <DEDENT> if tree: <NEW_LINE> <INDENT> extracted["tree"] = extract_tree_name(tree) <NEW_LINE> <DEDENT> if patches is not None: <NEW_LINE> <INDENT> extracted["patches"] = hack_patches_count(patches) <NEW_LINE> <DEDENT> return extracted | Extract the kerormations from mail headers.
:param mail: The email to parse.
:return dict A dictionary with tree, branch, version and patches. | 625941c391f36d47f21ac4b2 |
def extractROIsFromPCAICA(spcomps, numSTD=4, gaussiansigmax=2 , gaussiansigmay=2,thresh=None): <NEW_LINE> <INDENT> numcomps, width, height=spcomps.shape <NEW_LINE> rowcols=int(np.ceil(np.sqrt(numcomps))); <NEW_LINE> allMasks=[]; <NEW_LINE> maskgrouped=[]; <NEW_LINE> for k in xrange(0,numcomps): <NEW_LINE> <INDENT> comp=spcomps[k] <NEW_LINE> comp=gaussian_filter(comp,[gaussiansigmay,gaussiansigmax]) <NEW_LINE> maxc=np.percentile(comp,99); <NEW_LINE> minc=np.percentile(comp,1); <NEW_LINE> q75, q25 = np.percentile(comp, [75 ,25]) <NEW_LINE> iqr = q75 - q25 <NEW_LINE> minCompValuePos=np.median(comp)+numSTD*iqr/1.35; <NEW_LINE> minCompValueNeg=np.median(comp)-numSTD*iqr/1.35; <NEW_LINE> if thresh is None: <NEW_LINE> <INDENT> compabspos=comp*(comp>minCompValuePos)-comp*(comp<minCompValueNeg); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> compabspos=comp*(comp>thresh)-comp*(comp<-thresh); <NEW_LINE> <DEDENT> labeledpos, n = label(compabspos>0, np.ones((3,3))) <NEW_LINE> maskgrouped.append(labeledpos) <NEW_LINE> for jj in range(1,n+1): <NEW_LINE> <INDENT> tmp_mask=np.asarray(labeledpos==jj) <NEW_LINE> allMasks.append(tmp_mask) <NEW_LINE> <DEDENT> <DEDENT> return allMasks,maskgrouped | Given the spatial components output of the IPCA_stICA function extract possible regions of interest
The algorithm estimates the significance of a components by thresholding the components after gaussian smoothing
Parameters
-----------
spcompomps, 3d array containing the spatial components
numSTD: number of standard deviation above the mean of the spatial component to be considered signiificant | 625941c321bff66bcd684916 |
def check(file, text=""): <NEW_LINE> <INDENT> return __check(file, text) | Private function to check one Python source file for whitespace related
problems.
@param file source filename (string)
@param text source text (string)
@return A tuple indicating status (True = an error was found), the
filename, the linenumber and the error message
(boolean, string, string, string). The values are only
valid, if the status is True. | 625941c363d6d428bbe444b1 |
def _sorteditems(d, orderby): <NEW_LINE> <INDENT> s = sorted([(i[orderby], k) for k, i in d.items()]) <NEW_LINE> return [(k, d[k]) for i, k in s] | return items from a dict of dict, sorted by the orderby item of the dict | 625941c323e79379d52ee527 |
def report(request): <NEW_LINE> <INDENT> if request.method != 'POST': <NEW_LINE> <INDENT> return render_to_response('assassins/report.html', context_instance=RequestContext(request)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> reporting_player = Player.objects.get(name=request.POST['playername'], key=request.POST['playerkey'].upper()) <NEW_LINE> <DEDENT> except (KeyError, Player.DoesNotExist): <NEW_LINE> <INDENT> return render_to_response('assassins/report.html', { 'system_message': "Your name or key is incorrect.", }, context_instance=RequestContext(request)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if reporting_player.target.key != request.POST['targetkey'].upper(): <NEW_LINE> <INDENT> return render_to_response('assassins/report.html', { 'system_message': "Incorrect key for your target.", }, context_instance=RequestContext(request)) <NEW_LINE> <DEDENT> newsmessage = str(reporting_player) + ' killed ' + str(reporting_player.target) + '.' <NEW_LINE> news = NewsReport(report_type='KILL', message=newsmessage) <NEW_LINE> news.save() <NEW_LINE> newtarget = reporting_player.target.target <NEW_LINE> reporting_player.target.alive = False <NEW_LINE> reporting_player.target.target = None <NEW_LINE> reporting_player.target.save() <NEW_LINE> reporting_player.increment_killcount() <NEW_LINE> reporting_player.target = newtarget <NEW_LINE> reporting_player.save() <NEW_LINE> system_message = "Kill Confirmed. Your new target is: " <NEW_LINE> system_message += str(reporting_player.target) <NEW_LINE> return render_to_response('assassins/report.html', { 'system_message': system_message, }, context_instance=RequestContext(request)) | provides a form for logging kills | 625941c3283ffb24f3c558c5 |
def slicenames_prefixes(): <NEW_LINE> <INDENT> prefixes = ''.join([string.digits, string.ascii_uppercase, string.ascii_lowercase]) <NEW_LINE> for l in prefixes: <NEW_LINE> <INDENT> yield l + '_' | Prefixes for video slices names in sort order for builtin sorted() function | 625941c3d99f1b3c44c67553 |
def addForeignKeySql(self, c_name, o_pkg, o_tbl, o_fld, m_pkg, m_tbl, m_fld, on_up, on_del, init_deferred): <NEW_LINE> <INDENT> return '' | Sqlite cannot add foreign keys, only define them in CREATE TABLE. However they are not enforced. | 625941c39f2886367277a850 |
@permission_required(add_user_perm) <NEW_LINE> def create(request): <NEW_LINE> <INDENT> is_admin = False <NEW_LINE> is_superuser = request.user.is_superuser <NEW_LINE> if 'wagtailadmin.union_admin' in request.user.get_all_permissions(): <NEW_LINE> <INDENT> is_admin = True <NEW_LINE> <DEDENT> for fn in hooks.get_hooks('before_create_user'): <NEW_LINE> <INDENT> result = fn(request) <NEW_LINE> if hasattr(result, 'status_code'): <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> form = get_user_creation_form()(request.POST, request.FILES) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> print('form valid') <NEW_LINE> user = form.save() <NEW_LINE> for group in request.user.groups.all(): <NEW_LINE> <INDENT> if group.permissions.filter(codename='union_admin'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user.groups.add(Group.objects.get(name=group.name.replace("Admin", "Editor"))) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> messages.error(request, "Cannot add the new user to the editor group of your union") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> messages.success(request, _("User '{0}' created.").format(user), buttons=[ messages.button(reverse('wagtailusers_users:edit', args=(user.pk,)), _('Edit')) ]) <NEW_LINE> for fn in hooks.get_hooks('after_create_user'): <NEW_LINE> <INDENT> result = fn(request, user) <NEW_LINE> if hasattr(result, 'status_code'): <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> return redirect('wagtailusers_users:index') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, _("The user could not be created due to errors.")) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> form = get_user_creation_form()() <NEW_LINE> <DEDENT> return render(request, 'wagtailusers/users/create.html', {'form': form, 'is_admin': is_admin, 'is_superuser': is_superuser}) | Return context to create.html | 625941c301c39578d7e74dfd |
def peak_widths(x, peaks, rel_height=0.5, prominence_data=None, wlen=None): <NEW_LINE> <INDENT> x = np.asarray(x, order='C', dtype=np.float64) <NEW_LINE> if x.ndim != 1: <NEW_LINE> <INDENT> raise ValueError('`x` must have exactly one dimension') <NEW_LINE> <DEDENT> peaks = np.asarray(peaks) <NEW_LINE> if peaks.size == 0: <NEW_LINE> <INDENT> peaks = np.array([], dtype=np.intp) <NEW_LINE> <DEDENT> if peaks.ndim != 1: <NEW_LINE> <INDENT> print(peaks.ndim) <NEW_LINE> raise ValueError('`peaks` must have exactly one dimension') <NEW_LINE> <DEDENT> if rel_height < 0.0: <NEW_LINE> <INDENT> raise ValueError('`rel_height` must be greater or equal to 0.0') <NEW_LINE> <DEDENT> if prominence_data is None: <NEW_LINE> <INDENT> prominence_data = peak_prominences(x, peaks, wlen) <NEW_LINE> <DEDENT> return _peak_widths(x, peaks, rel_height, *prominence_data) | Calculate the width of each peak in a signal.
.. versionadded:: 1.1.0 | 625941c3711fe17d82542331 |
def pickling_func2(folder, engine, delta_seq): <NEW_LINE> <INDENT> engine = create_engine('postgresql+psycopg2://ganlan:88a002d51c1@localhost/dataAUV_117', echo=False) <NEW_LINE> """ Creation of a rosbag to postgresql instance """ <NEW_LINE> rtp = rosbag_to_postgresql(pathfile=folder, engine=engine, init_db=True) <NEW_LINE> """ Creation of all the tables (If it wasn't created at the init with init_db = True) """ <NEW_LINE> """ Connection to the database """ <NEW_LINE> Session = sessionmaker(bind=engine) <NEW_LINE> session = Session() <NEW_LINE> """ Extracting messages from rosbag to database """ <NEW_LINE> topicstoext = ["/zodiac_auto/fix"] <NEW_LINE> return rtp.update_database(session, topicstoext, delta_seq) | Connection parameters to the database | 625941c338b623060ff0adb0 |
def within_bounds(self, item: Item) -> bool: <NEW_LINE> <INDENT> for header, header_range in self.ranges.items(): <NEW_LINE> <INDENT> if not header_range.within_bounds(item[header]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Checks whether a tuple is within all the ranges of the this
cluster, eg. would cause no information loss on being entered.
Args:
item: The tuple to perform bounds checking on
Returns: Whether or not the tuple is within the bounds of the cluster | 625941c330c21e258bdfa45e |
def load_pdfreader(self, reader): <NEW_LINE> <INDENT> outlines=reader.getOutlines() <NEW_LINE> self.add_children_from_pdfreader_outlines(outlines) | read outlines from from PyPDF2 reader | 625941c39b70327d1c4e0d96 |
def add_deleted_file(self, path): <NEW_LINE> <INDENT> self.DeletedFiles += 1 <NEW_LINE> self.DeltaEntries += 1 <NEW_LINE> self.add_delta_entries_file(path, b'deleted') | Add stats of file no longer in source directory | 625941c30a366e3fb873e7db |
def test_is_1_prime(): <NEW_LINE> <INDENT> assert not is_prime(1), "1 is not prime!" | Is 1 prime? | 625941c38e71fb1e9831d76c |
def sensor2float(int_sensor): <NEW_LINE> <INDENT> float_time = float(int_sensor / 1000000) <NEW_LINE> return float_time | Convert sensor data from int to float. | 625941c35fc7496912cc3940 |
def qryAccount(self): <NEW_LINE> <INDENT> self.qryApi.qryAccount() | 查询账户资金 | 625941c331939e2706e4ce2e |
def get_value(self, default=None): <NEW_LINE> <INDENT> ret = self.get("value", default) <NEW_LINE> if ret is None: <NEW_LINE> <INDENT> return self.get_default(default) <NEW_LINE> <DEDENT> return ret | Get field value
@param default:
@return: | 625941c36aa9bd52df036d65 |
def delete_element_in_dictlist(dic, key): <NEW_LINE> <INDENT> for i in dic: <NEW_LINE> <INDENT> if key in i: <NEW_LINE> <INDENT> del i[key] | Удалить ключ из списка словарей | 625941c38a349b6b435e8135 |
def container_exec(self, name, detach, args): <NEW_LINE> <INDENT> d = util.Decompose(name) <NEW_LINE> if d.registry != "" or ':' in name: <NEW_LINE> <INDENT> repo = self._get_ostree_repo() <NEW_LINE> if not repo: <NEW_LINE> <INDENT> raise ValueError("Cannot find a configured OSTree repo") <NEW_LINE> <DEDENT> name = self._pull_image_to_ostree(repo, name, False) <NEW_LINE> return self._run_once(name, "run-once-{}".format(os.getpid()), args=args) <NEW_LINE> <DEDENT> is_container_running = self._is_service_active(name) <NEW_LINE> tty = os.isatty(0) <NEW_LINE> if is_container_running: <NEW_LINE> <INDENT> cmd = [self._get_oci_runtime(), "exec"] <NEW_LINE> if tty: <NEW_LINE> <INDENT> cmd.extend(["--tty"]) <NEW_LINE> <DEDENT> if detach: <NEW_LINE> <INDENT> cmd.extend(["--detach"]) <NEW_LINE> <DEDENT> cmd.extend([name]) <NEW_LINE> cmd.extend(args) <NEW_LINE> return util.check_call(cmd, stdin=sys.stdin, stderr=sys.stderr, stdout=sys.stdout) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> checkout = self._canonicalize_location(self.get_checkout(name)) <NEW_LINE> if checkout is None: <NEW_LINE> <INDENT> raise ValueError("The container '{}' doesn't exist".format(name)) <NEW_LINE> <DEDENT> if detach: <NEW_LINE> <INDENT> raise ValueError("Cannot use --detach with not running containers") <NEW_LINE> <DEDENT> temp_dir = tempfile.mkdtemp() <NEW_LINE> try: <NEW_LINE> <INDENT> orig_config = os.path.sep.join([checkout, 'config.json']) <NEW_LINE> dest_config = os.path.sep.join([temp_dir, 'config.json']) <NEW_LINE> config = self._rewrite_config_args(orig_config, dest_config, args, tty=tty, checkout=checkout) <NEW_LINE> for i in config['mounts']: <NEW_LINE> <INDENT> if 'type' not in i or i['type'] != 'bind' or 'source' not in i: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> src = i['source'] <NEW_LINE> is_runtime = src.startswith('/run/') or src.startswith('/var/run/') <NEW_LINE> if is_runtime and not os.path.exists(src): <NEW_LINE> <INDENT> os.makedirs(src) <NEW_LINE> <DEDENT> <DEDENT> cmd = [self._get_oci_runtime(), "run"] <NEW_LINE> cmd.extend([name]) <NEW_LINE> subp = subprocess.Popen(cmd, cwd=temp_dir, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, close_fds=True, universal_newlines=False, env=os.environ) <NEW_LINE> return subp.wait() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> shutil.rmtree(temp_dir) | Run exec on a container.
:param name: The name of the container.
:type name: str
:param detach: If detach should be used.
:type detatch: bool
:param args: Arguments to pass to exec
:type args: list
:returns: The call result
:rtype: int
:raises: ValueError, OSError, subprocess.CalledProcessError | 625941c3b830903b967e98cf |
def next_state(present, picum, tmean, kA, opamp): <NEW_LINE> <INDENT> possible = np.nonzero(picum[present] >= random.random())[0] <NEW_LINE> next = np.delete(possible, np.where(possible == present))[0] <NEW_LINE> t = random.expovariate(1 / tmean[next]) <NEW_LINE> a = opamp if next < kA else 0 <NEW_LINE> return next, t, a | Get next state, its lifetime and amplitude. | 625941c3a934411ee3751655 |
def qinclude(context, path, start_at=None, end_at=None): <NEW_LINE> <INDENT> truncate_count = 0 <NEW_LINE> truncate_char = '' <NEW_LINE> fp = StringIO() <NEW_LINE> is_content = start_at is None <NEW_LINE> with open(path) as source: <NEW_LINE> <INDENT> for ii, line in enumerate(source): <NEW_LINE> <INDENT> if not is_content and start_at in line: <NEW_LINE> <INDENT> is_content = True <NEW_LINE> if line != '' and line[0] in (' ', '\t'): <NEW_LINE> <INDENT> truncate_char = line[0] <NEW_LINE> truncate_count = len(line) - len(line.lstrip(truncate_char)) <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> elif is_content and end_at is not None and end_at in line: <NEW_LINE> <INDENT> is_content = False <NEW_LINE> break <NEW_LINE> <DEDENT> if is_content: <NEW_LINE> <INDENT> fp.write(line) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> fp.seek(0) <NEW_LINE> try: <NEW_LINE> <INDENT> dp = StringIO() <NEW_LINE> for line in fp: <NEW_LINE> <INDENT> truncate_line = line[:truncate_count] if truncate_char else line <NEW_LINE> writable_line = line[truncate_count:] <NEW_LINE> if truncate_char: <NEW_LINE> <INDENT> if line.strip() == '': <NEW_LINE> <INDENT> writable_line = '\n' <NEW_LINE> <DEDENT> elif len(truncate_line) != truncate_line.count(truncate_char): <NEW_LINE> <INDENT> raise TruncateError('Truncate failed: {}'.format(repr(truncate_line))) <NEW_LINE> <DEDENT> <DEDENT> dp.write(writable_line) <NEW_LINE> <DEDENT> <DEDENT> except TruncateError as err: <NEW_LINE> <INDENT> logger.warning('Cannnot truncate: %s', err) <NEW_LINE> fp.seek(0) <NEW_LINE> dp = StringIO() <NEW_LINE> dp.write(fp.read()) <NEW_LINE> <DEDENT> dp.seek(0) <NEW_LINE> context.write(dp.read()) <NEW_LINE> return '' | start_at: None -> ファイルの先頭から
start_at: 'TAG' -> TAGの次の行から
end_at: None -> ファイルの最後まで
end_at: 'TAG' -> TAGの手前の行まで | 625941c373bcbd0ca4b2c039 |
def check_grad_file_status(grad_file: str) -> dwipy.constants.GRADSTATUS: <NEW_LINE> <INDENT> if os.path.exists(grad_file): <NEW_LINE> <INDENT> return check_grads_status(dwipy.IO.gradient_table.load_mrtrix_grads(grad_file)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return dwipy.constants.GRADSTATUS.GRADS_NOT_FOUND | Check the status of the gradients from a gradient file.
Args:
grad_file (str): Path to gradient file
Returns:
dwipy.constants.GRADSTATUS: The gradient status | 625941c3e76e3b2f99f3a7d1 |
def compute_novelty(sentences, corpus_file, opt, idx_to_word): <NEW_LINE> <INDENT> ref = sentences[0].split("\n") <NEW_LINE> sentences = [s.split(" ") for s in sentences[1].split("\n")] <NEW_LINE> with open(corpus_file, 'r') as f: <NEW_LINE> <INDENT> corpus = [s.rstrip().split(" ") for s in f.readlines()] <NEW_LINE> <DEDENT> corpus = [s for s in corpus if len(s) < opt.sample_len + 5] <NEW_LINE> novelty = [] <NEW_LINE> closest = [] <NEW_LINE> for i, sen in enumerate(sentences): <NEW_LINE> <INDENT> print("Computing novelty for sentence {}/{}.\n".format(i, len(sentences))) <NEW_LINE> mindex = np.argmin(np.array([ter(sen, s) for s in corpus])) <NEW_LINE> novelty.append(ter(sen, corpus[mindex])) <NEW_LINE> closest.append(" ".join([idx_to_word[int(idx)] for idx in corpus[mindex]])) <NEW_LINE> print("Novelty: {}, Sentence: {}, Closest: {}\n".format(novelty[i], ref[i], closest[i])) <NEW_LINE> <DEDENT> return sum(novelty) / float(len(novelty)), sorted(zip(novelty, ref, closest)) | Computes the novelty of a batch of sentences given a corpus. | 625941c34a966d76dd550fd0 |
def pers_ref_lst(self, media_handle): <NEW_LINE> <INDENT> lst = list() <NEW_LINE> backrefs = self._db.find_backlink_handles(media_handle) <NEW_LINE> for (reftype, ref) in backrefs: <NEW_LINE> <INDENT> if reftype == "Person": <NEW_LINE> <INDENT> person = self._db.get_person_from_handle(ref) <NEW_LINE> gallery = person.get_media_list() <NEW_LINE> for mediaref in gallery: <NEW_LINE> <INDENT> referenced_handles = mediaref.get_referenced_handles() <NEW_LINE> if len(referenced_handles) == 1: <NEW_LINE> <INDENT> handle_type, handle = referenced_handles[0] <NEW_LINE> if handle_type == "Media" and handle == media_handle: <NEW_LINE> <INDENT> rect = mediaref.get_rectangle() <NEW_LINE> if rect is None: <NEW_LINE> <INDENT> rect = (0, 0, 100, 100) <NEW_LINE> <DEDENT> lst.append((person, rect)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return lst | Get a person reference list with image rectangle information.
:param media_handle: handle of the media file
:type media_handle: string
:return lst: list of reference tuples
:rtype lst: list
:example lst: (:class Person: object, (0, 0, 100, 100)) | 625941c3fb3f5b602dac3654 |
def load(fo): <NEW_LINE> <INDENT> return json.load(fo, object_hook=object_hook) | Load from `fo`, convert date/datetime from string representation to
values. | 625941c37b180e01f3dc47c3 |
def __add__(self, other): <NEW_LINE> <INDENT> return DimensionAddition(self, other) | Return the sum of this dimension and `other`. | 625941c321bff66bcd684917 |
def form_post(self, form): <NEW_LINE> <INDENT> pass | Override this method to implement your form processing | 625941c39f2886367277a851 |
def run(self, edit): <NEW_LINE> <INDENT> g = gislacks(self, edit) <NEW_LINE> res = g.Get({ "command": "slack", "options": { "filelistasjson": True, "cfgdirectory": g.settings.get("gislack_cfgpath") } }) <NEW_LINE> if str.strip(res) == "No files.": <NEW_LINE> <INDENT> sublime.message_dialog("No files.") <NEW_LINE> return <NEW_LINE> <DEDENT> elif "Error" in res: <NEW_LINE> <INDENT> sublime.message_dialog(res) <NEW_LINE> return <NEW_LINE> <DEDENT> elif res is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ar = [] <NEW_LINE> ids = [] <NEW_LINE> dat = json.loads(res) <NEW_LINE> for e in dat["files"]: <NEW_LINE> <INDENT> dat = e["name"] + " - " + str(datetime.strptime(e["createdtime"], '%Y-%m-%dT%H:%M:%S+09:00')) + " - " + e["title"] <NEW_LINE> ar.append(dat) <NEW_LINE> ids.append(e["id"]) <NEW_LINE> <DEDENT> def selected(idx): <NEW_LINE> <INDENT> if idx > -1: <NEW_LINE> <INDENT> self.disp(g, edit, ids[idx]) <NEW_LINE> <DEDENT> <DEDENT> self.list_items = ar <NEW_LINE> sublime.active_window().show_quick_panel(self.list_items, selected, sublime.MONOSPACE_FONT) | Function to get file list of Slack
@param edit sublime.Edit | 625941c3f548e778e58cd53f |
def groups(request): <NEW_LINE> <INDENT> if request.user.is_staff: <NEW_LINE> <INDENT> context = {'userGroup': Group.objects.all()} <NEW_LINE> return TemplateResponse(request, 'teacher/groups.html', context) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Http404('<h1>Page not found</h1>') | Учительская админка.
Выдает список групп.
:param request:
:return: 404
:return: шаблон teacher/students.html | 625941c329b78933be1e5671 |
def write_menu(Dish_list: list, s : str): <NEW_LINE> <INDENT> infile1 = open(s,'w') <NEW_LINE> for Dish in Dish_list: <NEW_LINE> <INDENT> infile1.write(str(Dish) + '\n') | Writes a txt document of dishes in list | 625941c3a4f1c619b28b0000 |
def add_sg_pt_tab_detail(self,file_path): <NEW_LINE> <INDENT> sg_pt_tab_detail = list() <NEW_LINE> with open(file_path, 'r') as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> line = line.split('\t') <NEW_LINE> if str(line[1]) == "chr3" and str(line[2]) == '18370866': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr17' and str(line[2]) == '7676154': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr22' and str(line[2]) == '42688607': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr20' and str(line[2]) == '50314010': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr21' and str(line[2]) == '39445145': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr12' and str(line[2]) == '8945306': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr1' and str(line[2]) == '21616107': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr8' and str(line[2]) == '6867054': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif str(line[1]) == 'chr19' and str(line[2]) == '58387815': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> insert_data = { "sample_id": line[0], "chrom":line[1], "pos": line[2], "ref": line[3], "alt": line[4], "dp": line[5], "ref_dp": line[6], "alt_dp": line[7], } <NEW_LINE> sg_pt_tab_detail.append(insert_data) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> collection = self.database['sg_pt_ref'] <NEW_LINE> collection.insert_many(sg_pt_tab_detail) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.bind_object.logger.error('导入tab表格出错:{}'.format(e)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bind_object.logger.info("导入tab表格成功") | 导入样本的tab文件
:param file_path:tab文件
:return: | 625941c356b00c62f0f1461b |
def argvu(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return [s.decode(self.encoding) for s in self.argv] <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return [s for s in self.argv] | Return the decoded arguments from argv. | 625941c3ec188e330fd5a765 |
def get_scaled_cutout2(self, p1, p2, scales, method='basic', logger=None): <NEW_LINE> <INDENT> if logger is None: <NEW_LINE> <INDENT> logger = self.logger <NEW_LINE> <DEDENT> data = self._get_data() <NEW_LINE> newdata, oscales = trcalc.get_scaled_cutout_basic2(data, p1, p2, scales, interpolation=method, logger=logger) <NEW_LINE> scale_x, scale_y = oscales[:2] <NEW_LINE> res = Bunch.Bunch(data=newdata, scale_x=scale_x, scale_y=scale_y) <NEW_LINE> if len(scales) > 2: <NEW_LINE> <INDENT> res.scale_z = oscales[2] <NEW_LINE> <DEDENT> return res | Extract a region of the image defined by points `p1` and `p2`
and scale it by scale factors `scales`.
`method` describes the method of interpolation used, where the
default "basic" is nearest neighbor. | 625941c330bbd722463cbd87 |
def maxAncestorDiff(self, root): <NEW_LINE> <INDENT> if not root: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> self.maxdiff=0 <NEW_LINE> self.dfs(root,root.val,root.val) <NEW_LINE> return self.maxdiff | :type root: TreeNode
:rtype: int | 625941c3236d856c2ad4479a |
def run(self): <NEW_LINE> <INDENT> self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.sock.connect((self.network, self.port)) <NEW_LINE> self.sock.settimeout(0.3) <NEW_LINE> self.recv() <NEW_LINE> self.send_message('NICK', params=[self.nick]) <NEW_LINE> self.send_message('USER', params=[self.username, 0, '*', self.realname]) <NEW_LINE> online = True <NEW_LINE> while online: <NEW_LINE> <INDENT> messages = self.recv() <NEW_LINE> for message in messages: <NEW_LINE> <INDENT> if message.verb == '001': <NEW_LINE> <INDENT> self.rpl_welcome() <NEW_LINE> online = False <NEW_LINE> <DEDENT> elif message.verb.lower() == 'ping': <NEW_LINE> <INDENT> self.send_message('PONG', params=message.params) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.recv() <NEW_LINE> sleep(1.5) <NEW_LINE> self.recv() <NEW_LINE> self.quit() | Connect, execute commands, and then disconnect. | 625941c3442bda511e8be3dd |
def lonlatradius_2_cartesian(positionVectorLonLatRad): <NEW_LINE> <INDENT> [radius, theta, phi] = lonlatradius_2_sphericalPolar(positionVectorLonLatRad) <NEW_LINE> [x, y, z] = sphericalPolar_2_cartesian([radius, theta, phi]) <NEW_LINE> return [x, y, z] | Convert longitude-latitude-radial coordinates to Cartesian coordinates.
Longitude and latitude must be in degrees. The origin of the Cartesian
frame of reference is located at the centre of the sphere, the positive
half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis
goes through 90 deg E, 0 deg N and the positive half of the z-axis goes
through the North Pole equivalent. | 625941c3091ae35668666f24 |
def test_postgres_initialise(self): <NEW_LINE> <INDENT> if self.postgres is None: <NEW_LINE> <INDENT> self.skipTest("Postgres is not available. Test is skipping") <NEW_LINE> <DEDENT> data_store_postgres = DataStore( db_username="postgres", db_password="", db_host="localhost", db_port=55527, db_name="test", ) <NEW_LINE> inspector = inspect(data_store_postgres.engine) <NEW_LINE> table_names = inspector.get_table_names() <NEW_LINE> schema_names = inspector.get_schema_names() <NEW_LINE> self.assertEqual(len(table_names), 0) <NEW_LINE> self.assertNotIn("datastore_schema", schema_names) <NEW_LINE> data_store_postgres.initialise() <NEW_LINE> inspector = inspect(data_store_postgres.engine) <NEW_LINE> table_names = inspector.get_table_names() <NEW_LINE> schema_names = inspector.get_schema_names() <NEW_LINE> self.assertEqual(len(table_names), 11) <NEW_LINE> self.assertIn("Entry", table_names) <NEW_LINE> self.assertIn("Platforms", table_names) <NEW_LINE> self.assertIn("State", table_names) <NEW_LINE> self.assertIn("Datafiles", table_names) <NEW_LINE> self.assertIn("Nationalities", table_names) <NEW_LINE> self.assertIn("datastore_schema", schema_names) | Test whether schemas created successfully on PostgresSQL | 625941c31f5feb6acb0c4b15 |
def handle_metadata_verify_json(self, environ, start_response, qs): <NEW_LINE> <INDENT> ok = False <NEW_LINE> services = "[]" <NEW_LINE> try: <NEW_LINE> <INDENT> if CONST_BODY in qs: <NEW_LINE> <INDENT> json_message = json.loads(qs[CONST_BODY]) <NEW_LINE> if "xml" in json_message: <NEW_LINE> <INDENT> xml = json_message["xml"] <NEW_LINE> xml = xml.strip() <NEW_LINE> metadata_ok = False <NEW_LINE> ci = None <NEW_LINE> mds = MetadataStore( CONST_ONTS.values(), CONST_ATTRCONV, self.xmlsec_path, disable_ssl_certificate_validation=True) <NEW_LINE> _md = MetaData(CONST_ONTS.values(), CONST_ATTRCONV, metadata=xml) <NEW_LINE> try: <NEW_LINE> <INDENT> _md.load() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> _logger.info( 'Could not parse the metadata file in handleMetadataVerifyJSON.', exc_info=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> entity_id = _md.entity.keys()[0] <NEW_LINE> mds.metadata[entity_id] = _md <NEW_LINE> args = {"metad": mds, "dkeys": [self.key]} <NEW_LINE> ci = utils.ConsumerInfo(['metadata'], **args) <NEW_LINE> metadata_ok = True <NEW_LINE> <DEDENT> services = "[" <NEW_LINE> first = True <NEW_LINE> if ci is not None: <NEW_LINE> <INDENT> for item in ci.info: <NEW_LINE> <INDENT> if item.ava is not None and entity_id in item.ava: <NEW_LINE> <INDENT> for social in item.ava[entity_id]: <NEW_LINE> <INDENT> if not first: <NEW_LINE> <INDENT> services += "," <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> first = False <NEW_LINE> <DEDENT> services += '"' + social + '"' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> services += "]" <NEW_LINE> if metadata_ok: <NEW_LINE> <INDENT> ok = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> _logger.fatal('Unknown error in handleMetadataVerifyJSON.', exc_info=True) <NEW_LINE> <DEDENT> resp = Response('{"ok":"' + str(ok) + '", "services":' + services + '}', headers=[('Content-Type', CONST_TYPEJSON)]) <NEW_LINE> return resp(environ, start_response) | Handles JSON metadata verifications.
The post body must contains a JSON message like
{ 'xml' : 'a metadata file'}
:param environ: wsgi enviroment
:param start_response: wsgi start respons
:param qs: Query parameters in a dictionary.
:return: wsgi response contaning a JSON response. The JSON message will
contain the parameter ok and services.
ok will contain true if the metadata file can be parsed, otherwise
false.
services will contain a list of all the service names contained in
the metadata file. | 625941c37b180e01f3dc47c4 |
def get_root(self): <NEW_LINE> <INDENT> ancestors = self.get_ancestors() <NEW_LINE> if ancestors: <NEW_LINE> <INDENT> return ancestors[0] <NEW_LINE> <DEDENT> return self | :returns: the root node for the current node object. | 625941c399cbb53fe6792ba9 |
def __init__(self, model_field_name=None, dump=True, dump_key=None, dump_type=None): <NEW_LINE> <INDENT> self._model_field_name = model_field_name <NEW_LINE> self.dump = dump <NEW_LINE> self._dump_key = dump_key <NEW_LINE> self._dump_type = dump_type | Initialize the field.
:param model_field_name: Name of field on the database model.
:param dump: Set to false to not dump the field.
:param dump_key: The dictionary key to use in dumps.
:param dump_type: The data type used to determine how to serialize the
model field. | 625941c350485f2cf553cd5b |
def coerce(cls, value): <NEW_LINE> <INDENT> if cls.check(value): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if isinstance(value, unicode): <NEW_LINE> <INDENT> value = value.encode() <NEW_LINE> <DEDENT> if isinstance(value, str): <NEW_LINE> <INDENT> return DateTimeFrom(value) <NEW_LINE> <DEDENT> elif isinstance(value, float): <NEW_LINE> <INDENT> return DateTimeFromTicks(value) <NEW_LINE> <DEDENT> elif isinstance(value, time.struct_time): <NEW_LINE> <INDENT> return mktime(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( """Could not convert %r (type %s) to mxDateTime type"""%(value, type(value)) ) | Coerce value to the appropriate data type
Will accept:
DateTimeType
string or Unicode representations (DateTimeFrom)
float (DateTimeFromTicks)
time.struct_time (mktime) | 625941c3d10714528d5ffca4 |
def calculate_bmi(weight, height, system='metric'): <NEW_LINE> <INDENT> if system == 'metric': <NEW_LINE> <INDENT> bmi = (weight / (height ** 2)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bmi = 703 * (weight / (height ** 2)) <NEW_LINE> <DEDENT> return bmi | Return the Body Mass Idex (BMI) for the
given weight, height, and measurment system | 625941c355399d3f05588676 |
def update_model(model_spec, **actions): <NEW_LINE> <INDENT> def todo(model): <NEW_LINE> <INDENT> for k, v in actions.items(): <NEW_LINE> <INDENT> if not hasattr(model, k): <NEW_LINE> <INDENT> raise Exception( "%s has no attribute %s to update." % (model, k)) <NEW_LINE> <DEDENT> setattr(model, k, v) <NEW_LINE> <DEDENT> <DEDENT> return do_when_prepared(todo, model_spec) | Replace the specified attributes in the specified model.q | 625941c3e1aae11d1e749c78 |
def read_csv(filepath): <NEW_LINE> <INDENT> events = pd.read_csv(filepath + 'events.csv') <NEW_LINE> mortality = pd.read_csv(filepath + 'mortality_events.csv') <NEW_LINE> return events, mortality | TODO : This function needs to be completed.
Read the events.csv and mortality_events.csv files.
Variables returned from this function are passed as input to the metric functions. | 625941c37d847024c06be27c |
def _convert_remove_append(options): <NEW_LINE> <INDENT> converted = [] <NEW_LINE> for key in options: <NEW_LINE> <INDENT> v = options[key] <NEW_LINE> if isinstance(v, list): <NEW_LINE> <INDENT> v = ','.join(v) <NEW_LINE> <DEDENT> converted.append(key + "=" + v) <NEW_LINE> <DEDENT> return converted | Convert append and remove dicts read from .yaml file.
:param options: {'section.key': 'value1,value2', ...}
:type options: dict
:return: ['section.key=value1,value2', ...]
:rtype: list | 625941c33317a56b86939c1f |
@app.errorhandler(HTTP_BAD_REQUEST) <NEW_LINE> def invalid_request(error): <NEW_LINE> <INDENT> return make_response(jsonify(INVALID_REQUEST), HTTP_BAD_REQUEST) | Return a 400 with the INVALID_REQUEST message. | 625941c37d847024c06be27d |
def search(self, nums, target): <NEW_LINE> <INDENT> def BS (nums, first, last): <NEW_LINE> <INDENT> if first ==last: <NEW_LINE> <INDENT> if nums[first] == target: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if nums[(first+last)/2] < target: <NEW_LINE> <INDENT> return BS(nums, (first+last+1)/2, last) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return BS(nums, first, (first+last)/2) <NEW_LINE> <DEDENT> <DEDENT> def search_rec(nums,target,first,last): <NEW_LINE> <INDENT> if first == last: <NEW_LINE> <INDENT> if nums[first] == target: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> elif (nums[first]==nums[last]): <NEW_LINE> <INDENT> last-=1 <NEW_LINE> return search_rec(nums,target,first,last) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (nums[first]<=nums[(first+last)/2]): <NEW_LINE> <INDENT> if (target<=nums[(first+last)/2])&(target>=nums[first]): <NEW_LINE> <INDENT> return search_rec(nums,target, first, (first+last)/2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return search_rec(nums,target,(first+last+1)/2,last) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if (target>=nums[(first+last)/2])&(target<=nums[last]): <NEW_LINE> <INDENT> return search_rec(nums,target,(first+last)/2,last) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return search_rec (nums,target, first, (first+last)/2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return search_rec(nums,target,0,len(nums)-1) | :type nums: List[int]
:type target: int
:rtype: int | 625941c3a219f33f3462892f |
def square(n): <NEW_LINE> <INDENT> return n * n | Returns the square of n, which is n * n. | 625941c3d6c5a1020814400c |
def change_user_password(user: User, password: str) -> None: <NEW_LINE> <INDENT> user.set_password(password) <NEW_LINE> user.save() | Change password field in user object | 625941c38da39b475bd64f34 |
def _add_hidden_group(self) -> None: <NEW_LINE> <INDENT> self[0] = Group('0', self.url, **self._get("/groups/0")) | Add the hidden "all known lights" Group ID 0 since its not returned.
Group #0 contains all known lights to the bridge. It can be pretty helpful to
have for making "entire home" changes. I am leaving it here so it is explicitly
known to exist. | 625941c3236d856c2ad4479b |
@pytest.mark.parametrize( "constructor_name, container_type", [ ("list", list), ("tuple", tuple), ("array", np.ndarray), ("sparse", sparse.csr_matrix), ("sparse_csr", sparse.csr_matrix), ("sparse_csc", sparse.csc_matrix), ("dataframe", lambda: pytest.importorskip("pandas").DataFrame), ("series", lambda: pytest.importorskip("pandas").Series), ("index", lambda: pytest.importorskip("pandas").Index), ("slice", slice), ], ) <NEW_LINE> @pytest.mark.parametrize( "dtype, superdtype", [ (np.int32, np.integer), (np.int64, np.integer), (np.float32, np.floating), (np.float64, np.floating), ], ) <NEW_LINE> def test_convert_container( constructor_name, container_type, dtype, superdtype, ): <NEW_LINE> <INDENT> if constructor_name in ("dataframe", "series", "index"): <NEW_LINE> <INDENT> container_type = container_type() <NEW_LINE> <DEDENT> container = [0, 1] <NEW_LINE> container_converted = _convert_container( container, constructor_name, dtype=dtype, ) <NEW_LINE> assert isinstance(container_converted, container_type) <NEW_LINE> if constructor_name in ("list", "tuple", "index"): <NEW_LINE> <INDENT> assert np.issubdtype(type(container_converted[0]), superdtype) <NEW_LINE> <DEDENT> elif hasattr(container_converted, "dtype"): <NEW_LINE> <INDENT> assert container_converted.dtype == dtype <NEW_LINE> <DEDENT> elif hasattr(container_converted, "dtypes"): <NEW_LINE> <INDENT> assert container_converted.dtypes[0] == dtype | Check that we convert the container to the right type of array with the
right data type. | 625941c382261d6c526ab45f |
def pretty_print_info(text): <NEW_LINE> <INDENT> pretty_print(text, Fore.CYAN) | Print info | 625941c3aad79263cf390a01 |
def test_get_context_with_invalid_choice(self): <NEW_LINE> <INDENT> class MyOperator1(BaseConditionOperator): <NEW_LINE> <INDENT> operator_id = 'my-op-1' <NEW_LINE> name = 'My Op 1' <NEW_LINE> value_field = ConditionValueIntegerField() <NEW_LINE> <DEDENT> class MyOperator2(BaseConditionOperator): <NEW_LINE> <INDENT> operator_id = 'my-op-2' <NEW_LINE> name = 'My Op 2' <NEW_LINE> value_field = ConditionValueCharField() <NEW_LINE> <DEDENT> class MyChoice1(BaseConditionChoice): <NEW_LINE> <INDENT> choice_id = 'my-choice-1' <NEW_LINE> name = 'My Choice 1' <NEW_LINE> operators = ConditionOperators([MyOperator1, MyOperator2]) <NEW_LINE> <DEDENT> choices = ConditionChoices([MyChoice1]) <NEW_LINE> field = ConditionsField(choices=choices) <NEW_LINE> result = field.widget.get_context( 'my_conditions', { 'mode': 'any', 'conditions': [ { 'choice': 'invalid-choice', 'op': 'my-op-1', 'value': 'my-value-1', }, ], }, { 'id': 'my-conditions', }) <NEW_LINE> rendered_rows = result['rendered_rows'] <NEW_LINE> self.assertEqual( rendered_rows, [{ 'choice': ( '<select disabled="disabled" id="my-conditions_choice_0"' ' name="my_conditions_choice[0]">\n' '<option value="my-choice-1">My Choice 1</option>\n' '<option value="invalid-choice" selected="selected">' 'invalid-choice</option>\n' '</select>' '<input name="my_conditions_choice[0]" type="hidden"' ' value="invalid-choice" />' ), 'operator': ( '<select disabled="disabled" id="my-conditions_operator_0"' ' name="my_conditions_operator[0]">\n' '<option value="my-op-1" selected="selected">' 'my-op-1</option>\n' '</select>' '<input name="my_conditions_operator[0]" type="hidden"' ' value="my-op-1" />' ), 'error': ('This choice no longer exists. You will need to ' 'delete the condition in order to make changes.'), }]) <NEW_LINE> serialized_choices = result['serialized_choices'] <NEW_LINE> self.assertEqual(len(serialized_choices), 1) <NEW_LINE> self.assertEqual(serialized_choices[0]['id'], 'my-choice-1') <NEW_LINE> serialized_rows = result['serialized_rows'] <NEW_LINE> self.assertEqual( serialized_rows, [{ 'choiceID': 'invalid-choice', 'operatorID': 'my-op-1', 'valid': False, 'value': 'my-value-1', 'error': ('This choice no longer exists. You will need to ' 'delete the condition in order to make changes.'), }]) | Testing ConditionsWidget.get_context with invalid choice | 625941c3ad47b63b2c509f42 |
def forward(self, kernel): <NEW_LINE> <INDENT> kernel_center = torch.sum(kernel.squeeze() * self.locs) <NEW_LINE> loss = F.mse_loss(kernel_center, self.center) <NEW_LINE> return loss | Calculates the loss for this kernel. | 625941c316aa5153ce36243b |
def calc_sharpe_ratio(returns, periods=250): <NEW_LINE> <INDENT> return np.sqrt(periods) * (np.mean(returns)) / np.std(returns) | 计算夏普比率 | 625941c3b545ff76a8913dd9 |
def test_read_selection(): <NEW_LINE> <INDENT> ch_names = ['MEG 2211', 'MEG 0223', 'MEG 1312', 'MEG 0412', 'MEG 1043', 'MEG 2042', 'MEG 2032', 'MEG 0522', 'MEG 1031'] <NEW_LINE> sel_names = ['Vertex', 'Left-temporal', 'Right-temporal', 'Left-parietal', 'Right-parietal', 'Left-occipital', 'Right-occipital', 'Left-frontal', 'Right-frontal'] <NEW_LINE> for i, name in enumerate(sel_names): <NEW_LINE> <INDENT> sel = read_selection(name) <NEW_LINE> assert(ch_names[i] in sel) <NEW_LINE> <DEDENT> all_ch = read_selection(['L', 'R']) <NEW_LINE> left = read_selection('L') <NEW_LINE> right = read_selection('R') <NEW_LINE> assert(len(all_ch) == len(left) + len(right)) <NEW_LINE> assert(len(set(left).intersection(set(right))) == 0) <NEW_LINE> frontal = read_selection('frontal') <NEW_LINE> occipital = read_selection('Right-occipital') <NEW_LINE> assert(len(set(frontal).intersection(set(occipital))) == 0) | Test reading of selections | 625941c3498bea3a759b9a73 |
def get_drive(self, drive_id): <NEW_LINE> <INDENT> return self.client.drives[drive_id].get() | Get metadata for a drive
:param drive_id:
:return: | 625941c3d268445f265b4e31 |
def str_to_key(value, key_table = wx, accel_format = 'ACCEL_%s', key_format = 'WXK_%s', key_transpositions = {}): <NEW_LINE> <INDENT> logger.debug('Converting "%s" to integers.', value) <NEW_LINE> modifiers = 0 <NEW_LINE> key = 0 <NEW_LINE> split = value.split('+') <NEW_LINE> for v in split: <NEW_LINE> <INDENT> v = v.upper() <NEW_LINE> a = accel_format % key_transpositions.get(v, v) <NEW_LINE> logger.debug('Accelerator format = %s.', a) <NEW_LINE> k = key_format % key_transpositions.get(v, v) <NEW_LINE> logger.debug('Key format = %s.', k) <NEW_LINE> if hasattr(key_table, a): <NEW_LINE> <INDENT> logger.debug('Found accelerator on %r.', key_table) <NEW_LINE> modifiers = modifiers | getattr(key_table, a) <NEW_LINE> <DEDENT> elif hasattr(key_table, k): <NEW_LINE> <INDENT> logger.debug('Found key on %r.', key_table) <NEW_LINE> if key: <NEW_LINE> <INDENT> raise ValueError('Multiple keys specified.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = getattr(key_table, k) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not key: <NEW_LINE> <INDENT> logger.debug('No key yet, falling back to ord.') <NEW_LINE> key = ord(split[-1]) <NEW_LINE> <DEDENT> logger.debug('modifiers = %d, key = %d.', modifiers, key) <NEW_LINE> return (modifiers, key) | Turns a string like "CTRL_ALT+K" into (3, 75).
To get a global hotkey, try passing:
key_table = win32con, accel_format = 'MOD_%s', key_format = 'VK_%s', key_transpositions = {'CTRL': 'CONTROL'} | 625941c376d4e153a657eaf3 |
def testNotOffline(self): <NEW_LINE> <INDENT> def fetcher(title, db='database'): <NEW_LINE> <INDENT> return SeqRecord(None) <NEW_LINE> <DEDENT> featureList = FeatureList('title', 'database', set(), sequenceFetcher=fetcher) <NEW_LINE> self.assertEqual(False, featureList.offline) | If the sequence fetcher does not return C{None} we must be marked as
being online. | 625941c3f8510a7c17cf96be |
def _osx_gpudata(): <NEW_LINE> <INDENT> gpus = [] <NEW_LINE> try: <NEW_LINE> <INDENT> pcictl_out = __salt__['cmd.run']('system_profiler SPDisplaysDataType') <NEW_LINE> for line in pcictl_out.splitlines(): <NEW_LINE> <INDENT> fieldname, _, fieldval = line.partition(': ') <NEW_LINE> if fieldname.strip() == "Chipset Model": <NEW_LINE> <INDENT> vendor, _, model = fieldval.partition(' ') <NEW_LINE> vendor = vendor.lower() <NEW_LINE> gpus.append({'vendor': vendor, 'model': model}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> grains = {} <NEW_LINE> grains['num_gpus'] = len(gpus) <NEW_LINE> grains['gpus'] = gpus <NEW_LINE> return grains | num_gpus: int
gpus:
- vendor: nvidia|amd|ati|...
model: string | 625941c3be383301e01b544c |
def redo(self) -> None: <NEW_LINE> <INDENT> self.__logger.debug("Redo") <NEW_LINE> if self.canRedo: <NEW_LINE> <INDENT> self._currentStep += 1 <NEW_LINE> self.restore() <NEW_LINE> self.scene.isModified = True | Perform the redo operation | 625941c350812a4eaa59c2e6 |
def unset_session_cookie(self, resp): <NEW_LINE> <INDENT> resp.set_cookie( name=self.session_cookie_name, value='', domain=self.session_cookie_domain, path=self.session_cookie_path, secure=self.session_cookie_secure, http_only=self.session_cookie_http_only, expires=datetime.utcnow() - timedelta(1), max_age=-1 ) | Disables session cookies. | 625941c3ad47b63b2c509f43 |
def GetDirectedHausdorffDistance(self): <NEW_LINE> <INDENT> return _itkDirectedHausdorffDistanceImageFilterPython.itkDirectedHausdorffDistanceImageFilterIUC3IUC3_GetDirectedHausdorffDistance(self) | GetDirectedHausdorffDistance(self) -> double | 625941c35e10d32532c5eeea |
def test_delete_alias_creator(self): <NEW_LINE> <INDENT> self._create_alias(self.test_user) <NEW_LINE> result = self.get_success( self.handler.delete_association( create_requester(self.test_user), self.room_alias ) ) <NEW_LINE> self.assertEqual(self.room_id, result) <NEW_LINE> self.get_failure( self.handler.get_association(self.room_alias), synapse.api.errors.SynapseError, ) | An alias creator can delete their own alias. | 625941c3090684286d50eca7 |
def render_border(xmin: int = 0, xmax: int = 0, ymin: int = 0, ymax: int = 0): <NEW_LINE> <INDENT> pass | Set the boundaries of the border render and enable border render
:param xmin: X Min
:type xmin: int
:param xmax: X Max
:type xmax: int
:param ymin: Y Min
:type ymin: int
:param ymax: Y Max
:type ymax: int | 625941c38c3a87329515837c |
def get_webhook_logs(self, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> return self.get_webhook_logs_with_http_info(**kwargs) | List Webhook Log Entries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_webhook_logs(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int page_size: The number of items to include in this response. When omitted, the maximum value of 1000 will be used.
:param int skip: Skips the given number of items when paging through large result sets.
:param str sort: The field by which results should be sorted. Sorting defaults to ascending order, prefix the field name with `-` to sort in descending order.
:param str status: Filter results by HTTP status codes.
:param float webhook_id: Filter results by Webhook.
:param float application_id:
:param float campaign_id: Filter results by campaign.
:param str request_uuid: Filter results by request UUID.
:param datetime created_before: Filter results where request and response times to return entries before parameter value, expected to be an RFC3339 timestamp string. You can use any timezone. Talon.One will convert to UTC internally.
:param datetime created_after: Filter results where request and response times to return entries after parameter value, expected to be an RFC3339 timestamp string. You can use any timezone. Talon.One will convert to UTC internally.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse20025
If the method is called asynchronously,
returns the request thread. | 625941c34e4d5625662d439d |
def _get_defined_exports(exports_file): <NEW_LINE> <INDENT> with io.open(exports_file, 'rt') as exports_file: <NEW_LINE> <INDENT> exports = ExportTable.deserialize(exports_file) <NEW_LINE> <DEDENT> return exports | Retrieve all defined exports from the nfs exports config file.
:param exports_file: path to nfs exports file
:type exports_file: string (unicode)
:returns: py:class:`scality_manila_utils.exports.ExportTable`
with the exports read from file | 625941c38e7ae83300e4af8f |
def query(self, query): <NEW_LINE> <INDENT> assert isinstance(query, str) <NEW_LINE> self.query = query | @param query: Set the search query
@param type: str | 625941c3d164cc6175782d11 |
def R4g(): <NEW_LINE> <INDENT> A = Ugde(b,t1+t2+t3)*Uedg(b,t1+t2)*Ugde(a,t1) <NEW_LINE> return evaluate_cumulant(A, positive_times=(t1, t2, t3), leading_index=a, arrays=["gg"]) | 625941c3de87d2750b85fd54 | |
def test_blank_logoUrl(self): <NEW_LINE> <INDENT> super().create_user(admin_user) <NEW_LINE> login = super().login_user(admin_user_login) <NEW_LINE> login_content = json.loads(login.data.decode('utf-8')) <NEW_LINE> token = [d['token'] for d in login_content['data']][0] <NEW_LINE> response = super().create_party(party_blank_logoUrl, token) <NEW_LINE> response_content = json.loads(response.data.decode()) <NEW_LINE> self.assertTrue( response_content['message'] == 'logoUrl cannot be blank') | Test that endpoint cannot accept a logoUrl | 625941c3d58c6744b4257c23 |
@users_route.route('/api/users/count', methods=['GET']) <NEW_LINE> def getUserCount(): <NEW_LINE> <INDENT> if request.method == 'GET': <NEW_LINE> <INDENT> return UsersHandler().count() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return jsonify(Error="Method not allowed. "), 405 | Returns the ammount of users | 625941c38e05c05ec3eea336 |
def run(self): <NEW_LINE> <INDENT> for stream in self.audio_streams: <NEW_LINE> <INDENT> self.channels.set_text( get_value_from_model(audio_channels, stream.get_channels())) <NEW_LINE> self.sample_rate.set_text( get_value_from_model(audio_rates, stream.get_sample_rate())) <NEW_LINE> self.has_audio = True <NEW_LINE> break <NEW_LINE> <DEDENT> for stream in self.video_streams: <NEW_LINE> <INDENT> self.size_width.set_text(str(stream.get_width())) <NEW_LINE> self.size_height.set_text(str(stream.get_height())) <NEW_LINE> self.is_image = stream.is_image() <NEW_LINE> if not self.is_image: <NEW_LINE> <INDENT> framerate_num = stream.get_framerate_num() <NEW_LINE> framerate_denom = stream.get_framerate_denom() <NEW_LINE> if framerate_num != 0 and framerate_denom != 0: <NEW_LINE> <INDENT> self.frame_rate.set_text( get_value_from_model(frame_rates, Gst.Fraction(framerate_num, framerate_denom) )) <NEW_LINE> if (framerate_num / framerate_denom) > 500: <NEW_LINE> <INDENT> self.framerate_checkbutton.set_active(False) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> foo = str(framerate_num) + "/" + str(framerate_denom) <NEW_LINE> self.frame_rate.set_text(_("invalid (%s fps)" % foo)) <NEW_LINE> self.framerate_checkbutton.set_active(False) <NEW_LINE> self.framerate_checkbutton.set_sensitive(False) <NEW_LINE> self.frame_rate.set_sensitive(False) <NEW_LINE> <DEDENT> self.aspect_ratio.set_text( get_value_from_model(pixel_aspect_ratios, Gst.Fraction( stream.get_par_num(), stream.get_par_denom()))) <NEW_LINE> <DEDENT> self.has_video = True <NEW_LINE> break <NEW_LINE> <DEDENT> if not self.has_video: <NEW_LINE> <INDENT> self.frame1.hide() <NEW_LINE> <DEDENT> if not self.has_audio: <NEW_LINE> <INDENT> self.frame2.hide() <NEW_LINE> <DEDENT> if self.is_image: <NEW_LINE> <INDENT> self.hbox2.hide() <NEW_LINE> self.hbox3.hide() <NEW_LINE> self.video_header_label.set_markup("<b>" + _("Image:") + "</b>") <NEW_LINE> <DEDENT> self.dialog.connect("key-press-event", self._keyPressCb) <NEW_LINE> self.dialog.run() | Set up widgets and run the dialog | 625941c3283ffb24f3c558c6 |
def state(self): <NEW_LINE> <INDENT> if self._tty_fd: <NEW_LINE> <INDENT> fcntl.ioctl(self._tty_fd, KDGETLED, self._buf, True) <NEW_LINE> return ord(self._buf.tolist()[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | returns keyboard LED state
| 625941c3baa26c4b54cb10e4 |
def filter(self, pred: ty.Callable[[T], bool]) -> IndexedNullableField: <NEW_LINE> <INDENT> ... | unindex each element for which `pred` is False (in new Series) | 625941c338b623060ff0adb1 |
def get_jobID(cmd_args): <NEW_LINE> <INDENT> if os.path.isfile(cmd_args.psub_cmd_or_PBS_JOBID): <NEW_LINE> <INDENT> m = re.match(r'.*run-p.(\d+).balza.\d+/psub_cmd', cmd_args.psub_cmd_or_PBS_JOBID) <NEW_LINE> if not m: <NEW_LINE> <INDENT> fatal_error("Error with regular expression {}".format(cmd_args.psub_cmd_or_PBS_JOBID)) <NEW_LINE> <DEDENT> return m.group(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = re.match(r'\d+', cmd_args.psub_cmd_or_PBS_JOBID) <NEW_LINE> if not m: <NEW_LINE> <INDENT> fatal_error("Unrecognized job id format {}".format(cmd_args.psub_cmd_or_PBS_JOBID)) <NEW_LINE> <DEDENT> return cmd_args.psub_cmd_or_PBS_JOBID | Get the job id from the psub_cmd's directory path or PBS_JOBID provided by
the user on the command line. | 625941c3e1aae11d1e749c79 |
@task(venv) <NEW_LINE> def unused(c): <NEW_LINE> <INDENT> c.run("venv/bin/vulture --sort-by-size {}".format(META["name"])) | Format the source code of the project. | 625941c3cb5e8a47e48b7a70 |
def estimate_omega(q1, q2): <NEW_LINE> <INDENT> return np.mean(((q1-q2)**2)/(q2*(1-q2))) | :param q1: array of alt allele freq in pop1
:param q2: array of alt allele freq in pop2
:return: | 625941c37c178a314d6ef420 |
def __init__(self, label, ipmi_host, ipmi_user, ipmi_pass): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> self.ipmi_host = ipmi_host <NEW_LINE> self.ipmi_user = ipmi_user <NEW_LINE> self.ipmi_pass = ipmi_pass | Register the given node.
ipmi_* must be supplied to allow the HaaS to do things like reboot
the node.
The node is initially registered with no nics; see the Nic class. | 625941c3b830903b967e98d0 |
def _add_readout_trig(self): <NEW_LINE> <INDENT> trig = np.zeros_like(self.readout_iq) <NEW_LINE> start = (np.abs(self.readout_iq) > 0.0).nonzero()[0][0] <NEW_LINE> end = int( np.min((start + self.readout_trig_duration * self.sample_rate, self.n_pts_readout))) <NEW_LINE> trig[start:end] = self.readout_trig_amplitude <NEW_LINE> trig[0] = 0.0 <NEW_LINE> trig[-1] = 0.0 <NEW_LINE> self.readout_trig = trig | Create waveform for readout trigger. | 625941c3f548e778e58cd540 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.