code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def build_kitchen_sink(): """All settings set""" from sendgrid.helpers.mail import ( Mail, From, To, Cc, Bcc, Subject, PlainTextContent, HtmlContent, SendGridException, Substitution, Header, CustomArg, SendAt, Content, MimeType, Attachment, FileName, FileContent, FileType, Disposition, ContentId, TemplateId, Section, ReplyTo, Category, BatchId, Asm, GroupId, GroupsToDisplay, IpPoolName, MailSettings, BccSettings, BccSettingsEmail, BypassListManagement, FooterSettings, FooterText, FooterHtml, SandBoxMode, SpamCheck, SpamThreshold, SpamUrl, TrackingSettings, ClickTracking, SubscriptionTracking, SubscriptionText, SubscriptionHtml, SubscriptionSubstitutionTag, OpenTracking, OpenTrackingSubstitutionTag, Ganalytics, UtmSource, UtmMedium, UtmTerm, UtmContent, UtmCampaign) import time import datetime message = Mail() # Define Personalizations message.to = To('test1@sendgrid.com', 'Example User1', p=0) message.to = [ To('test2@sendgrid.com', 'Example User2', p=0), To('test3@sendgrid.com', 'Example User3', p=0) ] message.cc = Cc('test4@example.com', 'Example User4', p=0) message.cc = [ Cc('test5@example.com', 'Example User5', p=0), Cc('test6@example.com', 'Example User6', p=0) ] message.bcc = Bcc('test7@example.com', 'Example User7', p=0) message.bcc = [ Bcc('test8@example.com', 'Example User8', p=0), Bcc('test9@example.com', 'Example User9', p=0) ] message.subject = Subject('Sending with SendGrid is Fun 0', p=0) message.header = Header('X-Test1', 'Test1', p=0) message.header = Header('X-Test2', 'Test2', p=0) message.header = [ Header('X-Test3', 'Test3', p=0), Header('X-Test4', 'Test4', p=0) ] message.substitution = Substitution('%name1%', 'Example Name 1', p=0) message.substitution = Substitution('%city1%', 'Example City 1', p=0) message.substitution = [ Substitution('%name2%', 'Example Name 2', p=0), Substitution('%city2%', 'Example City 2', p=0) ] message.custom_arg = CustomArg('marketing1', 'true', p=0) message.custom_arg = CustomArg('transactional1', 'false', p=0) message.custom_arg = [ CustomArg('marketing2', 'false', p=0), CustomArg('transactional2', 'true', p=0) ] message.send_at = SendAt(1461775051, p=0) message.to = To('test10@example.com', 'Example User10', p=1) message.to = [ To('test11@example.com', 'Example User11', p=1), To('test12@example.com', 'Example User12', p=1) ] message.cc = Cc('test13@example.com', 'Example User13', p=1) message.cc = [ Cc('test14@example.com', 'Example User14', p=1), Cc('test15@example.com', 'Example User15', p=1) ] message.bcc = Bcc('test16@example.com', 'Example User16', p=1) message.bcc = [ Bcc('test17@example.com', 'Example User17', p=1), Bcc('test18@example.com', 'Example User18', p=1) ] message.header = Header('X-Test5', 'Test5', p=1) message.header = Header('X-Test6', 'Test6', p=1) message.header = [ Header('X-Test7', 'Test7', p=1), Header('X-Test8', 'Test8', p=1) ] message.substitution = Substitution('%name3%', 'Example Name 3', p=1) message.substitution = Substitution('%city3%', 'Example City 3', p=1) message.substitution = [ Substitution('%name4%', 'Example Name 4', p=1), Substitution('%city4%', 'Example City 4', p=1) ] message.custom_arg = CustomArg('marketing3', 'true', p=1) message.custom_arg = CustomArg('transactional3', 'false', p=1) message.custom_arg = [ CustomArg('marketing4', 'false', p=1), CustomArg('transactional4', 'true', p=1) ] message.send_at = SendAt(1461775052, p=1) message.subject = Subject('Sending with SendGrid is Fun 1', p=1) # The values below this comment are global to entire message message.from_email = From('dx@sendgrid.com', 'DX') message.reply_to = ReplyTo('dx_reply@sendgrid.com', 'DX Reply') message.subject = Subject('Sending with SendGrid is Fun 2') message.content = Content(MimeType.text, 'and easy to do anywhere, even with Python') message.content = Content(MimeType.html, '<strong>and easy to do anywhere, even with Python</strong>') message.content = [ Content('text/calendar', 'Party Time!!'), Content('text/custom', 'Party Time 2!!') ] message.attachment = Attachment(FileContent('base64 encoded content 1'), FileType('application/pdf'), FileName('balance_001.pdf'), Disposition('attachment'), ContentId('Content ID 1')) message.attachment = [ Attachment(FileContent('base64 encoded content 2'), FileType('image/png'), FileName('banner.png'), Disposition('inline'), ContentId('Content ID 2')), Attachment(FileContent('base64 encoded content 3'), FileType('image/png'), FileName('banner2.png'), Disposition('inline'), ContentId('Content ID 3')) ] message.template_id = TemplateId('13b8f94f-bcae-4ec6-b752-70d6cb59f932') message.section = Section('%section1%', 'Substitution for Section 1 Tag') message.section = [ Section('%section2%', 'Substitution for Section 2 Tag'), Section('%section3%', 'Substitution for Section 3 Tag') ] message.header = Header('X-Test9', 'Test9') message.header = Header('X-Test10', 'Test10') message.header = [ Header('X-Test11', 'Test11'), Header('X-Test12', 'Test12') ] message.category = Category('Category 1') message.category = Category('Category 2') message.category = [ Category('Category 1'), Category('Category 2') ] message.custom_arg = CustomArg('marketing5', 'false') message.custom_arg = CustomArg('transactional5', 'true') message.custom_arg = [ CustomArg('marketing6', 'true'), CustomArg('transactional6', 'false') ] message.send_at = SendAt(1461775053) message.batch_id = BatchId("HkJ5yLYULb7Rj8GKSx7u025ouWVlMgAi") message.asm = Asm(GroupId(1), GroupsToDisplay([1,2,3,4])) message.ip_pool_name = IpPoolName("IP Pool Name") mail_settings = MailSettings() mail_settings.bcc_settings = BccSettings(False, BccSettingsTo("bcc@twilio.com")) mail_settings.bypass_list_management = BypassListManagement(False) mail_settings.footer_settings = FooterSettings(True, FooterText("w00t"), FooterHtml("<string>w00t!<strong>")) mail_settings.sandbox_mode = SandBoxMode(True) mail_settings.spam_check = SpamCheck(True, SpamThreshold(5), SpamUrl("https://example.com")) message.mail_settings = mail_settings tracking_settings = TrackingSettings() tracking_settings.click_tracking = ClickTracking(True, False) tracking_settings.open_tracking = OpenTracking(True, OpenTrackingSubstitutionTag("open_tracking")) tracking_settings.subscription_tracking = SubscriptionTracking( True, SubscriptionText("Goodbye"), SubscriptionHtml("<strong>Goodbye!</strong>"), SubscriptionSubstitutionTag("unsubscribe")) tracking_settings.ganalytics = Ganalytics( True, UtmSource("utm_source"), UtmMedium("utm_medium"), UtmTerm("utm_term"), UtmContent("utm_content"), UtmCampaign("utm_campaign")) message.tracking_settings = tracking_settings return message.get()
All settings set
def ageostrophic_wind(heights, f, dx, dy, u, v, dim_order='yx'): r"""Calculate the ageostrophic wind given from the heights or geopotential. Parameters ---------- heights : (M, N) ndarray The height field. f : array_like The coriolis parameter. This can be a scalar to be applied everywhere or an array of values. dx : float or ndarray The grid spacing(s) in the x-direction. If an array, there should be one item less than the size of `heights` along the applicable axis. dy : float or ndarray The grid spacing(s) in the y-direction. If an array, there should be one item less than the size of `heights` along the applicable axis. u : (M, N) ndarray The u wind field. v : (M, N) ndarray The u wind field. Returns ------- A 2-item tuple of arrays A tuple of the u-component and v-component of the ageostrophic wind. Notes ----- If inputs have more than two dimensions, they are assumed to have either leading dimensions of (x, y) or trailing dimensions of (y, x), depending on the value of ``dim_order``. """ u_geostrophic, v_geostrophic = geostrophic_wind(heights, f, dx, dy, dim_order=dim_order) return u - u_geostrophic, v - v_geostrophic
r"""Calculate the ageostrophic wind given from the heights or geopotential. Parameters ---------- heights : (M, N) ndarray The height field. f : array_like The coriolis parameter. This can be a scalar to be applied everywhere or an array of values. dx : float or ndarray The grid spacing(s) in the x-direction. If an array, there should be one item less than the size of `heights` along the applicable axis. dy : float or ndarray The grid spacing(s) in the y-direction. If an array, there should be one item less than the size of `heights` along the applicable axis. u : (M, N) ndarray The u wind field. v : (M, N) ndarray The u wind field. Returns ------- A 2-item tuple of arrays A tuple of the u-component and v-component of the ageostrophic wind. Notes ----- If inputs have more than two dimensions, they are assumed to have either leading dimensions of (x, y) or trailing dimensions of (y, x), depending on the value of ``dim_order``.
def build_class(name, basenames=(), doc=None): """create and initialize an astroid ClassDef node""" node = nodes.ClassDef(name, doc) for base in basenames: basenode = nodes.Name() basenode.name = base node.bases.append(basenode) basenode.parent = node return node
create and initialize an astroid ClassDef node
def emitCurrentRecordChanged(self, item): """ Emits the record changed signal for the given item, provided the signals are not currently blocked. :param item | <QTreeWidgetItem> """ if self.signalsBlocked(): return # emit that the record has been clicked if isinstance(item, XOrbRecordItem): self.currentRecordChanged.emit(item.record()) else: self.currentRecordChanged.emit(None)
Emits the record changed signal for the given item, provided the signals are not currently blocked. :param item | <QTreeWidgetItem>
def create_like(self, repository_id, pull_request_id, thread_id, comment_id, project=None): """CreateLike. [Preview API] Add a like on a comment. :param str repository_id: The repository ID of the pull request's target branch. :param int pull_request_id: ID of the pull request. :param int thread_id: The ID of the thread that contains the comment. :param int comment_id: The ID of the comment. :param str project: Project ID or project name """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') if repository_id is not None: route_values['repositoryId'] = self._serialize.url('repository_id', repository_id, 'str') if pull_request_id is not None: route_values['pullRequestId'] = self._serialize.url('pull_request_id', pull_request_id, 'int') if thread_id is not None: route_values['threadId'] = self._serialize.url('thread_id', thread_id, 'int') if comment_id is not None: route_values['commentId'] = self._serialize.url('comment_id', comment_id, 'int') self._send(http_method='POST', location_id='5f2e2851-1389-425b-a00b-fb2adb3ef31b', version='5.1-preview.1', route_values=route_values)
CreateLike. [Preview API] Add a like on a comment. :param str repository_id: The repository ID of the pull request's target branch. :param int pull_request_id: ID of the pull request. :param int thread_id: The ID of the thread that contains the comment. :param int comment_id: The ID of the comment. :param str project: Project ID or project name
def compile_dependencies(self, sourcepath, include_self=False): """ Apply compile on all dependencies Args: sourcepath (string): Sass source path to compile to its destination using project settings. Keyword Arguments: include_self (bool): If ``True`` the given sourcepath is add to items to compile, else only its dependencies are compiled. """ items = self.inspector.parents(sourcepath) # Also add the current event related path if include_self: items.add(sourcepath) return filter(None, [self.compile_source(item) for item in items])
Apply compile on all dependencies Args: sourcepath (string): Sass source path to compile to its destination using project settings. Keyword Arguments: include_self (bool): If ``True`` the given sourcepath is add to items to compile, else only its dependencies are compiled.
def _get_sts_token(self): """ Assume a role via STS and return the credentials. First connect to STS via :py:func:`boto3.client`, then assume a role using `boto3.STS.Client.assume_role <https://boto3.readthe docs.org/en/latest/reference/services/sts.html#STS.Client.assume_role>`_ using ``self.account_id`` and ``self.account_role`` (and optionally ``self.external_id``, ``self.mfa_serial_number``, ``self.mfa_token``). Return the resulting :py:class:`~.ConnectableCredentials` object. :returns: STS assumed role credentials :rtype: :py:class:`~.ConnectableCredentials` """ logger.debug("Connecting to STS in region %s", self.region) sts = boto3.client('sts', region_name=self.region) arn = "arn:aws:iam::%s:role/%s" % (self.account_id, self.account_role) logger.debug("STS assume role for %s", arn) assume_kwargs = { 'RoleArn': arn, 'RoleSessionName': 'awslimitchecker' } if self.external_id is not None: assume_kwargs['ExternalId'] = self.external_id if self.mfa_serial_number is not None: assume_kwargs['SerialNumber'] = self.mfa_serial_number if self.mfa_token is not None: assume_kwargs['TokenCode'] = self.mfa_token role = sts.assume_role(**assume_kwargs) creds = ConnectableCredentials(role) creds.account_id = self.account_id logger.debug("Got STS credentials for role; access_key_id=%s " "(account_id=%s)", creds.access_key, creds.account_id) return creds
Assume a role via STS and return the credentials. First connect to STS via :py:func:`boto3.client`, then assume a role using `boto3.STS.Client.assume_role <https://boto3.readthe docs.org/en/latest/reference/services/sts.html#STS.Client.assume_role>`_ using ``self.account_id`` and ``self.account_role`` (and optionally ``self.external_id``, ``self.mfa_serial_number``, ``self.mfa_token``). Return the resulting :py:class:`~.ConnectableCredentials` object. :returns: STS assumed role credentials :rtype: :py:class:`~.ConnectableCredentials`
def check_existing_vr_tag(self): """ Checks if version-release tag (primary not floating tag) exists already, and fails plugin if it does. """ primary_images = get_primary_images(self.workflow) if not primary_images: return vr_image = None for image in primary_images: if '-' in image.tag: vr_image = image break if not vr_image: return should_fail = False for registry_name, registry in self.registries.items(): pullspec = vr_image.copy() pullspec.registry = registry_name insecure = registry.get('insecure', False) secret = registry.get('secret', None) manifest_list = get_manifest_list(pullspec, registry_name, insecure, secret) if manifest_list: self.log.error("Primary tag already exists in registry: %s", pullspec) should_fail = True if should_fail: raise RuntimeError("Primary tag already exists in registry")
Checks if version-release tag (primary not floating tag) exists already, and fails plugin if it does.
def validate_instance(cls, opts): """Validates an instance of global options for cases that are not prohibited via registration. For example: mutually exclusive options may be registered by passing a `mutually_exclusive_group`, but when multiple flags must be specified together, it can be necessary to specify post-parse checks. Raises pants.option.errors.OptionsError on validation failure. """ if opts.loop and (not opts.v2 or opts.v1): raise OptionsError('The --loop option only works with @console_rules, and thus requires ' '`--v2 --no-v1` to function as expected.') if opts.loop and not opts.enable_pantsd: raise OptionsError('The --loop option requires `--enable-pantsd`, in order to watch files.') if opts.v2_ui and not opts.v2: raise OptionsError('The --v2-ui option requires --v2 to be enabled together.')
Validates an instance of global options for cases that are not prohibited via registration. For example: mutually exclusive options may be registered by passing a `mutually_exclusive_group`, but when multiple flags must be specified together, it can be necessary to specify post-parse checks. Raises pants.option.errors.OptionsError on validation failure.
def addStencilBranch(self, disp, weight): """ Set or overwrite the stencil weight for the given direction @param disp displacement vector @param weight stencil weight """ self.stencil[tuple(disp)] = weight self.__setPartionLogic(disp)
Set or overwrite the stencil weight for the given direction @param disp displacement vector @param weight stencil weight
def set_ontime(self, ontime): """Set duration th switch stays on when toggled. """ try: ontime = float(ontime) except Exception as err: LOG.debug("SwitchPowermeter.set_ontime: Exception %s" % (err,)) return False self.actionNodeData("ON_TIME", ontime)
Set duration th switch stays on when toggled.
def close(self): """Close the channel to the queue.""" self.cancel() self.backend.close() self._closed = True
Close the channel to the queue.
def delete_local_file(file_name): """ Deletes the file associated with the file_name passed from local storage. :param str file_name: Filename of the file to be deleted :return str: Filename of the file that was just deleted """ try: os.remove(file_name) log.info(f"Deletion for {file_name} has finished") return file_name except OSError: pass
Deletes the file associated with the file_name passed from local storage. :param str file_name: Filename of the file to be deleted :return str: Filename of the file that was just deleted
def integrate_data(xdata, ydata, xmin=None, xmax=None, autozero=0): """ Numerically integrates up the ydata using the trapezoid approximation. estimate the bin width (scaled by the specified amount). Returns (xdata, integrated ydata). autozero is the number of data points to use as an estimate of the background (then subtracted before integrating). """ # sort the arrays and make sure they're numpy arrays [xdata, ydata] = sort_matrix([xdata,ydata],0) xdata = _n.array(xdata) ydata = _n.array(ydata) if xmin is None: xmin = min(xdata) if xmax is None: xmax = max(xdata) # find the index range imin = xdata.searchsorted(xmin) imax = xdata.searchsorted(xmax) xint = [xdata[imin]] yint = [0] # get the autozero if autozero >= 1: zero = _n.average(ydata[imin:imin+int(autozero)]) ydata = ydata-zero for n in range(imin+1,imax): if len(yint): xint.append(xdata[n]) yint.append(yint[-1]+0.5*(xdata[n]-xdata[n-1])*(ydata[n]+ydata[n-1])) else: xint.append(xdata[n]) yint.append(0.5*(xdata[n]-xdata[n-1])*(ydata[n]+ydata[n-1])) return _n.array(xint), _n.array(yint)
Numerically integrates up the ydata using the trapezoid approximation. estimate the bin width (scaled by the specified amount). Returns (xdata, integrated ydata). autozero is the number of data points to use as an estimate of the background (then subtracted before integrating).
def delete(self, ids): """ Method to delete ipv6's by their ids :param ids: Identifiers of ipv6's :return: None """ url = build_uri_with_ids('api/v4/ipv6/%s/', ids) return super(ApiV4IPv6, self).delete(url)
Method to delete ipv6's by their ids :param ids: Identifiers of ipv6's :return: None
def query(self, query=None): """ If query is given, modify the URL correspondingly, return the current query otherwise. """ if query is None: return self.url.query self.url.query = query
If query is given, modify the URL correspondingly, return the current query otherwise.
def sitemap(self): """Return the sitemap URI based on maps or explicit settings.""" if (self.sitemap_name is not None): return(self.sitemap_name) return(self.sitemap_uri(self.resource_list_name))
Return the sitemap URI based on maps or explicit settings.
def Append(self, component=None, **kwarg): """Append a new pathspec component to this pathspec.""" if component is None: component = self.__class__(**kwarg) if self.HasField("pathtype"): self.last.nested_path = component else: for k, v in iteritems(kwarg): setattr(self, k, v) self.SetRawData(component.GetRawData()) return self
Append a new pathspec component to this pathspec.
def getPlayAreaRect(self): """ Returns the 4 corner positions of the Play Area (formerly named Soft Bounds). Corners are in counter-clockwise order. Standing center (0,0,0) is the center of the Play Area. It's a rectangle. 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. Height of every corner is 0Y (on the floor). """ fn = self.function_table.getPlayAreaRect rect = HmdQuad_t() result = fn(byref(rect)) return result, rect
Returns the 4 corner positions of the Play Area (formerly named Soft Bounds). Corners are in counter-clockwise order. Standing center (0,0,0) is the center of the Play Area. It's a rectangle. 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. Height of every corner is 0Y (on the floor).
def list_vpnservices(retrieve_all=True, profile=None, **kwargs): ''' Fetches a list of all configured VPN services for a tenant CLI Example: .. code-block:: bash salt '*' neutron.list_vpnservices :param retrieve_all: True or False, default: True (Optional) :param profile: Profile to build on (Optional) :return: List of VPN service ''' conn = _auth(profile) return conn.list_vpnservices(retrieve_all, **kwargs)
Fetches a list of all configured VPN services for a tenant CLI Example: .. code-block:: bash salt '*' neutron.list_vpnservices :param retrieve_all: True or False, default: True (Optional) :param profile: Profile to build on (Optional) :return: List of VPN service
def join_tags(tags): """ Given list of ``Tag`` instances, creates a string representation of the list suitable for editing by the user, such that submitting the given string representation back without changing it will give the same list of tags. Tag names which contain DELIMITER will be double quoted. Adapted from Taggit's _edit_string_for_tags() Ported from Jonathan Buchanan's `django-tagging <http://django-tagging.googlecode.com/>`_ """ names = [] delimiter = settings.TAGGIT_SELECTIZE['DELIMITER'] for tag in tags: name = tag.name if delimiter in name or ' ' in name: names.append('"%s"' % name) else: names.append(name) return delimiter.join(sorted(names))
Given list of ``Tag`` instances, creates a string representation of the list suitable for editing by the user, such that submitting the given string representation back without changing it will give the same list of tags. Tag names which contain DELIMITER will be double quoted. Adapted from Taggit's _edit_string_for_tags() Ported from Jonathan Buchanan's `django-tagging <http://django-tagging.googlecode.com/>`_
def variations(word): """Create variations of the word based on letter combinations like oo, sh, etc.""" if len(word) == 1: return [[word[0]]] elif word == 'aa': return [['A']] elif word == 'ee': return [['i']] elif word == 'ei': return [['ei']] elif word in ['oo', 'ou']: return [['u']] elif word == 'kha': return [['kha'], ['kh', 'a']] elif word in ['kh', 'gh', 'ch', 'sh', 'zh', 'ck']: return [[word]] elif word in ["'ee", "'ei"]: return [["'i"]] elif word in ["'oo", "'ou"]: return [["'u"]] elif word in ["a'", "e'", "o'", "i'", "u'", "A'"]: return [[word[0] + "'"]] elif word in ["'a", "'e", "'o", "'i", "'u", "'A"]: return [["'" + word[1]]] elif len(word) == 2 and word[0] == word[1]: return [[word[0]]] if word[:2] == 'aa': return [['A'] + i for i in variations(word[2:])] elif word[:2] == 'ee': return [['i'] + i for i in variations(word[2:])] elif word[:2] in ['oo', 'ou']: return [['u'] + i for i in variations(word[2:])] elif word[:3] == 'kha': return \ [['kha'] + i for i in variations(word[3:])] + \ [['kh', 'a'] + i for i in variations(word[3:])] + \ [['k', 'h', 'a'] + i for i in variations(word[3:])] elif word[:2] in ['kh', 'gh', 'ch', 'sh', 'zh', 'ck']: return \ [[word[:2]] + i for i in variations(word[2:])] + \ [[word[0]] + i for i in variations(word[1:])] elif word[:2] in ["a'", "e'", "o'", "i'", "u'", "A'"]: return [[word[:2]] + i for i in variations(word[2:])] elif word[:3] in ["'ee", "'ei"]: return [["'i"] + i for i in variations(word[3:])] elif word[:3] in ["'oo", "'ou"]: return [["'u"] + i for i in variations(word[3:])] elif word[:2] in ["'a", "'e", "'o", "'i", "'u", "'A"]: return [[word[:2]] + i for i in variations(word[2:])] elif len(word) >= 2 and word[0] == word[1]: return [[word[0]] + i for i in variations(word[2:])] else: return [[word[0]] + i for i in variations(word[1:])]
Create variations of the word based on letter combinations like oo, sh, etc.
def get_canvas_image(self): """Get canvas image object. Returns ------- imgobj : `~ginga.canvas.types.image.NormImage` Normalized image sitting on the canvas. """ if self._imgobj is not None: return self._imgobj try: # See if there is an image on the canvas self._imgobj = self.canvas.get_object_by_tag(self._canvas_img_tag) self._imgobj.add_callback('image-set', self._image_set_cb) except KeyError: # add a normalized image item to this canvas if we don't # have one already--then just keep reusing it NormImage = self.canvas.getDrawClass('normimage') interp = self.t_.get('interpolation', 'basic') # previous choice might not be available if preferences # were saved when opencv was being used (and not used now) # --if so, default to "basic" if interp not in trcalc.interpolation_methods: interp = 'basic' self._imgobj = NormImage(0, 0, None, alpha=1.0, interpolation=interp) self._imgobj.add_callback('image-set', self._image_set_cb) return self._imgobj
Get canvas image object. Returns ------- imgobj : `~ginga.canvas.types.image.NormImage` Normalized image sitting on the canvas.
def returns(self) -> T.Optional[DocstringReturns]: """Return return information indicated in docstring.""" try: return next( DocstringReturns.from_meta(meta) for meta in self.meta if meta.args[0] in {"return", "returns", "yield", "yields"} ) except StopIteration: return None
Return return information indicated in docstring.
async def get_next_opponent(self): """ Get the opponent of the potential next match. See :func:`get_next_match` |methcoro| Raises: APIException """ next_match = await self.get_next_match() if next_match is not None: opponent_id = next_match.player1_id if next_match.player2_id == self._id else next_match.player2_id return await self._tournament.get_participant(opponent_id) return None
Get the opponent of the potential next match. See :func:`get_next_match` |methcoro| Raises: APIException
def print_fields(bf, *args, **kwargs): """ Print all the fields of a Bitfield object to stdout. This is primarly a diagnostic aid during debugging. """ vals = {k: hex(v) for k, v in bf.items()} print(bf.base, vals, *args, **kwargs)
Print all the fields of a Bitfield object to stdout. This is primarly a diagnostic aid during debugging.
def ensure_ndarray(ndarray_or_adjusted_array): """ Return the input as a numpy ndarray. This is a no-op if the input is already an ndarray. If the input is an adjusted_array, this extracts a read-only view of its internal data buffer. Parameters ---------- ndarray_or_adjusted_array : numpy.ndarray | zipline.data.adjusted_array Returns ------- out : The input, converted to an ndarray. """ if isinstance(ndarray_or_adjusted_array, ndarray): return ndarray_or_adjusted_array elif isinstance(ndarray_or_adjusted_array, AdjustedArray): return ndarray_or_adjusted_array.data else: raise TypeError( "Can't convert %s to ndarray" % type(ndarray_or_adjusted_array).__name__ )
Return the input as a numpy ndarray. This is a no-op if the input is already an ndarray. If the input is an adjusted_array, this extracts a read-only view of its internal data buffer. Parameters ---------- ndarray_or_adjusted_array : numpy.ndarray | zipline.data.adjusted_array Returns ------- out : The input, converted to an ndarray.
def get_darker_image(self): """Returns an icon 80% more dark""" icon_pressed = self.icon.copy() for x in range(self.w): for y in range(self.h): r, g, b, *_ = tuple(self.icon.get_at((x, y))) const = 0.8 r = int(const * r) g = int(const * g) b = int(const * b) icon_pressed.set_at((x, y), (r, g, b)) return icon_pressed
Returns an icon 80% more dark
def get_ISI_ratio(sorting, sampling_frequency, unit_ids=None, save_as_property=True): '''This function calculates the ratio between the frequency of spikes present within 0- to 2-ms (refractory period) interspike interval (ISI) and those at 0- to 20-ms interval. It then returns the ratios and also adds a property, ISI_ratio, for the passed in sorting extractor. Taken from: "Large-scale, high-density (up to 512 channels) recording of local circuits in behaving animals" - Antal Berényi, et al. Parameters ---------- unit_ids: list List of unit ids for which to get ISIratios sorting: SortingExtractor SortingExtractor for the results file being analyzed sampling_frequency: float The sampling frequency of recording save_as_property: boolean If True, this will save the ISI_ratio as a property in the given sorting extractor. Returns ---------- ISI_ratios: list of floats A list of ratios for each unit passed into this function. Each ratio is the ratio between the frequency of spikes present within 0- to 2-ms ISI and those at 0- to 20-ms interval for the corresponding spike train. ''' ISI_ratios = [] if unit_ids is None: unit_ids = sorting.get_unit_ids() for unit_id in unit_ids: unit_spike_train = sorting.get_unit_spike_train(unit_id) ref_frame_period = sampling_frequency*0.002 long_interval = sampling_frequency*0.02 ISIs = np.diff(unit_spike_train) num_ref_violations = float(sum(ISIs<ref_frame_period)) num_longer_interval = float(sum(ISIs<long_interval)) ISI_ratio = num_ref_violations / num_longer_interval if save_as_property: sorting.set_unit_property(unit_id, 'ISI_ratio', ISI_ratio) ISI_ratios.append(ISI_ratio) return ISI_ratios
This function calculates the ratio between the frequency of spikes present within 0- to 2-ms (refractory period) interspike interval (ISI) and those at 0- to 20-ms interval. It then returns the ratios and also adds a property, ISI_ratio, for the passed in sorting extractor. Taken from: "Large-scale, high-density (up to 512 channels) recording of local circuits in behaving animals" - Antal Berényi, et al. Parameters ---------- unit_ids: list List of unit ids for which to get ISIratios sorting: SortingExtractor SortingExtractor for the results file being analyzed sampling_frequency: float The sampling frequency of recording save_as_property: boolean If True, this will save the ISI_ratio as a property in the given sorting extractor. Returns ---------- ISI_ratios: list of floats A list of ratios for each unit passed into this function. Each ratio is the ratio between the frequency of spikes present within 0- to 2-ms ISI and those at 0- to 20-ms interval for the corresponding spike train.
def execution_context(self): """ Access the execution_context :returns: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextList :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextList """ if self._execution_context is None: self._execution_context = ExecutionContextList( self._version, flow_sid=self._solution['flow_sid'], execution_sid=self._solution['sid'], ) return self._execution_context
Access the execution_context :returns: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextList :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextList
def audio(audio, sample_rate, name=None, out=None, subdir='', timeout=5, **kwargs): """summary audio files to listen on a browser. An sampled array is converted as WAV audio file, saved to output directory, and reported to the ChainerUI server. The audio file is saved every called this function. The audio file will be listened on `assets` endpoint vertically. If need to aggregate audio files in row, use :func:`~chainerui.summary.reporter`. Example of how to set arguments:: >>> from chainerui import summary >>> summary.set_out('/path/to/output') >>> rate = 44100 >>> >>> summary.audio(sampled_array, rate, name='test') >>> # sampled_array can be listened on a browser. Add description about the audio file:: >>> summary.image( >>> sampled_array, rate, name='test', epoch=1, iteration=100) >>> # 'epoch' and 'iteration' column will be shown. Args: audio (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \ :class:`chainer.Variable`): sampled wave array. sample_rate (int): sampling rate. name (str): name of image. set as column name. when not setting, assigned ``'audio'``. out (str): directory path of output. subdir (str): sub-directory path of output. **kwargs (dict): key-value pair to show as description. regardless of empty or not, timestamp on created the image is added. """ from chainerui.report.audio_report import check_available if not check_available(): return from chainerui.report.audio_report import report as _audio out_root = _chainerui_asset_observer.get_outpath(out) out_path = os.path.join(out_root, subdir) if not os.path.isdir(out_path): os.makedirs(out_path) col_name = name if col_name is None: col_name = 'audio' filename, created_at = _audio(audio, sample_rate, out_path, col_name) value = kwargs value['timestamp'] = created_at.isoformat() value['audios'] = {col_name: os.path.join(subdir, filename)} _chainerui_asset_observer.add(value) _chainerui_asset_observer.save(out_root, timeout)
summary audio files to listen on a browser. An sampled array is converted as WAV audio file, saved to output directory, and reported to the ChainerUI server. The audio file is saved every called this function. The audio file will be listened on `assets` endpoint vertically. If need to aggregate audio files in row, use :func:`~chainerui.summary.reporter`. Example of how to set arguments:: >>> from chainerui import summary >>> summary.set_out('/path/to/output') >>> rate = 44100 >>> >>> summary.audio(sampled_array, rate, name='test') >>> # sampled_array can be listened on a browser. Add description about the audio file:: >>> summary.image( >>> sampled_array, rate, name='test', epoch=1, iteration=100) >>> # 'epoch' and 'iteration' column will be shown. Args: audio (:class:`numpy.ndarray` or :class:`cupy.ndarray` or \ :class:`chainer.Variable`): sampled wave array. sample_rate (int): sampling rate. name (str): name of image. set as column name. when not setting, assigned ``'audio'``. out (str): directory path of output. subdir (str): sub-directory path of output. **kwargs (dict): key-value pair to show as description. regardless of empty or not, timestamp on created the image is added.
def size(): """Determines the height and width of the console window Returns: tuple of int: The height in lines, then width in characters """ try: assert os != 'nt' and sys.stdout.isatty() rows, columns = os.popen('stty size', 'r').read().split() except (AssertionError, AttributeError, ValueError): # in case of failure, use dimensions of a full screen 13" laptop rows, columns = DEFAULT_HEIGHT, DEFAULT_WIDTH return int(rows), int(columns)
Determines the height and width of the console window Returns: tuple of int: The height in lines, then width in characters
def start_element(self, name, attrs): """Set tag status for start element.""" self.in_tag = (name == self.tag) self.url = u""
Set tag status for start element.
def iter_commit_activity(self, number=-1, etag=None): """Iterate over last year of commit activity by week. See: http://developer.github.com/v3/repos/statistics/ :param int number: (optional), number of weeks to return. Default -1 will return all of the weeks. :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of dictionaries .. note:: All statistics methods may return a 202. On those occasions, you will not receive any objects. You should store your iterator and check the new ``last_status`` attribute. If it is a 202 you should wait before re-requesting. .. versionadded:: 0.7 """ url = self._build_url('stats', 'commit_activity', base_url=self._api) return self._iter(int(number), url, dict, etag=etag)
Iterate over last year of commit activity by week. See: http://developer.github.com/v3/repos/statistics/ :param int number: (optional), number of weeks to return. Default -1 will return all of the weeks. :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of dictionaries .. note:: All statistics methods may return a 202. On those occasions, you will not receive any objects. You should store your iterator and check the new ``last_status`` attribute. If it is a 202 you should wait before re-requesting. .. versionadded:: 0.7
def find_closest_match(target_track, tracks): """ Return closest match to target track """ track = None # Get a list of (track, artist match ratio, name match ratio) tracks_with_match_ratio = [( track, get_similarity(target_track.artist, track.artist), get_similarity(target_track.name, track.name), ) for track in tracks] # Sort by artist then by title sorted_tracks = sorted( tracks_with_match_ratio, key=lambda t: (t[1], t[2]), reverse=True # Descending, highest match ratio first ) if sorted_tracks: track = sorted_tracks[0][0] # Closest match to query return track
Return closest match to target track
def tap_hold(self, x, y, duration=1.0): """ Tap and hold for a moment Args: - x, y(int): position - duration(float): seconds of hold time [[FBRoute POST:@"/wda/touchAndHold"] respondWithTarget:self action:@selector(handleTouchAndHoldCoordinate:)], """ data = {'x': x, 'y': y, 'duration': duration} return self.http.post('/wda/touchAndHold', data=data)
Tap and hold for a moment Args: - x, y(int): position - duration(float): seconds of hold time [[FBRoute POST:@"/wda/touchAndHold"] respondWithTarget:self action:@selector(handleTouchAndHoldCoordinate:)],
def save(): '''save is a view to save data. We might want to adjust this to allow for updating saved data, but given single file is just one post for now ''' if request.method == 'POST': exp_id = session.get('exp_id') app.logger.debug('Saving data for %s' %exp_id) fields = get_post_fields(request) result_file = app.save_data(session=session, content=fields, exp_id=exp_id) experiments = app.finish_experiment(session, exp_id) app.logger.info('Finished %s, %s remaining.' % (exp_id, len(experiments))) # Note, this doesn't seem to be enough to trigger ajax success return json.dumps({'success':True}), 200, {'ContentType':'application/json'} return json.dumps({'success':False}), 403, {'ContentType':'application/json'}
save is a view to save data. We might want to adjust this to allow for updating saved data, but given single file is just one post for now
def write(self) -> None: """Call method |NetCDFFile.write| of all handled |NetCDFFile| objects. """ if self.folders: init = hydpy.pub.timegrids.init timeunits = init.firstdate.to_cfunits('hours') timepoints = init.to_timepoints('hours') for folder in self.folders.values(): for file_ in folder.values(): file_.write(timeunits, timepoints)
Call method |NetCDFFile.write| of all handled |NetCDFFile| objects.
def getAnalogType(self,num): """ Returns the type of the channel 'num' based on its unit stored in the Comtrade header file. Returns 'V' for a voltage channel and 'I' for a current channel. """ listidx = self.An.index(num) unit = self.uu[listidx] if unit == 'kV' or unit == 'V': return 'V' elif unit == 'A' or unit == 'kA': return 'I' else: print 'Unknown channel type' return 0
Returns the type of the channel 'num' based on its unit stored in the Comtrade header file. Returns 'V' for a voltage channel and 'I' for a current channel.
def create_helper_trans_node(op_name, input_node, node_name): """create extra transpose node for dot operator""" node_name = op_name + "_" + node_name trans_node = onnx.helper.make_node( 'Transpose', inputs=[input_node], outputs=[node_name], name=node_name ) return trans_node
create extra transpose node for dot operator
def normalize(seq): """ Scales each number in the sequence so that the sum of all numbers equals 1. """ s = float(sum(seq)) return [v/s for v in seq]
Scales each number in the sequence so that the sum of all numbers equals 1.
def iter_links(self, file, encoding=None, context=False): '''Return the links. This function is a convenience function for calling :meth:`iter_text` and returning only the links. ''' if context: return [item for item in self.iter_text(file, encoding) if item[1]] else: return [item[0] for item in self.iter_text(file, encoding) if item[1]]
Return the links. This function is a convenience function for calling :meth:`iter_text` and returning only the links.
def get(self, sid): """ Constructs a DocumentContext :param sid: The sid :returns: twilio.rest.preview.sync.service.document.DocumentContext :rtype: twilio.rest.preview.sync.service.document.DocumentContext """ return DocumentContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
Constructs a DocumentContext :param sid: The sid :returns: twilio.rest.preview.sync.service.document.DocumentContext :rtype: twilio.rest.preview.sync.service.document.DocumentContext
def make_phase_space_list(): """ Extract all the phase space information (due to ``EMIT`` commands in the input file), and create a list of PhaseSpace objects. The primary purpose of this is for interactive explorations of the data produced during Pynac simulations. """ with open('dynac.short') as f: data_str = ''.join(line for line in f.readlines()) data_str_array = data_str.split('beam (emit card)')[1:] data_str_matrix = [[j.strip().split() for j in i] for i in[chunk.split('\n')[1:8] for chunk in data_str_array]] return [PhaseSpace(data) for data in data_str_matrix]
Extract all the phase space information (due to ``EMIT`` commands in the input file), and create a list of PhaseSpace objects. The primary purpose of this is for interactive explorations of the data produced during Pynac simulations.
def Bier(P, Pc, Te=None, q=None): r'''Calculates heat transfer coefficient for a evaporator operating in the nucleate boiling regime according to [1]_ . Either heat flux or excess temperature is required. With `Te` specified: .. math:: h = \left(0.00417P_c^{0.69} \Delta Te^{0.7}\left[0.7 + 2P_r\left(4 + \frac{1}{1-P_r}\right) \right]\right)^{1/0.3} With `q` specified: .. math:: h = 0.00417P_c^{0.69} \Delta q^{0.7}\left[0.7 + 2P_r\left(4 + \frac{1}{1-P_r}\right) \right] Parameters ---------- P : float Saturation pressure of fluid, [Pa] Pc : float Critical pressure of fluid, [Pa] Te : float, optional Excess wall temperature, [K] q : float, optional Heat flux, [W/m^2] Returns ------- h : float Heat transfer coefficient [W/m^2/K] Notes ----- No examples of this are known. Seems to give very different results than other correlations. Examples -------- Water boiling at 1 atm, with excess temperature of 4.3 K from [1]_. >>> Bier(101325., 22048321.0, Te=4.3) 1290.5349471503353 References ---------- .. [1] Rohsenow, Warren and James Hartnett and Young Cho. Handbook of Heat Transfer, 3E. New York: McGraw-Hill, 1998. ''' Pr = P/Pc if Te: return (0.00417*(Pc/1000.)**0.69*Te**0.7*(0.7 + 2.*Pr*(4. + 1./(1.-Pr))))**(1./0.3) elif q: return 0.00417*(Pc/1000.)**0.69*q**0.7*(0.7 + 2.*Pr*(4. + 1./(1. - Pr))) else: raise Exception('Either q or Te is needed for this correlation')
r'''Calculates heat transfer coefficient for a evaporator operating in the nucleate boiling regime according to [1]_ . Either heat flux or excess temperature is required. With `Te` specified: .. math:: h = \left(0.00417P_c^{0.69} \Delta Te^{0.7}\left[0.7 + 2P_r\left(4 + \frac{1}{1-P_r}\right) \right]\right)^{1/0.3} With `q` specified: .. math:: h = 0.00417P_c^{0.69} \Delta q^{0.7}\left[0.7 + 2P_r\left(4 + \frac{1}{1-P_r}\right) \right] Parameters ---------- P : float Saturation pressure of fluid, [Pa] Pc : float Critical pressure of fluid, [Pa] Te : float, optional Excess wall temperature, [K] q : float, optional Heat flux, [W/m^2] Returns ------- h : float Heat transfer coefficient [W/m^2/K] Notes ----- No examples of this are known. Seems to give very different results than other correlations. Examples -------- Water boiling at 1 atm, with excess temperature of 4.3 K from [1]_. >>> Bier(101325., 22048321.0, Te=4.3) 1290.5349471503353 References ---------- .. [1] Rohsenow, Warren and James Hartnett and Young Cho. Handbook of Heat Transfer, 3E. New York: McGraw-Hill, 1998.
def report_exception(self, filename, exc): """ This method is used when self.parser raises an Exception so that we can report a customized :class:`EventReport` object with info the exception. """ # Build fake event. event = AbinitError(src_file="Unknown", src_line=0, message=str(exc)) return EventReport(filename, events=[event])
This method is used when self.parser raises an Exception so that we can report a customized :class:`EventReport` object with info the exception.
def set_global_permissions(self, global_permissions): """SetGlobalPermissions. [Preview API] Set service-wide permissions that govern feed creation. :param [GlobalPermission] global_permissions: New permissions for the organization. :rtype: [GlobalPermission] """ content = self._serialize.body(global_permissions, '[GlobalPermission]') response = self._send(http_method='PATCH', location_id='a74419ef-b477-43df-8758-3cd1cd5f56c6', version='5.0-preview.1', content=content) return self._deserialize('[GlobalPermission]', self._unwrap_collection(response))
SetGlobalPermissions. [Preview API] Set service-wide permissions that govern feed creation. :param [GlobalPermission] global_permissions: New permissions for the organization. :rtype: [GlobalPermission]
def _run_services(self, pants_services): """Service runner main loop.""" if not pants_services.services: self._logger.critical('no services to run, bailing!') return service_thread_map = {service: self._make_thread(service) for service in pants_services.services} # Start services. for service, service_thread in service_thread_map.items(): self._logger.info('starting service {}'.format(service)) try: service_thread.start() except (RuntimeError, FSEventService.ServiceError): self.shutdown(service_thread_map) raise PantsDaemon.StartupFailure('service {} failed to start, shutting down!'.format(service)) # Once all services are started, write our pid. self.write_pid() self.write_metadata_by_name('pantsd', self.FINGERPRINT_KEY, ensure_text(self.options_fingerprint)) # Monitor services. while not self.is_killed: for service, service_thread in service_thread_map.items(): if not service_thread.is_alive(): self.shutdown(service_thread_map) raise PantsDaemon.RuntimeFailure('service failure for {}, shutting down!'.format(service)) else: # Avoid excessive CPU utilization. service_thread.join(self.JOIN_TIMEOUT_SECONDS)
Service runner main loop.
def fit_points_in_bounding_box(df_points, bounding_box, padding_fraction=0): ''' Return data frame with ``x``, ``y`` columns scaled to fit points from :data:`df_points` to fill :data:`bounding_box` while maintaining aspect ratio. Arguments --------- df_points : pandas.DataFrame A frame with at least the columns ``x`` and ``y``, containing one row per point. bounding_box: pandas.Series A `pandas.Series` containing numeric `width` and `height` values. padding_fraction : float Fraction of padding to add around points. Returns ------- pandas.DataFrame Input frame with the points with ``x`` and ``y`` values scaled to fill :data:`bounding_box` while maintaining aspect ratio. ''' df_scaled_points = df_points.copy() offset, padded_scale = fit_points_in_bounding_box_params(df_points, bounding_box, padding_fraction) df_scaled_points[['x', 'y']] *= padded_scale df_scaled_points[['x', 'y']] += offset return df_scaled_points
Return data frame with ``x``, ``y`` columns scaled to fit points from :data:`df_points` to fill :data:`bounding_box` while maintaining aspect ratio. Arguments --------- df_points : pandas.DataFrame A frame with at least the columns ``x`` and ``y``, containing one row per point. bounding_box: pandas.Series A `pandas.Series` containing numeric `width` and `height` values. padding_fraction : float Fraction of padding to add around points. Returns ------- pandas.DataFrame Input frame with the points with ``x`` and ``y`` values scaled to fill :data:`bounding_box` while maintaining aspect ratio.
def _load_file(self): """ Reads the configured todo.txt file and loads it into the todo list instance. """ self.todolist.erase() self.todolist.add_list(self.todofile.read()) self.completer = PromptCompleter(self.todolist)
Reads the configured todo.txt file and loads it into the todo list instance.
def execute_cmd(self, *args, **kwargs): """Execute a given hpssacli/ssacli command on the controller. This method executes a given command on the controller. :params args: a tuple consisting of sub-commands to be appended after specifying the controller in hpssacli/ssacli command. :param kwargs: kwargs to be passed to execute() in processutils :raises: HPSSAOperationError, if hpssacli/ssacli operation failed. """ slot = self.properties['Slot'] base_cmd = ("controller", "slot=%s" % slot) cmd = base_cmd + args return _ssacli(*cmd, **kwargs)
Execute a given hpssacli/ssacli command on the controller. This method executes a given command on the controller. :params args: a tuple consisting of sub-commands to be appended after specifying the controller in hpssacli/ssacli command. :param kwargs: kwargs to be passed to execute() in processutils :raises: HPSSAOperationError, if hpssacli/ssacli operation failed.
def _to_viewitem(self, prog_var): """ Convert a ProgramVariable instance to a DDGViewItem object. :param ProgramVariable prog_var: The ProgramVariable object to convert. :return: The converted DDGViewItem object. :rtype: DDGViewItem """ return DDGViewItem(self._ddg, prog_var, simplified=self._simplified)
Convert a ProgramVariable instance to a DDGViewItem object. :param ProgramVariable prog_var: The ProgramVariable object to convert. :return: The converted DDGViewItem object. :rtype: DDGViewItem
def get_InsideConvexPoly(self, RelOff=_def.TorRelOff, ZLim='Def', Spline=True, Splprms=_def.TorSplprms, NP=_def.TorInsideNP, Plot=False, Test=True): """ Return a polygon that is a smaller and smoothed approximation of Ves.Poly, useful for excluding the divertor region in a Tokamak For some uses, it can be practical to approximate the polygon defining the Ves object (which can be non-convex, like with a divertor), by a simpler, sligthly smaller and convex polygon. This method provides a fast solution for computing such a proxy. Parameters ---------- RelOff : float Fraction by which an homothetic polygon should be reduced (1.-RelOff)*(Poly-BaryS) ZLim : None / str / tuple Flag indicating what limits shall be put to the height of the polygon (used for excluding divertor) Spline : bool Flag indiating whether the reduced and truncated polygon shall be smoothed by 2D b-spline curves Splprms : list List of 3 parameters to be used for the smoothing [weights,smoothness,b-spline order], fed to scipy.interpolate.splprep() NP : int Number of points to be used to define the smoothed polygon Plot : bool Flag indicating whether the result shall be plotted for visual inspection Test : bool Flag indicating whether the inputs should be tested for conformity Returns ------- Poly : np.ndarray (2,N) polygon resulting from homothetic transform, truncating and optional smoothing """ return _comp._Ves_get_InsideConvexPoly(self.Poly_closed, self.dgeom['P2Min'], self.dgeom['P2Max'], self.dgeom['BaryS'], RelOff=RelOff, ZLim=ZLim, Spline=Spline, Splprms=Splprms, NP=NP, Plot=Plot, Test=Test)
Return a polygon that is a smaller and smoothed approximation of Ves.Poly, useful for excluding the divertor region in a Tokamak For some uses, it can be practical to approximate the polygon defining the Ves object (which can be non-convex, like with a divertor), by a simpler, sligthly smaller and convex polygon. This method provides a fast solution for computing such a proxy. Parameters ---------- RelOff : float Fraction by which an homothetic polygon should be reduced (1.-RelOff)*(Poly-BaryS) ZLim : None / str / tuple Flag indicating what limits shall be put to the height of the polygon (used for excluding divertor) Spline : bool Flag indiating whether the reduced and truncated polygon shall be smoothed by 2D b-spline curves Splprms : list List of 3 parameters to be used for the smoothing [weights,smoothness,b-spline order], fed to scipy.interpolate.splprep() NP : int Number of points to be used to define the smoothed polygon Plot : bool Flag indicating whether the result shall be plotted for visual inspection Test : bool Flag indicating whether the inputs should be tested for conformity Returns ------- Poly : np.ndarray (2,N) polygon resulting from homothetic transform, truncating and optional smoothing
def get_system_uptime_output_cmd_error(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_system_uptime = ET.Element("get_system_uptime") config = get_system_uptime output = ET.SubElement(get_system_uptime, "output") cmd_error = ET.SubElement(output, "cmd-error") cmd_error.text = kwargs.pop('cmd_error') callback = kwargs.pop('callback', self._callback) return callback(config)
Auto Generated Code
def status(self): '''Allow custom status messages''' message = self.status_message if message is None: message = STATUS[self.status_code] return '%s %s' % (self.status_code, message)
Allow custom status messages
def _getDocstringLineno(self, node_type, node): """ Get line number of the docstring. @param node_type: type of node_type @param node: node of currently checking @return: line number """ docstringStriped = node.as_string().strip() linenoDocstring = (node.lineno + docstringStriped .count("\n", 0, docstringStriped.index('"""'))) if node_type == "module": # Module starts from line 0. linenoDocstring += 1 return linenoDocstring
Get line number of the docstring. @param node_type: type of node_type @param node: node of currently checking @return: line number
def reset_generation(self): """Reset the generation and memberId because we have fallen out of the group.""" with self._lock: self._generation = Generation.NO_GENERATION self.rejoin_needed = True self.state = MemberState.UNJOINED
Reset the generation and memberId because we have fallen out of the group.
def handleEvent(self, eventObj): """This method should be called every time through the main loop. It handles showing the up, over, and down states of the button. Parameters: | eventObj - the event object obtained by calling pygame.event.get() Returns: | False most of the time | True when the has toggled the checkbox. """ if eventObj.type not in (MOUSEMOTION, MOUSEBUTTONUP, MOUSEBUTTONDOWN) or not self.visible: # The checkBox only cares bout mouse-related events (or no events, if it is invisible) return False if not self.isEnabled: return False clicked = False if (not self.mouseOverButton) and self.rect.collidepoint(eventObj.pos): # if mouse has entered the checkBox: self.mouseOverButton = True elif self.mouseOverButton and (not self.rect.collidepoint(eventObj.pos)): # if mouse has exited the checkBox: self.mouseOverButton = False if self.rect.collidepoint(eventObj.pos): if eventObj.type == MOUSEBUTTONDOWN: self.buttonDown = True self.lastMouseDownOverButton = True else: if eventObj.type in (MOUSEBUTTONUP, MOUSEBUTTONDOWN): # if an up/down happens off the checkBox, then the next up won't cause mouseClick() self.lastMouseDownOverButton = False if eventObj.type == MOUSEBUTTONDOWN: self.mouseIsDown = True # mouse up is handled whether or not it was over the checkBox doMouseClick = False if eventObj.type == MOUSEBUTTONUP: self.mouseIsDown = False if self.lastMouseDownOverButton: doMouseClick = True self.lastMouseDownOverButton = False if self.buttonDown: self.buttonDown = False if doMouseClick: self.buttonDown = False clicked = True if self.playSoundOnClick: self.soundOnClick.play() # switch state: self.value = not self.value return clicked
This method should be called every time through the main loop. It handles showing the up, over, and down states of the button. Parameters: | eventObj - the event object obtained by calling pygame.event.get() Returns: | False most of the time | True when the has toggled the checkbox.
def create(vm_, call=None): '''Create an lxc Container. This function is idempotent and will try to either provision or finish the provision of an lxc container. NOTE: Most of the initialization code has been moved and merged with the lxc runner and lxc.init functions ''' prov = get_configured_provider(vm_) if not prov: return # we cant use profile as a configuration key as it conflicts # with salt cloud internals profile = vm_.get( 'lxc_profile', vm_.get('container_profile', None)) event_data = vm_.copy() event_data['profile'] = profile __utils__['cloud.fire_event']( 'event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), args=__utils__['cloud.filter_event']('creating', event_data, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) ret = {'name': vm_['name'], 'changes': {}, 'result': True, 'comment': ''} if 'pub_key' not in vm_ and 'priv_key' not in vm_: log.debug('Generating minion keys for %s', vm_['name']) vm_['priv_key'], vm_['pub_key'] = salt.utils.cloud.gen_keys( salt.config.get_cloud_config_value( 'keysize', vm_, __opts__)) # get the minion key pair to distribute back to the container kwarg = copy.deepcopy(vm_) kwarg['host'] = prov['target'] kwarg['profile'] = profile __utils__['cloud.fire_event']( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args=__utils__['cloud.filter_event']('requesting', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) cret = _runner().cmd('lxc.cloud_init', [vm_['name']], kwarg=kwarg) ret['runner_return'] = cret ret['result'] = cret['result'] if not ret['result']: ret['Error'] = 'Error while creating {0},'.format(vm_['name']) else: ret['changes']['created'] = 'created' # When using cloud states to manage LXC containers # __opts__['profile'] is not implicitly reset between operations # on different containers. However list_nodes will hide container # if profile is set in opts assuming that it have to be created. # But in cloud state we do want to check at first if it really # exists hence the need to remove profile from global opts once # current container is created. if 'profile' in __opts__: __opts__['internal_lxc_profile'] = __opts__['profile'] del __opts__['profile'] __utils__['cloud.fire_event']( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), args=__utils__['cloud.filter_event']('created', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) return ret
Create an lxc Container. This function is idempotent and will try to either provision or finish the provision of an lxc container. NOTE: Most of the initialization code has been moved and merged with the lxc runner and lxc.init functions
def check_stat(self, path): """ Checks logfile stat information for excluding files not in datetime period. On Linux it's possible to checks only modification time, because file creation info are not available, so it's possible to exclude only older files. In Unix BSD systems and windows information about file creation date and times are available, so is possible to exclude too newer files. """ statinfo = os.stat(path) st_mtime = datetime.fromtimestamp(statinfo.st_mtime) if platform.system() == 'Linux': check = st_mtime >= self.start_dt else: st_ctime = datetime.fromtimestamp(statinfo.st_ctime) check = st_mtime >= self.start_dt and st_ctime <= self.end_dt if not check: logger.info("file %r not in datetime period!", path) return check
Checks logfile stat information for excluding files not in datetime period. On Linux it's possible to checks only modification time, because file creation info are not available, so it's possible to exclude only older files. In Unix BSD systems and windows information about file creation date and times are available, so is possible to exclude too newer files.
def create_object(self, name, experiment_id, model_id, argument_defs, arguments=None, properties=None): """Create a model run object with the given list of arguments. The initial state of the object is RUNNING. Raises ValueError if given arguments are invalid. Parameters ---------- name : string User-provided name for the model run experiment_id : string Unique identifier of associated experiment object model_id : string Unique model identifier argument_defs : list(attribute.AttributeDefinition) Definition of valid arguments for the given model arguments : list(dict('name':...,'value:...')), optional List of attribute instances properties : Dictionary, optional Set of model run properties. Returns ------- PredictionHandle Object handle for created model run """ # Create a new object identifier. identifier = str(uuid.uuid4()).replace('-','') # Directory for successful model run resource files. Directories are # simply named by object identifier directory = os.path.join(self.directory, identifier) # Create the directory if it doesn't exists if not os.access(directory, os.F_OK): os.makedirs(directory) # By default all model runs are in IDLE state at creation state = ModelRunIdle() # Create the initial set of properties. run_properties = { datastore.PROPERTY_NAME: name, datastore.PROPERTY_STATE: str(state), datastore.PROPERTY_MODEL: model_id } if not properties is None: for prop in properties: if not prop in run_properties: run_properties[prop] = properties[prop] # If argument list is not given then the initial set of arguments is # empty. Here we do not validate the given arguments. Definitions of # valid argument sets are maintained in the model registry and are not # accessible by the model run manager at this point. run_arguments = {} if not arguments is None: # Convert arguments to dictionary of Atrribute instances. Will # raise an exception if values are of invalid type. run_arguments = attribute.to_dict(arguments, argument_defs) # Create the image group object and store it in the database before # returning it. obj = ModelRunHandle( identifier, run_properties, directory, state, experiment_id, model_id, run_arguments ) self.insert_object(obj) return obj
Create a model run object with the given list of arguments. The initial state of the object is RUNNING. Raises ValueError if given arguments are invalid. Parameters ---------- name : string User-provided name for the model run experiment_id : string Unique identifier of associated experiment object model_id : string Unique model identifier argument_defs : list(attribute.AttributeDefinition) Definition of valid arguments for the given model arguments : list(dict('name':...,'value:...')), optional List of attribute instances properties : Dictionary, optional Set of model run properties. Returns ------- PredictionHandle Object handle for created model run
def RegexLookup(fieldVal, db, fieldName, lookupType, histObj={}): """ Return a new field value based on match against regex queried from MongoDB :param string fieldVal: input value to lookup :param MongoClient db: MongoClient instance connected to MongoDB :param string lookupType: Type of lookup to perform/MongoDB collection name. One of 'genericRegex', 'fieldSpecificRegex', 'normRegex' :param string fieldName: Field name to query against :param dict histObj: History object to which changes should be appended """ if lookupType == 'genericRegex': lookup_dict = {} elif lookupType in ['fieldSpecificRegex', 'normRegex']: lookup_dict = {"fieldName": fieldName} else: raise ValueError("Invalid type") field_val_new = fieldVal pattern = '' coll = db[lookupType] re_val = coll.find(lookup_dict, ['pattern', 'replace']) for row in re_val: try: match = re.match(row['pattern'], _DataClean_(field_val_new), flags=re.IGNORECASE) if match: if 'replace' in row: field_val_new = re.sub(row['pattern'], row['replace'], _DataClean_(field_val_new), flags=re.IGNORECASE) else: field_val_new = re.sub(row['pattern'], '', _DataClean_(field_val_new), flags=re.IGNORECASE) pattern = row['pattern'] break except KeyError as Key_error_obj: warnings.warn('schema error', Key_error_obj) if re_val: re_val.close() change = _CollectHistory_(lookupType=lookupType, fromVal=fieldVal, toVal=field_val_new, pattern=pattern) histObjUpd = _CollectHistoryAgg_(contactHist=histObj, fieldHistObj=change, fieldName=fieldName) return field_val_new, histObjUpd
Return a new field value based on match against regex queried from MongoDB :param string fieldVal: input value to lookup :param MongoClient db: MongoClient instance connected to MongoDB :param string lookupType: Type of lookup to perform/MongoDB collection name. One of 'genericRegex', 'fieldSpecificRegex', 'normRegex' :param string fieldName: Field name to query against :param dict histObj: History object to which changes should be appended
def _cim_keybinding(key, value): """ Return a keybinding value, from dict item input (key+value). Key may be None (for unnamed keys). The returned value will be a CIM-typed value, except if it was provided as Python number type (in which case it will remain that type). Invalid types or values cause TypeError or ValueError to be raised. """ if key is not None and isinstance(value, CIMProperty): if value.name.lower() != key.lower(): raise ValueError( _format("Invalid keybinding name: CIMProperty.name must be " "dictionary key {0!A}, but is {1!A}", key, value.name)) return copy_.copy(value.value) if value is None: return None if isinstance(value, six.text_type): return value if isinstance(value, six.binary_type): return _to_unicode(value) if isinstance(value, (bool, CIMInstanceName, CIMType)): return value # pylint: disable=unidiomatic-typecheck if builtin_type(value) in number_types: # Note: The CIM data types are derived from the built-in types, so we # cannot use isinstance() for this test. # Ideally, pywbem won't accept keybinding values specified as Python # number typed values, but require a CIM data type (e.g. Uint32 or # Real32). # However, there are two reasons for continuing to allow that: # * It was allowed in earlier versions of pywbem. # * Parsing the (untyped) WBEM URI of an instance path, results in # int or float values without size, and the size information # to automatically convert that into numeric CIM data types is # not available. return value if isinstance(value, (CIMClass, CIMInstance)): raise TypeError( _format("Value of keybinding {0!A} cannot be an embedded object: " "{1}", key, type(value))) if isinstance(value, list): raise TypeError( _format("Value of keybinding {0!A} cannot be a list", key)) raise TypeError( _format("Value of keybinding {0!A} has an invalid type: {1}", key, type(value)))
Return a keybinding value, from dict item input (key+value). Key may be None (for unnamed keys). The returned value will be a CIM-typed value, except if it was provided as Python number type (in which case it will remain that type). Invalid types or values cause TypeError or ValueError to be raised.
def set_frameworkcontroller_config(experiment_config, port, config_file_name): '''set kubeflow configuration''' frameworkcontroller_config_data = dict() frameworkcontroller_config_data['frameworkcontroller_config'] = experiment_config['frameworkcontrollerConfig'] response = rest_put(cluster_metadata_url(port), json.dumps(frameworkcontroller_config_data), REST_TIME_OUT) err_message = None if not response or not response.status_code == 200: if response is not None: err_message = response.text _, stderr_full_path = get_log_path(config_file_name) with open(stderr_full_path, 'a+') as fout: fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':'))) return False, err_message result, message = setNNIManagerIp(experiment_config, port, config_file_name) if not result: return result, message #set trial_config return set_trial_config(experiment_config, port, config_file_name), err_message
set kubeflow configuration
def from_file(cls, fname, form=None): """ Read an orthography profile from a metadata file or a default tab-separated profile file. """ try: tg = TableGroup.from_file(fname) opfname = None except JSONDecodeError: tg = TableGroup.fromvalue(cls.MD) opfname = fname if len(tg.tables) != 1: raise ValueError('profile description must contain exactly one table') metadata = tg.common_props metadata.update(fname=Path(fname), form=form) return cls( *[{k: None if (k != cls.GRAPHEME_COL and v == cls.NULL) else v for k, v in d.items()} for d in tg.tables[0].iterdicts(fname=opfname)], **metadata)
Read an orthography profile from a metadata file or a default tab-separated profile file.
def evaluatePotentials(Pot,R,z,phi=None,t=0.,dR=0,dphi=0): """ NAME: evaluatePotentials PURPOSE: convenience function to evaluate a possible sum of potentials INPUT: Pot - potential or list of potentials (dissipative forces in such a list are ignored) R - cylindrical Galactocentric distance (can be Quantity) z - distance above the plane (can be Quantity) phi - azimuth (can be Quantity) t - time (can be Quantity) dR= dphi=, if set to non-zero integers, return the dR, dphi't derivative instead OUTPUT: Phi(R,z) HISTORY: 2010-04-16 - Written - Bovy (NYU) """ return _evaluatePotentials(Pot,R,z,phi=phi,t=t,dR=dR,dphi=dphi)
NAME: evaluatePotentials PURPOSE: convenience function to evaluate a possible sum of potentials INPUT: Pot - potential or list of potentials (dissipative forces in such a list are ignored) R - cylindrical Galactocentric distance (can be Quantity) z - distance above the plane (can be Quantity) phi - azimuth (can be Quantity) t - time (can be Quantity) dR= dphi=, if set to non-zero integers, return the dR, dphi't derivative instead OUTPUT: Phi(R,z) HISTORY: 2010-04-16 - Written - Bovy (NYU)
def _get_future_devices(self, context): """Return a generator yielding new devices.""" monitor = Monitor.from_netlink(context) monitor.filter_by("hidraw") monitor.start() self._scanning_log_message() for device in iter(monitor.poll, None): if device.action == "add": # Sometimes udev rules has not been applied at this point, # causing permission denied error if we are running in user # mode. With this sleep this will hopefully not happen. sleep(1) yield device self._scanning_log_message()
Return a generator yielding new devices.
def yearly(self): """ Access the yearly :returns: twilio.rest.api.v2010.account.usage.record.yearly.YearlyList :rtype: twilio.rest.api.v2010.account.usage.record.yearly.YearlyList """ if self._yearly is None: self._yearly = YearlyList(self._version, account_sid=self._solution['account_sid'], ) return self._yearly
Access the yearly :returns: twilio.rest.api.v2010.account.usage.record.yearly.YearlyList :rtype: twilio.rest.api.v2010.account.usage.record.yearly.YearlyList
def apply_driver_hacks(self, app, info, options): """ Set custom SQLAlchemy engine options: - Teach it to encode and decode our node objects - Enable pre-ping (i.e., test the DB connection before trying to use it) """ options.update(dict( json_serializer=lambda data: json.dumps(data, default=encode_node), json_deserializer=lambda data: json.loads(data, object_hook=decode_node), pool_pre_ping=True, )) super(QuiltSQLAlchemy, self).apply_driver_hacks(app, info, options)
Set custom SQLAlchemy engine options: - Teach it to encode and decode our node objects - Enable pre-ping (i.e., test the DB connection before trying to use it)
def decrypt_report(self, device_id, root, data, **kwargs): """Decrypt a buffer of report data on behalf of a device. Args: device_id (int): The id of the device that we should encrypt for root (int): The root key type that should be used to generate the report data (bytearray): The data that we should decrypt **kwargs: There are additional specific keyword args that are required depending on the root key used. Typically, you must specify - report_id (int): The report id - sent_timestamp (int): The sent timestamp of the report These two bits of information are used to construct the per report signing and encryption key from the specific root key type. Returns: dict: The decrypted data and any associated metadata about the data. The data itself must always be a bytearray stored under the 'data' key, however additional keys may be present depending on the encryption method used. Raises: NotFoundError: If the auth provider is not able to decrypt the data. """ report_key = self._verify_derive_key(device_id, root, **kwargs) try: from Crypto.Cipher import AES import Crypto.Util.Counter except ImportError: raise NotFoundError ctr = Crypto.Util.Counter.new(128) # We use AES-128 for encryption encryptor = AES.new(bytes(report_key[:16]), AES.MODE_CTR, counter=ctr) decrypted = encryptor.decrypt(bytes(data)) return {'data': decrypted}
Decrypt a buffer of report data on behalf of a device. Args: device_id (int): The id of the device that we should encrypt for root (int): The root key type that should be used to generate the report data (bytearray): The data that we should decrypt **kwargs: There are additional specific keyword args that are required depending on the root key used. Typically, you must specify - report_id (int): The report id - sent_timestamp (int): The sent timestamp of the report These two bits of information are used to construct the per report signing and encryption key from the specific root key type. Returns: dict: The decrypted data and any associated metadata about the data. The data itself must always be a bytearray stored under the 'data' key, however additional keys may be present depending on the encryption method used. Raises: NotFoundError: If the auth provider is not able to decrypt the data.
def prompt(text, default=None, hide_input=False, confirmation_prompt=False, type=None, value_proc=None, prompt_suffix=': ', show_default=True, err=False): """Prompts a user for input. This is a convenience function that can be used to prompt a user for input later. If the user aborts the input by sending a interrupt signal, this function will catch it and raise a :exc:`Abort` exception. .. versionadded:: 6.0 Added unicode support for cmd.exe on Windows. .. versionadded:: 4.0 Added the `err` parameter. :param text: the text to show for the prompt. :param default: the default value to use if no input happens. If this is not given it will prompt until it's aborted. :param hide_input: if this is set to true then the input value will be hidden. :param confirmation_prompt: asks for confirmation for the value. :param type: the type to use to check the value against. :param value_proc: if this parameter is provided it's a function that is invoked instead of the type conversion to convert a value. :param prompt_suffix: a suffix that should be added to the prompt. :param show_default: shows or hides the default value in the prompt. :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo. """ result = None def prompt_func(text): f = hide_input and hidden_prompt_func or visible_prompt_func try: # Write the prompt separately so that we get nice # coloring through colorama on Windows echo(text, nl=False, err=err) return f('') except (KeyboardInterrupt, EOFError): # getpass doesn't print a newline if the user aborts input with ^C. # Allegedly this behavior is inherited from getpass(3). # A doc bug has been filed at https://bugs.python.org/issue24711 if hide_input: echo(None, err=err) raise Abort() if value_proc is None: value_proc = convert_type(type, default) prompt = _build_prompt(text, prompt_suffix, show_default, default) while 1: while 1: value = prompt_func(prompt) if value: break # If a default is set and used, then the confirmation # prompt is always skipped because that's the only thing # that really makes sense. elif default is not None: return default try: result = value_proc(value) except UsageError as e: echo('Error: %s' % e.message, err=err) continue if not confirmation_prompt: return result while 1: value2 = prompt_func('Repeat for confirmation: ') if value2: break if value == value2: return result echo('Error: the two entered values do not match', err=err)
Prompts a user for input. This is a convenience function that can be used to prompt a user for input later. If the user aborts the input by sending a interrupt signal, this function will catch it and raise a :exc:`Abort` exception. .. versionadded:: 6.0 Added unicode support for cmd.exe on Windows. .. versionadded:: 4.0 Added the `err` parameter. :param text: the text to show for the prompt. :param default: the default value to use if no input happens. If this is not given it will prompt until it's aborted. :param hide_input: if this is set to true then the input value will be hidden. :param confirmation_prompt: asks for confirmation for the value. :param type: the type to use to check the value against. :param value_proc: if this parameter is provided it's a function that is invoked instead of the type conversion to convert a value. :param prompt_suffix: a suffix that should be added to the prompt. :param show_default: shows or hides the default value in the prompt. :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo.
def grid_edges(shape, inds=None, return_directions=True): """ Get list of grid edges :param shape: :param inds: :param return_directions: :return: """ if inds is None: inds = np.arange(np.prod(shape)).reshape(shape) # if not self.segparams['use_boundary_penalties'] and \ # boundary_penalties_fcn is None : if len(shape) == 2: edgx = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()] edgy = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()] edges = [edgx, edgy] directions = [ np.ones([edgx.shape[0]], dtype=np.int8) * 0, np.ones([edgy.shape[0]], dtype=np.int8) * 1, ] elif len(shape) == 3: # This is faster for some specific format edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()] edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()] edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()] edges = [edgx, edgy, edgz] else: logger.error("Expected 2D or 3D data") # for all edges along first direction put 0, for second direction put 1, for third direction put 3 if return_directions: directions = [] for idirection in range(len(shape)): directions.append( np.ones([edges[idirection].shape[0]], dtype=np.int8) * idirection ) edges = np.concatenate(edges) if return_directions: edge_dir = np.concatenate(directions) return edges, edge_dir else: return edges
Get list of grid edges :param shape: :param inds: :param return_directions: :return:
def change_event_params(self, handler, **kwargs): """ This allows the client to change the parameters for an event, in the case that there is a desire for slightly different behavior, such as reasigning keys. handler - the handler object that the desired changes are made to. kwargs - the variable number of keyword arguments for the parameters that must match the properties of the corresponding event. """ if not isinstance(handler, Handler): raise TypeError("given object must be of type Handler.") if not self.remove_handler(handler): raise ValueError("You must pass in a valid handler that already exists.") self.add_handler(handler.type, handler.actions, **kwargs) self.event = handler.event
This allows the client to change the parameters for an event, in the case that there is a desire for slightly different behavior, such as reasigning keys. handler - the handler object that the desired changes are made to. kwargs - the variable number of keyword arguments for the parameters that must match the properties of the corresponding event.
def geohash(self, key, member, *members, **kwargs): """Returns members of a geospatial index as standard geohash strings. :rtype: list[str or bytes or None] """ return self.execute( b'GEOHASH', key, member, *members, **kwargs )
Returns members of a geospatial index as standard geohash strings. :rtype: list[str or bytes or None]
def _value_encode(cls, member, value): """ Internal method used to encode values into the hash. :param member: str :param value: multi :return: bytes """ try: field_validator = cls.fields[member] except KeyError: return cls.valueparse.encode(value) return field_validator.encode(value)
Internal method used to encode values into the hash. :param member: str :param value: multi :return: bytes
def replace_event_annotations(event, newanns): """Replace event annotations with the provided ones.""" _humilis = event.get("_humilis", {}) if not _humilis: event["_humilis"] = {"annotation": newanns} else: event["_humilis"]["annotation"] = newanns
Replace event annotations with the provided ones.
def get_client(self, service, region, public=True, cached=True, client_class=None): """ Returns the client object for the specified service and region. By default the public endpoint is used. If you wish to work with a services internal endpoints, specify `public=False`. By default, if a client has already been created for the given service, region, and public values, that will be returned. To force a new client to be created, pass 'cached=False'. """ if not self.authenticated: raise exc.NotAuthenticated("You must authenticate before trying " "to create clients.") clt = ep = None mapped_service = self.service_mapping.get(service) or service svc = self.services.get(mapped_service) if svc: ep = svc.endpoints.get(region) if ep: clt = ep._get_client(public=public, cached=cached, client_class=client_class) if not clt: raise exc.NoSuchClient("There is no client available for the " "service '%s' in the region '%s'." % (service, region)) return clt
Returns the client object for the specified service and region. By default the public endpoint is used. If you wish to work with a services internal endpoints, specify `public=False`. By default, if a client has already been created for the given service, region, and public values, that will be returned. To force a new client to be created, pass 'cached=False'.
def extract_alzip (archive, compression, cmd, verbosity, interactive, outdir): """Extract a ALZIP archive.""" return [cmd, '-d', outdir, archive]
Extract a ALZIP archive.
def node_type(node: astroid.node_classes.NodeNG) -> Optional[type]: """Return the inferred type for `node` If there is more than one possible type, or if inferred type is Uninferable or None, return None """ # check there is only one possible type for the assign node. Else we # don't handle it for now types = set() try: for var_type in node.infer(): if var_type == astroid.Uninferable or is_none(var_type): continue types.add(var_type) if len(types) > 1: return None except astroid.InferenceError: return None return types.pop() if types else None
Return the inferred type for `node` If there is more than one possible type, or if inferred type is Uninferable or None, return None
def maybe_download_and_extract(data_root: str, url: str) -> None: """ Maybe download the specified file to ``data_root`` and try to unpack it with ``shutil.unpack_archive``. :param data_root: data root to download the files to :param url: url to download from """ # make sure data_root exists os.makedirs(data_root, exist_ok=True) # create sanitized filename from url filename = sanitize_url(url) # check whether the archive already exists filepath = os.path.join(data_root, filename) if os.path.exists(filepath): logging.info('\t`%s` already exists; skipping', filepath) return # download with progressbar try: logging.info('\tdownloading %s', filepath) req = requests.get(url, stream=True) req.raise_for_status() except requests.exceptions.RequestException as ex: logging.error('File `%s` could not be downloaded, %s', filepath, ex) return expected_size = int(req.headers.get('content-length')) chunk_size = 1024 with open(filepath, 'wb') as f_out,\ click.progressbar(req.iter_content(chunk_size=chunk_size), length=expected_size/chunk_size) as bar: for chunk in bar: if chunk: f_out.write(chunk) f_out.flush() # extract try: shutil.unpack_archive(filepath, data_root) except (shutil.ReadError, ValueError): logging.info('File `%s` could not be extracted by `shutil.unpack_archive`. Please process it manually.', filepath)
Maybe download the specified file to ``data_root`` and try to unpack it with ``shutil.unpack_archive``. :param data_root: data root to download the files to :param url: url to download from
def _get_domain(conn, *vms, **kwargs): ''' Return a domain object for the named VM or return domain object for all VMs. :params conn: libvirt connection object :param vms: list of domain names to look for :param iterable: True to return an array in all cases ''' ret = list() lookup_vms = list() all_vms = [] if kwargs.get('active', True): for id_ in conn.listDomainsID(): all_vms.append(conn.lookupByID(id_).name()) if kwargs.get('inactive', True): for id_ in conn.listDefinedDomains(): all_vms.append(id_) if not all_vms: raise CommandExecutionError('No virtual machines found.') if vms: for name in vms: if name not in all_vms: raise CommandExecutionError('The VM "{name}" is not present'.format(name=name)) else: lookup_vms.append(name) else: lookup_vms = list(all_vms) for name in lookup_vms: ret.append(conn.lookupByName(name)) return len(ret) == 1 and not kwargs.get('iterable') and ret[0] or ret
Return a domain object for the named VM or return domain object for all VMs. :params conn: libvirt connection object :param vms: list of domain names to look for :param iterable: True to return an array in all cases
def _get_envelopes_centroid(envelopes): """ Returns the centroid of an inputted geometry column. Not currently in use, as this is now handled by this library's CRS wrapper directly. Light wrapper over ``_get_envelopes_min_maxes``. Parameters ---------- envelopes : GeoSeries The envelopes of the given geometries, as would be returned by e.g. ``data.geometry.envelope``. Returns ------- (mean_x, mean_y) : tuple The data centroid. """ xmin, xmax, ymin, ymax = _get_envelopes_min_maxes(envelopes) return np.mean(xmin, xmax), np.mean(ymin, ymax)
Returns the centroid of an inputted geometry column. Not currently in use, as this is now handled by this library's CRS wrapper directly. Light wrapper over ``_get_envelopes_min_maxes``. Parameters ---------- envelopes : GeoSeries The envelopes of the given geometries, as would be returned by e.g. ``data.geometry.envelope``. Returns ------- (mean_x, mean_y) : tuple The data centroid.
def _determine_slot(self, *args): """ figure out what slot based on command and args """ if len(args) <= 1: raise RedisClusterException("No way to dispatch this command to Redis Cluster. Missing key.") command = args[0] if command in ['EVAL', 'EVALSHA']: numkeys = args[2] keys = args[3: 3 + numkeys] slots = {self.connection_pool.nodes.keyslot(key) for key in keys} if len(slots) != 1: raise RedisClusterException("{0} - all keys must map to the same key slot".format(command)) return slots.pop() key = args[1] return self.connection_pool.nodes.keyslot(key)
figure out what slot based on command and args
def process_byte(self, tag): """Process byte type tags""" tag.set_address(self.normal_register.current_address) # each address needs 1 byte self.normal_register.move_to_next_address(1)
Process byte type tags
def add_task(self, task, func=None, **kwargs): ''' Add a task parser ''' if not self.__tasks: raise Exception("Tasks subparsers is disabled") if 'help' not in kwargs: if func.__doc__: kwargs['help'] = func.__doc__ task_parser = self.__tasks.add_parser(task, **kwargs) if self.__add_vq: self.add_vq(task_parser) if func is not None: task_parser.set_defaults(func=func) return task_parser
Add a task parser
def get_states(self, dump_optimizer=False): """Gets updater states. Parameters ---------- dump_optimizer : bool, default False Whether to also save the optimizer itself. This would also save optimizer information such as learning rate and weight decay schedules. """ return pickle.dumps((self.states, self.optimizer) if dump_optimizer else self.states)
Gets updater states. Parameters ---------- dump_optimizer : bool, default False Whether to also save the optimizer itself. This would also save optimizer information such as learning rate and weight decay schedules.
def parse_250_row(row: list) -> BasicMeterData: """ Parse basic meter data record (250) """ return BasicMeterData(row[1], row[2], row[3], row[4], row[5], row[6], row[7], float(row[8]), parse_datetime(row[9]), row[10], row[11], row[12], float(row[13]), parse_datetime( row[14]), row[15], row[16], row[17], float(row[18]), row[19], row[20], parse_datetime(row[21]), parse_datetime(row[22]))
Parse basic meter data record (250)
def _thread_loop(self): """Background thread used when Sender is in asynchronous/interval mode.""" last_check_time = time.time() messages = [] while True: # Get first message from queue, blocking until the next time we # should be sending time_since_last_check = time.time() - last_check_time time_till_next_check = max(0, self.interval - time_since_last_check) try: message = self._queue.get(timeout=time_till_next_check) except queue.Empty: pass else: if message is None: # None is the signal to stop this background thread break messages.append(message) # Get any other messages currently on queue without blocking, # paying attention to None ("stop thread" signal) should_stop = False while True: try: message = self._queue.get_nowait() except queue.Empty: break if message is None: should_stop = True break messages.append(message) if should_stop: break # If it's time to send, send what we've collected current_time = time.time() if current_time - last_check_time >= self.interval: last_check_time = current_time for i in range(0, len(messages), self.batch_size): batch = messages[i:i + self.batch_size] self.send_socket(b''.join(batch)) messages = [] # Send any final messages before exiting thread for i in range(0, len(messages), self.batch_size): batch = messages[i:i + self.batch_size] self.send_socket(b''.join(batch))
Background thread used when Sender is in asynchronous/interval mode.
def __get_dynamic_attr(self, attname, arg, default=None): """ Gets "something" from self, which could be an attribute or a callable with either 0 or 1 arguments (besides self). Stolen from django.contrib.syntication.feeds.Feed. """ try: attr = getattr(self, attname) except AttributeError: return default if callable(attr): # Check func_code.co_argcount rather than try/excepting the # function and catching the TypeError, because something inside # the function may raise the TypeError. This technique is more # accurate. if hasattr(attr, 'func_code'): argcount = attr.func_code.co_argcount else: argcount = attr.__call__.func_code.co_argcount if argcount == 2: # one argument is 'self' return attr(arg) else: return attr() return attr
Gets "something" from self, which could be an attribute or a callable with either 0 or 1 arguments (besides self). Stolen from django.contrib.syntication.feeds.Feed.
def on_message(self, headers, message): """ Event method that gets called when this listener has received a JMS message (representing an HMC notification). Parameters: headers (dict): JMS message headers, as described for `headers` tuple item returned by the :meth:`~zhmcclient.NotificationReceiver.notifications` method. message (string): JMS message body as a string, which contains a serialized JSON object. The JSON object is described in the `message` tuple item returned by the :meth:`~zhmcclient.NotificationReceiver.notifications` method). """ with self._handover_cond: # Wait until receiver has processed the previous notification while len(self._handover_dict) > 0: self._handover_cond.wait(self._wait_timeout) # Indicate to receiver that there is a new notification self._handover_dict['headers'] = headers try: msg_obj = json.loads(message) except Exception: raise # TODO: Find better exception for this case self._handover_dict['message'] = msg_obj self._handover_cond.notifyAll()
Event method that gets called when this listener has received a JMS message (representing an HMC notification). Parameters: headers (dict): JMS message headers, as described for `headers` tuple item returned by the :meth:`~zhmcclient.NotificationReceiver.notifications` method. message (string): JMS message body as a string, which contains a serialized JSON object. The JSON object is described in the `message` tuple item returned by the :meth:`~zhmcclient.NotificationReceiver.notifications` method).
def remove_role(self, databaseName, roleName, collectionName=None): """Remove one role Args: databaseName (str): Database Name roleName (RoleSpecs): role Keyword Args: collectionName (str): Collection """ role = {"databaseName" : databaseName, "roleName" : roleName} if collectionName: role["collectionName"] = collectionName if role in self.roles: self.roles.remove(role)
Remove one role Args: databaseName (str): Database Name roleName (RoleSpecs): role Keyword Args: collectionName (str): Collection
def has_elem(elem_ref): """ Has element? :param elem_ref: :return: """ if not is_elem_ref(elem_ref): return False elif elem_ref[0] == ElemRefObj: return hasattr(elem_ref[1], elem_ref[2]) elif elem_ref[0] == ElemRefArr: return elem_ref[2] in elem_ref[1]
Has element? :param elem_ref: :return:
def salm2map(salm, s, lmax, Ntheta, Nphi): """Convert mode weights of spin-weighted function to values on a grid Parameters ---------- salm : array_like, complex, shape (..., (lmax+1)**2) Input array representing mode weights of the spin-weighted function. This array may be multi-dimensional, where initial dimensions may represent different times, for example, or separate functions on the sphere. The final dimension should give the values of the mode weights, in the order described below in the 'Notes' section. s : int or array, int, shape (...) Spin weight of the function. If `salm` is multidimensional and this is an array, its dimensions must match the first dimensions of `salm`, and the different values are the spin weights of the different functions represented by those dimensions. Otherwise, if `salm` is multidimensional and `s` is a single integer, all functions are assumed to have the same spin weight. lmax : int The largest `ell` value present in the input array. Ntheta : int Number of points in the output grid along the polar angle. Nphi : int Number of points in the output grid along the azimuthal angle. Returns ------- map : ndarray, complex, shape (..., Ntheta, Nphi) Values of the spin-weighted function on grid points of the sphere. This array is shaped like the input `salm` array, but has one extra dimension. The final two dimensions describe the values of the function on the sphere. See also -------- spinsfast.map2salm : Roughly the inverse of this function. Notes ----- The input `salm` data should be given in increasing order of `ell` value, always starting with (ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc. Explicitly, the ordering should match this: [f_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)] The input is converted to a contiguous complex numpy array if necessary. The output data are presented on this grid of spherical coordinates: np.array([[f(theta, phi) for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)] for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)]) Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons. First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input to `salm2map`. It is also possible to define a `map` function that violates this assumption -- for example, having a nonzero average value over the sphere, if the function has nonzero spin `s`, this is impossible. Also, it is possible to define a map of a function with so much angular dependence that it cannot be captured with the given `lmax` value. For example, a discontinuous function will never be perfectly resolved. Example ------- >>> s = -2 >>> lmax = 8 >>> Ntheta = Nphi = 2*lmax + 1 >>> modes = np.zeros(spinsfast.N_lm(lmax), dtype=np.complex128) >>> modes[spinsfast.lm_ind(2, 2, 8)] = 1.0 >>> values = spinsfast.salm2map(modes, s, lmax, Ntheta, Nphi) """ if Ntheta < 2 or Nphi < 1: raise ValueError("Input values of Ntheta={0} and Nphi={1} ".format(Ntheta, Nphi) + "are not allowed; they must be greater than 1 and 0, respectively.") if lmax < 1: raise ValueError("Input value of lmax={0} ".format(lmax) + "is not allowed; it must be greater than 0 and should be greater " + "than |s|={0}.".format(abs(s))) import numpy as np salm = np.ascontiguousarray(salm, dtype=np.complex128) if salm.shape[-1] < N_lm(lmax): raise ValueError("The input `salm` array of shape {0} is too small for the stated `lmax` of {1}. ".format(salm.shape, lmax) + "Perhaps you forgot to include the (zero) modes with ell<|s|.") map = np.empty(salm.shape[:-1]+(Ntheta, Nphi), dtype=np.complex128) if salm.ndim>1: s = np.ascontiguousarray(s, dtype=np.intc) if s.ndim != salm.ndim-1 or np.product(s.shape) != np.product(salm.shape[:-1]): s = s*np.ones(salm.shape[:-1], dtype=np.intc) _multi_salm2map(salm, map, s, lmax, Ntheta, Nphi) else: _salm2map(salm, map, s, lmax, Ntheta, Nphi) return map
Convert mode weights of spin-weighted function to values on a grid Parameters ---------- salm : array_like, complex, shape (..., (lmax+1)**2) Input array representing mode weights of the spin-weighted function. This array may be multi-dimensional, where initial dimensions may represent different times, for example, or separate functions on the sphere. The final dimension should give the values of the mode weights, in the order described below in the 'Notes' section. s : int or array, int, shape (...) Spin weight of the function. If `salm` is multidimensional and this is an array, its dimensions must match the first dimensions of `salm`, and the different values are the spin weights of the different functions represented by those dimensions. Otherwise, if `salm` is multidimensional and `s` is a single integer, all functions are assumed to have the same spin weight. lmax : int The largest `ell` value present in the input array. Ntheta : int Number of points in the output grid along the polar angle. Nphi : int Number of points in the output grid along the azimuthal angle. Returns ------- map : ndarray, complex, shape (..., Ntheta, Nphi) Values of the spin-weighted function on grid points of the sphere. This array is shaped like the input `salm` array, but has one extra dimension. The final two dimensions describe the values of the function on the sphere. See also -------- spinsfast.map2salm : Roughly the inverse of this function. Notes ----- The input `salm` data should be given in increasing order of `ell` value, always starting with (ell, m) = (0, 0) even if `s` is nonzero, proceeding to (1, -1), (1, 0), (1, 1), etc. Explicitly, the ordering should match this: [f_lm(ell, m) for ell in range(lmax+1) for m in range(-ell, ell+1)] The input is converted to a contiguous complex numpy array if necessary. The output data are presented on this grid of spherical coordinates: np.array([[f(theta, phi) for phi in np.linspace(0.0, 2*np.pi, num=2*lmax+1, endpoint=False)] for theta in np.linspace(0.0, np.pi, num=2*lmax+1, endpoint=True)]) Note that `map2salm` and `salm2map` are not true inverses of each other for several reasons. First, modes with `ell < |s|` should always be zero; they are simply assumed to be zero on input to `salm2map`. It is also possible to define a `map` function that violates this assumption -- for example, having a nonzero average value over the sphere, if the function has nonzero spin `s`, this is impossible. Also, it is possible to define a map of a function with so much angular dependence that it cannot be captured with the given `lmax` value. For example, a discontinuous function will never be perfectly resolved. Example ------- >>> s = -2 >>> lmax = 8 >>> Ntheta = Nphi = 2*lmax + 1 >>> modes = np.zeros(spinsfast.N_lm(lmax), dtype=np.complex128) >>> modes[spinsfast.lm_ind(2, 2, 8)] = 1.0 >>> values = spinsfast.salm2map(modes, s, lmax, Ntheta, Nphi)
def _get_streams(self): """ Finds the streams from tvcatchup.com. """ token = self.login(self.get_option("username"), self.get_option("password")) m = self._url_re.match(self.url) scode = m and m.group("scode") or self.get_option("station_code") res = self.session.http.get(self._guide_url, params=dict(token=token)) channels = OrderedDict() for t in itertags(res.text, "a"): if t.attributes.get('cs'): channels[t.attributes.get('cs').lower()] = t.attributes.get('title').replace("Watch ", "").strip() if not scode: log.error("Station code not provided, use --ustvnow-station-code.") log.info("Available stations are: \n{0} ".format('\n'.join(' {0} ({1})'.format(c, n) for c, n in channels.items()))) return if scode in channels: log.debug("Finding streams for: {0}", channels.get(scode)) r = self.session.http.get(self._stream_url, params={"scode": scode, "token": token, "br_n": "Firefox", "br_v": "52", "br_d": "desktop"}, headers={"User-Agent": useragents.FIREFOX}) data = self.session.http.json(r) return HLSStream.parse_variant_playlist(self.session, data["stream"]) else: log.error("Invalid station-code: {0}", scode)
Finds the streams from tvcatchup.com.
def dependency_context(package_names, aggressively_remove=False): """ Install the supplied packages and yield. Finally, remove all packages that were installed. Currently assumes 'aptitude' is available. """ installed_packages = [] log = logging.getLogger(__name__) try: if not package_names: logging.debug('No packages requested') if package_names: lock = yg.lockfile.FileLock( '/tmp/.pkg-context-lock', timeout=30 * 60) log.info('Acquiring lock to perform install') lock.acquire() log.info('Installing ' + ', '.join(package_names)) output = subprocess.check_output( ['sudo', 'aptitude', 'install', '-y'] + package_names, stderr=subprocess.STDOUT, ) log.debug('Aptitude output:\n%s', output) installed_packages = jaraco.apt.parse_new_packages( output, include_automatic=aggressively_remove) if not installed_packages: lock.release() log.info('Installed ' + ', '.join(installed_packages)) yield installed_packages except subprocess.CalledProcessError: log.error("Error occurred installing packages") raise finally: if installed_packages: log.info('Removing ' + ','.join(installed_packages)) subprocess.check_call( ['sudo', 'aptitude', 'remove', '-y'] + installed_packages, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) lock.release()
Install the supplied packages and yield. Finally, remove all packages that were installed. Currently assumes 'aptitude' is available.
def are_you_sure(msg=''): r""" Prompts user to accept or checks command line for -y Args: msg (str): Returns: bool: accept or not """ print(msg) from utool import util_arg from utool import util_str override = util_arg.get_argflag(('--yes', '--y', '-y')) if override: print('accepting based on command line flag') return True valid_ans = ['yes', 'y'] valid_prompt = util_str.conj_phrase(valid_ans, 'or') ans = input('Are you sure?\n Enter %s to accept\n' % valid_prompt) return ans.lower() in valid_ans
r""" Prompts user to accept or checks command line for -y Args: msg (str): Returns: bool: accept or not
def EnumMissingModules(): """Enumerate all modules which match the patterns MODULE_PATTERNS. PyInstaller often fails to locate all dlls which are required at runtime. We import all the client modules here, we simply introspect all the modules we have loaded in our current running process, and all the ones matching the patterns are copied into the client package. Yields: a source file for a linked dll. """ module_handle = ctypes.c_ulong() count = ctypes.c_ulong() process_handle = ctypes.windll.kernel32.OpenProcess( PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, 0, os.getpid()) ctypes.windll.psapi.EnumProcessModules(process_handle, ctypes.byref(module_handle), ctypes.sizeof(module_handle), ctypes.byref(count)) # The size of a handle is pointer size (i.e. 64 bit of amd64 and 32 bit on # i386). if sys.maxsize > 2**32: handle_type = ctypes.c_ulonglong else: handle_type = ctypes.c_ulong module_list = (handle_type * (count.value // ctypes.sizeof(handle_type)))() ctypes.windll.psapi.EnumProcessModulesEx(process_handle, ctypes.byref(module_list), ctypes.sizeof(module_list), ctypes.byref(count), 2) for x in module_list: module_filename = win32process.GetModuleFileNameEx(process_handle, x) for pattern in MODULE_PATTERNS: if pattern.match(os.path.basename(module_filename)): yield module_filename for venv_file in FILES_FROM_VIRTUALENV: yield os.path.join(sys.prefix, venv_file)
Enumerate all modules which match the patterns MODULE_PATTERNS. PyInstaller often fails to locate all dlls which are required at runtime. We import all the client modules here, we simply introspect all the modules we have loaded in our current running process, and all the ones matching the patterns are copied into the client package. Yields: a source file for a linked dll.
def move_wheel_files( name, # type: str req, # type: Requirement wheeldir, # type: str user=False, # type: bool home=None, # type: Optional[str] root=None, # type: Optional[str] pycompile=True, # type: bool scheme=None, # type: Optional[Mapping[str, str]] isolated=False, # type: bool prefix=None, # type: Optional[str] warn_script_location=True # type: bool ): # type: (...) -> None """Install a wheel""" # TODO: Investigate and break this up. # TODO: Look into moving this into a dedicated class for representing an # installation. if not scheme: scheme = distutils_scheme( name, user=user, home=home, root=root, isolated=isolated, prefix=prefix, ) if root_is_purelib(name, wheeldir): lib_dir = scheme['purelib'] else: lib_dir = scheme['platlib'] info_dir = [] # type: List[str] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} # type: Dict[str, str] changed = set() generated = [] # type: List[str] # Compile all of the pyc files that we're going to be installing if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): warnings.filterwarnings('ignore') compileall.compile_dir(source, force=True, quiet=True) logger.debug(stdout.getvalue()) def record_installed(srcfile, destfile, modified=False): """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and canonicalize_name(s).startswith( canonicalize_name(req.name))): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # copyfile (called below) truncates the destination if it # exists and then writes the new contents. This is fine in most # cases, but can cause a segfault if pip has loaded a shared # object (e.g. from pyopenssl through its vendored urllib3) # Since the shared object is mmap'd an attempt to call a # symbol in it will then cause a segfault. Unlinking the file # allows writing of new contents while allowing the process to # continue to use the old copy. if os.path.exists(destfile): os.unlink(destfile) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = ( st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH ) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed) clobber(source, lib_dir, True) assert info_dir, "%s .dist-info directory not found" % req # Get the defined entry points ep_file = os.path.join(info_dir[0], 'entry_points.txt') console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) for datadir in data_dirs: fixer = None filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None if subdir == 'scripts': fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = scheme[subdir] clobber(source, dest, False, fixer=fixer, filter=filter) maker = ScriptMaker(None, scheme['scripts']) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 maker.clobber = True # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = {''} # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True # Simplify the script and fix the fact that the default script swallows # every single stack trace. # See https://bitbucket.org/pypa/distlib/issue/34/ # See https://bitbucket.org/pypa/distlib/issue/33/ def _get_script_text(entry): if entry.suffix is None: raise InstallationError( "Invalid script entry point: %s for req: %s - A callable " "suffix is required. Cf https://packaging.python.org/en/" "latest/distributing.html#console-scripts for more " "information." % (entry, req) ) return maker.script_template % { "module": entry.prefix, "import_name": entry.suffix.split(".")[0], "func": entry.suffix, } # ignore type, because mypy disallows assigning to a method, # see https://github.com/python/mypy/issues/2427 maker._get_script_text = _get_script_text # type: ignore maker.script_template = r"""# -*- coding: utf-8 -*- import re import sys from %(module)s import %(import_name)s if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) """ # Special case pip and setuptools to generate versioned wrappers # # The issue is that some projects (specifically, pip and setuptools) use # code in setup.py to create "versioned" entry points - pip2.7 on Python # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe # such versioned entry points, but that won't happen till Metadata 2.0 is # available. # In the meantime, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the # correct ones. This code is purely a short-term measure until Metadata 2.0 # is available. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment # variable which will control which version scripts get installed. # # ENSUREPIP_OPTIONS=altinstall # - Only pipX.Y and easy_install-X.Y will be generated and installed # ENSUREPIP_OPTIONS=install # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note # that this option is technically if ENSUREPIP_OPTIONS is set and is # not altinstall # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. pip_script = console.pop('pip', None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'pip = ' + pip_script generated.extend(maker.make(spec)) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": spec = 'pip%s = %s' % (sys.version[:1], pip_script) generated.extend(maker.make(spec)) spec = 'pip%s = %s' % (sys.version[:3], pip_script) generated.extend(maker.make(spec)) # Delete any other versioned pip entry points pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] easy_install_script = console.pop('easy_install', None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'easy_install = ' + easy_install_script generated.extend(maker.make(spec)) spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) generated.extend(maker.make(spec)) # Delete any other versioned easy_install entry points easy_install_ep = [ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) ] for k in easy_install_ep: del console[k] # Generate the console and GUI entry points specified in the wheel if len(console) > 0: generated_console_scripts = maker.make_multiple( ['%s = %s' % kv for kv in console.items()] ) generated.extend(generated_console_scripts) if warn_script_location: msg = message_about_scripts_not_on_PATH(generated_console_scripts) if msg is not None: logger.warning(msg) if len(gui) > 0: generated.extend( maker.make_multiple( ['%s = %s' % kv for kv in gui.items()], {'gui': True} ) ) # Record pip as the installer installer = os.path.join(info_dir[0], 'INSTALLER') temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') with open(temp_installer, 'wb') as installer_file: installer_file.write(b'pip\n') shutil.move(temp_installer, installer) generated.append(installer) # Record details of all files installed record = os.path.join(info_dir[0], 'RECORD') temp_record = os.path.join(info_dir[0], 'RECORD.pip') with open_for_csv(record, 'r') as record_in: with open_for_csv(temp_record, 'w+') as record_out: reader = csv.reader(record_in) outrows = get_csv_rows_for_installed( reader, installed=installed, changed=changed, generated=generated, lib_dir=lib_dir, ) writer = csv.writer(record_out) # Sort to simplify testing. for row in sorted_outrows(outrows): writer.writerow(row) shutil.move(temp_record, record)
Install a wheel
def run(self, concurrency=0, outline=False, tail=False, dump=False, *args, **kwargs): """Kicks off the build/update of the stacks in the stack_definitions. This is the main entry point for the Builder. """ plan = self._generate_plan(tail=tail) if not plan.keys(): logger.warn('WARNING: No stacks detected (error in config?)') if not outline and not dump: plan.outline(logging.DEBUG) logger.debug("Launching stacks: %s", ", ".join(plan.keys())) walker = build_walker(concurrency) plan.execute(walker) else: if outline: plan.outline() if dump: plan.dump(directory=dump, context=self.context, provider=self.provider)
Kicks off the build/update of the stacks in the stack_definitions. This is the main entry point for the Builder.
def enable_network(self, *hostnames): """ Enables real networking mode, optionally passing one or multiple hostnames that would be used as filter. If at least one hostname matches with the outgoing traffic, the request will be executed via the real network. Arguments: *hostnames: optional list of host names to enable real network against them. hostname value can be a regular expression. """ def hostname_filter(hostname, req): if isregex(hostname): return hostname.match(req.url.hostname) return req.url.hostname == hostname for hostname in hostnames: self.use_network_filter(partial(hostname_filter, hostname)) self.networking = True
Enables real networking mode, optionally passing one or multiple hostnames that would be used as filter. If at least one hostname matches with the outgoing traffic, the request will be executed via the real network. Arguments: *hostnames: optional list of host names to enable real network against them. hostname value can be a regular expression.