hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acfb73f77f0eaebde32adc9105d04427be2a507e
| 57,573
|
py
|
Python
|
osf/models/user.py
|
fabmiz/osf.io
|
8d86af3f0a6e5388bd5b18383e68e27b65a66247
|
[
"Apache-2.0"
] | null | null | null |
osf/models/user.py
|
fabmiz/osf.io
|
8d86af3f0a6e5388bd5b18383e68e27b65a66247
|
[
"Apache-2.0"
] | null | null | null |
osf/models/user.py
|
fabmiz/osf.io
|
8d86af3f0a6e5388bd5b18383e68e27b65a66247
|
[
"Apache-2.0"
] | null | null | null |
import datetime as dt
import logging
import re
import urllib
import urlparse
import uuid
from copy import deepcopy
from os.path import splitext
from flask import Request as FlaskRequest
from framework import analytics
# OSF imports
import itsdangerous
import pytz
from dirtyfields import DirtyFieldsMixin
from django.conf import settings
from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import PermissionsMixin
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.db import models
from django.utils import timezone
from django_extensions.db.models import TimeStampedModel
from framework.auth import Auth, signals, utils
from framework.auth.core import generate_verification_key
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError,
InvalidTokenError,
MergeConfirmedRequiredError,
MergeConflictError)
from framework.exceptions import PermissionsError
from framework.sessions.utils import remove_sessions_for_user
from osf.utils.requests import get_current_request
from osf.exceptions import reraise_django_validation_errors, MaxRetriesError
from osf.models.base import BaseModel, GuidMixin, GuidMixinQuerySet
from osf.models.contributor import RecentlyAddedContributor
from osf.models.institution import Institution
from osf.models.mixins import AddonModelMixin
from osf.models.session import Session
from osf.models.tag import Tag
from osf.models.validators import validate_email, validate_social, validate_history_item
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from osf.utils.fields import NonNaiveDateTimeField, LowercaseEmailField
from osf.utils.names import impute_names
from website import settings as website_settings
from website import filters, mails
from website.project import new_bookmark_collection
logger = logging.getLogger(__name__)
MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS = 1000
def get_default_mailing_lists():
return {'Open Science Framework Help': True}
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
class OSFUserManager(BaseUserManager):
def create_user(self, username, password=None):
if not username:
raise ValueError('Users must have a username')
user = self.model(
username=self.normalize_email(username),
is_active=True,
date_registered=timezone.now()
)
user.set_password(password)
user.save(using=self._db)
return user
_queryset_class = GuidMixinQuerySet
def all(self):
qs = super(OSFUserManager, self).all()
qs.annotate_query_with_guids()
return qs
def eager(self, *fields):
fk_fields = set(self.model.get_fk_field_names()) & set(fields)
m2m_fields = set(self.model.get_m2m_field_names()) & set(fields)
return self.select_related(*fk_fields).prefetch_related(*m2m_fields)
def create_superuser(self, username, password):
user = self.create_user(username, password=password)
user.is_superuser = True
user.is_staff = True
user.is_active = True
user.save(using=self._db)
return user
class Email(BaseModel, TimeStampedModel):
address = LowercaseEmailField(unique=True, db_index=True, validators=[validate_email])
user = models.ForeignKey('OSFUser', related_name='emails', on_delete=models.CASCADE)
def __unicode__(self):
return self.address
class OSFUser(DirtyFieldsMixin, GuidMixin, BaseModel, AbstractBaseUser, PermissionsMixin, AddonModelMixin):
FIELD_ALIASES = {
'_id': 'guids___id',
'system_tags': 'tags',
}
settings_type = 'user' # Needed for addons
USERNAME_FIELD = 'username'
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
TRACK_FIELDS = SEARCH_UPDATE_FIELDS.copy()
TRACK_FIELDS.update({'password', 'last_login'})
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.org/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citations?user={}',
'twitter': u'http://twitter.com/{}',
'profileWebsites': [],
'linkedIn': u'https://www.linkedin.com/{}',
'impactStory': u'https://impactstory.org/u/{}',
'researcherId': u'http://researcherid.com/rid/{}',
'researchGate': u'https://researchgate.net/profile/{}',
'academiaInstitution': u'https://{}',
'academiaProfileID': u'.academia.edu/{}',
'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}',
'ssrn': u'http://papers.ssrn.com/sol3/cf_dev/AbsByAuth.cfm?per_id={}'
}
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
# TODO: Consider making this a FK to Email with to_field='address'
# Django supports this (https://docs.djangoproject.com/en/1.11/topics/auth/customizing/#django.contrib.auth.models.CustomUser.USERNAME_FIELD)
# but some third-party apps may not.
username = models.CharField(max_length=255, db_index=True, unique=True)
# Hashed. Use `User.set_password` and `User.check_password`
# password = models.CharField(max_length=255)
fullname = models.CharField(max_length=255)
# user has taken action to register the account
is_registered = models.BooleanField(db_index=True, default=False)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = models.BooleanField(default=False, db_index=True)
# for internal use
tags = models.ManyToManyField('Tag', blank=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = models.BooleanField(default=False, db_index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = DateTimeAwareJSONField(default=dict, blank=True)
# The user into which this account was merged
merged_by = models.ForeignKey('self', null=True, blank=True, related_name='merger')
# verification key v1: only the token string, no expiration time
# used for cas login with username and verification key
verification_key = models.CharField(max_length=255, null=True, blank=True)
# verification key v2: token, and expiration time
# used for password reset, confirm account/email, claim account/contributor-ship
verification_key_v2 = DateTimeAwareJSONField(default=dict, blank=True, null=True)
# Format: {
# 'token': <verification token>
# 'expires': <verification expiration time>
# }
email_last_sent = NonNaiveDateTimeField(null=True, blank=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# email lists to which the user has chosen a subscription setting
mailchimp_mailing_lists = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# email lists to which the user has chosen a subscription setting,
# being sent from osf, rather than mailchimp
osf_mailing_lists = DateTimeAwareJSONField(default=get_default_mailing_lists, blank=True)
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
date_registered = NonNaiveDateTimeField(db_index=True, auto_now_add=True)
# list of collaborators that this user recently added to nodes as a contributor
# recently_added = fields.ForeignField("user", list=True)
recently_added = models.ManyToManyField('self',
through=RecentlyAddedContributor,
through_fields=('user', 'contributor'),
symmetrical=False)
# Attached external accounts (OAuth)
# external_accounts = fields.ForeignField("externalaccount", list=True)
external_accounts = models.ManyToManyField('ExternalAccount', blank=True)
# CSL names
given_name = models.CharField(max_length=255, blank=True)
middle_names = models.CharField(max_length=255, blank=True)
family_name = models.CharField(max_length=255, blank=True)
suffix = models.CharField(max_length=255, blank=True)
# identity for user logged in through external idp
external_identity = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# <external_id_provider>: {
# <external_id>: <status from ('VERIFIED, 'CREATE', 'LINK')>,
# ...
# },
# ...
# }
# Employment history
jobs = DateTimeAwareJSONField(default=list, blank=True, validators=[validate_history_item])
# Format: list of {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = DateTimeAwareJSONField(default=list, blank=True, validators=[validate_history_item])
# Format: list of {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = DateTimeAwareJSONField(default=dict, blank=True, validators=[validate_social])
# Format: {
# 'profileWebsites': <list of profile websites>
# 'twitter': <twitter id>,
# }
# date the user last sent a request
date_last_login = NonNaiveDateTimeField(null=True, blank=True)
# date the user first successfully confirmed an email address
date_confirmed = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
# When the user was disabled.
date_disabled = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
# when comments were last viewed
comments_viewed_timestamp = DateTimeAwareJSONField(default=dict, blank=True)
# Format: {
# 'Comment.root_target._id': 'timestamp',
# ...
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = models.CharField(blank=True, default='Etc/UTC', max_length=255)
# user language and locale data (e.g. 'en_US')
locale = models.CharField(blank=True, max_length=255, default='en_US')
# whether the user has requested to deactivate their account
requested_deactivation = models.BooleanField(default=False)
affiliated_institutions = models.ManyToManyField('Institution', blank=True)
notifications_configured = DateTimeAwareJSONField(default=dict, blank=True)
objects = OSFUserManager()
is_active = models.BooleanField(default=False)
is_staff = models.BooleanField(default=False)
def __repr__(self):
return '<OSFUser({0!r}) with guid {1!r}>'.format(self.username, self._id)
@property
def deep_url(self):
"""Used for GUID resolution."""
return '/profile/{}/'.format(self._primary_key)
@property
def url(self):
return '/{}/'.format(self._id)
@property
def absolute_url(self):
return urlparse.urljoin(website_settings.DOMAIN, self.url)
@property
def absolute_api_v2_url(self):
from website import util
return util.api_v2_url('users/{}/'.format(self._id))
@property
def api_url(self):
return '/api/v1/profile/{}/'.format(self._id)
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def is_disabled(self):
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val and not self.date_disabled:
self.date_disabled = timezone.now()
elif val is False:
self.date_disabled = None
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def is_merged(self):
"""Whether or not this account has been merged into another account.
"""
return self.merged_by is not None
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
@property
def social_links(self):
social_user_fields = {}
for key, val in self.social.items():
if val and key in self.SOCIAL_FIELDS:
if not isinstance(val, basestring):
social_user_fields[key] = val
else:
social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val)
return social_user_fields
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def email(self):
if self.has_usable_username():
return self.username
else:
return None
@property
def all_tags(self):
"""Return a queryset containing all of this user's tags (incl. system tags)."""
# Tag's default manager only returns non-system tags, so we can't use self.tags
return Tag.all_tags.filter(osfuser=self)
@property
def system_tags(self):
"""The system tags associated with this node. This currently returns a list of string
names for the tags, for compatibility with v1. Eventually, we can just return the
QuerySet.
"""
return self.all_tags.filter(system=True).values_list('name', flat=True)
@property
def csl_given_name(self):
return utils.generate_csl_given_name(self.given_name, self.middle_names, self.suffix)
def csl_name(self, node_id=None):
if self.is_registered:
name = self.fullname
else:
name = self.get_unclaimed_record(node_id)['name']
if self.family_name and self.given_name:
"""If the user has a family and given name, use those"""
return {
'family': self.family_name,
'given': self.csl_given_name,
}
else:
""" If the user doesn't autofill his family and given name """
parsed = utils.impute_names(name)
given_name = parsed['given']
middle_names = parsed['middle']
family_name = parsed['family']
suffix = parsed['suffix']
csl_given_name = utils.generate_csl_given_name(given_name, middle_names, suffix)
return {
'family': family_name,
'given': csl_given_name,
}
@property
def contributor_to(self):
return self.nodes.filter(is_deleted=False, type__in=['osf.node', 'osf.registration'])
@property
def visible_contributor_to(self):
return self.nodes.filter(is_deleted=False, contributor__visible=True, type__in=['osf.node', 'osf.registration'])
def set_unusable_username(self):
"""Sets username to an unusable value. Used for, e.g. for invited contributors
and merged users.
NOTE: This is necessary because Django does not allow the username column to be nullable.
"""
if self._id:
self.username = self._id
else:
self.username = str(uuid.uuid4())
return self.username
def has_usable_username(self):
return '@' in self.username
@property
def is_authenticated(self): # Needed for django compat
return True
@property
def is_anonymous(self):
return False
def get_absolute_url(self):
return self.absolute_api_v2_url
def get_addon_names(self):
return []
# django methods
def get_full_name(self):
return self.fullname
def get_short_name(self):
return self.username
def __unicode__(self):
return self.get_short_name()
def __str__(self):
return self.get_short_name()
@property
def contributed(self):
return self.nodes.all()
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError('Users cannot be merged')
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags.all():
self.add_system_tag(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
notifications_configured = user.notifications_configured.copy()
notifications_configured.update(self.notifications_configured)
self.notifications_configured = notifications_configured
if not website_settings.RUNNING_MIGRATION:
for key, value in user.mailchimp_mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailchimp_mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False)
for target_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(target_id):
self.comments_viewed_timestamp[target_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[target_id]:
self.comments_viewed_timestamp[target_id] = timestamp
# Give old user's emails to self
user.emails.update(user=self)
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
self.affiliated_institutions.add(*user.affiliated_institutions.values_list('pk', flat=True))
for service in user.external_identity:
for service_id in user.external_identity[service].iterkeys():
if not (
service_id in self.external_identity.get(service, '') and
self.external_identity[service][service_id] == 'VERIFIED'
):
# Prevent 'CREATE', merging user has already been created.
external = user.external_identity[service][service_id]
status = 'VERIFIED' if external == 'VERIFIED' else 'LINK'
if self.external_identity.get(service):
self.external_identity[service].update(
{service_id: status}
)
else:
self.external_identity[service] = {
service_id: status
}
user.external_identity = {}
# FOREIGN FIELDS
self.external_accounts.add(*user.external_accounts.values_list('pk', flat=True))
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# - projects where the user was a contributor
for node in user.contributed:
# Skip bookmark collection node
if node.is_bookmark_collection:
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
user_permissions = node.get_permissions(user)
self_permissions = node.get_permissions(self)
permissions = max([user_permissions, self_permissions])
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
node.contributor_set.filter(user=user).delete()
else:
node.contributor_set.filter(user=user).update(user=self)
node.save()
from osf.models import QuickFilesNode
from osf.models import BaseFileNode
# - projects where the user was the creator
user.created.filter(is_bookmark_collection=False).exclude(type=QuickFilesNode._typedmodels_type).update(creator=self)
# - file that the user has checked_out, import done here to prevent import error
for file_node in BaseFileNode.files_checked_out(user=user):
file_node.checkout = self
file_node.save()
# - move files in the merged user's quickfiles node, checking for name conflicts
from addons.osfstorage.models import OsfStorageFileNode
primary_quickfiles = QuickFilesNode.objects.get(creator=self)
merging_user_quickfiles = QuickFilesNode.objects.get(creator=user)
files_in_merging_user_quickfiles = merging_user_quickfiles.files.filter(type='osf.osfstoragefile')
for merging_user_file in files_in_merging_user_quickfiles:
if OsfStorageFileNode.objects.filter(node=primary_quickfiles, name=merging_user_file.name).exists():
digit = 1
split_filename = splitext(merging_user_file.name)
name_without_extension = split_filename[0]
extension = split_filename[1]
found_digit_in_parens = re.findall('(?<=\()(\d)(?=\))', name_without_extension)
if found_digit_in_parens:
found_digit = int(found_digit_in_parens[0])
digit = found_digit + 1
name_without_extension = name_without_extension.replace('({})'.format(found_digit), '').strip()
new_name_format = '{} ({}){}'
new_name = new_name_format.format(name_without_extension, digit, extension)
# check if new name conflicts, update til it does not (try up to 1000 times)
rename_count = 0
while OsfStorageFileNode.objects.filter(node=primary_quickfiles, name=new_name).exists():
digit += 1
new_name = new_name_format.format(name_without_extension, digit, extension)
rename_count += 1
if rename_count >= MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS:
raise MaxRetriesError('Maximum number of rename attempts has been reached')
merging_user_file.name = new_name
merging_user_file.save()
merging_user_file.node = primary_quickfiles
merging_user_file.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to the GUID so the merging user can set it primary
# in the future (note: it cannot be set to None due to non-null constraint)
user.set_unusable_username()
user.set_unusable_password()
user.verification_key = None
user.osf_mailing_lists = {}
user.merged_by = self
user.save()
def disable_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
from mailchimp emails, remove any existing sessions.
Ported from framework/auth/core.py
"""
from website import mailchimp_utils
from framework.auth import logout
try:
mailchimp_utils.unsubscribe_mailchimp(
list_name=website_settings.MAILCHIMP_GENERAL_LIST,
user_id=self._id,
username=self.username
)
except mailchimp_utils.mailchimp.ListNotSubscribedError:
pass
except mailchimp_utils.mailchimp.InvalidApiKeyError:
if not website_settings.ENABLE_EMAIL_SUBSCRIPTIONS:
pass
else:
raise
except mailchimp_utils.mailchimp.EmailNotExistsError:
pass
# Call to `unsubscribe` above saves, and can lead to stale data
self.reload()
self.is_disabled = True
# we must call both methods to ensure the current session is cleared and all existing
# sessions are revoked.
req = get_current_request()
if isinstance(req, FlaskRequest):
logout()
remove_sessions_for_user(self)
def update_is_active(self):
"""Update ``is_active`` to be consistent with the fields that
it depends on.
"""
# The user can log in if they have set a password OR
# have a verified external ID, e.g an ORCID
can_login = self.has_usable_password() or (
'VERIFIED' in sum([each.values() for each in self.external_identity.values()], [])
)
self.is_active = (
self.is_registered and
self.is_confirmed and
can_login and
not self.is_merged and
not self.is_disabled
)
# Overrides BaseModel
def save(self, *args, **kwargs):
self.update_is_active()
self.username = self.username.lower().strip() if self.username else None
dirty_fields = set(self.get_dirty_fields(check_relationship=True))
ret = super(OSFUser, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(dirty_fields) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
if 'fullname' in dirty_fields:
from osf.models.quickfiles import get_quickfiles_project_title, QuickFilesNode
quickfiles = QuickFilesNode.objects.filter(creator=self).first()
if quickfiles:
quickfiles.title = get_quickfiles_project_title(self)
quickfiles.save()
return ret
# Legacy methods
@classmethod
def create(cls, username, password, fullname):
validate_email(username) # Raises ValidationError if spam address
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
def set_password(self, raw_password, notify=True):
"""Set the password for this user to the hash of ``raw_password``.
If this is a new user, we're done. If this is a password change,
then email the user about the change and clear all the old sessions
so that users will have to log in again with the new password.
:param raw_password: the plaintext value of the new password
:param notify: Only meant for unit tests to keep extra notifications from being sent
:rtype: list
:returns: Changed fields from the user save
"""
had_existing_password = bool(self.has_usable_password() and self.is_confirmed)
if self.username == raw_password:
raise ChangePasswordError(['Password cannot be the same as your email address'])
super(OSFUser, self).set_password(raw_password)
if had_existing_password and notify:
mails.send_mail(
to_addr=self.username,
mail=mails.PASSWORD_RESET,
mimetype='plain',
user=self
)
remove_sessions_for_user(self)
@classmethod
def create_unconfirmed(cls, username, password, fullname, external_identity=None,
do_confirm=True, campaign=None):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username, external_identity=external_identity)
user.is_registered = False
if external_identity:
user.external_identity.update(external_identity)
if campaign:
# needed to prevent cirular import
from framework.auth.campaigns import system_tag_for_campaign # skipci
# User needs to be saved before adding system tags (due to m2m relationship)
user.save()
user.add_system_tag(system_tag_for_campaign(campaign))
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.save() # Must save before using auto_now_add field
user.date_confirmed = user.date_registered
user.emails.create(address=username.lower().strip())
return user
def get_unconfirmed_email_for_token(self, token):
"""Return email if valid.
:rtype: bool
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: InvalidTokenError if trying to access a token that is invalid.
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'].replace(tzinfo=pytz.utc) < timezone.now()
):
raise ExpiredTokenError
return verification['email']
def get_unconfirmed_emails_exclude_external_identity(self):
"""Return a list of unconfirmed emails that are not related to external identity."""
unconfirmed_emails = []
if self.email_verifications:
for token, value in self.email_verifications.iteritems():
if not value.get('external_identity'):
unconfirmed_emails.append(value.get('email'))
return unconfirmed_emails
@property
def unconfirmed_email_info(self):
"""Return a list of dictionaries containing information about each of this
user's unconfirmed emails.
"""
unconfirmed_emails = []
email_verifications = self.email_verifications or []
for token in email_verifications:
if self.email_verifications[token].get('confirmed', False):
try:
user_merge = OSFUser.objects.get(emails__address__iexact=self.email_verifications[token]['email'])
except OSFUser.DoesNotExist:
user_merge = False
unconfirmed_emails.append({'address': self.email_verifications[token]['email'],
'token': token,
'confirmed': self.email_verifications[token]['confirmed'],
'user_merge': user_merge.email if user_merge else False})
return unconfirmed_emails
def clean_email_verifications(self, given_token=None):
email_verifications = deepcopy(self.email_verifications or {})
for token in self.email_verifications or {}:
try:
self.get_unconfirmed_email_for_token(token)
except (KeyError, ExpiredTokenError):
email_verifications.pop(token)
continue
if token == given_token:
email_verifications.pop(token)
self.email_verifications = email_verifications
def verify_password_token(self, token):
"""
Verify that the password reset token for this user is valid.
:param token: the token in verification key
:return `True` if valid, otherwise `False`
"""
if token and self.verification_key_v2:
try:
return (self.verification_key_v2['token'] == token and
self.verification_key_v2['expires'] > timezone.now())
except AttributeError:
return False
return False
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
if not email:
user.set_unusable_username()
user.set_unusable_password()
user.update_guessed_names()
return user
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def add_unconfirmed_email(self, email, expiration=None, external_identity=None):
"""
Add an email verification token for a given email.
:param email: the email to confirm
:param email: overwrite default expiration time
:param external_identity: the user's external identity
:return: a token
:raises: ValueError if email already confirmed, except for login through external idp.
"""
# Note: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if not external_identity and self.emails.filter(address=email).exists():
raise ValueError('Email already confirmed to this user.')
with reraise_django_validation_errors():
validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
verification_key = generate_verification_key(verification_type='confirm')
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
self.email_verifications[verification_key['token']] = {
'email': email,
'confirmed': False,
'expiration': expiration if expiration else verification_key['expires'],
'external_identity': external_identity,
}
return verification_key['token']
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if self.emails.filter(address=email):
self.emails.filter(address=email).delete()
signals.user_email_removed.send(self, email=email)
def get_confirmation_token(self, email, force=False, renew=False):
"""Return the confirmation token for a given email.
:param str email: The email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new one and return it.
:param bool renew: Generate a new token and return it.
:return Return the confirmation token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if renew:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
if not expiration or (expiration and expiration < timezone.now()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email,
external=True,
force=False,
renew=False,
external_id_provider=None,
destination=None):
"""Return the confirmation url for a given email.
:param email: The email to confirm.
:param external: Use absolute or relative url.
:param force: If an expired token exists for the given email, generate a new one and return it.
:param renew: Generate a new token and return it.
:param external_id_provider: The external identity provider that authenticates the user.
:param destination: The destination page to redirect after confirmation
:return: Return the confirmation url.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = website_settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force, renew=renew)
external = 'external/' if external_id_provider else ''
destination = '?{}'.format(urllib.urlencode({'destination': destination})) if destination else ''
return '{0}confirm/{1}{2}/{3}/{4}'.format(base, external, self._primary_key, token, destination)
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if not self.emails.filter(address=username):
self.emails.create(address=username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = timezone.now()
self.update_search()
self.update_search_nodes()
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self.get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = OSFUser.objects.get(emails__address=email)
except OSFUser.DoesNotExist:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = OSFUser.objects.exclude(guids___id=self._id).get(username=email)
except OSFUser.DoesNotExist:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if not self.emails.filter(address=email).exists():
self.emails.create(address=email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = timezone.now()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
def update_search(self):
from website.search.search import update_user
update_user(self)
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors_async(self.id)
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.contributor_to:
node.update_search()
def update_date_last_login(self):
self.date_last_login = timezone.now()
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def check_password(self, raw_password):
"""
Return a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
Source: https://github.com/django/django/blob/master/django/contrib/auth/base_user.py#L104
"""
def setter(raw_password):
self.set_password(raw_password, notify=False)
# Password hash upgrades shouldn't be considered password changes.
self._password = None
self.save(update_fields=['password'])
return check_password(raw_password, self.password, setter)
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
# TODO: Move validation to set_password
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
elif raw_new_password == self.username:
issues.append('Password cannot be the same as your email address')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 8:
issues.append('Password should be at least eight characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(str(node._id), None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
def add_system_tag(self, tag):
if not isinstance(tag, Tag):
tag_instance, created = Tag.all_tags.get_or_create(name=tag.lower(), system=True)
else:
tag_instance = tag
if not tag_instance.system:
raise ValueError('Non-system tag passed to add_system_tag')
if not self.all_tags.filter(id=tag_instance.id).exists():
self.tags.add(tag_instance)
return tag_instance
def get_recently_added(self):
return (
each.contributor
for each in self.recentlyaddedcontributor_set.order_by('-date_added')
)
def _projects_in_common_query(self, other_user):
return (self.nodes
.filter(is_deleted=False)
.exclude(type='osf.collection')
.filter(_contributors=other_user)
.distinct())
def get_projects_in_common(self, other_user):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
query = self._projects_in_common_query(other_user)
return set(query.all())
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return self._projects_in_common_query(other_user).count()
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError(
'Referrer does not have permission to add a contributor to project {0}'.format(node._primary_key)
)
project_id = str(node._id)
referrer_id = str(referrer._id)
if email:
clean_email = email.lower().strip()
else:
clean_email = None
verification_key = generate_verification_key(verification_type='claim')
try:
record = self.unclaimed_records[node._id]
except KeyError:
record = None
if record:
del record
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': verification_key['token'],
'expires': verification_key['expires'],
'email': clean_email,
}
self.unclaimed_records[project_id] = record
return record
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = website_settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def is_affiliated_with_institution(self, institution):
"""Return if this user is affiliated with ``institution``."""
return self.affiliated_institutions.filter(id=institution.id).exists()
def update_affiliated_institutions_by_email_domain(self):
"""
Append affiliated_institutions by email domain.
:return:
"""
try:
email_domains = [email.split('@')[1].lower() for email in self.emails.values_list('address', flat=True)]
insts = Institution.objects.filter(email_domains__overlap=email_domains)
if insts.exists():
self.affiliated_institutions.add(*insts)
except IndexError:
pass
def remove_institution(self, inst_id):
try:
inst = self.affiliated_institutions.get(_id=inst_id)
except Institution.DoesNotExist:
return False
else:
self.affiliated_institutions.remove(inst)
return True
def get_activity_points(self):
return analytics.get_total_activity_count(self._id)
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
user_session = Session.objects.filter(
data__auth_user_id=self._id
).order_by(
'-date_modified'
).first()
if not user_session:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def get_node_comment_timestamps(self, target_id):
""" Returns the timestamp for when comments were last viewed on a node, file or wiki.
"""
default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0, tzinfo=pytz.utc)
return self.comments_viewed_timestamp.get(target_id, default_timestamp)
class Meta:
# custom permissions for use in the OSF Admin App
permissions = (
('view_osfuser', 'Can view user details'),
)
@receiver(post_save, sender=OSFUser)
def create_bookmark_collection(sender, instance, created, **kwargs):
if created:
new_bookmark_collection(instance)
@receiver(post_save, sender=OSFUser)
def create_quickfiles_project(sender, instance, created, **kwargs):
from osf.models.quickfiles import QuickFilesNode
if created:
QuickFilesNode.objects.create_for_user(instance)
| 38.717552
| 147
| 0.638337
|
acfb788724c912f4feabbd0baa716e5fb780fba6
| 765
|
py
|
Python
|
modules/sequence_modeling.py
|
khoroo/deep-text-recognition-benchmark
|
6089c4035c5b8136c2f055126e5dd43a121501d9
|
[
"Apache-2.0"
] | 4
|
2021-12-27T14:37:33.000Z
|
2022-03-30T10:56:57.000Z
|
deep-text-recognition-benchmark/modules/sequence_modeling.py
|
JasonHippo/Scene_text_detection_and_recognition
|
c0da141d71b7b888d560296b201aecbbd735b565
|
[
"MIT"
] | null | null | null |
deep-text-recognition-benchmark/modules/sequence_modeling.py
|
JasonHippo/Scene_text_detection_and_recognition
|
c0da141d71b7b888d560296b201aecbbd735b565
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
class BidirectionalLSTM(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(BidirectionalLSTM, self).__init__()
self.rnn = nn.LSTM(input_size, hidden_size, bidirectional=True, batch_first=True)
self.linear = nn.Linear(hidden_size * 2, output_size)
def forward(self, input):
"""
input : visual feature [batch_size x T x input_size]
output : contextual feature [batch_size x T x output_size]
"""
self.rnn.flatten_parameters()
recurrent, _ = self.rnn(input) # batch_size x T x input_size -> batch_size x T x (2*hidden_size)
output = self.linear(recurrent) # batch_size x T x output_size
return output
| 38.25
| 106
| 0.65098
|
acfb78ca8057da4333e6f4df3690c3f8fad0d10a
| 165
|
py
|
Python
|
contact_angle/__init__.py
|
tcmoore3/contact_angle
|
b6d20d69158f5ec3ed35163e4bb8da849e4d1f29
|
[
"MIT"
] | 3
|
2015-11-07T03:54:36.000Z
|
2020-05-26T09:40:47.000Z
|
contact_angle/__init__.py
|
tcmoore3/contact_angle
|
b6d20d69158f5ec3ed35163e4bb8da849e4d1f29
|
[
"MIT"
] | 3
|
2015-11-07T01:01:45.000Z
|
2019-08-01T13:19:42.000Z
|
contact_angle/__init__.py
|
tcmoore3/contact_angle
|
b6d20d69158f5ec3ed35163e4bb8da849e4d1f29
|
[
"MIT"
] | 1
|
2018-10-23T18:05:48.000Z
|
2018-10-23T18:05:48.000Z
|
from contact_angle.core import calc_contact_angle
from contact_angle.core import print_contact_angle_results
from contact_angle.core import print_contact_angle_fits
| 41.25
| 58
| 0.909091
|
acfb7972e4e7f97fb923fd390212a9eae022a9bb
| 2,783
|
py
|
Python
|
webstore/users/views.py
|
dmusial98/WebStorePython
|
ed98764a40dd82db2b57e030ff9bf0bc777075a7
|
[
"Unlicense"
] | null | null | null |
webstore/users/views.py
|
dmusial98/WebStorePython
|
ed98764a40dd82db2b57e030ff9bf0bc777075a7
|
[
"Unlicense"
] | null | null | null |
webstore/users/views.py
|
dmusial98/WebStorePython
|
ed98764a40dd82db2b57e030ff9bf0bc777075a7
|
[
"Unlicense"
] | null | null | null |
from cgitb import lookup
from urllib import request
from .models import User
from . import serializers
from rest_framework.permissions import AllowAny
# from . import permissions
from rest_framework import generics, status
from rest_framework.response import Response
from django_filters.rest_framework import DjangoFilterBackend
class UserListView(generics.ListAPIView):
queryset = User.objects.all()
serializer_class = serializers.UserSerializer
# permission_classes = []
filter_backends = [DjangoFilterBackend]
ordering = ['name']
def get_permissions(self):
permission_classes = [AllowAny]
return [permission() for permission in permission_classes]
class UserCreateView(generics.CreateAPIView):
queryset = User.objects.all()
serializer_class = serializers.UserSerializer
def get_permissions(self):
permission_classes = [AllowAny]
return [permission() for permission in permission_classes]
def create(self, request, *args, **kwargs):
super(UserCreateView, self).create(request, args, kwargs)
response = {"status_code": status.HTTP_200_OK,
"message": "Successfully created",
"result": request.data}
return Response(response)
class UserDetailView(generics.RetrieveUpdateDestroyAPIView):
# queryset = User.objects.all()
serializer_class = serializers.UserSerializer
lookup_field = 'name'
def get_queryset(self):
name = self.kwargs['name']
return User.objects.filter(name = name)
def get_permissions(self):
permission_classes = [AllowAny]
return [permission() for permission in permission_classes]
def retrieve(self, request, *args, **kwargs):
super(UserDetailView, self).retrieve(request, args, kwargs)
instance = self.get_object()
serializer = self.get_serializer(instance)
data = serializer.data
response = {"status_code": status.HTTP_200_OK,
"message": "Successfully retrieved",
"result": data}
return Response(response)
def patch(self, request, *args, **kwargs):
super(UserDetailView, self).patch(request, args, kwargs)
instance = self.get_object()
serializer = self.get_serializer(instance)
data = serializer.data
response = {"status_code": status.HTTP_200_OK,
"message": "Successfully updated",
"result": data}
return Response(response)
def delete(self, request, *args, **kwargs):
super(UserDetailView, self).delete(request, args, kwargs)
response = {"status_code": status.HTTP_200_OK,
"message": "Successfully deleted"}
return Response(response)
| 37.608108
| 67
| 0.674093
|
acfb7bb5a80c0a32ce47ca1a9e0ff97bdee2db7e
| 24,472
|
py
|
Python
|
third_party/chromite/scripts/cros_gdb.py
|
zipated/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
third_party/chromite/scripts/cros_gdb.py
|
cangulcan/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
third_party/chromite/scripts/cros_gdb.py
|
cangulcan/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper for running gdb.
This handles the fun details like running against the right sysroot, via
qemu, bind mounts, etc...
"""
from __future__ import print_function
import argparse
import contextlib
import errno
import os
import pipes
import sys
import tempfile
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import namespaces
from chromite.lib import osutils
from chromite.lib import qemu
from chromite.lib import remote_access
from chromite.lib import retry_util
from chromite.lib import toolchain
class GdbException(Exception):
"""Base exception for this module."""
class GdbBadRemoteDeviceError(GdbException):
"""Raised when remote device does not exist or is not responding."""
class GdbMissingSysrootError(GdbException):
"""Raised when path to sysroot cannot be found in chroot."""
class GdbMissingInferiorError(GdbException):
"""Raised when the binary to be debugged cannot be found."""
class GdbMissingDebuggerError(GdbException):
"""Raised when cannot find correct version of debugger."""
class GdbCannotFindRemoteProcessError(GdbException):
"""Raised when cannot find requested executing process on remote device."""
class GdbUnableToStartGdbserverError(GdbException):
"""Raised when error occurs trying to start gdbserver on remote device."""
class GdbTooManyPidsError(GdbException):
"""Raised when more than one matching pid is found running on device."""
class GdbEarlyExitError(GdbException):
"""Raised when user requests to exit early."""
class GdbCannotDetectBoardError(GdbException):
"""Raised when board isn't specified and can't be automatically determined."""
class GdbSimpleChromeBinaryError(GdbException):
"""Raised when none or multiple chrome binaries are under out_${board} dir."""
class BoardSpecificGdb(object):
"""Framework for running gdb."""
_BIND_MOUNT_PATHS = ('dev', 'dev/pts', 'proc', 'mnt/host/source', 'sys')
_GDB = '/usr/bin/gdb'
_EXTRA_SSH_SETTINGS = {
'CheckHostIP': 'no',
'BatchMode': 'yes',
'LogLevel': 'QUIET'
}
_MISSING_DEBUG_INFO_MSG = """
%(inf_cmd)s is stripped and %(debug_file)s does not exist on your local machine.
The debug symbols for that package may not be installed. To install the debug
symbols for %(package)s only, run:
cros_install_debug_syms --board=%(board)s %(package)s
To install the debug symbols for all available packages, run:
cros_install_debug_syms --board=%(board)s --all"""
def __init__(self, board, gdb_args, inf_cmd, inf_args, remote, pid,
remote_process_name, cgdb_flag, ping, binary):
self.board = board
self.sysroot = None
self.prompt = '(gdb) '
self.inf_cmd = inf_cmd
self.run_as_root = False
self.gdb_args = gdb_args
self.inf_args = inf_args
self.remote = remote.hostname if remote else None
self.pid = pid
self.remote_process_name = remote_process_name
# Port used for sending ssh commands to DUT.
self.remote_port = remote.port if remote else None
# Port for communicating between gdb & gdbserver.
self.gdbserver_port = remote_access.GetUnusedPort()
self.ssh_settings = remote_access.CompileSSHConnectSettings(
**self._EXTRA_SSH_SETTINGS)
self.cgdb = cgdb_flag
self.framework = 'auto'
self.qemu = None
self.device = None
self.cross_gdb = None
self.ping = ping
self.binary = binary
self.in_chroot = None
self.chrome_path = None
self.sdk_path = None
def IsInChroot(self):
"""Decide whether we are in chroot or chrome-sdk."""
return os.path.exists("/mnt/host/source/chromite/")
def SimpleChromeGdb(self):
"""Get the name of the cross gdb based on board name."""
bin_path = self.board + '+' + os.environ['SDK_VERSION'] + '+' + \
'target_toolchain'
bin_path = os.path.join(self.sdk_path, bin_path, 'bin')
for f in os.listdir(bin_path):
if f.endswith('gdb'):
return os.path.join(bin_path, f)
raise GdbMissingDebuggerError('Cannot find cros gdb for %s.'
% self.board)
def SimpleChromeSysroot(self):
"""Get the sysroot in simple chrome."""
sysroot = self.board + '+' + os.environ['SDK_VERSION'] + \
'+' + 'sysroot_chromeos-base_chromeos-chrome.tar.xz'
sysroot = os.path.join(self.sdk_path, sysroot)
if not os.path.isdir(sysroot):
raise GdbMissingSysrootError('Cannot find sysroot for %s at.'
' %s' % self.board, sysroot)
return sysroot
def GetSimpleChromeBinary(self):
"""Get path to the binary in simple chrome."""
if self.binary:
return self.binary
output_dir = os.path.join(self.chrome_path, 'src',
'out_{}'.format(self.board))
target_binary = None
binary_name = os.path.basename(self.inf_cmd)
for root, _, files in os.walk(output_dir):
for f in files:
if f == binary_name:
if target_binary == None:
target_binary = os.path.join(root, f)
else:
raise GdbSimpleChromeBinaryError(
'There are multiple %s under %s. Please specify the path to '
'the binary via --binary'% binary_name, output_dir)
if target_binary == None:
raise GdbSimpleChromeBinaryError('There is no %s under %s.'
% binary_name, output_dir)
return target_binary
def VerifyAndFinishInitialization(self, device):
"""Verify files/processes exist and flags are correct."""
if not self.board:
if self.remote:
self.board = cros_build_lib.GetBoard(device_board=device.board,
override_board=self.board)
else:
raise GdbCannotDetectBoardError('Cannot determine which board to use. '
'Please specify the with --board flag.')
self.in_chroot = self.IsInChroot()
self.prompt = '(%s-gdb) ' % self.board
if self.in_chroot:
self.sysroot = cros_build_lib.GetSysroot(board=self.board)
self.inf_cmd = self.RemoveSysrootPrefix(self.inf_cmd)
self.cross_gdb = self.GetCrossGdb()
else:
self.chrome_path = os.path.realpath(os.path.join(os.path.dirname(
os.path.realpath(__file__)), "../../../.."))
self.sdk_path = os.path.join(self.chrome_path,
'.cros_cache/chrome-sdk/tarballs/')
self.sysroot = self.SimpleChromeSysroot()
self.cross_gdb = self.SimpleChromeGdb()
if self.remote:
# If given remote process name, find pid & inf_cmd on remote device.
if self.remote_process_name or self.pid:
self._FindRemoteProcess(device)
# Verify that sysroot is valid (exists).
if not os.path.isdir(self.sysroot):
raise GdbMissingSysrootError('Sysroot does not exist: %s' %
self.sysroot)
self.device = device
if not self.in_chroot:
return
sysroot_inf_cmd = ''
if self.inf_cmd:
sysroot_inf_cmd = os.path.join(self.sysroot,
self.inf_cmd.lstrip('/'))
# Verify that inf_cmd, if given, exists.
if sysroot_inf_cmd and not os.path.exists(sysroot_inf_cmd):
raise GdbMissingInferiorError('Cannot find file %s (in sysroot).' %
sysroot_inf_cmd)
# Check to see if inf_cmd is stripped, and if so, check to see if debug file
# exists. If not, tell user and give them the option of quitting & getting
# the debug info.
if sysroot_inf_cmd:
stripped_info = cros_build_lib.RunCommand(['file', sysroot_inf_cmd],
capture_output=True).output
if not ' not stripped' in stripped_info:
debug_file = os.path.join(self.sysroot, 'usr/lib/debug',
self.inf_cmd.lstrip('/'))
debug_file += '.debug'
if not os.path.exists(debug_file):
equery = 'equery-%s' % self.board
package = cros_build_lib.RunCommand([equery, '-q', 'b',
self.inf_cmd],
capture_output=True).output
logging.info(self._MISSING_DEBUG_INFO_MSG % {
'board': self.board,
'inf_cmd': self.inf_cmd,
'package': package,
'debug_file': debug_file})
answer = cros_build_lib.BooleanPrompt()
if not answer:
raise GdbEarlyExitError('Exiting early, at user request.')
# Set up qemu, if appropriate.
qemu_arch = qemu.Qemu.DetectArch(self._GDB, self.sysroot)
if qemu_arch is None:
self.framework = 'ldso'
else:
self.framework = 'qemu'
self.qemu = qemu.Qemu(self.sysroot, arch=qemu_arch)
if self.remote:
# Verify cgdb flag info.
if self.cgdb:
if osutils.Which('cgdb') is None:
raise GdbMissingDebuggerError('Cannot find cgdb. Please install '
'cgdb first.')
def RemoveSysrootPrefix(self, path):
"""Returns the given path with any sysroot prefix removed."""
# If the sysroot is /, then the paths are already normalized.
if self.sysroot != '/' and path.startswith(self.sysroot):
path = path.replace(self.sysroot, '', 1)
return path
@staticmethod
def GetNonRootAccount():
"""Return details about the non-root account we want to use.
Returns:
A tuple of (username, uid, gid, home).
"""
return (
os.environ.get('SUDO_USER', 'nobody'),
int(os.environ.get('SUDO_UID', '65534')),
int(os.environ.get('SUDO_GID', '65534')),
# Should we find a better home?
'/tmp/portage',
)
@staticmethod
@contextlib.contextmanager
def LockDb(db):
"""Lock an account database.
We use the same algorithm as shadow/user.eclass. This way we don't race
and corrupt things in parallel.
"""
lock = '%s.lock' % db
_, tmplock = tempfile.mkstemp(prefix='%s.platform.' % lock)
# First try forever to grab the lock.
retry = lambda e: e.errno == errno.EEXIST
# Retry quickly at first, but slow down over time.
try:
retry_util.GenericRetry(retry, 60, os.link, tmplock, lock, sleep=0.1)
except Exception as e:
raise Exception('Could not grab lock %s. %s' % (lock, e))
# Yield while holding the lock, but try to clean it no matter what.
try:
os.unlink(tmplock)
yield lock
finally:
os.unlink(lock)
def SetupUser(self):
"""Propogate the user name<->id mapping from outside the chroot.
Some unittests use getpwnam($USER), as does bash. If the account
is not registered in the sysroot, they get back errors.
"""
MAGIC_GECOS = 'Added by your friendly platform test helper; do not modify'
# This is kept in sync with what sdk_lib/make_chroot.sh generates.
SDK_GECOS = 'ChromeOS Developer'
user, uid, gid, home = self.GetNonRootAccount()
if user == 'nobody':
return
passwd_db = os.path.join(self.sysroot, 'etc', 'passwd')
with self.LockDb(passwd_db):
data = osutils.ReadFile(passwd_db)
accts = data.splitlines()
for acct in accts:
passwd = acct.split(':')
if passwd[0] == user:
# Did the sdk make this account?
if passwd[4] == SDK_GECOS:
# Don't modify it (see below) since we didn't create it.
return
# Did we make this account?
if passwd[4] != MAGIC_GECOS:
raise RuntimeError('your passwd db (%s) has unmanaged acct %s' %
(passwd_db, user))
# Maybe we should see if it needs to be updated? Like if they
# changed UIDs? But we don't really check that elsewhere ...
return
acct = '%(name)s:x:%(uid)s:%(gid)s:%(gecos)s:%(homedir)s:%(shell)s' % {
'name': user,
'uid': uid,
'gid': gid,
'gecos': MAGIC_GECOS,
'homedir': home,
'shell': '/bin/bash',
}
with open(passwd_db, 'a') as f:
if data[-1] != '\n':
f.write('\n')
f.write('%s\n' % acct)
def _FindRemoteProcess(self, device):
"""Find a named process (or a pid) running on a remote device."""
if not self.remote_process_name and not self.pid:
return
if self.remote_process_name:
# Look for a process with the specified name on the remote device; if
# found, get its pid.
pname = self.remote_process_name
if pname == 'browser':
all_chrome_pids = set(device.GetRunningPids(
'/opt/google/chrome/chrome'))
sandbox_pids = set(device.GetRunningPids(
'/opt/google/chrome/chrome-sandbox'))
non_main_chrome_pids = set(device.GetRunningPids('type='))
pids = list(all_chrome_pids - sandbox_pids - non_main_chrome_pids)
elif pname == 'renderer' or pname == 'gpu-process':
pids = device.GetRunningPids('type=%s'% pname)
else:
pids = device.GetRunningPids(pname)
if pids:
if len(pids) == 1:
self.pid = pids[0]
else:
raise GdbTooManyPidsError('Multiple pids found for %s process: %s. '
'You must specify the correct pid.'
% (pname, repr(pids)))
else:
raise GdbCannotFindRemoteProcessError('Cannot find pid for "%s" on %s' %
(pname, self.remote))
# Find full path for process, from pid (and verify pid).
command = [
'readlink',
'-e', '/proc/%s/exe' % self.pid,
]
try:
res = device.RunCommand(command, capture_output=True)
if res.returncode == 0:
self.inf_cmd = res.output.rstrip('\n')
except cros_build_lib.RunCommandError:
raise GdbCannotFindRemoteProcessError('Unable to find name of process '
'with pid %s on %s' %
(self.pid, self.remote))
def GetCrossGdb(self):
"""Find the appropriate cross-version of gdb for the board."""
toolchains = toolchain.GetToolchainsForBoard(self.board)
tc = toolchain.FilterToolchains(toolchains, 'default', True).keys()
cross_gdb = tc[0] + '-gdb'
if not osutils.Which(cross_gdb):
raise GdbMissingDebuggerError('Cannot find %s; do you need to run '
'setup_board?' % cross_gdb)
return cross_gdb
def GetGdbInitCommands(self, inferior_cmd, device=None):
"""Generate list of commands with which to initialize the gdb session."""
gdb_init_commands = []
if self.remote:
sysroot_var = self.sysroot
else:
sysroot_var = '/'
gdb_init_commands = [
'set sysroot %s' % sysroot_var,
'set prompt %s' % self.prompt,
]
if self.in_chroot:
gdb_init_commands += [
'set solib-absolute-prefix %s' % sysroot_var,
'set solib-search-path %s' % sysroot_var,
'set debug-file-directory %s/usr/lib/debug' % sysroot_var,
]
if device:
ssh_cmd = device.GetAgent().GetSSHCommand(self.ssh_settings)
ssh_cmd.extend(['--', 'gdbserver'])
if self.pid:
ssh_cmd.extend(['--attach', 'stdio', str(self.pid)])
target_type = 'remote'
elif inferior_cmd:
ssh_cmd.extend(['-', inferior_cmd])
ssh_cmd.extend(self.inf_args)
target_type = 'remote'
else:
ssh_cmd.extend(['--multi', 'stdio'])
target_type = 'extended-remote'
ssh_cmd = ' '.join(map(pipes.quote, ssh_cmd))
if self.in_chroot:
if inferior_cmd:
gdb_init_commands.append(
'file %s' % os.path.join(sysroot_var,
inferior_cmd.lstrip(os.sep)))
else:
gdb_init_commands.append('file %s' % self.GetSimpleChromeBinary())
gdb_init_commands.append('target %s | %s' % (target_type, ssh_cmd))
else:
if inferior_cmd:
gdb_init_commands.append('file %s ' % inferior_cmd)
gdb_init_commands.append('set args %s' % ' '.join(self.inf_args))
return gdb_init_commands
def RunRemote(self):
"""Handle remote debugging, via gdbserver & cross debugger."""
device = None
try:
device = remote_access.ChromiumOSDeviceHandler(
self.remote,
port=self.remote_port,
connect_settings=self.ssh_settings,
ping=self.ping).device
except remote_access.DeviceNotPingableError:
raise GdbBadRemoteDeviceError('Remote device %s is not responding to '
'ping.' % self.remote)
self.VerifyAndFinishInitialization(device)
gdb_cmd = self.cross_gdb
gdb_commands = self.GetGdbInitCommands(self.inf_cmd, device)
gdb_args = ['--quiet'] + ['--eval-command=%s' % x for x in gdb_commands]
gdb_args += self.gdb_args
if self.cgdb:
gdb_args = ['-d', gdb_cmd, '--'] + gdb_args
gdb_cmd = 'cgdb'
logging.debug('Running: %s', [gdb_cmd] + gdb_args)
os.chdir(self.sysroot)
sys.exit(os.execvp(gdb_cmd, gdb_args))
def Run(self):
"""Runs the debugger in a proper environment (e.g. qemu)."""
self.VerifyAndFinishInitialization(None)
self.SetupUser()
if self.framework == 'qemu':
self.qemu.Install(self.sysroot)
self.qemu.RegisterBinfmt()
for mount in self._BIND_MOUNT_PATHS:
path = os.path.join(self.sysroot, mount)
osutils.SafeMakedirs(path)
osutils.Mount('/' + mount, path, 'none', osutils.MS_BIND)
gdb_cmd = self._GDB
inferior_cmd = self.inf_cmd
gdb_argv = self.gdb_args[:]
if gdb_argv:
gdb_argv[0] = self.RemoveSysrootPrefix(gdb_argv[0])
# Some programs expect to find data files via $CWD, so doing a chroot
# and dropping them into / would make them fail.
cwd = self.RemoveSysrootPrefix(os.getcwd())
os.chroot(self.sysroot)
os.chdir(cwd)
# The TERM the user is leveraging might not exist in the sysroot.
# Force a sane default that supports standard color sequences.
os.environ['TERM'] = 'ansi'
# Some progs want this like bash else they get super confused.
os.environ['PWD'] = cwd
if not self.run_as_root:
_, uid, gid, home = self.GetNonRootAccount()
os.setgid(gid)
os.setuid(uid)
os.environ['HOME'] = home
gdb_commands = self.GetGdbInitCommands(inferior_cmd)
gdb_args = [gdb_cmd, '--quiet'] + ['--eval-command=%s' % x
for x in gdb_commands]
gdb_args += self.gdb_args
sys.exit(os.execvp(gdb_cmd, gdb_args))
def _ReExecuteIfNeeded(argv, ns_net=False, ns_pid=False):
"""Re-execute gdb as root.
We often need to do things as root, so make sure we're that. Like chroot
for proper library environment or do bind mounts.
Also unshare the mount namespace so as to ensure that doing bind mounts for
tests don't leak out to the normal chroot. Also unshare the UTS namespace
so changes to `hostname` do not impact the host.
"""
if os.geteuid() != 0:
cmd = ['sudo', '-E', '--'] + argv
os.execvp(cmd[0], cmd)
else:
namespaces.SimpleUnshare(net=ns_net, pid=ns_pid)
def FindInferior(arg_list):
"""Look for the name of the inferior (to be debugged) in arg list."""
program_name = ''
new_list = []
for item in arg_list:
if item[0] == '-':
new_list.append(item)
elif not program_name:
program_name = item
else:
raise RuntimeError('Found multiple program names: %s %s'
% (program_name, item))
return program_name, new_list
def main(argv):
parser = commandline.ArgumentParser(description=__doc__)
parser.add_argument('--board', default=None,
help='board to debug for')
parser.add_argument('-g', '--gdb_args', action='append', default=[],
help='Arguments to gdb itself. If multiple arguments are'
' passed, each argument needs a separate \'-g\' flag.')
parser.add_argument(
'--remote', default=None,
type=commandline.DeviceParser(commandline.DEVICE_SCHEME_SSH),
help='Remote device on which to run the binary. Use'
' "--remote=localhost:9222" to debug in a ChromeOS image in an'
' already running local virtual machine.')
parser.add_argument('--pid', default='',
help='Process ID of the (already) running process on the'
' remote device to which to attach.')
parser.add_argument('--remote_pid', dest='pid', default='',
help='Deprecated alias for --pid.')
parser.add_argument('--no-ping', dest='ping', default=True,
action='store_false',
help='Do not ping remote before attempting to connect.')
parser.add_argument('--attach', dest='attach_name', default='',
help='Name of existing process to which to attach, on'
' remote device (remote debugging only). "--attach'
' browser" will find the main chrome browser process;'
' "--attach renderer" will find a chrome renderer'
' process; "--attach gpu-process" will find the chrome'
' gpu process.')
parser.add_argument('--cgdb', default=False,
action='store_true',
help='Use cgdb curses interface rather than plain gdb.'
'This option is only valid for remote debugging.')
parser.add_argument('inf_args', nargs=argparse.REMAINDER,
help='Arguments for gdb to pass to the program being'
' debugged. These are positional and must come at the end'
' of the command line. This will not work if attaching'
' to an already running program.')
parser.add_argument('--binary', default='',
help='full path to the binary being debuged.'
' This is only useful for simple chrome.'
' An example is --bianry /home/out_falco/chrome.')
options = parser.parse_args(argv)
options.Freeze()
gdb_args = []
inf_args = []
inf_cmd = ''
if options.inf_args:
inf_cmd = options.inf_args[0]
inf_args = options.inf_args[1:]
if options.gdb_args:
gdb_args = options.gdb_args
if inf_cmd:
fname = os.path.join(cros_build_lib.GetSysroot(options.board),
inf_cmd.lstrip('/'))
if not os.path.exists(fname):
cros_build_lib.Die('Cannot find program %s.' % fname)
else:
if inf_args:
parser.error('Cannot specify arguments without a program.')
if inf_args and (options.pid or options.attach_name):
parser.error('Cannot pass arguments to an already'
' running process (--remote-pid or --attach).')
if options.remote:
if options.attach_name and options.attach_name == 'browser':
inf_cmd = '/opt/google/chrome/chrome'
else:
if options.cgdb:
parser.error('--cgdb option can only be used with remote debugging.')
if options.pid:
parser.error('Must specify a remote device (--remote) if you want '
'to attach to a remote pid.')
if options.attach_name:
parser.error('Must specify remote device (--remote) when using'
' --attach option.')
if options.binary:
if not os.path.exists(options.binary):
parser.error('%s does not exist.' % options.binary)
# Once we've finished sanity checking args, make sure we're root.
if not options.remote:
_ReExecuteIfNeeded([sys.argv[0]] + argv)
gdb = BoardSpecificGdb(options.board, gdb_args, inf_cmd, inf_args,
options.remote, options.pid, options.attach_name,
options.cgdb, options.ping, options.binary)
try:
if options.remote:
gdb.RunRemote()
else:
gdb.Run()
except GdbException as e:
if options.debug:
raise
else:
raise cros_build_lib.Die(str(e))
| 35.988235
| 80
| 0.621118
|
acfb7c8311154848af3401c83ca8258a481dc72b
| 305
|
py
|
Python
|
tbproject/talkback/version.py
|
amjad2000in/Amjad-talkback-lambda
|
18b4cf8b2b6a503783fe6ad9ce1b0b1e1b30c8b4
|
[
"Apache-2.0"
] | null | null | null |
tbproject/talkback/version.py
|
amjad2000in/Amjad-talkback-lambda
|
18b4cf8b2b6a503783fe6ad9ce1b0b1e1b30c8b4
|
[
"Apache-2.0"
] | 2
|
2021-06-02T00:19:10.000Z
|
2021-06-02T00:19:25.000Z
|
tbproject/talkback/version.py
|
amjad2000in/Amjad-talkback-lambda
|
18b4cf8b2b6a503783fe6ad9ce1b0b1e1b30c8b4
|
[
"Apache-2.0"
] | null | null | null |
# https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module module
__version__ = '1.0'
| 43.571429
| 94
| 0.757377
|
acfb7ce8c606377051eab5fb8d9154a75d7290d1
| 2,388
|
py
|
Python
|
tests/parsers/safari_cookies.py
|
ir4n6/plaso
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
[
"Apache-2.0"
] | null | null | null |
tests/parsers/safari_cookies.py
|
ir4n6/plaso
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
[
"Apache-2.0"
] | null | null | null |
tests/parsers/safari_cookies.py
|
ir4n6/plaso
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Safari cookie parser."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import safari_cookies as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers import safari_cookies
from tests import test_lib as shared_test_lib
from tests.parsers import test_lib
class SafariCookieParserTest(test_lib.ParserTestCase):
"""Tests for the Safari cookie parser."""
@shared_test_lib.skipUnlessHasTestFile(['Cookies.binarycookies'])
def testParseFile(self):
"""Tests the Parse function on a Safari binary cookies file."""
parser = safari_cookies.BinaryCookieParser()
storage_writer = self._ParseFile(
['Cookies.binarycookies'], parser)
cookie_events = []
for event in storage_writer.GetEvents():
if event.data_type == 'safari:cookie:entry':
cookie_events.append(event)
# There should be:
# * 207 events in total
# * 182 events from the safari cookie parser
# * 25 event from the cookie plugins
self.assertEqual(storage_writer.number_of_events, 207)
self.assertEqual(len(cookie_events), 182)
event = cookie_events[3]
self.assertEqual(event.flags, 5)
self.assertEqual(event.url, 'accounts.google.com')
self.assertEqual(event.cookie_name, 'GAPS')
event = cookie_events[48]
self.CheckTimestamp(event.timestamp, '2013-07-08 20:54:50.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_CREATION)
self.assertEqual(event.flags, 0)
self.assertEqual(event.cookie_name, 'nonsession')
self.assertEqual(event.path, '/')
expected_message = '.ebay.com </> (nonsession)'
expected_short_message = '.ebay.com (nonsession)'
self._TestGetMessageStrings(
event, expected_message, expected_short_message)
event = cookie_events[52]
self.assertEqual(event.cookie_name, 'fpc')
value = (
'd=0dTg3Ou32s3MrAJ2iHjFph100Tw3E1HTfDOTly0GfJ2g4W.mXpy54F9fjBFfXMw4YyW'
'AG2cT2FVSqOvGGi_Y1OPrngmNvpKPPyz5gIUP6x_EQeM7bR3jsrg_F1UXVOgu6JgkFwqO'
'5uHrv4HiL05qb.85Bl.V__HZI5wpAGOGPz1XHhY5mOMH.g.pkVDLli36W2iuYwA-&v=2')
self.assertEqual(event.cookie_value, value)
self.assertEqual(event.path, '/')
self.assertEqual(event.url, '.www.yahoo.com')
if __name__ == '__main__':
unittest.main()
| 32.27027
| 81
| 0.731156
|
acfb7eefdf587a92bc761d151bd47f6eaeb78390
| 10,747
|
py
|
Python
|
python_modules/dagster/dagster_tests/core_tests/types_tests/test_types.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | 1
|
2019-07-15T17:34:04.000Z
|
2019-07-15T17:34:04.000Z
|
python_modules/dagster/dagster_tests/core_tests/types_tests/test_types.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster_tests/core_tests/types_tests/test_types.py
|
jake-billings/dagster
|
7a1548a1f246c48189f3d8109e831b744bceb7d4
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from dagster import (
RunConfig,
InProcessExecutorConfig,
DagsterInvariantViolationError,
DagsterTypeCheckError,
Failure,
InputDefinition,
OutputDefinition,
RuntimeType,
TypeCheck,
execute_pipeline,
lambda_solid,
pipeline,
)
from dagster.core.types import Int, Optional, List, PythonObjectType
from dagster.core.types.runtime import resolve_to_runtime_type
class BarObj(object):
pass
class Bar(PythonObjectType):
def __init__(self):
super(Bar, self).__init__(BarObj, description='A bar.')
def test_python_object_type():
type_bar = Bar.inst()
assert type_bar.name == 'Bar'
assert type_bar.description == 'A bar.'
assert_type_check(type_bar.type_check(BarObj()))
with pytest.raises(Failure):
assert type_bar.type_check(None)
with pytest.raises(Failure):
type_bar.type_check('not_a_bar')
def test_nullable_python_object_type():
nullable_type_bar = resolve_to_runtime_type(Optional[Bar])
assert_type_check(nullable_type_bar.type_check(BarObj()))
assert_type_check(nullable_type_bar.type_check(None))
with pytest.raises(Failure):
nullable_type_bar.type_check('not_a_bar')
def test_nullable_int_coercion():
int_type = resolve_to_runtime_type(Int)
assert_type_check(int_type.type_check(1))
with pytest.raises(Failure):
int_type.type_check(None)
nullable_int_type = resolve_to_runtime_type(Optional[Int])
assert_type_check(nullable_int_type.type_check(1))
assert_type_check(nullable_int_type.type_check(None))
def assert_type_check(type_check):
assert type_check is None or isinstance(type_check, TypeCheck)
def assert_success(runtime_type, value):
type_check_result = runtime_type.type_check(value)
assert_type_check(type_check_result)
def assert_failure(runtime_type, value):
with pytest.raises(Failure):
runtime_type.type_check(value)
def test_nullable_list_combos_coerciion():
list_of_int = resolve_to_runtime_type(List[Int])
assert_failure(list_of_int, None)
assert_success(list_of_int, [])
assert_success(list_of_int, [1])
assert_failure(list_of_int, [None])
nullable_int_of_list = resolve_to_runtime_type(Optional[List[Int]])
assert_success(nullable_int_of_list, None)
assert_success(nullable_int_of_list, [])
assert_success(nullable_int_of_list, [1])
assert_failure(nullable_int_of_list, [None])
list_of_nullable_int = resolve_to_runtime_type(List[Optional[Int]])
assert_failure(list_of_nullable_int, None)
assert_success(list_of_nullable_int, [])
assert_success(list_of_nullable_int, [1])
assert_success(list_of_nullable_int, [None])
nullable_list_of_nullable_int = resolve_to_runtime_type(Optional[List[Optional[Int]]])
assert_success(nullable_list_of_nullable_int, None)
assert_success(nullable_list_of_nullable_int, [])
assert_success(nullable_list_of_nullable_int, [1])
assert_success(nullable_list_of_nullable_int, [None])
def execute_no_throw(pipeline_def):
return execute_pipeline(
pipeline_def,
run_config=RunConfig(executor_config=InProcessExecutorConfig(raise_on_error=False)),
)
def _type_check_data_for_input(solid_result, input_name):
return solid_result.compute_input_event_dict[input_name].event_specific_data.type_check_data
def test_input_types_succeed_in_pipeline():
@lambda_solid
def return_one():
return 1
@lambda_solid(input_defs=[InputDefinition('num', int)])
def take_num(num):
return num
@pipeline
def pipe():
return take_num(return_one())
pipeline_result = execute_pipeline(pipe)
assert pipeline_result.success
solid_result = pipeline_result.result_for_solid('take_num')
assert solid_result.success
type_check_data = _type_check_data_for_input(solid_result, 'num')
assert type_check_data.success
def test_output_types_succeed_in_pipeline():
@lambda_solid(output_def=OutputDefinition(int))
def return_one():
return 1
@pipeline
def pipe():
return return_one()
pipeline_result = execute_pipeline(pipe)
assert pipeline_result.success
solid_result = pipeline_result.result_for_solid('return_one')
assert solid_result.success
output_event = solid_result.get_output_event_for_compute()
type_check_data = output_event.event_specific_data.type_check_data
assert type_check_data.success
def test_input_types_fail_in_pipeline():
@lambda_solid
def return_one():
return 1
@lambda_solid(input_defs=[InputDefinition('string', str)])
def take_string(string):
return string
@pipeline
def pipe():
return take_string(return_one())
with pytest.raises(DagsterTypeCheckError) as exc_info:
execute_pipeline(pipe)
assert 'In solid "take_string" the input "string" received value 1 of Python ' in str(
exc_info.value
)
# now check events in no throw case
pipeline_result = execute_no_throw(pipe)
assert not pipeline_result.success
solid_result = pipeline_result.result_for_solid('take_string')
type_check_data = _type_check_data_for_input(solid_result, 'string')
assert not type_check_data.success
assert type_check_data.description == 'Value "1" of python type "int" must be a string.'
step_failure_event = solid_result.compute_step_failure_event
assert step_failure_event.event_specific_data.error.cls_name == 'Failure'
def test_output_types_fail_in_pipeline():
@lambda_solid(output_def=OutputDefinition(str))
def return_int_fails():
return 1
@pipeline
def pipe():
return return_int_fails()
with pytest.raises(DagsterTypeCheckError) as exc_info:
execute_pipeline(pipe)
assert (
'In solid "return_int_fails" the output "result" received value 1 of Python type'
) in str(exc_info.value)
pipeline_result = execute_no_throw(pipe)
assert not pipeline_result.success
solid_result = pipeline_result.result_for_solid('return_int_fails')
assert not solid_result.success
output_event = solid_result.get_output_event_for_compute()
type_check_data = output_event.event_specific_data.type_check_data
assert not type_check_data.success
assert type_check_data.description == 'Value "1" of python type "int" must be a string.'
step_failure_event = solid_result.compute_step_failure_event
assert step_failure_event.event_specific_data.error.cls_name == 'Failure'
# TODO add more step output use cases
class ThrowsExceptionType(RuntimeType):
def __init__(self):
super(ThrowsExceptionType, self).__init__(
key='ThrowsExceptionType', name='ThrowsExceptionType'
)
def type_check(self, value):
raise Exception('kdjfkjd')
class BadType(RuntimeType):
def __init__(self):
super(BadType, self).__init__(key='BadType', name='BadType')
def type_check(self, value):
return 'kdjfkjd'
def test_input_type_returns_wrong_thing():
@lambda_solid
def return_one():
return 1
@lambda_solid(input_defs=[InputDefinition('value', BadType)])
def take_bad_thing(value):
return value
@pipeline
def pipe():
return take_bad_thing(return_one())
with pytest.raises(DagsterInvariantViolationError):
execute_pipeline(pipe)
pipeline_result = execute_no_throw(pipe)
assert not pipeline_result.success
solid_result = pipeline_result.result_for_solid('take_bad_thing')
type_check_data = _type_check_data_for_input(solid_result, 'value')
assert not type_check_data.success
assert (
type_check_data.description
== "Type checks can only return None or TypeCheck. Type BadType returned 'kdjfkjd'."
)
assert not type_check_data.metadata_entries
step_failure_event = solid_result.compute_step_failure_event
assert step_failure_event.event_specific_data.error.cls_name == 'DagsterInvariantViolationError'
def test_output_type_returns_wrong_thing():
@lambda_solid(output_def=OutputDefinition(BadType))
def return_one_bad_thing():
return 1
@pipeline
def pipe():
return return_one_bad_thing()
with pytest.raises(DagsterInvariantViolationError):
execute_pipeline(pipe)
pipeline_result = execute_no_throw(pipe)
assert not pipeline_result.success
solid_result = pipeline_result.result_for_solid('return_one_bad_thing')
output_event = solid_result.get_output_event_for_compute()
type_check_data = output_event.event_specific_data.type_check_data
assert not type_check_data.success
assert (
type_check_data.description
== "Type checks can only return None or TypeCheck. Type BadType returned 'kdjfkjd'."
)
step_failure_event = solid_result.compute_step_failure_event
assert step_failure_event.event_specific_data.error.cls_name == 'DagsterInvariantViolationError'
def test_input_type_throw_arbitrary_exception():
@lambda_solid
def return_one():
return 1
@lambda_solid(input_defs=[InputDefinition('value', ThrowsExceptionType)])
def take_throws(value):
return value
@pipeline
def pipe():
return take_throws(return_one())
with pytest.raises(DagsterTypeCheckError):
execute_pipeline(pipe)
pipeline_result = execute_no_throw(pipe)
assert not pipeline_result.success
solid_result = pipeline_result.result_for_solid('take_throws')
type_check_data = _type_check_data_for_input(solid_result, 'value')
assert not type_check_data.success
step_failure_event = solid_result.compute_step_failure_event
assert step_failure_event.event_specific_data.error.cls_name == 'Exception'
def test_output_type_throw_arbitrary_exception():
@lambda_solid(output_def=OutputDefinition(ThrowsExceptionType))
def return_one_throws():
return 1
@pipeline
def pipe():
return return_one_throws()
with pytest.raises(DagsterTypeCheckError):
execute_pipeline(pipe)
pipeline_result = execute_no_throw(pipe)
assert not pipeline_result.success
solid_result = pipeline_result.result_for_solid('return_one_throws')
output_event = solid_result.get_output_event_for_compute()
type_check_data = output_event.event_specific_data.type_check_data
assert not type_check_data.success
assert (
'In solid "return_one_throws" the output "result" received value'
in type_check_data.description
)
step_failure_event = solid_result.compute_step_failure_event
assert step_failure_event.event_specific_data.error.cls_name == 'Exception'
| 29.203804
| 100
| 0.748116
|
acfb7f384ca7dca402cf6f9789c4b6d4f5c781c1
| 9,549
|
py
|
Python
|
gobigger/utils/collision_detection.py
|
luanshaotong/GoBigger
|
00c347a89a660134677d633f39c39123c5ab3deb
|
[
"Apache-2.0"
] | 189
|
2021-10-08T07:55:10.000Z
|
2022-03-31T23:49:43.000Z
|
gobigger/utils/collision_detection.py
|
luanshaotong/GoBigger
|
00c347a89a660134677d633f39c39123c5ab3deb
|
[
"Apache-2.0"
] | 25
|
2021-11-01T06:59:30.000Z
|
2022-03-22T11:22:27.000Z
|
gobigger/utils/collision_detection.py
|
luanshaotong/GoBigger
|
00c347a89a660134677d633f39c39123c5ab3deb
|
[
"Apache-2.0"
] | 28
|
2021-10-14T12:23:14.000Z
|
2022-03-31T23:49:45.000Z
|
import numpy as np
import logging
from .structures import Border, QuadNode
class BaseCollisionDetection:
def __init__(self, border: Border) -> None:
self.border = border
def solve(self, query_list: list, gallery_list: list):
raise NotImplementedError
class ExhaustiveCollisionDetection(BaseCollisionDetection):
'''
Overview:
Exhaustive Algorithm
'''
def __init__(self, border: Border) -> None:
super(ExhaustiveCollisionDetection, self).__init__(border=border)
def solve(self, query_list: list, gallery_list: list):
'''
Overview:
For the balls in the query, enumerate each ball in the gallery to determine whether there is a collision
Parameters:
query_list <List[BaseBall]>: List of balls that need to be queried for collision
gallery_list <List[BaseBall]>: List of all balls
Returns:
results <Dict[int: List[BaseBall]> return value
int value denotes:
the subscript in query_list
string value denotes:
List of balls that collided with the query corresponding to the subscript
'''
results = {}
for i, q in enumerate(query_list):
results[i] = []
for j, g in enumerate(gallery_list):
if q.judge_cover(g):
results[i].append(g)
return results
class PrecisionCollisionDetection(BaseCollisionDetection) :
'''
Overview:
Precision Approximation Algorithm
Divide the map into several rows according to the accuracy that has been set, dynamically maintain the row information in each frame, and search by row
'''
def __init__(self, border: Border, precision : int = 50) -> None:
'''
Parameter:
precision <int>: the precision of dividing rows
'''
super(PrecisionCollisionDetection, self).__init__(border = border)
self.precision = precision
def get_row(self, x) -> int:
'''
Overview:
Get the row coordinates of the ball
Parameter:
node <BaseBall>: The ball need to get its row coordinates
'''
return int((x - self.border.minx) / self.border.height * self.precision)
def solve(self, query_list: list, gallery_list: list):
'''
Overview:
First, you need to sort the balls in each row according to the ordinate.
For the balls in query_list, first abstract the boundary of the ball into
a rectangle, then traverse each row in the rectangle, and find the first
ball covered by the query through dichotomy in each row, and then Enumerate
the balls in sequence until the ordinate exceeds the boundary of the query
rectangle.
Parameters:
query_list <List[BaseBall]>: List of balls that need to be queried for collision
gallery_list <List[BaseBall]>: List of all balls
Returns:
results <Dict[int: List[BaseBall]> return value
int value denotes:
the subscript in query_list
string value denotes:
List of balls that collided with the query corresponding to the subscript
'''
vec = {}
for id, node in enumerate(gallery_list):
row_id = self.get_row(node.position.x)
if row_id not in vec:
vec[row_id] = []
vec[row_id].append((id, node.position.y))
for val in vec.values():
val.sort(key = lambda x: x[1])
results = {}
for id, query in enumerate(query_list):
results[id] = []
left = query.position.y - query.radius
right = query.position.y + query.radius
top = self.get_row(query.position.x - query.radius)
bottom = self.get_row(query.position.x + query.radius)
for i in range(top, bottom + 1):
if i not in vec: continue
l = len(vec[i])
start_pos = 0
for j in range(15, -1, -1):
if start_pos+(2**j) < l and vec[i][start_pos+(2**j)][1] < left:
start_pos += 2**j
for j in range(start_pos, l):
if vec[i][j][1] > right: break
if query.judge_cover(gallery_list[vec[i][j][0]]):
results[id].append(gallery_list[vec[i][j][0]])
return results
class RebuildQuadTreeCollisionDetection(BaseCollisionDetection) :
'''
Overview:
Build a quadtree on a two-dimensional plane in every frame, and query collisions in the quadtree
'''
def __init__(self, border: Border, node_capacity = 64, tree_depth = 32) -> None:
'''
Parameter:
node_capacity <int>: The capacity of each point in the quadtree
tree_depth <int>: The max depth of the quadtree
'''
super(RebuildQuadTreeCollisionDetection, self).__init__(border = border)
self.node_capacity = node_capacity
self.tree_depth = tree_depth
self.border = border
def solve(self, query_list: list, gallery_list: list):
'''
Overview:
Construct a quadtree from scratch based on gallery_list and complete the query
Parameters:
query_list <List[BaseBall]>: List of balls that need to be queried for collision
gallery_list <List[BaseBall]>: List of all balls
Returns:
results <Dict[int: List[BaseBall]> return value
int value denotes:
the subscript in query_list
string value denotes:
List of balls that collided with the query corresponding to the subscript
'''
quadTree = QuadNode(border=self.border, max_depth = self.tree_depth, max_num = self.node_capacity)
for node in gallery_list:
quadTree.insert(node)
results = {}
for i, query in enumerate(query_list):
results[i] = []
quadTree_results = quadTree.find(Border(max(query.position.x - query.radius, self.border.minx),
max(query.position.y - query.radius, self.border.miny),
min(query.position.x + query.radius, self.border.maxx),
min(query.position.y + query.radius, self.border.maxy)))
for result in quadTree_results:
if query.judge_cover(result):
results[i].append(result)
return results
class RemoveQuadTreeCollisionDetection(BaseCollisionDetection) :
'''
Overview:
Add delete operations for the quadtree, and dynamically maintain a quadtree
'''
def __init__(self, border: Border, node_capacity = 64, tree_depth = 32) -> None:
'''
Parameter:
node_capacity <int>: The capacity of each point in the quadtree
tree_depth <int>: The max depth of the quadtree
'''
super(RemoveQuadTreeCollisionDetection, self).__init__(border = border)
self.node_capacity = node_capacity
self.tree_depth = tree_depth
self.border = border
self.quadTree = QuadNode(border = border, max_depth = tree_depth, max_num = node_capacity, parent=None)
def solve(self, query_list: list, changed_node_list: list):
'''
Overview:
Update the points in the quadtree according to the changed_node_list and complete the query
Parameters:
query_list <List[BaseBall]>: List of balls that need to be queried for collision
gallery_list <List[BaseBall]>: List of all balls
Returns:
results <Dict[int: List[BaseBall]> return value
int value denotes:
the subscript in query_list
string value denotes:
List of balls that collided with the query corresponding to the subscript
'''
for node in changed_node_list:
if not node.quad_node == None : node.quad_node.remove(node)
if not node.is_remove: self.quadTree.insert(node)
results = {}
for i, query in enumerate(query_list):
results[i] = []
quadTree_results = self.quadTree.find(Border(max(query.position.x - query.radius, self.border.minx),
max(query.position.y - query.radius, self.border.miny),
min(query.position.x + query.radius, self.border.maxx),
min(query.position.y + query.radius, self.border.maxy)))
for result in quadTree_results:
if query.judge_cover(result):
results[i].append(result)
return results
def create_collision_detection(cd_type, **cd_kwargs):
if cd_type == 'exhaustive':
return ExhaustiveCollisionDetection(**cd_kwargs)
if cd_type == 'precision':
return PrecisionCollisionDetection(**cd_kwargs)
if cd_type == 'rebuild_quadtree':
return RebuildQuadTreeCollisionDetection(**cd_kwargs)
if cd_type == 'remove_quadtree':
return RemoveQuadTreeCollisionDetection(**cd_kwargs)
else:
raise NotImplementedError
| 42.252212
| 159
| 0.589276
|
acfb80b379c6676e2acdbb67e709382a49cff707
| 183
|
py
|
Python
|
planfood/common/apps.py
|
vsventy/planfood-server
|
e7db1930705f15b70c989205b4b876649695cc94
|
[
"MIT"
] | null | null | null |
planfood/common/apps.py
|
vsventy/planfood-server
|
e7db1930705f15b70c989205b4b876649695cc94
|
[
"MIT"
] | null | null | null |
planfood/common/apps.py
|
vsventy/planfood-server
|
e7db1930705f15b70c989205b4b876649695cc94
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommonConfig(AppConfig):
name = 'planfood.common'
verbose_name = _('Common')
| 22.875
| 55
| 0.765027
|
acfb80cbee3a03fe0b26c4e389b376880ff81bec
| 8,425
|
py
|
Python
|
tensorflow_probability/python/distributions/pareto.py
|
nadheesh/probability
|
919adeda68c0a1403303646d1f7f508747fbfaa4
|
[
"Apache-2.0"
] | 1
|
2019-10-13T19:52:59.000Z
|
2019-10-13T19:52:59.000Z
|
tensorflow_probability/python/distributions/pareto.py
|
snehil03july/probability
|
5f576230f1e261a823e20a49c442ff38c8f381d3
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/pareto.py
|
snehil03july/probability
|
5f576230f1e261a823e20a49c442ff38c8f381d3
|
[
"Apache-2.0"
] | 1
|
2019-10-13T19:52:57.000Z
|
2019-10-13T19:52:57.000Z
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Pareto distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops.distributions import util as distribution_util
class Pareto(tf.distributions.Distribution):
"""Pareto distribution.
The Pareto distribution is parameterized by a `scale` and a
`concentration` parameter.
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; alpha, scale, x >= scale) = alpha * scale ** alpha / x ** (alpha + 1)
```
where `concentration = alpha`.
Note that `scale` acts as a scaling parameter, since
`Pareto(c, scale).pdf(x) == Pareto(c, 1.).pdf(x / scale)`.
The support of the distribution is defined on `[scale, infinity)`.
"""
def __init__(self,
concentration,
scale=1.,
validate_args=False,
allow_nan_stats=True,
name="Pareto"):
"""Construct Pareto distribution with `concentration` and `scale`.
Args:
concentration: Floating point tensor. Must contain only positive values.
scale: Floating point tensor, equivalent to `mode`. `scale` also
restricts the domain of this distribution to be in `[scale, inf)`.
Must contain only positive values. Default value: `1`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs. Default value: `False` (i.e. do not validate args).
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
Default value: `True`.
name: Python `str` name prefixed to Ops created by this class.
Default value: 'Pareto'.
"""
parameters = dict(locals())
with tf.name_scope(name, values=[concentration, scale]):
self._concentration = tf.convert_to_tensor(
concentration, name="concentration")
self._scale = tf.convert_to_tensor(scale, name="scale")
with tf.control_dependencies([
tf.assert_positive(self._concentration),
tf.assert_positive(self._scale)] if validate_args else []):
self._concentration = tf.identity(
self._concentration, name="concentration")
self._scale = tf.identity(self._scale, name="scale")
super(Pareto, self).__init__(
dtype=self._concentration.dtype,
reparameterization_type=tf.distributions.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._concentration, self._scale],
name=name)
@property
def scale(self):
"""Scale parameter and also the lower bound of the support."""
return self._scale
@property
def concentration(self):
"""Concentration parameter for this distribution."""
return self._concentration
def _batch_shape_tensor(self):
return tf.broadcast_dynamic_shape(
tf.shape(self.concentration), tf.shape(self.scale))
def _batch_shape(self):
return tf.broadcast_static_shape(self.concentration.shape, self.scale.shape)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
shape = tf.concat([[n], self.batch_shape_tensor()], 0)
sampled = tf.random_uniform(
shape, maxval=1., seed=seed, dtype=self.dtype)
log_sample = tf.log(self.scale) - tf.log1p(-sampled) / self.concentration
return tf.exp(log_sample)
def _log_prob(self, x):
with tf.control_dependencies([
tf.assert_greater_equal(
x, self.scale,
message="x is not in the support of the distribution."
)] if self.validate_args else []):
def log_prob_on_support(z):
return (tf.log(self.concentration) +
self.concentration * tf.log(self.scale) -
(self.concentration + 1.) * tf.log(z))
return self._extend_support(x, log_prob_on_support, alt=-np.inf)
def _prob(self, x):
with tf.control_dependencies([
tf.assert_greater_equal(
x, self.scale,
message="x is not in the support of the distribution."
)] if self.validate_args else []):
def prob_on_support(z):
return (self.concentration * (self.scale ** self.concentration) /
(z ** (self.concentration + 1)))
return self._extend_support(x, prob_on_support, alt=0.)
def _log_cdf(self, x):
return self._extend_support(
x, lambda x: tf.log1p(-(self.scale / x) ** self.concentration),
alt=-np.inf)
def _cdf(self, x):
return self._extend_support(
x, lambda x: -tf.expm1(self.concentration * tf.log(self.scale / x)),
alt=0.)
def _log_survival_function(self, x):
return self._extend_support(
x, lambda x: self.concentration * tf.log(self.scale / x), alt=np.inf)
@distribution_util.AppendDocstring(
"""The mean of Pareto is defined` if `concentration > 1.`, otherwise it
is `Inf`.""")
def _mean(self):
broadcasted_concentration = self.concentration + tf.zeros_like(
self.scale)
infs = tf.fill(
dims=tf.shape(broadcasted_concentration),
value=np.array(np.inf, dtype=self.dtype.as_numpy_dtype))
return tf.where(
broadcasted_concentration > 1.,
self.concentration * self.scale / (self.concentration - 1),
infs)
@distribution_util.AppendDocstring(
"""The variance of Pareto is defined` if `concentration > 2.`, otherwise
it is `Inf`.""")
def _variance(self):
broadcasted_concentration = self.concentration + tf.zeros_like(self.scale)
infs = tf.fill(
dims=tf.shape(broadcasted_concentration),
value=np.array(np.inf, dtype=self.dtype.as_numpy_dtype))
return tf.where(
broadcasted_concentration > 2.,
self.scale ** 2 * self.concentration / (
(self.concentration - 1.) ** 2 * (self.concentration - 2.)),
infs)
def _mode(self):
return self.scale + tf.zeros_like(self.concentration)
def _extend_support(self, x, f, alt):
"""Returns `f(x)` if x is in the support, and `alt` otherwise.
Given `f` which is defined on the support of this distribution
(e.g. x > scale), extend the function definition to the real line
by defining `f(x) = alt` for `x < scale`.
Args:
x: Floating-point Tensor to evaluate `f` at.
f: Lambda that takes in a tensor and returns a tensor. This represents
the function who we want to extend the domain of definition.
alt: Python or numpy literal representing the value to use for extending
the domain.
Returns:
Tensor representing an extension of `f(x)`.
"""
# We need to do a series of broadcasts for the tf.where.
scale = self.scale + tf.zeros_like(self.concentration)
is_invalid = x < scale
scale = scale + tf.zeros_like(x)
x = x + tf.zeros_like(scale)
# We need to do this to ensure gradients are sound.
y = f(tf.where(is_invalid, scale, x))
if alt == 0.:
alt = tf.zeros_like(y)
elif alt == 1.:
alt = tf.ones_like(y)
else:
alt = tf.fill(
dims=tf.shape(y),
value=np.array(alt, dtype=self.dtype.as_numpy_dtype))
return tf.where(is_invalid, alt, y)
| 37.114537
| 80
| 0.661128
|
acfb8181297aa4a75b6c61074726d3801d55c976
| 47,196
|
py
|
Python
|
neutron/tests/functional/agent/linux/test_ip_lib.py
|
guillermomolina/neutron
|
bd2933a2588d1e0b18790dd719ca1d89aa4a0c8d
|
[
"Apache-2.0"
] | 3
|
2021-02-17T09:49:14.000Z
|
2022-01-19T08:40:34.000Z
|
neutron/tests/functional/agent/linux/test_ip_lib.py
|
guillermomolina/neutron
|
bd2933a2588d1e0b18790dd719ca1d89aa4a0c8d
|
[
"Apache-2.0"
] | 1
|
2021-11-30T01:34:39.000Z
|
2021-11-30T01:34:39.000Z
|
neutron/tests/functional/agent/linux/test_ip_lib.py
|
guillermomolina/neutron
|
bd2933a2588d1e0b18790dd719ca1d89aa4a0c8d
|
[
"Apache-2.0"
] | 1
|
2022-02-16T22:23:07.000Z
|
2022-02-16T22:23:07.000Z
|
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import itertools
import signal
import netaddr
from neutron_lib import constants
from neutron_lib.utils import net
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslo_utils import uuidutils
from pyroute2.iproute import linux as iproute_linux
import testscenarios
import testtools
from neutron.agent.common import async_process
from neutron.agent.linux import ip_lib
from neutron.common import utils
from neutron.conf.agent import common as config
from neutron.privileged.agent.linux import ip_lib as priv_ip_lib
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.linux.bin import ip_monitor
from neutron.tests.functional import base as functional_base
LOG = logging.getLogger(__name__)
Device = collections.namedtuple('Device',
'name ip_cidrs mac_address namespace')
WRONG_IP = '0.0.0.0'
TEST_IP = '240.0.0.1'
TEST_IP_NEIGH = '240.0.0.2'
TEST_IP_SECONDARY = '240.0.0.3'
TEST_IP6_NEIGH = 'fd00::2'
TEST_IP6_SECONDARY = 'fd00::3'
TEST_IP_NUD_STATES = ((TEST_IP_NEIGH, 'permanent'),
(TEST_IP_SECONDARY, 'reachable'),
(TEST_IP6_NEIGH, 'permanent'),
(TEST_IP6_SECONDARY, 'reachable'))
class IpLibTestFramework(functional_base.BaseSudoTestCase):
def setUp(self):
super(IpLibTestFramework, self).setUp()
self._configure()
def _configure(self):
config.register_interface_driver_opts_helper(cfg.CONF)
cfg.CONF.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
config.register_interface_opts()
self.driver = importutils.import_object(cfg.CONF.interface_driver,
cfg.CONF)
def generate_device_details(self, name=None, ip_cidrs=None,
mac_address=None, namespace=None):
if ip_cidrs is None:
ip_cidrs = ["%s/24" % TEST_IP]
return Device(name or utils.get_rand_name(),
ip_cidrs,
mac_address or
net.get_random_mac('fa:16:3e:00:00:00'.split(':')),
namespace or utils.get_rand_name())
def _safe_delete_device(self, device):
try:
device.link.delete()
except RuntimeError:
LOG.debug('Could not delete %s, was it already deleted?', device)
def manage_device(self, attr):
"""Create a tuntap with the specified attributes.
The device is cleaned up at the end of the test.
:param attr: A Device namedtuple
:return: A tuntap ip_lib.IPDevice
"""
ip = ip_lib.IPWrapper(namespace=attr.namespace)
if attr.namespace:
ip.netns.add(attr.namespace)
self.addCleanup(ip.netns.delete, attr.namespace)
tap_device = ip.add_tuntap(attr.name)
self.addCleanup(self._safe_delete_device, tap_device)
tap_device.link.set_address(attr.mac_address)
self.driver.init_l3(attr.name, attr.ip_cidrs,
namespace=attr.namespace)
tap_device.link.set_up()
return tap_device
class IpLibTestCase(IpLibTestFramework):
def _check_routes(self, expected_routes, actual_routes):
actual_routes = [{key: route[key] for key in expected_routes[0].keys()}
for route in actual_routes]
self.assertEqual(expected_routes, actual_routes)
def test_rules_lifecycle(self):
PRIORITY = 32768
TABLE = 16
attr = self.generate_device_details()
device = self.manage_device(attr)
test_cases = {
constants.IP_VERSION_4: [
{
'ip': '1.1.1.1',
'to': '8.8.8.0/24'
},
{
'ip': '1.1.1.1',
'iif': device.name,
'to': '7.7.7.0/24'
}
],
constants.IP_VERSION_6: [
{
'ip': 'abcd::1',
'to': '1234::/64'
},
{
'ip': 'abcd::1',
'iif': device.name,
'to': '4567::/64'
}
]
}
expected_rules = {
constants.IP_VERSION_4: [
{
'from': '1.1.1.1',
'to': '8.8.8.0/24',
'priority': str(PRIORITY),
'table': str(TABLE),
'type': 'unicast'
}, {
'from': '0.0.0.0/0',
'to': '7.7.7.0/24',
'iif': device.name,
'priority': str(PRIORITY),
'table': str(TABLE),
'type': 'unicast'
}
],
constants.IP_VERSION_6: [
{
'from': 'abcd::1',
'to': '1234::/64',
'priority': str(PRIORITY),
'table': str(TABLE),
'type': 'unicast'
},
{
'from': '::/0',
'to': '4567::/64',
'iif': device.name,
'priority': str(PRIORITY),
'table': str(TABLE),
'type': 'unicast',
}
]
}
for ip_version, test_case in test_cases.items():
for rule in test_case:
ip_lib.add_ip_rule(namespace=device.namespace, table=TABLE,
priority=PRIORITY, **rule)
rules = ip_lib.list_ip_rules(device.namespace, ip_version)
for expected_rule in expected_rules[ip_version]:
self.assertIn(expected_rule, rules)
for rule in test_case:
ip_lib.delete_ip_rule(device.namespace, table=TABLE,
priority=PRIORITY, **rule)
rules = priv_ip_lib.list_ip_rules(device.namespace, ip_version)
for expected_rule in expected_rules[ip_version]:
self.assertNotIn(expected_rule, rules)
def test_device_exists(self):
attr = self.generate_device_details()
self.assertFalse(
ip_lib.device_exists(attr.name, namespace=attr.namespace))
device = self.manage_device(attr)
self.assertTrue(
ip_lib.device_exists(device.name, namespace=attr.namespace))
self.assertFalse(
ip_lib.device_exists(attr.name, namespace='wrong_namespace'))
device.link.delete()
self.assertFalse(
ip_lib.device_exists(attr.name, namespace=attr.namespace))
def test_ipdevice_exists(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
self.assertTrue(device.exists())
device.link.delete()
self.assertFalse(device.exists())
def test_vlan_exists(self):
attr = self.generate_device_details()
ip = ip_lib.IPWrapper(namespace=attr.namespace)
ip.netns.add(attr.namespace)
self.addCleanup(ip.netns.delete, attr.namespace)
priv_ip_lib.create_interface(attr.name, attr.namespace, 'dummy')
self.assertFalse(ip_lib.vlan_in_use(1999, namespace=attr.namespace))
device = ip.add_vlan('vlan1999', attr.name, 1999)
self.assertTrue(ip_lib.vlan_in_use(1999, namespace=attr.namespace))
device.link.delete()
self.assertFalse(ip_lib.vlan_in_use(1999, namespace=attr.namespace))
def test_vxlan_exists(self):
attr = self.generate_device_details()
ip = ip_lib.IPWrapper(namespace=attr.namespace)
ip.netns.add(attr.namespace)
self.addCleanup(ip.netns.delete, attr.namespace)
self.assertFalse(ip_lib.vxlan_in_use(9999, namespace=attr.namespace))
device = ip.add_vxlan(attr.name, 9999)
self.addCleanup(self._safe_delete_device, device)
self.assertTrue(ip_lib.vxlan_in_use(9999, namespace=attr.namespace))
device.link.delete()
self.assertFalse(ip_lib.vxlan_in_use(9999, namespace=attr.namespace))
def test_ipwrapper_get_device_by_ip_None(self):
ip_wrapper = ip_lib.IPWrapper(namespace=None)
self.assertIsNone(ip_wrapper.get_device_by_ip(ip=None))
def test_ipwrapper_get_device_by_ip(self):
# We need to pass both IP and cidr values to get_device_by_ip()
# to make sure it filters correctly.
test_ip = "%s/24" % TEST_IP
test_ip_secondary = "%s/24" % TEST_IP_SECONDARY
attr = self.generate_device_details(
ip_cidrs=[test_ip, test_ip_secondary]
)
self.manage_device(attr)
ip_wrapper = ip_lib.IPWrapper(namespace=attr.namespace)
self.assertEqual(attr.name, ip_wrapper.get_device_by_ip(TEST_IP).name)
self.assertEqual(attr.name,
ip_wrapper.get_device_by_ip(TEST_IP_SECONDARY).name)
self.assertIsNone(ip_wrapper.get_device_by_ip(TEST_IP_NEIGH))
# this is in the same subnet, so will match if we pass as cidr
test_ip_neigh = "%s/24" % TEST_IP_NEIGH
self.assertEqual(attr.name,
ip_wrapper.get_device_by_ip(test_ip_neigh).name)
self.assertIsNone(ip_wrapper.get_device_by_ip(WRONG_IP))
def test_device_exists_with_ips_and_mac(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
self.assertTrue(
ip_lib.device_exists_with_ips_and_mac(*attr))
wrong_ip_cidr = '10.0.0.1/8'
wrong_mac_address = 'aa:aa:aa:aa:aa:aa'
attr = self.generate_device_details(name='wrong_name')
self.assertFalse(
ip_lib.device_exists_with_ips_and_mac(*attr))
attr = self.generate_device_details(ip_cidrs=[wrong_ip_cidr])
self.assertFalse(ip_lib.device_exists_with_ips_and_mac(*attr))
attr = self.generate_device_details(mac_address=wrong_mac_address)
self.assertFalse(ip_lib.device_exists_with_ips_and_mac(*attr))
attr = self.generate_device_details(namespace='wrong_namespace')
self.assertFalse(ip_lib.device_exists_with_ips_and_mac(*attr))
device.link.delete()
def test_get_device_mac(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
mac_address = ip_lib.get_device_mac(attr.name,
namespace=attr.namespace)
self.assertEqual(attr.mac_address, mac_address)
device.link.delete()
def test_get_device_mac_too_long_name(self):
name = utils.get_rand_name(
max_length=constants.DEVICE_NAME_MAX_LEN + 5)
attr = self.generate_device_details(name=name)
device = self.manage_device(attr)
mac_address = ip_lib.get_device_mac(attr.name,
namespace=attr.namespace)
self.assertEqual(attr.mac_address, mac_address)
device.link.delete()
def test_gateway_lifecycle(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
metric = 1000
device = self.manage_device(attr)
gateways = {
constants.IP_VERSION_4: attr.ip_cidrs[0].split('/')[0],
constants.IP_VERSION_6: "fd00::ff"
}
expected_gateways = {
constants.IP_VERSION_4: {
'metric': metric,
'via': gateways[constants.IP_VERSION_4]},
constants.IP_VERSION_6: {
'metric': metric,
'via': gateways[constants.IP_VERSION_6]}}
for ip_version, gateway_ip in gateways.items():
device.route.add_gateway(gateway_ip, metric)
self._check_routes(
[expected_gateways[ip_version]],
[device.route.get_gateway(ip_version=ip_version)])
device.route.delete_gateway(gateway_ip)
self.assertIsNone(
device.route.get_gateway(ip_version=ip_version))
def test_gateway_flush(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
device = self.manage_device(attr)
gateways = {
constants.IP_VERSION_4: attr.ip_cidrs[0].split('/')[0],
constants.IP_VERSION_6: "fd00::ff"
}
for ip_version, gateway_ip in gateways.items():
# Ensure that there is no gateway configured
self.assertIsNone(
device.route.get_gateway(ip_version=ip_version))
# Now lets add gateway
device.route.add_gateway(gateway_ip, table="main")
self.assertIsNotNone(
device.route.get_gateway(ip_version=ip_version))
# Flush gateway and check that there is no any gateway configured
device.route.flush(ip_version, table="main")
self.assertIsNone(
device.route.get_gateway(ip_version=ip_version))
def test_get_neigh_entries(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
mac_address = net.get_random_mac('fa:16:3e:00:00:00'.split(':'))
device = self.manage_device(attr)
device.neigh.add(TEST_IP_NEIGH, mac_address)
expected_neighs = [{'dst': TEST_IP_NEIGH,
'lladdr': mac_address,
'device': attr.name,
'state': 'permanent'}]
neighs = device.neigh.dump(4)
self.assertCountEqual(expected_neighs, neighs)
self.assertIsInstance(neighs, list)
device.neigh.delete(TEST_IP_NEIGH, mac_address)
neighs = device.neigh.dump(4, dst=TEST_IP_NEIGH, lladdr=mac_address)
self.assertEqual([], neighs)
def test_get_neigh_entries_no_namespace(self):
with testtools.ExpectedException(ip_lib.NetworkNamespaceNotFound):
ip_lib.dump_neigh_entries(4, namespace="nonexistent-netns")
def test_get_neigh_entries_no_interface(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
self.manage_device(attr)
with testtools.ExpectedException(ip_lib.NetworkInterfaceNotFound):
ip_lib.dump_neigh_entries(4, device="nosuchdevice",
namespace=attr.namespace)
def test_delete_neigh_entries(self):
attr = self.generate_device_details(
ip_cidrs=["%s/24" % TEST_IP, "fd00::1/64"]
)
mac_address = net.get_random_mac('fa:16:3e:00:00:00'.split(':'))
device = self.manage_device(attr)
# trying to delete a non-existent entry shouldn't raise an error
device.neigh.delete(TEST_IP_NEIGH, mac_address)
def test_flush_neigh_ipv4(self):
# Entry with state "reachable" deleted.
self._flush_neigh(constants.IP_VERSION_4, TEST_IP_SECONDARY,
{TEST_IP_NEIGH})
# Entries belong to "ip_to_flush" passed CIDR, but "permanent" entry
# is not deleted.
self._flush_neigh(constants.IP_VERSION_4, '240.0.0.0/28',
{TEST_IP_NEIGH})
# "all" passed, but "permanent" entry is not deleted.
self._flush_neigh(constants.IP_VERSION_4, 'all', {TEST_IP_NEIGH})
def test_flush_neigh_ipv6(self):
# Entry with state "reachable" deleted.
self._flush_neigh(constants.IP_VERSION_6, TEST_IP6_SECONDARY,
{TEST_IP6_NEIGH})
# Entries belong to "ip_to_flush" passed CIDR, but "permanent" entry
# is not deleted.
self._flush_neigh(constants.IP_VERSION_6, 'fd00::0/64',
{TEST_IP6_NEIGH})
# "all" passed, but "permanent" entry is not deleted.
self._flush_neigh(constants.IP_VERSION_6, 'all', {TEST_IP6_NEIGH})
def _flush_neigh(self, version, ip_to_flush, ips_expected):
attr = self.generate_device_details(
ip_cidrs=['%s/24' % TEST_IP, 'fd00::1/64'],
namespace=utils.get_rand_name(20, 'ns-'))
device = self.manage_device(attr)
for test_ip, nud_state in TEST_IP_NUD_STATES:
mac_address = net.get_random_mac('fa:16:3e:00:00:00'.split(':'))
device.neigh.add(test_ip, mac_address, nud_state)
device.neigh.flush(version, ip_to_flush)
ips = {e['dst'] for e in device.neigh.dump(version)}
self.assertEqual(ips_expected, ips)
def _check_for_device_name(self, ip, name, should_exist):
exist = any(d for d in ip.get_devices() if d.name == name)
self.assertEqual(should_exist, exist)
def test_veth_exists(self):
namespace1 = self.useFixture(net_helpers.NamespaceFixture())
namespace2 = self.useFixture(net_helpers.NamespaceFixture())
dev_name1 = utils.get_rand_name()
dev_name2 = utils.get_rand_name()
device1, device2 = namespace1.ip_wrapper.add_veth(
dev_name1, dev_name2, namespace2.name)
self.addCleanup(self._safe_delete_device, device1)
self.addCleanup(self._safe_delete_device, device2)
self._check_for_device_name(namespace1.ip_wrapper, dev_name1, True)
self._check_for_device_name(namespace2.ip_wrapper, dev_name2, True)
self._check_for_device_name(namespace1.ip_wrapper, dev_name2, False)
self._check_for_device_name(namespace2.ip_wrapper, dev_name1, False)
# As it is veth pair, remove of device1 should be enough to remove
# both devices
device1.link.delete()
self._check_for_device_name(namespace1.ip_wrapper, dev_name1, False)
self._check_for_device_name(namespace2.ip_wrapper, dev_name2, False)
def test_macvtap_exists(self):
namespace = self.useFixture(net_helpers.NamespaceFixture())
src_dev_name = utils.get_rand_name()
src_dev = namespace.ip_wrapper.add_dummy(src_dev_name)
self.addCleanup(self._safe_delete_device, src_dev)
dev_name = utils.get_rand_name()
device = namespace.ip_wrapper.add_macvtap(dev_name, src_dev_name)
self.addCleanup(self._safe_delete_device, device)
self._check_for_device_name(namespace.ip_wrapper, dev_name, True)
device.link.delete()
self._check_for_device_name(namespace.ip_wrapper, dev_name, False)
def test_dummy_exists(self):
namespace = self.useFixture(net_helpers.NamespaceFixture())
dev_name = utils.get_rand_name()
device = namespace.ip_wrapper.add_dummy(dev_name)
self.addCleanup(self._safe_delete_device, device)
self._check_for_device_name(namespace.ip_wrapper, dev_name, True)
device.link.delete()
self._check_for_device_name(namespace.ip_wrapper, dev_name, False)
def test_set_link_mtu(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
device.link.set_mtu(1450)
self.assertEqual(1450, device.link.mtu)
# Check if proper exception will be raised when wrong MTU value is
# provided
self.assertRaises(ip_lib.InvalidArgument, device.link.set_mtu, 1)
def test_set_link_allmulticast_on(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
self.assertFalse(device.link.allmulticast)
device.link.set_allmulticast_on()
self.assertTrue(device.link.allmulticast)
def test_set_link_netns(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
original_namespace = device.namespace
original_ip_wrapper = ip_lib.IPWrapper(namespace=original_namespace)
new_namespace = self.useFixture(net_helpers.NamespaceFixture())
device.link.set_netns(new_namespace.name)
self.assertEqual(new_namespace.name, device.namespace)
self._check_for_device_name(
new_namespace.ip_wrapper, device.name, True)
self._check_for_device_name(
original_ip_wrapper, device.name, False)
def test_set_link_name(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
ip_wrapper = ip_lib.IPWrapper(namespace=device.namespace)
original_name = device.name
new_name = utils.get_rand_name()
# device has to be DOWN to rename it
device.link.set_down()
device.link.set_name(new_name)
self.assertEqual(new_name, device.name)
self._check_for_device_name(ip_wrapper, new_name, True)
self._check_for_device_name(ip_wrapper, original_name, False)
def test_set_link_alias(self):
attr = self.generate_device_details()
device = self.manage_device(attr)
alias = utils.get_rand_name()
device.link.set_alias(alias)
self.assertEqual(alias, device.link.alias)
def _add_and_check_ips(self, device, ip_addresses):
for cidr, scope, expected_broadcast in ip_addresses:
# For IPv4 address add_broadcast flag will be set to True only
# if expected_broadcast is given.
# For IPv6 add_broadcast flag can be set to True always but
# broadcast address will not be set, so expected_broadcast for
# IPv6 should be always given as None.
add_broadcast = True
if cidr.version == constants.IP_VERSION_4:
add_broadcast = bool(expected_broadcast)
device.addr.add(str(cidr), scope, add_broadcast)
device_ips_info = [
(netaddr.IPNetwork(ip_info['cidr']),
ip_info['scope'],
ip_info['broadcast']) for
ip_info in device.addr.list()]
self.assertCountEqual(ip_addresses, device_ips_info)
def _flush_ips(self, device, ip_version):
device.addr.flush(ip_version)
for ip_address in device.addr.list():
cidr = netaddr.IPNetwork(ip_address['cidr'])
self.assertNotEqual(ip_version, cidr.version)
def test_add_ip_address(self):
ip_addresses = [
(netaddr.IPNetwork("10.10.10.10/30"), "global", '10.10.10.11'),
(netaddr.IPNetwork("11.11.11.11/28"), "link", None),
(netaddr.IPNetwork("2801::1/120"), "global", None),
(netaddr.IPNetwork("fe80::/64"), "link", None)]
attr = self.generate_device_details(ip_cidrs=[])
device = self.manage_device(attr)
self._add_and_check_ips(device, ip_addresses)
# Now let's check if adding already existing IP address will raise
# RuntimeError
ip_address = ip_addresses[0]
self.assertRaises(RuntimeError,
device.addr.add, str(ip_address[0]), ip_address[1])
def test_delete_ip_address(self):
attr = self.generate_device_details()
cidr = attr.ip_cidrs[0]
device = self.manage_device(attr)
device_cidrs = [ip_info['cidr'] for ip_info in device.addr.list()]
self.assertIn(cidr, device_cidrs)
device.addr.delete(cidr)
device_cidrs = [ip_info['cidr'] for ip_info in device.addr.list()]
self.assertNotIn(cidr, device_cidrs)
# Try to delete not existing IP address, it should be just fine and
# finish without any error raised
device.addr.delete(cidr)
def test_flush_ip_addresses(self):
ip_addresses = [
(netaddr.IPNetwork("10.10.10.10/30"), "global", '10.10.10.11'),
(netaddr.IPNetwork("11.11.11.11/28"), "link", None),
(netaddr.IPNetwork("2801::1/120"), "global", None),
(netaddr.IPNetwork("fe80::/64"), "link", None)]
attr = self.generate_device_details(ip_cidrs=[])
device = self.manage_device(attr)
self._add_and_check_ips(device, ip_addresses)
self._flush_ips(device, constants.IP_VERSION_4)
self._flush_ips(device, constants.IP_VERSION_6)
class TestSetIpNonlocalBind(functional_base.BaseSudoTestCase):
def test_assigned_value(self):
namespace = self.useFixture(net_helpers.NamespaceFixture())
for expected in (0, 1):
failed = ip_lib.set_ip_nonlocal_bind(expected, namespace.name)
try:
observed = ip_lib.get_ip_nonlocal_bind(namespace.name)
except RuntimeError as rte:
stat_message = (
'cannot stat /proc/sys/net/ipv4/ip_nonlocal_bind')
if stat_message in str(rte):
raise self.skipException(
"This kernel doesn't support %s in network "
"namespaces." % ip_lib.IP_NONLOCAL_BIND)
raise
self.assertFalse(failed)
self.assertEqual(expected, observed)
class NamespaceTestCase(functional_base.BaseSudoTestCase):
def setUp(self):
super(NamespaceTestCase, self).setUp()
self.namespace = 'test_ns_' + uuidutils.generate_uuid()
ip_lib.create_network_namespace(self.namespace)
self.addCleanup(self._delete_namespace)
def _delete_namespace(self):
ip_lib.delete_network_namespace(self.namespace)
def test_network_namespace_exists_ns_exists(self):
self.assertTrue(ip_lib.network_namespace_exists(self.namespace))
def test_network_namespace_exists_ns_doesnt_exists(self):
self.assertFalse(ip_lib.network_namespace_exists('another_ns'))
def test_network_namespace_exists_ns_exists_try_is_ready(self):
self.assertTrue(ip_lib.network_namespace_exists(self.namespace,
try_is_ready=True))
def test_network_namespace_exists_ns_doesnt_exists_try_is_ready(self):
self.assertFalse(ip_lib.network_namespace_exists('another_ns',
try_is_ready=True))
class IpMonitorTestCase(testscenarios.WithScenarios,
functional_base.BaseLoggingTestCase):
scenarios = [
('namespace', {'namespace': 'ns_' + uuidutils.generate_uuid()}),
('no_namespace', {'namespace': None})
]
def setUp(self):
super(IpMonitorTestCase, self).setUp()
self.addCleanup(self._cleanup)
if self.namespace:
priv_ip_lib.create_netns(self.namespace)
self.devices = [('int_' + uuidutils.generate_uuid())[
:constants.DEVICE_NAME_MAX_LEN] for _ in range(5)]
self.ip_wrapper = ip_lib.IPWrapper(self.namespace)
self.temp_file = self.get_temp_file_path('out_' + self.devices[0] +
'.tmp')
self.proc = self._run_ip_monitor(ip_monitor)
def _cleanup(self):
self.proc.stop(kill_timeout=10, kill_signal=signal.SIGTERM)
if self.namespace:
priv_ip_lib.remove_netns(self.namespace)
else:
for device in self.devices:
try:
priv_ip_lib.delete_interface(device, self.namespace)
except priv_ip_lib.NetworkInterfaceNotFound:
pass
@staticmethod
def _normalize_module_name(name):
for suf in ['.pyc', '.pyo']:
if name.endswith(suf):
return name[:-len(suf)] + '.py'
return name
def _run_ip_monitor(self, module):
executable = self._normalize_module_name(module.__file__)
proc = async_process.AsyncProcess(
[executable, self.temp_file, str(self.namespace)],
run_as_root=True)
proc.start(block=True)
return proc
def _read_file(self, ip_addresses):
try:
registers = []
with open(self.temp_file, 'r') as f:
data = f.read()
for line in data.splitlines():
register = jsonutils.loads(line)
registers.append({'name': register['name'],
'cidr': register['cidr'],
'event': register['event']})
for ip_address in ip_addresses:
if ip_address not in registers:
return False
return True
except (OSError, IOError, ValueError):
return False
def _check_read_file(self, ip_addresses):
try:
utils.wait_until_true(lambda: self._read_file(ip_addresses),
timeout=30)
except utils.WaitTimeout:
with open(self.temp_file, 'r') as f:
registers = f.read()
self.fail('Defined IP addresses: %s, IP addresses registered: %s' %
(ip_addresses, registers))
def _handle_ip_addresses(self, event, ip_addresses):
for ip_address in (_ip for _ip in ip_addresses
if _ip['event'] == event):
ip_device = ip_lib.IPDevice(ip_address['name'], self.namespace)
if event == 'removed':
ip_device.addr.delete(ip_address['cidr'])
if event == 'added':
ip_device.addr.add(ip_address['cidr'])
def test_add_remove_ip_address_and_interface(self):
for device in self.devices:
self.ip_wrapper.add_dummy(device)
utils.wait_until_true(lambda: self._read_file({}), timeout=30)
ip_addresses = [
{'cidr': '192.168.250.1/24', 'event': 'added',
'name': self.devices[0]},
{'cidr': '192.168.250.2/24', 'event': 'added',
'name': self.devices[1]},
{'cidr': '192.168.250.3/24', 'event': 'added',
'name': self.devices[2]},
{'cidr': '192.168.250.10/24', 'event': 'added',
'name': self.devices[3]},
{'cidr': '192.168.250.10/24', 'event': 'removed',
'name': self.devices[3]},
{'cidr': '2001:db8::1/64', 'event': 'added',
'name': self.devices[4]},
{'cidr': '2001:db8::2/64', 'event': 'added',
'name': self.devices[4]}]
self._handle_ip_addresses('added', ip_addresses)
self._handle_ip_addresses('removed', ip_addresses)
self._check_read_file(ip_addresses)
ip_device = ip_lib.IPDevice(self.devices[4], self.namespace)
ip_device.link.delete()
ip_addresses = [
{'cidr': '2001:db8::1/64', 'event': 'removed',
'name': self.devices[4]},
{'cidr': '2001:db8::2/64', 'event': 'removed',
'name': self.devices[4]}]
self._check_read_file(ip_addresses)
def test_interface_added_after_initilization(self):
for device in self.devices[:len(self.devices) - 1]:
self.ip_wrapper.add_dummy(device)
utils.wait_until_true(lambda: self._read_file({}), timeout=30)
ip_addresses = [
{'cidr': '192.168.251.21/24', 'event': 'added',
'name': self.devices[0]},
{'cidr': '192.168.251.22/24', 'event': 'added',
'name': self.devices[1]}]
self._handle_ip_addresses('added', ip_addresses)
self._check_read_file(ip_addresses)
self.ip_wrapper.add_dummy(self.devices[-1])
ip_addresses.append({'cidr': '192.168.251.23/24', 'event': 'added',
'name': self.devices[-1]})
self._handle_ip_addresses('added', [ip_addresses[-1]])
self._check_read_file(ip_addresses)
def test_add_and_remove_multiple_ips(self):
# NOTE(ralonsoh): testing [1], adding multiple IPs.
# [1] https://bugs.launchpad.net/neutron/+bug/1832307
utils.wait_until_true(lambda: self._read_file({}), timeout=30)
self.ip_wrapper.add_dummy(self.devices[0])
ip_addresses = []
for i in range(100):
_cidr = str(netaddr.IPNetwork('192.168.252.1/32').ip + i) + '/32'
ip_addresses.append({'cidr': _cidr, 'event': 'added',
'name': self.devices[0]})
self._handle_ip_addresses('added', ip_addresses)
self._check_read_file(ip_addresses)
for i in range(100):
_cidr = str(netaddr.IPNetwork('192.168.252.1/32').ip + i) + '/32'
ip_addresses.append({'cidr': _cidr, 'event': 'removed',
'name': self.devices[0]})
self._handle_ip_addresses('removed', ip_addresses)
self._check_read_file(ip_addresses)
class IpRouteCommandTestCase(functional_base.BaseSudoTestCase):
def setUp(self):
super(IpRouteCommandTestCase, self).setUp()
self.namespace = self.useFixture(net_helpers.NamespaceFixture()).name
ip_lib.IPWrapper(self.namespace).add_dummy('test_device')
self.device = ip_lib.IPDevice('test_device', namespace=self.namespace)
self.device.link.set_up()
self.device_cidr_ipv4 = '192.168.100.1/24'
self.device_cidr_ipv6 = '2020::1/64'
self.device.addr.add(self.device_cidr_ipv4)
self.device.addr.add(self.device_cidr_ipv6)
self.cidrs = ['192.168.0.0/24', '10.0.0.0/8', '2001::/64', 'faaa::/96']
def _assert_route(self, ip_version, table=None, source_prefix=None,
cidr=None, scope=None, via=None, metric=None,
not_in=False):
if not_in:
fn = lambda: cmp not in self.device.route.list_routes(ip_version,
table=table)
msg = 'Route found: %s'
else:
fn = lambda: cmp in self.device.route.list_routes(ip_version,
table=table)
msg = 'Route not found: %s'
if cidr:
ip_version = utils.get_ip_version(cidr)
else:
ip_version = utils.get_ip_version(via)
cidr = constants.IP_ANY[ip_version]
if constants.IP_VERSION_6 == ip_version:
scope = ip_lib.IP_ADDRESS_SCOPE[0]
elif not scope:
scope = 'global' if via else 'link'
if not metric:
metric = ip_lib.IP_ROUTE_METRIC_DEFAULT[ip_version]
table = table or iproute_linux.DEFAULT_TABLE
table = ip_lib.IP_RULE_TABLES_NAMES.get(table, table)
cmp = {'table': table,
'cidr': cidr,
'source_prefix': source_prefix,
'scope': scope,
'device': 'test_device',
'via': via,
'metric': metric,
'proto': 'static'}
try:
utils.wait_until_true(fn, timeout=5)
except utils.WaitTimeout:
raise self.fail(msg % cmp)
def test_add_route_table(self):
tables = (None, 1, 253, 254, 255)
for cidr in self.cidrs:
for table in tables:
self.device.route.add_route(cidr, table=table)
ip_version = utils.get_ip_version(cidr)
self._assert_route(ip_version, cidr=cidr, table=table)
def test_add_route_via(self):
gateway_ipv4 = str(netaddr.IPNetwork(self.device_cidr_ipv4).ip)
gateway_ipv6 = str(netaddr.IPNetwork(self.device_cidr_ipv6).ip + 1)
for cidr in self.cidrs:
ip_version = utils.get_ip_version(cidr)
gateway = (gateway_ipv4 if ip_version == constants.IP_VERSION_4
else gateway_ipv6)
self.device.route.add_route(cidr, via=gateway)
self._assert_route(ip_version, cidr=cidr, via=gateway)
def test_add_route_metric(self):
metrics = (None, 1, 10, 255)
for cidr in self.cidrs:
for metric in metrics:
self.device.route.add_route(cidr, metric=metric)
ip_version = utils.get_ip_version(cidr)
self._assert_route(ip_version, cidr=cidr, metric=metric)
def test_add_route_scope(self):
for cidr in self.cidrs:
for scope in ip_lib.IP_ADDRESS_SCOPE_NAME:
self.device.route.add_route(cidr, scope=scope)
ip_version = utils.get_ip_version(cidr)
self._assert_route(ip_version, cidr=cidr, scope=scope)
def test_add_route_gateway(self):
gateways = (str(netaddr.IPNetwork(self.device_cidr_ipv4).ip),
str(netaddr.IPNetwork(self.device_cidr_ipv6).ip + 1))
for gateway in gateways:
ip_version = utils.get_ip_version(gateway)
self.device.route.add_gateway(gateway)
self._assert_route(ip_version, cidr=None, via=gateway,
scope='global')
def test_list_onlink_routes_ipv4(self):
cidr_ipv4 = []
for cidr in self.cidrs:
if utils.get_ip_version(cidr) == constants.IP_VERSION_4:
cidr_ipv4.append(cidr)
self.device.route.add_onlink_route(cidr)
for cidr in cidr_ipv4:
self._assert_route(constants.IP_VERSION_4, cidr=cidr)
routes = self.device.route.list_onlink_routes(constants.IP_VERSION_4)
self.assertEqual(len(cidr_ipv4), len(routes))
def test_get_and_delete_gateway(self):
gateways = (str(netaddr.IPNetwork(self.device_cidr_ipv4).ip),
str(netaddr.IPNetwork(self.device_cidr_ipv6).ip + 1))
scopes = ('global', 'site', 'link')
metrics = (None, 1, 255)
tables = (None, 1, 254, 255)
for gateway, scope, metric, table in itertools.product(
gateways, scopes, metrics, tables):
ip_version = utils.get_ip_version(gateway)
self.device.route.add_gateway(gateway, scope=scope, metric=metric,
table=table)
self._assert_route(ip_version, cidr=None, via=gateway, scope=scope,
metric=metric, table=table)
self.assertEqual(gateway, self.device.route.get_gateway(
ip_version=ip_version, table=table)['via'])
self.device.route.delete_gateway(gateway, table=table, scope=scope)
self.assertIsNone(self.device.route.get_gateway(
ip_version=ip_version, table=table))
def test_delete_route(self):
scopes = ('global', 'site', 'link')
tables = (None, 1, 254, 255)
for cidr, scope, table in itertools.product(
self.cidrs, scopes, tables):
ip_version = utils.get_ip_version(cidr)
self.device.route.add_route(cidr, table=table, scope=scope)
self._assert_route(ip_version, cidr=cidr, scope=scope, table=table)
self.device.route.delete_route(cidr, table=table, scope=scope)
self._assert_route(ip_version, cidr=cidr, scope=scope, table=table,
not_in=True)
def test_flush(self):
tables = (None, 1, 200)
ip_versions = (constants.IP_VERSION_4, constants.IP_VERSION_6)
for cidr, table in itertools.product(self.cidrs, tables):
self.device.route.add_route(cidr, table=table)
for ip_version, table in itertools.product(ip_versions, tables):
routes = self.device.route.list_routes(ip_version, table=table)
self.assertGreater(len(routes), 0)
self.device.route.flush(ip_version, table=table)
routes = self.device.route.list_routes(ip_version, table=table)
self.assertEqual([], routes)
class IpAddrCommandTestCase(functional_base.BaseSudoTestCase):
def setUp(self):
super(IpAddrCommandTestCase, self).setUp()
self.namespace = self.useFixture(net_helpers.NamespaceFixture()).name
ip_lib.IPWrapper(self.namespace).add_dummy('test_device')
self.device = ip_lib.IPDevice('test_device', namespace=self.namespace)
self.device.link.set_up()
def test_list_with_scope(self):
scope_ip = [
('global', '192.168.100.1/24'),
('global', '2001:db8::1/64'),
('link', '192.168.101.1/24'),
('link', 'fe80::1:1/64'),
('site', 'fec0:0:0:f101::1/64'),
('host', '192.168.102.1/24')]
for scope, _ip in scope_ip:
self.device.addr.add(_ip, scope=scope)
devices = self.device.addr.list()
devices_cidr = {device['cidr'] for device in devices}
for scope in scope_ip:
self.assertIn(scope[1], devices_cidr)
for scope, _ip in scope_ip:
devices_filtered = self.device.addr.list(scope=scope)
devices_cidr = {device['cidr'] for device in devices_filtered}
self.assertIn(_ip, devices_cidr)
class GetDevicesWithIpTestCase(functional_base.BaseSudoTestCase):
def setUp(self):
super().setUp()
self.namespace = self.useFixture(net_helpers.NamespaceFixture()).name
self.devices = []
self.num_devices = 5
self.num_devices_with_ip = 3
for idx in range(self.num_devices):
dev_name = 'test_device_%s' % idx
ip_lib.IPWrapper(self.namespace).add_dummy(dev_name)
device = ip_lib.IPDevice(dev_name, namespace=self.namespace)
device.link.set_up()
self.devices.append(device)
self.cidrs = [netaddr.IPNetwork('10.10.0.0/24'),
netaddr.IPNetwork('10.20.0.0/24'),
netaddr.IPNetwork('2001:db8:1234:1111::/64'),
netaddr.IPNetwork('2001:db8:1234:2222::/64')]
for idx in range(self.num_devices_with_ip):
for cidr in self.cidrs:
self.devices[idx].addr.add(str(cidr.ip + idx) + '/' +
str(cidr.netmask.netmask_bits()))
@staticmethod
def _remove_loopback_interface(ip_addresses):
return [ipa for ipa in ip_addresses if
ipa['name'] != ip_lib.LOOPBACK_DEVNAME]
@staticmethod
def _remove_ipv6_scope_link(ip_addresses):
# Remove all IPv6 addresses with scope link (fe80::...).
return [ipa for ipa in ip_addresses if not (
ipa['scope'] == 'link' and utils.get_ip_version(ipa['cidr']))]
@staticmethod
def _pop_ip_address(ip_addresses, cidr):
for idx, ip_address in enumerate(copy.deepcopy(ip_addresses)):
if cidr == ip_address['cidr']:
ip_addresses.pop(idx)
return
def test_get_devices_with_ip(self):
ip_addresses = ip_lib.get_devices_with_ip(self.namespace)
ip_addresses = self._remove_loopback_interface(ip_addresses)
ip_addresses = self._remove_ipv6_scope_link(ip_addresses)
self.assertEqual(self.num_devices_with_ip * len(self.cidrs),
len(ip_addresses))
for idx in range(self.num_devices_with_ip):
for cidr in self.cidrs:
cidr = (str(cidr.ip + idx) + '/' +
str(cidr.netmask.netmask_bits()))
self._pop_ip_address(ip_addresses, cidr)
self.assertEqual(0, len(ip_addresses))
def test_get_devices_with_ip_name(self):
for idx in range(self.num_devices_with_ip):
dev_name = 'test_device_%s' % idx
ip_addresses = ip_lib.get_devices_with_ip(self.namespace,
name=dev_name)
ip_addresses = self._remove_loopback_interface(ip_addresses)
ip_addresses = self._remove_ipv6_scope_link(ip_addresses)
for cidr in self.cidrs:
cidr = (str(cidr.ip + idx) + '/' +
str(cidr.netmask.netmask_bits()))
self._pop_ip_address(ip_addresses, cidr)
self.assertEqual(0, len(ip_addresses))
for idx in range(self.num_devices_with_ip, self.num_devices):
dev_name = 'test_device_%s' % idx
ip_addresses = ip_lib.get_devices_with_ip(self.namespace,
name=dev_name)
ip_addresses = self._remove_loopback_interface(ip_addresses)
ip_addresses = self._remove_ipv6_scope_link(ip_addresses)
self.assertEqual(0, len(ip_addresses))
class ListIpRoutesTestCase(functional_base.BaseSudoTestCase):
def setUp(self):
super().setUp()
self.namespace = self.useFixture(net_helpers.NamespaceFixture()).name
self.device_names = ['test_device1', 'test_device2']
self.device_ips = ['10.0.0.1/24', '10.0.1.1/24']
self.device_cidrs = [netaddr.IPNetwork(ip_address).cidr for ip_address
in self.device_ips]
for idx, dev in enumerate(self.device_names):
ip_lib.IPWrapper(self.namespace).add_dummy(dev)
device = ip_lib.IPDevice(dev, namespace=self.namespace)
device.link.set_up()
device.addr.add(self.device_ips[idx])
def test_list_ip_routes_multipath(self):
multipath = [
{'device': self.device_names[0],
'via': str(self.device_cidrs[0].ip + 100), 'weight': 10},
{'device': self.device_names[1],
'via': str(self.device_cidrs[1].ip + 100), 'weight': 20},
{'via': str(self.device_cidrs[1].ip + 101), 'weight': 30},
{'via': str(self.device_cidrs[1].ip + 102)}]
ip_lib.add_ip_route(self.namespace, '1.2.3.0/24',
constants.IP_VERSION_4, via=multipath)
routes = ip_lib.list_ip_routes(self.namespace, constants.IP_VERSION_4)
multipath[2]['device'] = self.device_names[1]
multipath[3]['device'] = self.device_names[1]
multipath[3]['weight'] = 1
for route in (route for route in routes if
route['cidr'] == '1.2.3.0/24'):
if not isinstance(route['via'], list):
continue
self.assertEqual(len(multipath), len(route['via']))
for nexthop in multipath:
for mp in route['via']:
if nexthop != mp:
continue
break
else:
self.fail('Not matching route, routes: %s' % routes)
return
self.fail('Not matching route, routes: %s' % routes)
| 41.183246
| 79
| 0.606026
|
acfb83a2a93257a99eef6342ea387f9e05d052e7
| 827
|
py
|
Python
|
Second-handtradingplatform/TradingPlatform/showgoods/models.py
|
shangyajun6943/Second-hand
|
b8c423eeb559aed67856fb8a9f7130793d028118
|
[
"Apache-2.0"
] | null | null | null |
Second-handtradingplatform/TradingPlatform/showgoods/models.py
|
shangyajun6943/Second-hand
|
b8c423eeb559aed67856fb8a9f7130793d028118
|
[
"Apache-2.0"
] | null | null | null |
Second-handtradingplatform/TradingPlatform/showgoods/models.py
|
shangyajun6943/Second-hand
|
b8c423eeb559aed67856fb8a9f7130793d028118
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
class ProductManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(isdelete=0)
class Product(models.Model):
pro_name=models.CharField(max_length=40,unique=True)
price=models.DecimalField(max_digits=7,decimal_places=3)
pro_status=models.IntegerField(default=1)
pro_type=models.IntegerField()
pro_num=models.IntegerField()
pro_volume=models.IntegerField()
pro_introduction=models.CharField(max_length=255)
shop_id=models.IntegerField()
img_1=models.URLField()
img_2=models.URLField()
img_3=models.URLField()
isdelete=models.IntegerField(default=0)
products=ProductManager()
def __str__(self):
return self.pro_name
class Meta:
db_table='product'
| 30.62963
| 60
| 0.729141
|
acfb84159b3e0509bee7bd191e5a787f18b252f7
| 1,465
|
py
|
Python
|
anuga/visualiser_new/feature.py
|
samcom12/anuga_core
|
f4378114dbf02d666fe6423de45798add5c42806
|
[
"Python-2.0",
"OLDAP-2.7"
] | 136
|
2015-05-07T05:47:43.000Z
|
2022-02-16T03:07:40.000Z
|
anuga/visualiser_new/feature.py
|
samcom12/anuga_core
|
f4378114dbf02d666fe6423de45798add5c42806
|
[
"Python-2.0",
"OLDAP-2.7"
] | 184
|
2015-05-03T09:27:54.000Z
|
2021-12-20T04:22:48.000Z
|
anuga/visualiser_new/feature.py
|
samcom12/anuga_core
|
f4378114dbf02d666fe6423de45798add5c42806
|
[
"Python-2.0",
"OLDAP-2.7"
] | 70
|
2015-03-18T07:35:22.000Z
|
2021-11-01T07:07:29.000Z
|
from builtins import object
from types import FloatType
from vtk import vtkActor
class Feature(object):
def __init__(self, colour=(0.5, 0.5, 0.5), opacity=1.0, dynamic=False):
'''
Parameters:
colour: (float, float, float) - apply a single colour to the feature.
opacity: float - 1.0 is opaque, 0.0 is invisible
dynamic: boolean - this quantity changes with time
'''
self.actor = vtkActor()
self.colour = colour
self.drawn = False
self.dynamic = dynamic
self.opacity = opacity
self.inRenderer = False
self.visualiser = None
def button(self, tk_component):
'''
Construct and return a Tkinter button that allows editing of
the feature's parameters.
'''
raise NotImplementedError('Subclasses must override Feature::button!')
def draw(self, renderer):
'''
Draw this object into the renderer, updating it if necessary.
'''
self.drawn = True
if not self.inRenderer:
self.inRenderer = True
if type(self.colour[0]) is FloatType:
self.actor.GetProperty().SetColor(self.colour)
renderer.AddActor(self.actor)
self.actor.GetProperty().SetOpacity(self.opacity)
def redraw(self, renderer):
'''
Force a redraw of this feature.
'''
self.drawn = False
self.draw(renderer)
| 32.555556
| 78
| 0.59727
|
acfb85a3d921ec0a6fe3e655f24a2fc89ab5c8e9
| 1,620
|
py
|
Python
|
python/phonenumbers/shortdata/region_KE.py
|
vishnuku/python-phonenumbers
|
6ac2cdd06b7ccf709a8efb21629cf2c5f030e627
|
[
"Apache-2.0"
] | 3
|
2018-12-02T23:09:00.000Z
|
2018-12-02T23:16:59.000Z
|
python/phonenumbers/shortdata/region_KE.py
|
vishnuku/python-phonenumbers
|
6ac2cdd06b7ccf709a8efb21629cf2c5f030e627
|
[
"Apache-2.0"
] | null | null | null |
python/phonenumbers/shortdata/region_KE.py
|
vishnuku/python-phonenumbers
|
6ac2cdd06b7ccf709a8efb21629cf2c5f030e627
|
[
"Apache-2.0"
] | null | null | null |
"""Auto-generated file, do not edit by hand. KE metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_KE = PhoneMetadata(id='KE', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[1-9]\\d{2,4}', possible_number_pattern='\\d{3,5}', possible_length=(3, 4, 5)),
toll_free=PhoneNumberDesc(national_number_pattern='1(?:1(?:6|9\\d)|5(?:01|2[127]|6(?:29|6[67])))', possible_number_pattern='\\d{4,5}', example_number='1501', possible_length=(4, 5)),
premium_rate=PhoneNumberDesc(national_number_pattern='909\\d{2}', possible_number_pattern='\\d{5}', example_number='90912', possible_length=(5,)),
emergency=PhoneNumberDesc(national_number_pattern='112|114|999', possible_number_pattern='\\d{3}', example_number='999', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:0(?:[07-9]|1[12]|400)|1(?:[02456]|9[0-579])|2[123]|3[01]|5(?:01|1[01]|2[0-2457]|33|55|6(?:29|6[67]))|65\\d{2}|[78]\\d|9(?:[02-9]\\d{2}|19))|(?:2[0-79]|3[0-29]|4[0-4])\\d{3}|5(?:[0-2]\\d|99)\\d{2}|(?:6[2357]|7[0-29])\\d{3}|8(?:[0-9]\\d{3}|988)|9(?:09\\d{2}|99)', possible_number_pattern='\\d{3,5}', example_number='116', possible_length=(3, 4, 5)),
standard_rate=PhoneNumberDesc(),
carrier_specific=PhoneNumberDesc(national_number_pattern='1(?:0400|3[01]|5(?:1[01]|2[25])|65\\d{2})|(?:2[0-79]|3[0-29]|4[0-4])\\d{3}|5(?:[0-2]\\d|99)\\d{2}|(?:6[2357]|7[0-29])\\d{3}|8(?:988|[0-9]\\d{3})|909\\d{2}', possible_number_pattern='\\d{3,5}', example_number='90912', possible_length=(3, 4, 5)),
short_data=True)
| 124.615385
| 407
| 0.665432
|
acfb85bc2a411357700ad96ed8a3ca7a4fc8bc3f
| 1,733
|
py
|
Python
|
chapter_13/mnist_large/mnist_cnn_aug_fcn.py
|
haloway13/PracticalDeepLearningPython
|
c3760b17945c9389421c2970a3d16c6528fb7af6
|
[
"MIT"
] | 44
|
2021-02-25T00:52:04.000Z
|
2022-03-16T02:04:50.000Z
|
chapter_13/mnist_large/mnist_cnn_aug_fcn.py
|
rkneusel9/PracticalDeepLearningWithPython
|
561004e76b3e0828a59952874443384c31b6d84e
|
[
"MIT"
] | null | null | null |
chapter_13/mnist_large/mnist_cnn_aug_fcn.py
|
rkneusel9/PracticalDeepLearningWithPython
|
561004e76b3e0828a59952874443384c31b6d84e
|
[
"MIT"
] | 18
|
2021-03-18T11:22:18.000Z
|
2022-03-08T21:10:42.000Z
|
#
# file: mnist_cnn_fcn.py
#
# Create fully convolutional version of the MNIST CNN
# and populate with the weights from the version trained
# on the MNIST digits.
#
# RTK, 20-Oct-2019
# Last update: 20-Oct-2019
#
################################################################
import keras
from keras.utils import plot_model
from keras.datasets import mnist
from keras.models import Sequential, load_model
from keras.layers import Dense, Dropout, Flatten, Activation
from keras.layers import Conv1D, Conv2D, MaxPooling2D
from keras import backend as K
import numpy as np
# Load the weights from the base model
weights = load_model('mnist_cnn_base_aug_model.h5').get_weights()
# Build the same architecture replacing Dense layers
# with equivalent fully convolutional layers
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3), # input shape arbitrary
activation='relu', # but grayscale
input_shape=(None,None,1)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
# Dense layer becomes Conv2D w/12x12 kernel, 128 filters
model.add(Conv2D(128, (12,12), activation='relu'))
model.add(Dropout(0.5))
# Output layer also Conv2D but 1x1 w/10 "filters"
model.add(Conv2D(10, (1,1), activation='softmax'))
# Copy the trained weights remapping as necessary
model.layers[0].set_weights([weights[0], weights[1]])
model.layers[1].set_weights([weights[2], weights[3]])
model.layers[4].set_weights([weights[4].reshape([12,12,64,128]), weights[5]])
model.layers[6].set_weights([weights[6].reshape([1,1,128,10]), weights[7]])
# Output the fully convolutional model
model.save('mnist_cnn_aug_fcn_model.h5')
| 33.980392
| 77
| 0.703405
|
acfb85d1cf24a5a0ca79a95d49c0cb5293d5272a
| 275
|
py
|
Python
|
runtime/hetdesrun/backend/models/adapter.py
|
JulianGrote1904/hetida-designer
|
05350810eb3e0548c9d8a2a5a6afbf455635b5fd
|
[
"MIT"
] | null | null | null |
runtime/hetdesrun/backend/models/adapter.py
|
JulianGrote1904/hetida-designer
|
05350810eb3e0548c9d8a2a5a6afbf455635b5fd
|
[
"MIT"
] | null | null | null |
runtime/hetdesrun/backend/models/adapter.py
|
JulianGrote1904/hetida-designer
|
05350810eb3e0548c9d8a2a5a6afbf455635b5fd
|
[
"MIT"
] | null | null | null |
# pylint: disable=no-name-in-module
from pydantic import BaseModel
from hetdesrun.backend.service.utils import to_camel
class AdapterFrontendDto(BaseModel):
id: str
name: str
url: str
internal_url: str
class Config:
alias_generator = to_camel
| 18.333333
| 52
| 0.723636
|
acfb867d01bad5a19609aecf491caf98dc05366d
| 1,108
|
py
|
Python
|
xv_leak_tools/test_components/dns_tool/dns_tool_builder.py
|
UAEKondaya1/expressvpn_leak_testing
|
9e4cee899ac04f7820ac351fa55efdc0c01370ba
|
[
"MIT"
] | 219
|
2017-12-12T09:42:46.000Z
|
2022-03-13T08:25:13.000Z
|
xv_leak_tools/test_components/dns_tool/dns_tool_builder.py
|
UAEKondaya1/expressvpn_leak_testing
|
9e4cee899ac04f7820ac351fa55efdc0c01370ba
|
[
"MIT"
] | 11
|
2017-12-14T08:14:51.000Z
|
2021-08-09T18:37:45.000Z
|
xv_leak_tools/test_components/dns_tool/dns_tool_builder.py
|
UAEKondaya1/expressvpn_leak_testing
|
9e4cee899ac04f7820ac351fa55efdc0c01370ba
|
[
"MIT"
] | 45
|
2017-12-14T07:26:36.000Z
|
2022-03-11T09:36:56.000Z
|
from xv_leak_tools.exception import XVEx
from xv_leak_tools.factory import Builder
from xv_leak_tools.test_components.dns_tool.macos.macos_dns_tool import MacOSDNSTool
from xv_leak_tools.test_components.dns_tool.windows.windows_dns_tool import WindowsDNSTool
from xv_leak_tools.test_components.dns_tool.linux.linux_dns_tool import LinuxDNSTool
from xv_leak_tools.test_components.dns_tool.android.android_dns_tool import AndroidDNSTool
class DNSToolBuilder(Builder):
@staticmethod
def name():
return 'dns_tool'
def build(self, device, config):
if device.os_name() == 'macos':
return MacOSDNSTool(device, config)
elif device.os_name() == 'windows':
if device.is_cygwin():
return WindowsDNSTool(device, config)
elif device.os_name() == 'linux':
return LinuxDNSTool(device, config)
elif device.os_name() == 'android':
return AndroidDNSTool(device, config)
else:
raise XVEx("Don't know how to build 'dns_tool' component for OS {}".format(
device.os_name()))
| 41.037037
| 90
| 0.703069
|
acfb86a419f5295feeb505e82a533b7ddc040bea
| 6,538
|
py
|
Python
|
uob/tossdr/ucla/gnuradio-802.15.4-demodulation/src/examples/cc1k_cc2420_dualrx.py
|
tinyos-io/tinyos-3.x-contrib
|
3aaf036722a2afc0c0aad588459a5c3e00bd3c01
|
[
"BSD-3-Clause",
"MIT"
] | 4
|
2016-03-27T04:53:16.000Z
|
2020-04-28T13:53:19.000Z
|
uob/tossdr/ucla/gnuradio-802.15.4-demodulation/src/examples/cc1k_cc2420_dualrx.py
|
tinyos-io/tinyos-3.x-contrib
|
3aaf036722a2afc0c0aad588459a5c3e00bd3c01
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
uob/tossdr/ucla/gnuradio-802.15.4-demodulation/src/examples/cc1k_cc2420_dualrx.py
|
tinyos-io/tinyos-3.x-contrib
|
3aaf036722a2afc0c0aad588459a5c3e00bd3c01
|
[
"BSD-3-Clause",
"MIT"
] | 2
|
2015-01-04T15:07:38.000Z
|
2019-10-24T07:11:38.000Z
|
#!/usr/bin/env python
#
# Decodes CC1k and CC2420 packets simultaniously. We use
# 8-bit data streams, thus, the CC1K needs to be near the
# antenna of the USRP or we don't receive it (signal too
# weak).
#
# We assume that the FLEX2400 is on side A and FL400 on
# side B!
#
# Modified by: Thomas Schmid
#
from gnuradio import gr, eng_notation
from gnuradio import usrp
from gnuradio import ucla
from gnuradio.ucla_blks import ieee802_15_4_pkt
from gnuradio.ucla_blks import cc1k_sos_pkt
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import math, struct, time, sys
class stats(object):
def __init__(self):
self.npkts = 0
self.nright = 0
class rx_graph (gr.flow_graph):
def __init__(self, rx_callback_cc2420, rx_callback_cc1k):
gr.flow_graph.__init__(self)
cc2420_cordic_freq = 2475000000
cc2420_data_rate = 2000000
cc1k_cordic_freq = 434845200
cc1k_data_rate = 38400
cc1k_sps = 8
payload_size = 128
print "cc2420_cordic_freq = %s" % (eng_notation.num_to_str (cc2420_cordic_freq))
print "cc1k_cordic_freq = %s" % (eng_notation.num_to_str (cc1k_cordic_freq))
# ----------------------------------------------------------------
self.data_rate = cc2420_data_rate
self.samples_per_symbol = 2
self.usrp_decim = int (64e6 / self.samples_per_symbol / self.data_rate)
self.fs = self.data_rate * self.samples_per_symbol
payload_size = 128 # bytes
print "usrp_decim = ", self.usrp_decim
print "fs = ", eng_notation.num_to_str(self.fs)
u = usrp.source_c (0, nchan=2)
u.set_decim_rate(self.usrp_decim)
self.subdev = (u.db[0][0], u.db[1][0])
print "Using RX d'board %s" % (self.subdev[0].side_and_name(),)
print "Using RX d'board %s" % (self.subdev[1].side_and_name(),)
u.set_mux(0x2301)
width = 8
shift = 8
format = u.make_format(width, shift)
r = u.set_format(format)
#this is the cc2420 code
u.tune(self.subdev[0]._which, self.subdev[0], cc2420_cordic_freq)
u.tune(self.subdev[1]._which, self.subdev[1], cc1k_cordic_freq)
u.set_pga(0, 0)
u.set_pga(1, 0)
self.u = u
# deinterleave two channels from FPGA
di = gr.deinterleave(gr.sizeof_gr_complex)
# wire up the head of the chain
self.connect(self.u, di)
#self.u = gr.file_source(gr.sizeof_gr_complex, 'rx_test.dat')
# CC2420 receiver
self.packet_receiver = ieee802_15_4_pkt.ieee802_15_4_demod_pkts(self,
callback=rx_callback_cc2420,
sps=self.samples_per_symbol,
symbol_rate=self.data_rate,
threshold=-1)
self.squelch = gr.pwr_squelch_cc(50, 1, 0, True)
self.connect((di,0), self.squelch, self.packet_receiver)
# CC1K receiver
gain_mu = 0.002*self.samples_per_symbol
self.packet_receiver_cc1k = cc1k_sos_pkt.cc1k_demod_pkts(self,
callback=rx_callback_cc1k,
sps=cc1k_sps,
symbol_rate=cc1k_data_rate,
p_size=payload_size,
threshold=-1)
#self.squelch2 = gr.pwr_squelch_cc(50, 1, 0, True)
keep = gr.keep_one_in_n(gr.sizeof_gr_complex, 13)
#self.connect((di, 1), keep, self.squelch2, self.packet_receiver_cc1k)
self.connect((di, 1), keep, self.packet_receiver_cc1k)
def main ():
def rx_callback_cc2420(ok, payload):
st_cc2420.npkts += 1
print " ------------------------"
if ok:
st_cc2420.nright += 1
(pktno,) = struct.unpack('!B', payload[2:3])
print "ok = %5r pktno = %4d len(payload) = %4d cc2420 pkts: %d/%d" % (ok, pktno, len(payload),
st_cc2420.nright, st_cc2420.npkts)
# for the sos head
#(am_group, addr_mode, dst_addr, src_addr, module_dst, module_src, msg_type) = struct.unpack("HHHHBBB", payload[0:11])
(am_group, module_dst, module_src, dst_addr, src_addr, msg_type) = struct.unpack("<BBBHHB", payload[11:19])
msg_payload = payload[19:-2]
(crc, ) = struct.unpack("!H", payload[-2:])
print " am group: " + str(am_group)
print " src_addr: "+str(src_addr)+" dst_addr: "+str(dst_addr)
print " src_module: " + str(module_src) + " dst_module: " + str(module_dst)
print " msg type: " + str(msg_type)
print " msg: " + str(map(hex, map(ord, payload[20:-2])))
print " crc: " + str(hex(crc))
else:
print "ok = %5r pkts: %d/%d" % (ok, st_cc2420.nright, st_cc2420.npkts)
print " ------------------------"
sys.stdout.flush()
def rx_callback_cc1k(ok, am_group, src_addr, dst_addr, module_src, module_dst, msg_type, msg_payload, crc):
st_cc1k.npkts += 1
if ok:
st_cc1k.nright += 1
print "ok = %5r cc1k pkts: %d/%d" % (ok, st_cc1k.nright, st_cc1k.npkts)
print " am group: " + str(am_group)
print " src_addr: "+str(src_addr)+" dst_addr: "+str(dst_addr)
print " src_module: " + str(module_src) + " dst_module: " + str(module_dst)
print " msg type: " + str(msg_type)
print " msg: " + str(map(hex, map(ord, msg_payload)))
print " crc: " + str(crc)
else:
print "ok = %5r pkts: %d/%d" % (ok, st_cc2420.nright, st_cc2420.npkts)
print " ++++++++++++++++++++++++"
st_cc1k = stats()
st_cc2420 = stats()
fg = rx_graph(rx_callback_cc2420, rx_callback_cc1k)
fg.start()
fg.wait()
if __name__ == '__main__':
# insert this in your test code...
#import os
#print 'Blocked waiting for GDB attach (pid = %d)' % (os.getpid(),)
#raw_input ('Press Enter to continue: ')
main ()
| 39.149701
| 130
| 0.53732
|
acfb87b0f2d77596e988c013010a13ce2d233345
| 4,086
|
py
|
Python
|
main.py
|
Bilbobx182/PopPunkBot
|
3b100c731d436222bf0d7a404fa548a80448eb54
|
[
"MIT"
] | 1
|
2016-04-17T22:53:32.000Z
|
2016-04-17T22:53:32.000Z
|
main.py
|
Bilbobx182/PopPunkBot
|
3b100c731d436222bf0d7a404fa548a80448eb54
|
[
"MIT"
] | null | null | null |
main.py
|
Bilbobx182/PopPunkBot
|
3b100c731d436222bf0d7a404fa548a80448eb54
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import praw
import os
import spotipy
import spotipy.util as util
from bs4 import BeautifulSoup
import requests
import sys
# "Declaring" variables for the first time.
# Praw Variables
username = "eternal_atom"
tid = ""
prawClientID = "reddit client ID here"
# Spotify Variables
pid = "playlist here"
scope = 'playlist-modify-private'
Cid = "clientID here"
Cs = "client Secret here"
SRI = "http://localhost:8888/callback"
onetrack = 0
track_ids = []
sub = "metal"
r = praw.Reddit(client_id="", client_secret='', password='',
user_agent='Reddit to Spotify converter', username="")
token = util.prompt_for_user_token(username, scope, Cid, Cs, SRI)
# Checking to see if a text file for song list(They're the spotify URI codes) is there
if not os.path.isfile("songlist.txt"):
songlist = []
else:
with open("songlist.txt", "r") as u:
songlist = u.read()
songlist = songlist.split("\n")
songlist = filter(None, songlist)
def titleparse(songdata, num):
info = songdata
global onetrack
global track_ids
if (num == 1):
response = requests.get(songdata)
soup = BeautifulSoup(response.content, "html.parser")
title = soup.title.string
title = title.split('-')
titlelen = len(title)
fixedtitle = ""
i = 0
while (i < (titlelen - 1)):
fixedtitle = fixedtitle + title[i]
i += 1
info = fixedtitle
if (" - " in info):
info.replace("-", "")
if any("(" or ")" in lis for lis in info):
start = info.find('(')
end = info.find(')')
if start != -1 and end != -1:
result = info[start:end + 1]
info = info.replace(result, "")
# however some users submit it in a wrong format with [] instead so we remove those too
if any("[" or "]" in lis for lis in info):
start = info.find('[')
end = info.find(']')
if start != -1 and end != -1:
result = info[start:end + 1]
info = info.replace(result, "")
sp = spotipy.Spotify()
# Creating the spotify query to see if a song is in it's database.
with open("songlist.txt", "a") as f:
if token:
sp = spotipy.Spotify(auth=token)
result = sp.search(info)
for i, t in enumerate(result['tracks']['items']):
# gets the first track ID and only that ID so it wont repeat through album or singles
if (onetrack <= 0):
temp = t['id']
if (temp in songlist):
# For Duplicates
print("ITS HERE ALREADY")
break
else:
track_ids.append(t['id'])
onetrack = onetrack + 1
f.write(t['id'])
f.write("\n")
print("DONE: " + info)
# tells it to go to /r/poppunkers
subreddit = r.subreddit(sub)
counter = 0
# the main loop of the program, it tells it as long as there's a submission in the top X amount loop around.
for submission in subreddit.top('week'):
num = submission.title.find(' - ')
if num != -1:
titleparse(submission.title, 0)
elif ('youtu' in submission.url):
titleparse(submission.url, 1)
if (len(track_ids) > 80):
if token:
sp = spotipy.Spotify(auth=token)
sp.trace = False
results = sp.user_playlist_add_tracks(username, pid, track_ids)
print(len(track_ids))
track_ids.clear()
onetrack = 0
# dealing with tracks that wouldn't be submitted as the code above only deals with 80 per time but this is for smaller requests
if (len(track_ids) <= 80 and len(track_ids) > 0):
token = util.prompt_for_user_token(username, scope, Cid, Cs, SRI)
if token:
sp = spotipy.Spotify(auth=token)
sp.trace = False
results = sp.user_playlist_add_tracks(username, pid, track_ids)
track_ids.clear()
print(len(track_ids))
| 30.492537
| 131
| 0.5744
|
acfb885d252ae82452ea56970a6a03a1783071e5
| 1,325
|
py
|
Python
|
webmap/models.py
|
ignacioHermosilla/geographynow
|
4eaea97afd27a4c096e64caa1e6a9d4417a3c4c7
|
[
"Unlicense",
"MIT"
] | null | null | null |
webmap/models.py
|
ignacioHermosilla/geographynow
|
4eaea97afd27a4c096e64caa1e6a9d4417a3c4c7
|
[
"Unlicense",
"MIT"
] | null | null | null |
webmap/models.py
|
ignacioHermosilla/geographynow
|
4eaea97afd27a4c096e64caa1e6a9d4417a3c4c7
|
[
"Unlicense",
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.db import models
import arrow
import urllib
class BaseModel(models.Model):
updated_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
class Country(BaseModel):
code = models.CharField(max_length=3, primary_key=True)
name = models.CharField(max_length=100)
geo_video_url = models.URLField()
flag_friday_video_url = models.URLField(null=True)
def __str__(self):
return self.code.upper()
def get_absolute_url(self):
url = "/" + self.name
return urllib.quote(url)
@property
def flag_icon(self):
return '/static/img/flags/flags_iso/32/{}.png'.format(self.code.lower())
@classmethod
def get_latest_video_info(cls):
latest_country = Country.objects.all().latest('created_at')
video_type = 'country video'
video_added_at = latest_country.created_at
if latest_country.flag_friday_video_url:
video_type = 'flag friday video'
video_added_at = latest_country.updated_at
return {
'country': latest_country.name,
'video_type': video_type,
'video_added_at_humanized': arrow.get(video_added_at).humanize()
}
| 28.804348
| 80
| 0.676981
|
acfb89c4783e7840b6bae03f6af65609d91bd0be
| 1,958
|
py
|
Python
|
py/projects/qaviton_tests/tests/execute_tests/unit_tests/test_crosstest.py
|
qaviton/test_repository
|
e9bf1bb12a138c6d92329ca4784f40767cb2ace9
|
[
"Apache-2.0"
] | 7
|
2018-11-20T15:44:27.000Z
|
2021-01-01T11:08:49.000Z
|
py/projects/qaviton_tests/tests/execute_tests/unit_tests/test_crosstest.py
|
Yativg/test_repository
|
7e5c018034d7bdf6a657325ef4fc34c13fdec2a7
|
[
"Apache-2.0"
] | 114
|
2018-11-17T20:55:24.000Z
|
2022-03-11T23:34:09.000Z
|
py/projects/qaviton_tests/tests/execute_tests/unit_tests/test_crosstest.py
|
Yativg/test_repository
|
7e5c018034d7bdf6a657325ef4fc34c13fdec2a7
|
[
"Apache-2.0"
] | 4
|
2018-11-20T15:56:11.000Z
|
2019-03-05T19:18:33.000Z
|
# test runner
import pytest
# things for the tests
from tests.data.platforms.supported_platforms import platforms
from tests.data.count_functions import from_zero_to_hero
# things to test
from qaviton.crosstest import WebDriver
from qaviton.crosstest import MobileDriver
from qaviton import crosstest
from qaviton import settings
from qaviton.utils import unique_id
@pytest.mark.parametrize('platform', platforms.get(), ids=unique_id.id) # test per platform
def test_platforms(platform):
if platform.platform["api"] == crosstest.API.WEB:
assert platform.platform["desired_capabilities"]["browserName"] in ("firefox", "chrome", "internet explorer")
elif platform.platform["api"] == crosstest.API.MOBILE:
assert platform.platform["desired_capabilities"]['platformName'] in ("Android",)
else:
raise Exception("bad testing type value: {}".format(platform.platform["api"]))
@pytest.mark.parametrize('platform', platforms.get(), ids=unique_id.id) # get test platform layer x4
@pytest.mark.parametrize('data', [from_zero_to_hero, from_zero_to_hero], ids=unique_id.id) # get test data layer x2
def test_platforms_and_data(platform: crosstest.Platform, data, request):
test = platform.setup(request)
if test.platform["api"] == WebDriver:
assert test.platform["command_executor"] == platforms.web.command_executor
assert test.platform["desired_capabilities"]["browserName"] in ("firefox", "chrome", "internet explorer")
elif test.platform["api"] == MobileDriver:
assert test.platform["command_executor"] == settings.mobiledriver_url
assert test.platform["desired_capabilities"]['platformName'] in ("Android",)
else:
raise Exception("test case object not as expected: {}".format(vars(test)))
assert data == from_zero_to_hero
# health check
@pytest.mark.critical
def test_all_threads_are_done():
import threading
assert threading.active_count() == 1
| 38.392157
| 117
| 0.740552
|
acfb89de4df1f174cb2e032ceee20bce55f05f42
| 1,812
|
py
|
Python
|
learner.py
|
cgomezsu/IntelligentAQM
|
c94b1ee339cfad46d5dc46e985b71ef73a85b6dd
|
[
"MIT"
] | 2
|
2020-08-08T02:02:50.000Z
|
2020-08-10T10:29:35.000Z
|
learner.py
|
cgomezsu/IntelligentAQM
|
c94b1ee339cfad46d5dc46e985b71ef73a85b6dd
|
[
"MIT"
] | 1
|
2020-06-18T05:10:22.000Z
|
2020-09-30T19:29:25.000Z
|
learner.py
|
cgomezsu/IntelligentAQM
|
c94b1ee339cfad46d5dc46e985b71ef73a85b6dd
|
[
"MIT"
] | 3
|
2019-09-14T09:51:49.000Z
|
2021-01-01T18:32:59.000Z
|
"""
v.e.s.
Tuner based on the Q-Learning algorithm.
MIT License
Copyright (c) 2019 Cesar A. Gomez
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import random
import numpy as np
random.seed(7) # For reproducibility
## Initializing learning parameters:
S = 100 # States
A = len(np.arange(50, 5050, 50)) # Actions
Q = np.zeros(shape=[S,A], dtype=np.float32) # Q-Table
gamma = 0.8
alpha = 0.5
def update(state, ind_action, reward, nxt_state):
max_nxt_action = max(Q[nxt_state,:])
Q[state,ind_action] = (1-alpha)*Q[state,ind_action]+alpha*(reward+gamma*max_nxt_action)
return Q
def action(state, epsilon=0.1):
if random.random()<epsilon:
action = random.randint(0,A-1)
else:
action = np.argmax(Q[state,:])
return action
| 30.711864
| 89
| 0.724062
|
acfb8b3f84a70c9a5b08a4baafb596e7ebc00d77
| 1,233
|
py
|
Python
|
alien.py
|
thepasterover/alien_invasion
|
75fff5f7981f31ddcda146039eef9d809eb081a2
|
[
"MIT"
] | 2
|
2019-05-13T16:16:20.000Z
|
2019-06-03T00:58:59.000Z
|
alien.py
|
thepasterover/alien_invasion
|
75fff5f7981f31ddcda146039eef9d809eb081a2
|
[
"MIT"
] | 1
|
2019-05-12T05:54:34.000Z
|
2020-04-20T22:16:31.000Z
|
alien.py
|
thepasterover/alien_invasion
|
75fff5f7981f31ddcda146039eef9d809eb081a2
|
[
"MIT"
] | null | null | null |
import pygame
from pygame.sprite import Sprite
class Alien(Sprite):
"""A class to represent a single alien in the fleet."""
def __init__(self, ai_settings, screen):
super(Alien, self).__init__()
self.screen = screen
self.ai_settings = ai_settings
# Load the alien image and set its rect attribute
self.image = pygame.image.load('images/alien.bmp')
self.rect = self.image.get_rect()
# Start each new alien near the top left of the screen
self.rect.x = self.rect.width
self.rect.y = self.rect.height
# Store the alien's exact position
self.x = float(self.rect.x)
def check_edges(self):
"""Return True if alien is at the edge of screen."""
screen_rect = self.screen.get_rect()
if self.rect.right >= screen_rect.right:
return True
elif self.rect.right <= 0:
return True
def update(self):
"""Move the alien right."""
self.x += (self.ai_settings.alien_speed_factor * self.ai_settings.fleet_direction)
self.rect.x = self.x
def blitme(self):
"""Draw the alien at its current location."""
self.screen.blit(self.image, self.rect)
| 30.825
| 90
| 0.621249
|
acfb8b899808d32f63fbad46ed80f34b23b947c5
| 32
|
py
|
Python
|
.scripts/helpers/__init__.py
|
tothradoslav/klaro
|
832d7b4d210d8473458ecc8a33ec0c5848200c05
|
[
"BSD-3-Clause"
] | 541
|
2020-07-17T08:03:27.000Z
|
2022-03-31T09:40:59.000Z
|
.scripts/helpers/__init__.py
|
tothradoslav/klaro
|
832d7b4d210d8473458ecc8a33ec0c5848200c05
|
[
"BSD-3-Clause"
] | 236
|
2020-07-17T12:43:52.000Z
|
2022-03-14T16:47:06.000Z
|
.scripts/helpers/__init__.py
|
tothradoslav/klaro
|
832d7b4d210d8473458ecc8a33ec0c5848200c05
|
[
"BSD-3-Clause"
] | 125
|
2018-05-28T20:09:48.000Z
|
2020-07-14T11:58:53.000Z
|
from .translate import * # noqa
| 16
| 31
| 0.71875
|
acfb8bfeafcf91376047d0cb5c1efc18de2b1a92
| 3,971
|
py
|
Python
|
reid/models/resnet.py
|
khko1022/bottom_up_reid
|
e60e86f5504f72b3ff8258702cd30c08f5a745f7
|
[
"MIT"
] | null | null | null |
reid/models/resnet.py
|
khko1022/bottom_up_reid
|
e60e86f5504f72b3ff8258702cd30c08f5a745f7
|
[
"MIT"
] | null | null | null |
reid/models/resnet.py
|
khko1022/bottom_up_reid
|
e60e86f5504f72b3ff8258702cd30c08f5a745f7
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from torch import nn
from torch.nn import functional as F
from torch.nn import init
import torchvision
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152']
class ResNet(nn.Module):
__factory = {
18: torchvision.models.resnet18,
34: torchvision.models.resnet34,
50: torchvision.models.resnet50,
101: torchvision.models.resnet101,
152: torchvision.models.resnet152,
}
def __init__(self, depth, pretrained=True, cut_at_pooling=False,
num_features=0, norm=False, dropout=0, num_classes=0, fixed_layer=True):
super(ResNet, self).__init__()
self.depth = depth
self.pretrained = pretrained
self.cut_at_pooling = cut_at_pooling
# Construct base (pretrained) resnet
if depth not in ResNet.__factory:
raise KeyError("Unsupported depth:", depth)
self.base = ResNet.__factory[depth](pretrained=pretrained)
if fixed_layer:
# fix layers [conv1 ~ layer2]
fixed_names = []
for name, module in self.base._modules.items():
if name == "layer3":
assert fixed_names == ["conv1", "bn1", "relu", "maxpool", "layer1", "layer2"]
break
fixed_names.append(name)
for param in module.parameters():
param.requires_grad = False
if not self.cut_at_pooling:
self.num_features = num_features
self.norm = norm
self.dropout = dropout
self.has_embedding = num_features > 0
self.num_classes = num_classes
out_planes = self.base.fc.in_features
# Append new layers
if self.has_embedding:
self.feat = nn.Linear(out_planes, self.num_features)
self.feat_bn = nn.BatchNorm1d(self.num_features)
init.kaiming_normal(self.feat.weight, mode='fan_out')
init.constant(self.feat.bias, 0)
init.constant(self.feat_bn.weight, 1)
init.constant(self.feat_bn.bias, 0)
else:
# Change the num_features to CNN output channels
self.num_features = out_planes
if self.dropout > 0:
self.drop = nn.Dropout(self.dropout)
if self.num_classes > 0:
self.classifier = nn.Linear(self.num_features, self.num_classes)
init.normal(self.classifier.weight, std=0.001)
init.constant(self.classifier.bias, 0)
if not self.pretrained:
self.reset_params()
def forward(self, x):
for name, module in self.base._modules.items():
if name == 'avgpool':
break
x = module(x)
# if name == 'layer2': print('module.parameters(): ', list(module.parameters())[0])
if self.cut_at_pooling:
return x
x = F.avg_pool2d(x, x.size()[2:])
x = x.view(x.size(0), -1)
return x
def reset_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight, mode='fan_out')
if m.bias is not None:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=0.001)
if m.bias is not None:
init.constant(m.bias, 0)
def resnet18(**kwargs):
return ResNet(18, **kwargs)
def resnet34(**kwargs):
return ResNet(34, **kwargs)
def resnet50(**kwargs):
return ResNet(50, **kwargs)
def resnet101(**kwargs):
return ResNet(101, **kwargs)
def resnet152(**kwargs):
return ResNet(152, **kwargs)
| 31.768
| 97
| 0.567615
|
acfb8cef12825631fb60202ab51bec21e7c89d90
| 3,481
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/parapedobacterluteus.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/parapedobacterluteus.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/parapedobacterluteus.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Parapedobacter luteus.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def ParapedobacterLuteus(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Parapedobacter luteus graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Parapedobacter luteus graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="ParapedobacterLuteus",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.152381
| 223
| 0.678541
|
acfb8d3f9a6e7329dcbdb752e498dfc4ad3019b2
| 855
|
py
|
Python
|
example/my13_boto3_requests_http.py
|
crazyfan5566/aws_lambda_python_example
|
d251ff340da1bd321d2134b599fd915366f5bace
|
[
"MIT"
] | 6
|
2017-02-13T05:28:26.000Z
|
2021-02-05T08:55:43.000Z
|
example/my13_boto3_requests_http.py
|
crazyfan5566/aws_lambda_python_example
|
d251ff340da1bd321d2134b599fd915366f5bace
|
[
"MIT"
] | null | null | null |
example/my13_boto3_requests_http.py
|
crazyfan5566/aws_lambda_python_example
|
d251ff340da1bd321d2134b599fd915366f5bace
|
[
"MIT"
] | 4
|
2017-05-11T06:58:46.000Z
|
2020-11-25T16:32:26.000Z
|
# encoding: utf-8
'''
使用 boto3 裡面的 requests lib 抓網頁
http://docs.python-requests.org/
'''
from __future__ import print_function
from botocore.vendored import requests
def lambda_handler(event, context):
'''
entry point (lambda)
'''
s_url = 'https://www.google.com'
# disable the security certificate check
requests.packages.urllib3.disable_warnings()
# Make a Request
req = requests.get(s_url, verify=False)
# dump
print('status_code : %s ' % req.status_code)
print('content-type: %s ' % req.headers['content-type'])
print('encoding : %s ' % req.encoding)
print('text length : %s ' % len(req.text))
print('text : %s ' % req.text[0:2000])
return None
if __name__ == '__main__':
'''
entry point
'''
lambda_handler(None, None)
| 22.5
| 61
| 0.604678
|
acfb8d58f11ab22b0a66eeed6e8931544b22b9be
| 5,548
|
py
|
Python
|
src/python/pants/backend/codegen/wire/java/wire_gen.py
|
billybecker/pants
|
ee101f3e360b712aceb9dacf7723aaf9b5567f04
|
[
"Apache-2.0"
] | 94
|
2015-01-15T21:24:20.000Z
|
2022-02-16T16:55:43.000Z
|
src/python/pants/backend/codegen/wire/java/wire_gen.py
|
billybecker/pants
|
ee101f3e360b712aceb9dacf7723aaf9b5567f04
|
[
"Apache-2.0"
] | 5
|
2020-07-18T01:04:43.000Z
|
2021-05-10T08:40:56.000Z
|
src/python/pants/backend/codegen/wire/java/wire_gen.py
|
billybecker/pants
|
ee101f3e360b712aceb9dacf7723aaf9b5567f04
|
[
"Apache-2.0"
] | 47
|
2015-02-25T02:20:07.000Z
|
2022-03-21T00:59:16.000Z
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
from twitter.common.collections import OrderedSet
from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.base.workunit import WorkUnitLabel
from pants.java.jar.jar_dependency import JarDependency
from pants.source.filespec import globs_matches
from pants.task.simple_codegen_task import SimpleCodegenTask
from pants.util.dirutil import fast_relpath
logger = logging.getLogger(__name__)
class WireGen(NailgunTaskBase, SimpleCodegenTask):
sources_globs = ('**/*',)
@classmethod
def register_options(cls, register):
super(WireGen, cls).register_options(register)
def wire_jar(name):
return JarDependency(org='com.squareup.wire', name=name, rev='1.8.0')
cls.register_jvm_tool(register,
'javadeps',
classpath=[
wire_jar(name='wire-runtime')
],
classpath_spec='//:wire-runtime',
help='Runtime dependencies for wire-using Java code.')
cls.register_jvm_tool(register, 'wire-compiler', classpath=[wire_jar(name='wire-compiler')])
@classmethod
def is_wire_compiler_jar(cls, jar):
return 'com.squareup.wire' == jar.org and 'wire-compiler' == jar.name
def __init__(self, *args, **kwargs):
"""Generates Java files from .proto files using the Wire protobuf compiler."""
super(WireGen, self).__init__(*args, **kwargs)
def synthetic_target_type(self, target):
return JavaLibrary
def is_gentarget(self, target):
return isinstance(target, JavaWireLibrary)
def synthetic_target_extra_dependencies(self, target, target_workdir):
wire_runtime_deps_spec = self.get_options().javadeps
return self.resolve_deps([wire_runtime_deps_spec])
def _compute_sources(self, target):
relative_sources = OrderedSet()
source_roots = OrderedSet()
def capture_and_relativize_to_source_root(source):
source_root = self.context.source_roots.find_by_path(source)
if not source_root:
source_root = self.context.source_roots.find(target)
source_roots.add(source_root.path)
return fast_relpath(source, source_root.path)
if target.payload.get_field_value('ordered_sources'):
# Re-match the filespecs against the sources in order to apply them in the literal order
# they were specified in.
filespec = target.globs_relative_to_buildroot()
excludes = filespec.get('excludes', [])
for filespec in filespec.get('globs', []):
sources = [s for s in target.sources_relative_to_buildroot()
if globs_matches([s], [filespec], excludes)]
if len(sources) != 1:
raise TargetDefinitionException(
target,
'With `ordered_sources=True`, expected one match for each file literal, '
'but got: {} for literal `{}`.'.format(sources, filespec)
)
relative_sources.add(capture_and_relativize_to_source_root(sources[0]))
else:
# Otherwise, use the default (unspecified) snapshot ordering.
for source in target.sources_relative_to_buildroot():
relative_sources.add(capture_and_relativize_to_source_root(source))
return relative_sources, source_roots
def format_args_for_target(self, target, target_workdir):
"""Calculate the arguments to pass to the command line for a single target."""
args = ['--java_out={0}'.format(target_workdir)]
# Add all params in payload to args
relative_sources, source_roots = self._compute_sources(target)
if target.payload.get_field_value('no_options'):
args.append('--no_options')
if target.payload.service_writer:
args.append('--service_writer={}'.format(target.payload.service_writer))
if target.payload.service_writer_options:
for opt in target.payload.service_writer_options:
args.append('--service_writer_opt')
args.append(opt)
registry_class = target.payload.registry_class
if registry_class:
args.append('--registry_class={0}'.format(registry_class))
if target.payload.roots:
args.append('--roots={0}'.format(','.join(target.payload.roots)))
if target.payload.enum_options:
args.append('--enum_options={0}'.format(','.join(target.payload.enum_options)))
for source_root in source_roots:
args.append('--proto_path={0}'.format(os.path.join(get_buildroot(), source_root)))
args.extend(relative_sources)
return args
def execute_codegen(self, target, target_workdir):
args = self.format_args_for_target(target, target_workdir)
if args:
result = self.runjava(classpath=self.tool_classpath('wire-compiler'),
main='com.squareup.wire.WireCompiler',
args=args,
workunit_name='compile',
workunit_labels=[WorkUnitLabel.TOOL])
if result != 0:
raise TaskError('Wire compiler exited non-zero ({0})'.format(result))
| 38.797203
| 96
| 0.697729
|
acfb8dfb868ed8be1484f6d3b9384d11b2d98c1b
| 34
|
py
|
Python
|
4/.#main.py
|
diblaze/TDP002
|
41c9c2155e2ad8cc4047ea912edd463042d95362
|
[
"MIT"
] | null | null | null |
4/.#main.py
|
diblaze/TDP002
|
41c9c2155e2ad8cc4047ea912edd463042d95362
|
[
"MIT"
] | null | null | null |
4/.#main.py
|
diblaze/TDP002
|
41c9c2155e2ad8cc4047ea912edd463042d95362
|
[
"MIT"
] | null | null | null |
dylma900@localhost.6200:1473937161
| 34
| 34
| 0.911765
|
acfb8e1935bf4d3e67e90ec5d4de6c21ef4c604e
| 7,009
|
py
|
Python
|
btclib/tests/test_sighash.py
|
giubby84/btclib
|
0dd7e4e8ca43451a03b577fd7ec95715a1a21711
|
[
"MIT"
] | null | null | null |
btclib/tests/test_sighash.py
|
giubby84/btclib
|
0dd7e4e8ca43451a03b577fd7ec95715a1a21711
|
[
"MIT"
] | null | null | null |
btclib/tests/test_sighash.py
|
giubby84/btclib
|
0dd7e4e8ca43451a03b577fd7ec95715a1a21711
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (C) 2020 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"Tests for `btclib.sighash` module."
# test vector at https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki
from btclib import script, tx, tx_out
from btclib.sighash import (
_get_witness_v0_scriptCodes,
get_sighash,
segwit_v0_sighash,
)
def test_native_p2wpkh():
transaction = tx.Tx.deserialize(
"0100000002fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f0000000000eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac11000000"
)
previous_txout = tx_out.TxOut(
nValue=600000000,
scriptPubKey=script.deserialize("00141d0f172a0ecb48aee1be1f2687d2963ae33f71a1"),
)
sighash = get_sighash(transaction, previous_txout, 1, 0x01)
assert (
sighash.hex()
== "c37af31116d1b27caf68aae9e3ac82f1477929014d5b917657d0eb49478cb670"
)
def test_wrapped_p2wpkh():
transaction = tx.Tx.deserialize(
"0100000001db6b1b20aa0fd7b23880be2ecbd4a98130974cf4748fb66092ac4d3ceb1a54770100000000feffffff02b8b4eb0b000000001976a914a457b684d7f0d539a46a45bbc043f35b59d0d96388ac0008af2f000000001976a914fd270b1ee6abcaea97fea7ad0402e8bd8ad6d77c88ac92040000"
)
transaction.vin[0].scriptSig = script.deserialize(
"001479091972186c449eb1ded22b78e40d009bdf0089"
)
previous_txout = tx_out.TxOut(
nValue=1000000000,
scriptPubKey=script.deserialize(
"a9144733f37cf4db86fbc2efed2500b4f4e49f31202387"
),
)
sighash = get_sighash(transaction, previous_txout, 0, 0x01)
assert (
sighash.hex()
== "64f3b0f4dd2bb3aa1ce8566d220cc74dda9df97d8490cc81d89d735c92e59fb6"
)
def test_native_p2wsh():
transaction = tx.Tx.deserialize(
"0100000002fe3dc9208094f3ffd12645477b3dc56f60ec4fa8e6f5d67c565d1c6b9216b36e0000000000ffffffff0815cf020f013ed6cf91d29f4202e8a58726b1ac6c79da47c23d1bee0a6925f80000000000ffffffff0100f2052a010000001976a914a30741f8145e5acadf23f751864167f32e0963f788ac00000000"
)
transaction.vin[1].txinwitness = [
"21026dccc749adc2a9d0d89497ac511f760f45c47dc5ed9cf352a58ac706453880aeadab210255a9626aebf5e29c0e6538428ba0d1dcf6ca98ffdf086aa8ced5e0d0215ea465ac"
]
previous_txout = tx.TxOut(
nValue=4900000000,
scriptPubKey=script.deserialize(
"00205d1b56b63d714eebe542309525f484b7e9d6f686b3781b6f61ef925d66d6f6a0"
),
)
sighash = get_sighash(transaction, previous_txout, 1, 0x03)
assert (
sighash.hex()
== "82dde6e4f1e94d02c2b7ad03d2115d691f48d064e9d52f58194a6637e4194391"
)
script_code = _get_witness_v0_scriptCodes(
script.deserialize(transaction.vin[1].txinwitness[-1])
)[1]
sighash = segwit_v0_sighash(
script_code, transaction, 1, 0x03, previous_txout.nValue
)
assert (
sighash.hex()
== "fef7bd749cce710c5c052bd796df1af0d935e59cea63736268bcbe2d2134fc47"
)
def test_native_p2wsh_2():
transaction = tx.Tx.deserialize(
"0100000002e9b542c5176808107ff1df906f46bb1f2583b16112b95ee5380665ba7fcfc0010000000000ffffffff80e68831516392fcd100d186b3c2c7b95c80b53c77e77c35ba03a66b429a2a1b0000000000ffffffff0280969800000000001976a914de4b231626ef508c9a74a8517e6783c0546d6b2888ac80969800000000001976a9146648a8cd4531e1ec47f35916de8e259237294d1e88ac00000000"
)
transaction.vin[0].txinwitness = [
"0063ab68210392972e2eb617b2388771abe27235fd5ac44af8e61693261550447a4c3e39da98ac"
]
transaction.vin[1].txinwitness = [
"5163ab68210392972e2eb617b2388771abe27235fd5ac44af8e61693261550447a4c3e39da98ac"
]
previous_txout_1 = tx_out.TxOut(
nValue=16777215,
scriptPubKey=script.deserialize(
"0020ba468eea561b26301e4cf69fa34bde4ad60c81e70f059f045ca9a79931004a4d"
),
)
sighash = get_sighash(transaction, previous_txout_1, 0, 0x83)
assert (
sighash.hex()
== "e9071e75e25b8a1e298a72f0d2e9f4f95a0f5cdf86a533cda597eb402ed13b3a"
)
previous_txout_2 = tx.TxOut(
nValue=16777215,
scriptPubKey=script.deserialize(
"0020d9bbfbe56af7c4b7f960a70d7ea107156913d9e5a26b0a71429df5e097ca6537"
),
)
script_code = _get_witness_v0_scriptCodes(
script.deserialize(transaction.vin[1].txinwitness[-1])
)[1]
sighash = segwit_v0_sighash(
script_code, transaction, 1, 0x83, previous_txout_2.nValue
)
assert (
sighash.hex()
== "cd72f1f1a433ee9df816857fad88d8ebd97e09a75cd481583eb841c330275e54"
)
def test_wrapped_p2wsh():
transaction = tx.Tx.deserialize(
"010000000136641869ca081e70f394c6948e8af409e18b619df2ed74aa106c1ca29787b96e0100000000ffffffff0200e9a435000000001976a914389ffce9cd9ae88dcc0631e88a821ffdbe9bfe2688acc0832f05000000001976a9147480a33f950689af511e6e84c138dbbd3c3ee41588ac00000000"
)
transaction.vin[0].txinwitness = [
"56210307b8ae49ac90a048e9b53357a2354b3334e9c8bee813ecb98e99a7e07e8c3ba32103b28f0c28bfab54554ae8c658ac5c3e0ce6e79ad336331f78c428dd43eea8449b21034b8113d703413d57761b8b9781957b8c0ac1dfe69f492580ca4195f50376ba4a21033400f6afecb833092a9a21cfdf1ed1376e58c5d1f47de74683123987e967a8f42103a6d48b1131e94ba04d9737d61acdaa1322008af9602b3b14862c07a1789aac162102d8b661b0b3302ee2f162b09e07a55ad5dfbe673a9f01d9f0c19617681024306b56ae"
]
previous_txout = tx_out.TxOut(
nValue=987654321,
scriptPubKey=script.deserialize(
"0020a16b5755f7f6f96dbd65f5f0d6ab9418b89af4b1f14a1bb8a09062c35f0dcb54"
),
)
assert (
get_sighash(transaction, previous_txout, 0, 0x01).hex()
== "185c0be5263dce5b4bb50a047973c1b6272bfbd0103a89444597dc40b248ee7c"
)
assert (
get_sighash(transaction, previous_txout, 0, 0x02).hex()
== "e9733bc60ea13c95c6527066bb975a2ff29a925e80aa14c213f686cbae5d2f36"
)
assert (
get_sighash(transaction, previous_txout, 0, 0x03).hex()
== "1e1f1c303dc025bd664acb72e583e933fae4cff9148bf78c157d1e8f78530aea"
)
assert (
get_sighash(transaction, previous_txout, 0, 0x81).hex()
== "2a67f03e63a6a422125878b40b82da593be8d4efaafe88ee528af6e5a9955c6e"
)
assert (
get_sighash(transaction, previous_txout, 0, 0x82).hex()
== "781ba15f3779d5542ce8ecb5c18716733a5ee42a6f51488ec96154934e2c890a"
)
assert (
get_sighash(transaction, previous_txout, 0, 0x83).hex()
== "511e8e52ed574121fc1b654970395502128263f62662e076dc6baf05c2e6a99b"
)
| 38.092391
| 424
| 0.774148
|
acfb8e988b7609486f1c3c5377e973f6cb60b8f8
| 1,102
|
py
|
Python
|
src/genbuildparams.py
|
kahunamoore/duktape
|
7fcb85345c180546a245e0fca927081ff7d24e9c
|
[
"MIT"
] | null | null | null |
src/genbuildparams.py
|
kahunamoore/duktape
|
7fcb85345c180546a245e0fca927081ff7d24e9c
|
[
"MIT"
] | null | null | null |
src/genbuildparams.py
|
kahunamoore/duktape
|
7fcb85345c180546a245e0fca927081ff7d24e9c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Generate build parameter files based on build information.
# A C header is generated for C code, and a JSON file for
# build scripts etc which need to know the build config.
#
import os
import sys
import json
import optparse
import dukutil
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--version', dest='version')
parser.add_option('--git-describe', dest='git_describe')
parser.add_option('--out-json', dest='out_json')
parser.add_option('--out-header', dest='out_header')
(opts, args) = parser.parse_args()
t = {
'version': opts.version,
'git_describe': opts.git_describe
}
f = open(opts.out_json, 'wb')
f.write(dukutil.json_encode(t).encode('ascii'))
f.close()
f = open(opts.out_header, 'wb')
f.write('#ifndef DUK_BUILDPARAMS_H_INCLUDED\n')
f.write('#define DUK_BUILDPARAMS_H_INCLUDED\n')
f.write('/* automatically generated by genbuildparams.py, do not edit */\n')
f.write('\n')
f.write('/* DUK_VERSION is defined in duktape.h */')
f.write('\n')
f.write('#endif /* DUK_BUILDPARAMS_H_INCLUDED */\n')
f.close()
| 26.238095
| 77
| 0.705989
|
acfb90f98f360435b5cbc30d709babdb00199757
| 294
|
py
|
Python
|
python/3bot-auth/config.py
|
crystaluniverse/crystal_filebrowser
|
714fedb53a0bf92c166f0e7ceb61f86077fcb456
|
[
"Apache-2.0"
] | null | null | null |
python/3bot-auth/config.py
|
crystaluniverse/crystal_filebrowser
|
714fedb53a0bf92c166f0e7ceb61f86077fcb456
|
[
"Apache-2.0"
] | null | null | null |
python/3bot-auth/config.py
|
crystaluniverse/crystal_filebrowser
|
714fedb53a0bf92c166f0e7ceb61f86077fcb456
|
[
"Apache-2.0"
] | null | null | null |
config = {
'threebot-privatekey': 'KBEs2Z7O7o/MzGdYTNR2FT0jjrRLAtFuWtZcnU8inWo=',
'threebot-appid': 'filebrowser.jimber.org',
'filebrowserUrl': 'https://filebrowser.jimber.org',
'loginUrl': 'https://login.threefold.me',
'kycUrl': 'https://openkyc.live',
'port': '5001'
}
| 36.75
| 74
| 0.670068
|
acfb9134c10e6e2ce77986ab9c21b20267dd9d33
| 107
|
py
|
Python
|
src/modeval/__init__.py
|
rubycookinson/seval
|
ed5ecbc0936191aa427ca3206def0d9ee9e4c04b
|
[
"MIT"
] | 1
|
2021-12-28T20:40:32.000Z
|
2021-12-28T20:40:32.000Z
|
src/modeval/__init__.py
|
rubycookinson/seval
|
ed5ecbc0936191aa427ca3206def0d9ee9e4c04b
|
[
"MIT"
] | 2
|
2021-12-24T18:27:43.000Z
|
2022-01-01T14:14:31.000Z
|
src/modeval/__init__.py
|
rubycookinson/seval
|
ed5ecbc0936191aa427ca3206def0d9ee9e4c04b
|
[
"MIT"
] | 1
|
2022-01-01T16:13:38.000Z
|
2022-01-01T16:13:38.000Z
|
from modeval.modeval import Parser, Ruleset, default_ruleset, scientific_ruleset, meval, parse_parentheses
| 53.5
| 106
| 0.859813
|
acfb923cc95c716d170adf353dc74bc40ac97046
| 1,464
|
py
|
Python
|
interpreter.py
|
kimi641/pyJVM
|
9e2b2392044a8ddd41ff8dda18a26e307776ae34
|
[
"MIT"
] | null | null | null |
interpreter.py
|
kimi641/pyJVM
|
9e2b2392044a8ddd41ff8dda18a26e307776ae34
|
[
"MIT"
] | 1
|
2021-01-21T09:38:24.000Z
|
2021-01-21T09:38:24.000Z
|
interpreter.py
|
kimi641/pyJVM
|
9e2b2392044a8ddd41ff8dda18a26e307776ae34
|
[
"MIT"
] | null | null | null |
import classfile
import instructions
import instructions.base
import rtda
import rtda.heap
def logInstruction(frame:rtda.Frame, inst:instructions.base.Instruction):
method = frame.Method()
className = method.Class().Name()
methodName = method.Name()
pc = frame.Thread().PC()
print(f"{className}.{methodName} #{pc} {inst} {inst}")
def loop(thread:rtda.Thread, logInst:bool):
reader = instructions.base.BytecodeReader()
while True:
frame = thread.CurrentFrame()
pc = frame.NextPC
thread.SetPC(pc)
#decode
reader.Reset(frame.Method().Code(), pc)
opcode = reader.ReadUint8()
inst = instructions.NewInstruction(opcode)
inst.FetchOperands(reader)
frame.SetNextPC(reader.PC)
if logInst:
logInstruction(frame, inst)
#execute
#print(f"pc:{pc} inst:{inst}")
inst.Execute(frame)
if thread.IsStackEmpty():
break
def logFrames(thread:rtda.Thread):
while not thread.IsStackEmpty():
frame = thread.PopFrame()
method = frame.Method()
className = method.Class().Name()
print(f">> pc:{frame.NextPC} {className}.{method.Name()}{method.Descriptor()}")
def interpret(method: rtda.heap.Method, logInst:bool):
thread = rtda.NewThread()
frame = thread.NewFrame(method)
thread.PushFrame(frame)
#try:
loop(thread, logInst)
#except:
# logFrames(thread)
| 28.153846
| 87
| 0.635246
|
acfb942fc56e46824f67f03be2980fb187fe86c5
| 3,114
|
py
|
Python
|
python3-virtualenv/Lib/python3.6/site-packages/greenlet/tests/test_leaks.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | null | null | null |
python3-virtualenv/Lib/python3.6/site-packages/greenlet/tests/test_leaks.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | null | null | null |
python3-virtualenv/Lib/python3.6/site-packages/greenlet/tests/test_leaks.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | 1
|
2021-06-20T19:28:37.000Z
|
2021-06-20T19:28:37.000Z
|
import unittest
import sys
import gc
import time
import weakref
import greenlet
import threading
class ArgRefcountTests(unittest.TestCase):
def test_arg_refs(self):
args = ('a', 'b', 'c')
refcount_before = sys.getrefcount(args)
g = greenlet.greenlet(
lambda *args: greenlet.getcurrent().parent.switch(*args))
for i in range(100):
g.switch(*args)
self.assertEqual(sys.getrefcount(args), refcount_before)
def test_kwarg_refs(self):
kwargs = {}
g = greenlet.greenlet(
lambda **kwargs: greenlet.getcurrent().parent.switch(**kwargs))
for i in range(100):
g.switch(**kwargs)
self.assertEqual(sys.getrefcount(kwargs), 2)
if greenlet.GREENLET_USE_GC:
# These only work with greenlet gc support
def recycle_threads(self):
# By introducing a thread that does sleep we allow other threads,
# that have triggered their __block condition, but did not have a
# chance to deallocate their thread state yet, to finally do so.
# The way it works is by requiring a GIL switch (different thread),
# which does a GIL release (sleep), which might do a GIL switch
# to finished threads and allow them to clean up.
def worker():
time.sleep(0.001)
t = threading.Thread(target=worker)
t.start()
time.sleep(0.001)
t.join()
def test_threaded_leak(self):
gg = []
def worker():
# only main greenlet present
gg.append(weakref.ref(greenlet.getcurrent()))
for i in range(2):
t = threading.Thread(target=worker)
t.start()
t.join()
del t
greenlet.getcurrent() # update ts_current
self.recycle_threads()
greenlet.getcurrent() # update ts_current
gc.collect()
greenlet.getcurrent() # update ts_current
for g in gg:
self.assertTrue(g() is None)
def test_threaded_adv_leak(self):
gg = []
def worker():
# main and additional *finished* greenlets
ll = greenlet.getcurrent().ll = []
def additional():
ll.append(greenlet.getcurrent())
for i in range(2):
greenlet.greenlet(additional).switch()
gg.append(weakref.ref(greenlet.getcurrent()))
for i in range(2):
t = threading.Thread(target=worker)
t.start()
t.join()
del t
greenlet.getcurrent() # update ts_current
self.recycle_threads()
greenlet.getcurrent() # update ts_current
gc.collect()
greenlet.getcurrent() # update ts_current
for g in gg:
self.assertTrue(g() is None)
| 36.209302
| 80
| 0.530829
|
acfb948764d84e5eca0a1d02a10f4d9bfdf9ded3
| 47,457
|
py
|
Python
|
v2.5.7/toontown/quest/QuestParser.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v2.5.7/toontown/quest/QuestParser.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v2.5.7/toontown/quest/QuestParser.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
import sys, os, tokenize, copy
from direct.interval.IntervalGlobal import *
from direct.directnotify import DirectNotifyGlobal
from direct.showbase import AppRunnerGlobal
from panda3d.core import *
from direct.showbase import DirectObject
import BlinkingArrows
from toontown.toon import ToonHeadFrame
from toontown.char import CharDNA
from toontown.suit import SuitDNA
from toontown.char import Char
from toontown.suit import Suit
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownBattleGlobals
from otp.speedchat import SpeedChatGlobals
from otp.nametag.NametagConstants import *
from toontown.ai import DistributedBlackCatMgr
from otp.otpbase import PythonUtil
from direct.interval.IntervalGlobal import *
from otp.nametag.NametagConstants import *
notify = DirectNotifyGlobal.directNotify.newCategory('QuestParser')
lineDict = {}
globalVarDict = {}
curId = None
def init():
globalVarDict.update({'render': render, 'camera': camera,
'hidden': hidden,
'aspect2d': aspect2d,
'bottomLeft': base.a2dBottomLeft,
'topLeft': base.a2dTopLeft,
'topRight': base.a2dTopRight,
'bottomRight': base.a2dBottomRight,
'localToon': base.localAvatar,
'laffMeter': base.localAvatar.laffMeter,
'inventory': base.localAvatar.inventory,
'bFriendsList': base.localAvatar.bFriendsList,
'book': base.localAvatar.book,
'bookPrevArrow': base.localAvatar.book.prevArrow,
'bookNextArrow': base.localAvatar.book.nextArrow,
'bookOpenButton': base.localAvatar.book.bookOpenButton,
'bookCloseButton': base.localAvatar.book.bookCloseButton,
'chatNormalButton': base.localAvatar.chatMgr.normalButton,
'chatScButton': base.localAvatar.chatMgr.scButton,
'arrows': BlinkingArrows.BlinkingArrows()})
def clear():
globalVarDict.clear()
def readFile(filename):
global curId
scriptFile = StreamReader(vfs.openReadFile(filename, 1), 1)
gen = tokenize.generate_tokens(scriptFile.readline)
line = getLineOfTokens(gen)
while line is not None:
if line == []:
line = getLineOfTokens(gen)
continue
if line[0] == 'ID':
parseId(line)
else:
if curId is None:
notify.error('Every script must begin with an ID')
else:
lineDict[curId].append(line)
line = getLineOfTokens(gen)
return
def getLineOfTokens(gen):
tokens = []
nextNeg = 0
token = gen.next()
if token[0] == tokenize.ENDMARKER:
return None
while token[0] != tokenize.NEWLINE and token[0] != tokenize.NL:
if token[0] == tokenize.COMMENT:
pass
else:
if token[0] == tokenize.OP and token[1] == '-':
nextNeg = 1
else:
if token[0] == tokenize.NUMBER:
if nextNeg:
tokens.append(-float(token[1]))
nextNeg = 0
else:
tokens.append(float(token[1]))
else:
if token[0] == tokenize.STRING:
tokens.append(token[1])
else:
if token[0] == tokenize.NAME:
tokens.append(token[1])
else:
notify.warning('Ignored token type: %s on line: %s' % (tokenize.tok_name[token[0]], token[2][0]))
token = gen.next()
return tokens
def parseId(line):
global curId
curId = line[1]
notify.debug('Setting current scriptId to: %s' % curId)
if questDefined(curId):
notify.error('Already defined scriptId: %s' % curId)
else:
lineDict[curId] = []
def questDefined(scriptId):
return lineDict.has_key(scriptId)
class NPCMoviePlayer(DirectObject.DirectObject):
def __init__(self, scriptId, toon, npc):
DirectObject.DirectObject.__init__(self)
self.scriptId = scriptId
self.toon = toon
self.isLocalToon = self.toon == base.localAvatar
self.npc = npc
self.privateVarDict = {}
self.toonHeads = {}
self.chars = []
self.uniqueId = 'scriptMovie_' + str(self.scriptId) + '_' + str(toon.getDoId()) + '_' + str(npc.getDoId())
self.setVar('toon', self.toon)
self.setVar('npc', self.npc)
self.chapterDict = {}
self.timeoutTrack = None
self.currentTrack = None
return
def getVar(self, varName):
if varName[0] == '"':
varName = varName[1:-1]
return globals()[varName]
if self.privateVarDict.has_key(varName):
return self.privateVarDict[varName]
if globalVarDict.has_key(varName):
return globalVarDict[varName]
if varName.find('tomDialogue') > -1 or varName.find('harryDialogue') > -1:
notify.warning('%s getting referenced. Tutorial Ack: %d Place: %s' % (varName, base.localAvatar.tutorialAck, base.cr.playGame.hood))
return
notify.error('Variable not defined: %s' % varName)
return
def delVar(self, varName):
if self.privateVarDict.has_key(varName):
del self.privateVarDict[varName]
else:
if globalVarDict.has_key(varName):
del globalVarDict[varName]
else:
notify.warning('Variable not defined: %s' % varName)
def setVar(self, varName, var):
self.privateVarDict[varName] = var
def cleanup(self):
if self.currentTrack:
self.currentTrack.pause()
self.currentTrack = None
self.ignoreAll()
taskMgr.remove(self.uniqueId)
for toonHeadFrame in self.toonHeads.values():
toonHeadFrame.destroy()
while self.chars:
self.__unloadChar(self.chars[0])
del self.toonHeads
del self.privateVarDict
del self.chapterDict
del self.toon
del self.npc
del self.timeoutTrack
return
def __unloadChar(self, char):
char.removeActive()
if char.style.name == 'mk' or char.style.name == 'mn':
char.stopEarTask()
char.delete()
self.chars.remove(char)
def timeout(self, fFinish=0):
if self.timeoutTrack:
if fFinish:
self.timeoutTrack.finish()
else:
self.timeoutTrack.start()
def finishMovie(self):
self.npc.finishMovie(self.toon, self.isLocalToon, 0.0)
def playNextChapter(self, eventName, timeStamp=0.0):
trackList = self.chapterDict[eventName]
if trackList:
self.currentTrack = trackList.pop(0)
self.currentTrack.start()
else:
notify.debug('Movie ended waiting for an event (%s)' % eventName)
def play(self):
lineNum = 0
self.currentEvent = 'start'
lines = lineDict.get(self.scriptId)
if lines is None:
notify.error('No movie defined for scriptId: %s' % self.scriptId)
chapterList = []
timeoutList = []
for line in lines:
lineNum += 1
command = line[0]
if command == 'UPON_TIMEOUT':
uponTimeout = 1
iList = timeoutList
line = line[1:]
command = line[0]
else:
uponTimeout = 0
iList = chapterList
if command == 'CALL':
if uponTimeout:
self.notify.error('CALL not allowed in an UPON_TIMEOUT')
iList.append(self.parseCall(line))
continue
else:
if command == 'DEBUG':
iList.append(self.parseDebug(line))
continue
else:
if command == 'WAIT':
if uponTimeout:
self.notify.error('WAIT not allowed in an UPON_TIMEOUT')
iList.append(self.parseWait(line))
continue
else:
if command == 'CHAT':
iList.append(self.parseChat(line))
continue
else:
if command == 'CLEAR_CHAT':
iList.append(self.parseClearChat(line))
continue
else:
if command == 'FINISH_QUEST_MOVIE':
chapterList.append(Func(self.finishMovie))
continue
else:
if command == 'CHAT_CONFIRM':
if uponTimeout:
self.notify.error('CHAT_CONFIRM not allowed in an UPON_TIMEOUT')
avatarName = line[1]
avatar = self.getVar(avatarName)
nextEvent = avatar.uniqueName('doneChatPage')
iList.append(Func(self.acceptOnce, nextEvent, self.playNextChapter, [nextEvent]))
iList.append(self.parseChatConfirm(line))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
continue
else:
if command == 'LOCAL_CHAT_CONFIRM':
if uponTimeout:
self.notify.error('LOCAL_CHAT_CONFIRM not allowed in an UPON_TIMEOUT')
avatarName = line[1]
avatar = self.getVar(avatarName)
nextEvent = avatar.uniqueName('doneChatPage')
iList.append(Func(self.acceptOnce, nextEvent, self.playNextChapter, [nextEvent]))
iList.append(self.parseLocalChatConfirm(line))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
continue
else:
if command == 'LOCAL_CHAT_PERSIST':
iList.append(self.parseLocalChatPersist(line))
continue
else:
if command == 'LOCAL_CHAT_TO_CONFIRM':
if uponTimeout:
self.notify.error('LOCAL_CHAT_TO_CONFIRM not allowed in an UPON_TIMEOUT')
avatarName = line[1]
avatar = self.getVar(avatarName)
nextEvent = avatar.uniqueName('doneChatPage')
iList.append(Func(self.acceptOnce, nextEvent, self.playNextChapter, [nextEvent]))
iList.append(self.parseLocalChatToConfirm(line))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
continue
else:
if command == 'CC_CHAT_CONFIRM':
if uponTimeout:
self.notify.error('CC_CHAT_CONFIRM not allowed in an UPON_TIMEOUT')
avatarName = line[1]
avatar = self.getVar(avatarName)
nextEvent = avatar.uniqueName('doneChatPage')
iList.append(Func(self.acceptOnce, nextEvent, self.playNextChapter, [nextEvent]))
iList.append(self.parseCCChatConfirm(line))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
continue
else:
if command == 'CC_CHAT_TO_CONFIRM':
if uponTimeout:
self.notify.error('CC_CHAT_TO_CONFIRM not allowed in an UPON_TIMEOUT')
avatarName = line[1]
avatar = self.getVar(avatarName)
nextEvent = avatar.uniqueName('doneChatPage')
iList.append(Func(self.acceptOnce, nextEvent, self.playNextChapter, [nextEvent]))
iList.append(self.parseCCChatToConfirm(line))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
continue
if self.isLocalToon:
if command == 'LOAD':
self.parseLoad(line)
elif command == 'LOAD_SFX':
self.parseLoadSfx(line)
elif command == 'LOAD_DIALOGUE':
self.parseLoadDialogue(line)
elif command == 'LOAD_CC_DIALOGUE':
self.parseLoadCCDialogue(line)
elif command == 'LOAD_CHAR':
self.parseLoadChar(line)
elif command == 'LOAD_CLASSIC_CHAR':
self.parseLoadClassicChar(line)
elif command == 'UNLOAD_CHAR':
iList.append(self.parseUnloadChar(line))
elif command == 'LOAD_SUIT':
self.parseLoadSuit(line)
elif command == 'SET':
self.parseSet(line)
elif command == 'LOCK_LOCALTOON':
iList.append(self.parseLockLocalToon(line))
elif command == 'FREE_LOCALTOON':
iList.append(self.parseFreeLocalToon(line))
elif command == 'REPARENTTO':
iList.append(self.parseReparent(line))
elif command == 'WRTREPARENTTO':
iList.append(self.parseWrtReparent(line))
elif command == 'SHOW':
iList.append(self.parseShow(line))
elif command == 'HIDE':
iList.append(self.parseHide(line))
elif command == 'POS':
iList.append(self.parsePos(line))
elif command == 'HPR':
iList.append(self.parseHpr(line))
elif command == 'SCALE':
iList.append(self.parseScale(line))
elif command == 'POSHPRSCALE':
iList.append(self.parsePosHprScale(line))
elif command == 'COLOR':
iList.append(self.parseColor(line))
elif command == 'COLOR_SCALE':
iList.append(self.parseColorScale(line))
elif command == 'ADD_LAFFMETER':
iList.append(self.parseAddLaffMeter(line))
elif command == 'LAFFMETER':
iList.append(self.parseLaffMeter(line))
elif command == 'OBSCURE_LAFFMETER':
iList.append(self.parseObscureLaffMeter(line))
elif command == 'ARROWS_ON':
iList.append(self.parseArrowsOn(line))
elif command == 'ARROWS_OFF':
iList.append(self.parseArrowsOff(line))
elif command == 'START_THROB':
iList.append(self.parseStartThrob(line))
elif command == 'STOP_THROB':
iList.append(self.parseStopThrob(line))
elif command == 'SHOW_FRIENDS_LIST':
iList.append(self.parseShowFriendsList(line))
elif command == 'HIDE_FRIENDS_LIST':
iList.append(self.parseHideFriendsList(line))
elif command == 'SHOW_BOOK':
iList.append(self.parseShowBook(line))
elif command == 'HIDE_BOOK':
iList.append(self.parseHideBook(line))
elif command == 'ENABLE_CLOSE_BOOK':
iList.append(self.parseEnableCloseBook(line))
elif command == 'OBSCURE_BOOK':
iList.append(self.parseObscureBook(line))
elif command == 'OBSCURE_CHAT':
iList.append(self.parseObscureChat(line))
elif command == 'ADD_INVENTORY':
iList.append(self.parseAddInventory(line))
elif command == 'SET_INVENTORY':
iList.append(self.parseSetInventory(line))
elif command == 'SET_INVENTORY_YPOS':
iList.append(self.parseSetInventoryYPos(line))
elif command == 'SET_INVENTORY_DETAIL':
iList.append(self.parseSetInventoryDetail(line))
elif command == 'PLAY_SFX':
iList.append(self.parsePlaySfx(line))
elif command == 'STOP_SFX':
iList.append(self.parseStopSfx(line))
elif command == 'PLAY_ANIM':
iList.append(self.parsePlayAnim(line))
elif command == 'LOOP_ANIM':
iList.append(self.parseLoopAnim(line))
elif command == 'LERP_POS':
iList.append(self.parseLerpPos(line))
elif command == 'LERP_HPR':
iList.append(self.parseLerpHpr(line))
elif command == 'LERP_SCALE':
iList.append(self.parseLerpScale(line))
elif command == 'LERP_POSHPRSCALE':
iList.append(self.parseLerpPosHprScale(line))
elif command == 'LERP_COLOR':
iList.append(self.parseLerpColor(line))
elif command == 'LERP_COLOR_SCALE':
iList.append(self.parseLerpColorScale(line))
elif command == 'DEPTH_WRITE_ON':
iList.append(self.parseDepthWriteOn(line))
elif command == 'DEPTH_WRITE_OFF':
iList.append(self.parseDepthWriteOff(line))
elif command == 'DEPTH_TEST_ON':
iList.append(self.parseDepthTestOn(line))
elif command == 'DEPTH_TEST_OFF':
iList.append(self.parseDepthTestOff(line))
elif command == 'SET_BIN':
iList.append(self.parseSetBin(line))
elif command == 'CLEAR_BIN':
iList.append(self.parseClearBin(line))
elif command == 'TOON_HEAD':
iList.append(self.parseToonHead(line))
elif command == 'SEND_EVENT':
iList.append(self.parseSendEvent(line))
elif command == 'FUNCTION':
iList.append(self.parseFunction(line))
elif command == 'BLACK_CAT_LISTEN':
iList.append(self.parseBlackCatListen(line))
elif command == 'SHOW_THROW_SQUIRT_PREVIEW':
if uponTimeout:
self.notify.error('SHOW_THROW_SQUIRT_PREVIEW not allowed in an UPON_TIMEOUT')
nextEvent = 'doneThrowSquirtPreview'
iList.append(Func(self.acceptOnce, nextEvent, self.playNextChapter, [nextEvent]))
iList.append(self.parseThrowSquirtPreview(line))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
elif command == 'WAIT_EVENT':
if uponTimeout:
self.notify.error('WAIT_EVENT not allowed in an UPON_TIMEOUT')
nextEvent = self.parseWaitEvent(line)
def proceed(self=self, nextEvent=nextEvent):
self.playNextChapter(nextEvent)
def handleEvent(*args):
proceed = args[0]
proceed()
iList.append(Func(self.acceptOnce, nextEvent, handleEvent, [proceed]))
self.closePreviousChapter(iList)
chapterList = []
self.currentEvent = nextEvent
elif command == 'SET_MUSIC_VOLUME':
iList.append(self.parseSetMusicVolume(line))
else:
notify.warning('Unknown command token: %s for scriptId: %s on line: %s' % (command, self.scriptId, lineNum))
self.closePreviousChapter(chapterList)
if timeoutList:
self.timeoutTrack = Sequence(*timeoutList)
self.playNextChapter('start')
return
def closePreviousChapter(self, iList):
trackList = self.chapterDict.setdefault(self.currentEvent, [])
trackList.append(Sequence(*iList))
def parseLoad(self, line):
if len(line) == 3:
token, varName, modelPath = line
modelPath = modelPath[1:-1]
fn = Filename(modelPath)
fn.setExtension('bam')
node = loader.loadModel(fn)
else:
if len(line) == 4:
token, varName, modelPath, subNodeName = line
modelPath = modelPath[1:-1]
fn = Filename(modelPath)
fn.setExtension('bam')
subNodeName = subNodeName[1:-1]
node = loader.loadModel(fn).find('**/' + subNodeName)
else:
notify.error('invalid parseLoad command')
self.setVar(varName, node)
def parseLoadSfx(self, line):
token, varName, fileName = line
sfx = base.loader.loadSfx(fileName.replace('"', ''))
self.setVar(varName, sfx)
def parseLoadDialogue(self, line):
token, varName, fileName = line
if varName == 'tomDialogue_01':
notify.debug('VarName tomDialogue getting added. Tutorial Ack: %d' % base.localAvatar.tutorialAck)
if config.GetString('language', 'english') == 'japanese':
dialogue = base.loader.loadSfx(fileName)
else:
dialogue = None
self.setVar(varName, dialogue)
return
def parseLoadCCDialogue(self, line):
token, varName, filenameTemplate = line
if self.toon.getStyle().gender == 'm':
classicChar = 'mickey'
else:
classicChar = 'minnie'
filename = filenameTemplate % classicChar
if config.GetString('language', 'english') == 'japanese':
dialogue = base.loader.loadSfx(filename)
else:
dialogue = None
self.setVar(varName, dialogue)
return
def parseLoadChar(self, line):
token, name, charType = line
char = Char.Char()
dna = CharDNA.CharDNA()
dna.newChar(charType)
char.setDNA(dna)
if charType == 'mk' or charType == 'mn':
char.startEarTask()
char.nametag.manage(base.marginManager)
char.addActive()
char.hideName()
self.setVar(name, char)
def parseLoadClassicChar(self, line):
token, name = line
char = Char.Char()
dna = CharDNA.CharDNA()
if self.toon.getStyle().gender == 'm':
charType = 'mk'
else:
charType = 'mn'
dna.newChar(charType)
char.setDNA(dna)
char.startEarTask()
char.nametag.manage(base.marginManager)
char.addActive()
char.hideName()
self.setVar(name, char)
self.chars.append(char)
def parseUnloadChar(self, line):
token, name = line
char = self.getVar(name)
track = Sequence()
track.append(Func(self.__unloadChar, char))
track.append(Func(self.delVar, name))
return track
def parseLoadSuit(self, line):
token, name, suitType = line
suit = Suit.Suit()
dna = SuitDNA.SuitDNA()
dna.newSuit(suitType)
suit.setDNA(dna)
self.setVar(name, suit)
def parseSet(self, line):
token, varName, value = line
self.setVar(varName, value)
def parseCall(self, line):
token, scriptId = line
nmp = NPCMoviePlayer(scriptId, self.toon, self.npc)
return Func(nmp.play)
def parseLockLocalToon(self, line):
return Sequence(Func(self.toon.detachCamera), Func(self.toon.collisionsOff), Func(self.toon.disableAvatarControls), Func(self.toon.stopTrackAnimToSpeed), Func(self.toon.stopUpdateSmartCamera))
def parseFreeLocalToon(self, line):
return Sequence(Func(self.toon.attachCamera), Func(self.toon.startTrackAnimToSpeed), Func(self.toon.collisionsOn), Func(self.toon.enableAvatarControls), Func(self.toon.startUpdateSmartCamera))
def parseDebug(self, line):
token, str = line
return Func(notify.debug, str)
def parseReparent(self, line):
if len(line) == 3:
token, childNodeName, parentNodeName = line
subNodeName = None
else:
if len(line) == 4:
token, childNodeName, parentNodeName, subNodeName = line
subNodeName = subNodeName[1:-1]
childNode = self.getVar(childNodeName)
if subNodeName:
parentNode = self.getVar(parentNodeName).find(subNodeName)
else:
parentNode = self.getVar(parentNodeName)
return ParentInterval(childNode, parentNode)
def parseWrtReparent(self, line):
if len(line) == 3:
token, childNodeName, parentNodeName = line
subNodeName = None
else:
if len(line) == 4:
token, childNodeName, parentNodeName, subNodeName = line
childNode = self.getVar(childNodeName)
if subNodeName:
parentNode = self.getVar(parentNodeName).find(subNodeName)
else:
parentNode = self.getVar(parentNodeName)
return WrtParentInterval(childNode, parentNode)
def parseShow(self, line):
token, nodeName = line
node = self.getVar(nodeName)
return Func(node.show)
def parseHide(self, line):
token, nodeName = line
node = self.getVar(nodeName)
return Func(node.hide)
def parsePos(self, line):
token, nodeName, x, y, z = line
node = self.getVar(nodeName)
return Func(node.setPos, x, y, z)
def parseHpr(self, line):
token, nodeName, h, p, r = line
node = self.getVar(nodeName)
return Func(node.setHpr, h, p, r)
def parseScale(self, line):
token, nodeName, x, y, z = line
node = self.getVar(nodeName)
return Func(node.setScale, x, y, z)
def parsePosHprScale(self, line):
token, nodeName, x, y, z, h, p, r, sx, sy, sz = line
node = self.getVar(nodeName)
return Func(node.setPosHprScale, x, y, z, h, p, r, sx, sy, sz)
def parseColor(self, line):
token, nodeName, r, g, b, a = line
node = self.getVar(nodeName)
return Func(node.setColor, r, g, b, a)
def parseColorScale(self, line):
token, nodeName, r, g, b, a = line
node = self.getVar(nodeName)
return Func(node.setColorScale, r, g, b, a)
def parseWait(self, line):
token, waitTime = line
return Wait(waitTime)
def parseChat(self, line):
toonId = self.toon.getDoId()
avatarName = line[1]
avatar = self.getVar(avatarName)
chatString = getattr(TTLocalizer, line[2])
chatFlags = CFSpeech | CFTimeout
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[3:])
if extraChatFlags:
chatFlags |= extraChatFlags
if len(dialogueList) > 0:
dialogue = dialogueList[0]
else:
dialogue = None
return Func(avatar.setChatAbsolute, chatString, chatFlags, dialogue)
def parseClearChat(self, line):
toonId = self.toon.getDoId()
avatarName = line[1]
avatar = self.getVar(avatarName)
chatFlags = CFSpeech | CFTimeout
return Func(avatar.setChatAbsolute, '', chatFlags)
def parseExtraChatArgs(self, args):
quitButton = 0
extraChatFlags = None
dialogueList = []
for arg in args:
if type(arg) in (int, float) or arg.isdigit():
quitButton = int(arg)
elif arg[0] == '"' and arg[(-1)] == '"':
if len(arg) > 4 and arg[1:3] == 'CF':
extraChatFlags = globals()[arg[1:-1]]
else:
dialogueList.append(self.getVar(arg))
return (quitButton, extraChatFlags, dialogueList)
def parseChatConfirm(self, line):
lineLength = len(line)
toonId = self.toon.getDoId()
avatarName = line[1]
avatar = self.getVar(avatarName)
chatString = getattr(TTLocalizer, line[2])
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[3:])
return Func(avatar.setPageChat, toonId, 0, chatString, quitButton, extraChatFlags, dialogueList)
def parseLocalChatConfirm(self, line):
lineLength = len(line)
avatarName = line[1]
avatar = self.getVar(avatarName)
chatString = getattr(TTLocalizer, line[2])
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[3:])
return Func(avatar.setLocalPageChat, chatString, quitButton, extraChatFlags, dialogueList)
def parseLocalChatPersist(self, line):
lineLength = len(line)
avatarName = line[1]
avatar = self.getVar(avatarName)
chatString = getattr(TTLocalizer, line[2])
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[3:])
if len(dialogueList) > 0:
dialogue = dialogueList[0]
else:
dialogue = None
if extraChatFlags:
flags = CFSpeech | extraChatFlags
else:
flags = CFSpeech
return Func(avatar.setChatAbsolute, chatString, flags, dialogue)
def parseLocalChatToConfirm(self, line):
lineLength = len(line)
avatarKey = line[1]
avatar = self.getVar(avatarKey)
toAvatarKey = line[2]
toAvatar = self.getVar(toAvatarKey)
localizerAvatarName = toAvatar.getName().capitalize()
toAvatarName = getattr(TTLocalizer, localizerAvatarName)
chatString = getattr(TTLocalizer, line[3])
chatString = chatString.replace('%s', toAvatarName)
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[4:])
return Func(avatar.setLocalPageChat, chatString, quitButton, extraChatFlags, dialogueList)
def parseCCChatConfirm(self, line):
lineLength = len(line)
avatarName = line[1]
avatar = self.getVar(avatarName)
if self.toon.getStyle().gender == 'm':
chatString = getattr(TTLocalizer, line[2][1:-1] % 'Mickey')
else:
chatString = getattr(TTLocalizer, line[2][1:-1] % 'Minnie')
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[3:])
return Func(avatar.setLocalPageChat, chatString, quitButton, extraChatFlags, dialogueList)
def parseCCChatToConfirm(self, line):
lineLength = len(line)
avatarKey = line[1]
avatar = self.getVar(avatarKey)
toAvatarKey = line[2]
toAvatar = self.getVar(toAvatarKey)
localizerAvatarName = toAvatar.getName().capitalize()
toAvatarName = getattr(TTLocalizer, localizerAvatarName)
if self.toon.getStyle().gender == 'm':
chatString = getattr(TTLocalizer, line[3][1:-1] % 'Mickey')
else:
chatString = getattr(TTLocalizer, line[3][1:-1] % 'Minnie')
chatString = chatString.replace('%s', toAvatarName)
quitButton, extraChatFlags, dialogueList = self.parseExtraChatArgs(line[4:])
return Func(avatar.setLocalPageChat, chatString, quitButton, extraChatFlags, dialogueList)
def parsePlaySfx(self, line):
if len(line) == 2:
token, sfxName = line
looping = 0
else:
if len(line) == 3:
token, sfxName, looping = line
else:
notify.error('invalid number of arguments')
sfx = self.getVar(sfxName)
return Func(base.playSfx, sfx, looping)
def parseStopSfx(self, line):
token, sfxName = line
sfx = self.getVar(sfxName)
return Func(sfx.stop)
def parsePlayAnim(self, line):
if len(line) == 3:
token, actorName, animName = line
playRate = 1.0
else:
if len(line) == 4:
token, actorName, animName, playRate = line
else:
notify.error('invalid number of arguments')
actor = self.getVar(actorName)
animName = animName[1:-1]
return Sequence(Func(actor.setPlayRate, playRate, animName), Func(actor.play, animName))
def parseLoopAnim(self, line):
if len(line) == 3:
token, actorName, animName = line
playRate = 1.0
else:
if len(line) == 4:
token, actorName, animName, playRate = line
else:
notify.error('invalid number of arguments')
actor = self.getVar(actorName)
animName = animName[1:-1]
return Sequence(Func(actor.setPlayRate, playRate, animName), Func(actor.loop, animName))
def parseLerpPos(self, line):
token, nodeName, x, y, z, t = line
node = self.getVar(nodeName)
return Sequence(LerpPosInterval(node, t, Point3(x, y, z), blendType='easeInOut'), duration=0.0)
def parseLerpHpr(self, line):
token, nodeName, h, p, r, t = line
node = self.getVar(nodeName)
return Sequence(LerpHprInterval(node, t, VBase3(h, p, r), blendType='easeInOut'), duration=0.0)
def parseLerpScale(self, line):
token, nodeName, x, y, z, t = line
node = self.getVar(nodeName)
return Sequence(LerpScaleInterval(node, t, VBase3(x, y, z), blendType='easeInOut'), duration=0.0)
def parseLerpPosHprScale(self, line):
token, nodeName, x, y, z, h, p, r, sx, sy, sz, t = line
node = self.getVar(nodeName)
return Sequence(LerpPosHprScaleInterval(node, t, VBase3(x, y, z), VBase3(h, p, r), VBase3(sx, sy, sz), blendType='easeInOut'), duration=0.0)
def parseLerpColor(self, line):
token, nodeName, sr, sg, sb, sa, er, eg, eb, ea, t = line
node = self.getVar(nodeName)
return Sequence(LerpColorInterval(node, t, VBase4(er, eg, eb, ea), startColorScale=VBase4(sr, sg, sb, sa), blendType='easeInOut'), duration=0.0)
def parseLerpColorScale(self, line):
token, nodeName, sr, sg, sb, sa, er, eg, eb, ea, t = line
node = self.getVar(nodeName)
return Sequence(LerpColorScaleInterval(node, t, VBase4(er, eg, eb, ea), startColorScale=VBase4(sr, sg, sb, sa), blendType='easeInOut'), duration=0.0)
def parseDepthWriteOn(self, line):
token, nodeName, depthWrite = line
node = self.getVar(nodeName)
return Sequence(Func(node.setDepthWrite, depthWrite))
def parseDepthWriteOff(self, line):
token, nodeName = line
node = self.getVar(nodeName)
return Sequence(Func(node.clearDepthWrite))
def parseDepthTestOn(self, line):
token, nodeName, depthTest = line
node = self.getVar(nodeName)
return Sequence(Func(node.setDepthTest, depthTest))
def parseDepthTestOff(self, line):
token, nodeName = line
node = self.getVar(nodeName)
return Sequence(Func(node.clearDepthTest))
def parseSetBin(self, line):
if len(line) == 3:
token, nodeName, binName = line
sortOrder = 0
else:
token, nodeName, binName, sortOrder = line
node = self.getVar(nodeName)
return Sequence(Func(node.setBin, binName[1:-1], sortOrder))
def parseClearBin(self, line):
token, nodeName = line
node = self.getVar(nodeName)
return Sequence(Func(node.clearBin))
def parseWaitEvent(self, line):
token, eventName = line
return eventName[1:-1]
def parseSendEvent(self, line):
token, eventName = line
return Func(messenger.send, eventName[1:-1])
def parseFunction(self, line):
token, objectName, functionName = line
object = self.getVar(objectName)
functionName = functionName[1:-1]
func = object
for fn in functionName.split('.'):
func = getattr(func, fn)
return Func(func)
def parseAddLaffMeter(self, line):
token, maxHpDelta = line
newMaxHp = maxHpDelta + self.toon.getMaxHp()
newHp = newMaxHp
laffMeter = self.getVar('laffMeter')
return Func(laffMeter.adjustFace, int(newHp), int(newMaxHp))
def parseLaffMeter(self, line):
token, newHp, newMaxHp = line
laffMeter = self.getVar('laffMeter')
return Func(laffMeter.adjustFace, int(newHp), int(newMaxHp))
def parseObscureLaffMeter(self, line):
token, val = line
return Func(self.toon.laffMeter.obscure, val)
def parseAddInventory(self, line):
token, track, level, number = line
inventory = self.getVar('inventory')
countSound = base.loader.loadSfx('phase_3.5/audio/sfx/tick_counter.ogg')
return Sequence(Func(base.playSfx, countSound), Func(inventory.buttonBoing, int(track), int(level)), Func(inventory.addItems, int(track), int(level), int(number)), Func(inventory.updateGUI, int(track), int(level)))
def parseSetInventory(self, line):
token, track, level, number = line
inventory = self.getVar('inventory')
return Sequence(Func(inventory.setItem, int(track), int(level), int(number)), Func(inventory.updateGUI, int(track), int(level)))
def parseSetInventoryYPos(self, line):
token, track, level, yPos = line
inventory = self.getVar('inventory')
button = inventory.buttons[int(track)][int(level)].stateNodePath[0]
text = button.find('**/+TextNode')
return Sequence(Func(text.setY, yPos))
def parseSetInventoryDetail(self, line):
if len(line) == 2:
token, val = line
else:
if len(line) == 4:
token, val, track, level = line
else:
notify.error('invalid line for parseSetInventoryDetail: %s' % line)
inventory = self.getVar('inventory')
if val == -1:
return Func(inventory.noDetail)
if val == 0:
return Func(inventory.hideDetail)
if val == 1:
return Func(inventory.showDetail, int(track), int(level))
notify.error('invalid inventory detail level: %s' % val)
def parseShowFriendsList(self, line):
from toontown.friends import FriendsListPanel
return Func(FriendsListPanel.showFriendsListTutorial)
def parseHideFriendsList(self, line):
from toontown.friends import FriendsListPanel
return Func(FriendsListPanel.hideFriendsListTutorial)
def parseShowBook(self, line):
return Sequence(Func(self.toon.book.setPage, self.toon.mapPage), Func(self.toon.book.enter), Func(self.toon.book.disableBookCloseButton))
def parseEnableCloseBook(self, line):
return Sequence(Func(self.toon.book.enableBookCloseButton))
def parseHideBook(self, line):
return Func(self.toon.book.exit)
def parseObscureBook(self, line):
token, val = line
return Func(self.toon.book.obscureButton, val)
def parseObscureChat(self, line):
token, val0, val1 = line
return Func(self.toon.chatMgr.obscure, val0, val1)
def parseArrowsOn(self, line):
arrows = self.getVar('arrows')
token, x1, y1, h1, x2, y2, h2 = line
return Func(arrows.arrowsOn, x1, y1, h1, x2, y2, h2)
def parseArrowsOff(self, line):
arrows = self.getVar('arrows')
return Func(arrows.arrowsOff)
def parseStartThrob(self, line):
token, nodeName, r, g, b, a, r2, g2, b2, a2, t = line
node = self.getVar(nodeName)
startCScale = Point4(r, g, b, a)
destCScale = Point4(r2, g2, b2, a2)
self.throbIval = Sequence(LerpColorScaleInterval(node, t / 2.0, destCScale, startColorScale=startCScale, blendType='easeInOut'), LerpColorScaleInterval(node, t / 2.0, startCScale, startColorScale=destCScale, blendType='easeInOut'))
return Func(self.throbIval.loop)
def parseStopThrob(self, line):
return Func(self.throbIval.finish)
def parseToonHead(self, line):
if len(line) == 5:
token, toonName, x, z, toggle = line
scale = 1.0
else:
token, toonName, x, z, toggle, scale = line
toon = self.getVar(toonName)
toonId = toon.getDoId()
toonHeadFrame = self.toonHeads.get(toonId)
if not toonHeadFrame:
toonHeadFrame = ToonHeadFrame.ToonHeadFrame(toon)
toonHeadFrame.hide()
self.toonHeads[toonId] = toonHeadFrame
self.setVar('%sToonHead' % toonName, toonHeadFrame)
if toggle:
return Sequence(Func(toonHeadFrame.setPos, x, 0, z), Func(toonHeadFrame.setScale, scale), Func(toonHeadFrame.show))
return Func(toonHeadFrame.hide)
def parseToonHeadScale(self, line):
token, toonName, scale = line
toon = self.getVar(toonName)
toonId = toon.getDoId()
toonHeadFrame = self.toonHeads.get(toonId)
return Func(toonHeadFrame.setScale, scale)
def parseBlackCatListen(self, line):
token, enable = line
if enable:
def phraseSaid(phraseId):
toontastic = 315
if phraseId == toontastic:
messenger.send(DistributedBlackCatMgr.DistributedBlackCatMgr.ActivateEvent)
def enableBlackCatListen():
self.acceptOnce(SpeedChatGlobals.SCStaticTextMsgEvent, phraseSaid)
return Func(enableBlackCatListen)
def disableBlackCatListen():
self.ignore(SpeedChatGlobals.SCStaticTextMsgEvent)
return Func(disableBlackCatListen)
def parseThrowSquirtPreview(self, line):
oldTrackAccess = [
None]
def grabCurTrackAccess(oldTrackAccess=oldTrackAccess):
oldTrackAccess[0] = copy.deepcopy(base.localAvatar.getTrackAccess())
def restoreTrackAccess(oldTrackAccess=oldTrackAccess):
base.localAvatar.setTrackAccess(oldTrackAccess[0])
minGagLevel = ToontownBattleGlobals.MIN_LEVEL_INDEX + 1
maxGagLevel = ToontownBattleGlobals.MAX_LEVEL_INDEX + 1
curGagLevel = minGagLevel
def updateGagLevel(t, curGagLevel=curGagLevel):
newGagLevel = int(round(t))
if newGagLevel == curGagLevel:
return
curGagLevel = newGagLevel
base.localAvatar.setTrackAccess([0,
0,
0,
0,
curGagLevel,
curGagLevel,
0])
return Sequence(Func(grabCurTrackAccess), LerpFunctionInterval(updateGagLevel, fromData=1, toData=7, duration=0.3), WaitInterval(3.5), LerpFunctionInterval(updateGagLevel, fromData=7, toData=1, duration=0.3), Func(restoreTrackAccess), Func(messenger.send, 'doneThrowSquirtPreview'))
def parseSetMusicVolume(self, line):
if config.GetString('language', 'english') == 'japanese':
try:
loader = base.cr.playGame.place.loader
type = 'music'
duration = 0
fromLevel = 1.0
if len(line) == 2:
token, level = line
else:
if len(line) == 3:
token, level, type = line
else:
if len(line) == 4:
token, level, type, duration = line
else:
if len(line) == 5:
token, level, type, duration, fromLevel = line
if type == 'battleMusic':
music = loader.battleMusic
else:
if type == 'activityMusic':
music = loader.activityMusic
else:
music = loader.music
if duration == 0:
return Func(music.setVolume, level)
def setVolume(level):
music.setVolume(level)
return LerpFunctionInterval(setVolume, fromData=fromLevel, toData=level, duration=duration)
except AttributeError:
pass
else:
return Wait(0.0)
def cleanString(self, string, charactors):
cleanedString = ''
for char in charactors:
if char in string:
cleanedString = string.replace(char, '')
return cleanedString
searchPath = DSearchPath()
searchPath.appendDirectory(Filename('/phase_3/etc'))
if not config.GetBool('want-retro-mode', False) and not config.GetBool('want-classic-chars', True):
scriptFile = Filename('QuestScriptsNoChar.txt')
else:
scriptFile = Filename('QuestScripts.txt')
found = vfs.resolveFilename(scriptFile, searchPath)
if not found:
notify.error('Could not find QuestScripts.txt file')
readFile(scriptFile)
| 41.997345
| 290
| 0.548981
|
acfb952c110a7d1f9339c7b12e59767402ea7987
| 6,479
|
py
|
Python
|
scripts/2.TCGA-MLexample.py
|
KT12/Machine-Learning
|
63cd3f9edc951fd1ca721f8a8336c8c625ea5b53
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/2.TCGA-MLexample.py
|
KT12/Machine-Learning
|
63cd3f9edc951fd1ca721f8a8336c8c625ea5b53
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/2.TCGA-MLexample.py
|
KT12/Machine-Learning
|
63cd3f9edc951fd1ca721f8a8336c8c625ea5b53
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# # Create a logistic regression model to predict TP53 mutation from gene expression data in TCGA
# In[1]:
import os
import urllib
import random
import warnings
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import preprocessing
from sklearn.linear_model import SGDClassifier
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.metrics import roc_auc_score, roc_curve
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.feature_selection import SelectKBest
from statsmodels.robust.scale import mad
# In[2]:
get_ipython().magic('matplotlib inline')
plt.style.use('seaborn-notebook')
# ## Specify model configuration
# In[3]:
# We're going to be building a 'TP53' classifier
GENE = '7157' # TP53
# *Here is some [documentation](http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.SGDClassifier.html) regarding the classifier and hyperparameters*
#
# *Here is some [information](https://ghr.nlm.nih.gov/gene/TP53) about TP53*
# ## Load Data
# In[4]:
get_ipython().run_cell_magic('time', '', "path = os.path.join('download', 'expression-matrix.tsv.bz2')\nX = pd.read_table(path, index_col=0)")
# In[5]:
get_ipython().run_cell_magic('time', '', "path = os.path.join('download', 'mutation-matrix.tsv.bz2')\nY = pd.read_table(path, index_col=0)")
# In[6]:
y = Y[GENE]
# In[7]:
# The Series now holds TP53 Mutation Status for each Sample
y.head(6)
# In[8]:
# Here are the percentage of tumors with NF1
y.value_counts(True)
# ## Set aside 10% of the data for testing
# In[9]:
# Typically, this can only be done where the number of mutations is large enough
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=0)
'Size: {:,} features, {:,} training samples, {:,} testing samples'.format(len(X.columns), len(X_train), len(X_test))
# ## Median absolute deviation feature selection
# In[10]:
def fs_mad(x, y):
"""
Get the median absolute deviation (MAD) for each column of x
"""
scores = mad(x)
return scores, np.array([np.NaN]*len(scores))
# ## Define pipeline and Cross validation model fitting
# In[11]:
# Parameter Sweep for Hyperparameters
param_grid = {
'select__k': [2000],
'classify__loss': ['log'],
'classify__penalty': ['elasticnet'],
'classify__alpha': [10 ** x for x in range(-3, 1)],
'classify__l1_ratio': [0, 0.2, 0.8, 1],
}
pipeline = Pipeline(steps=[
('select', SelectKBest(fs_mad)),
('standardize', StandardScaler()),
('classify', SGDClassifier(random_state=0, class_weight='balanced'))
])
cv_pipeline = GridSearchCV(estimator=pipeline, param_grid=param_grid, n_jobs=-1, scoring='roc_auc')
# In[12]:
get_ipython().run_cell_magic('time', '', 'cv_pipeline.fit(X=X_train, y=y_train)')
# In[13]:
# Best Params
print('{:.3%}'.format(cv_pipeline.best_score_))
# Best Params
cv_pipeline.best_params_
# ## Visualize hyperparameters performance
# In[14]:
cv_result_df = pd.concat([
pd.DataFrame(cv_pipeline.cv_results_),
pd.DataFrame.from_records(cv_pipeline.cv_results_['params']),
], axis='columns')
cv_result_df.head(2)
# In[15]:
# Cross-validated performance heatmap
cv_score_mat = pd.pivot_table(cv_result_df, values='mean_test_score', index='classify__l1_ratio', columns='classify__alpha')
ax = sns.heatmap(cv_score_mat, annot=True, fmt='.1%')
ax.set_xlabel('Regularization strength multiplier (alpha)')
ax.set_ylabel('Elastic net mixing parameter (l1_ratio)');
# ## Use Optimal Hyperparameters to Output ROC Curve
# In[16]:
y_pred_train = cv_pipeline.decision_function(X_train)
y_pred_test = cv_pipeline.decision_function(X_test)
def get_threshold_metrics(y_true, y_pred):
roc_columns = ['fpr', 'tpr', 'threshold']
roc_items = zip(roc_columns, roc_curve(y_true, y_pred))
roc_df = pd.DataFrame.from_items(roc_items)
auroc = roc_auc_score(y_true, y_pred)
return {'auroc': auroc, 'roc_df': roc_df}
metrics_train = get_threshold_metrics(y_train, y_pred_train)
metrics_test = get_threshold_metrics(y_test, y_pred_test)
# In[17]:
# Plot ROC
plt.figure()
for label, metrics in ('Training', metrics_train), ('Testing', metrics_test):
roc_df = metrics['roc_df']
plt.plot(roc_df.fpr, roc_df.tpr,
label='{} (AUROC = {:.1%})'.format(label, metrics['auroc']))
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Predicting TP53 mutation from gene expression (ROC curves)')
plt.legend(loc='lower right');
# ## What are the classifier coefficients?
# In[18]:
final_pipeline = cv_pipeline.best_estimator_
final_classifier = final_pipeline.named_steps['classify']
# In[19]:
select_indices = final_pipeline.named_steps['select'].transform(
np.arange(len(X.columns)).reshape(1, -1)
).tolist()
coef_df = pd.DataFrame.from_items([
('feature', X.columns[select_indices]),
('weight', final_classifier.coef_[0]),
])
coef_df['abs'] = coef_df['weight'].abs()
coef_df = coef_df.sort_values('abs', ascending=False)
# In[20]:
'{:.1%} zero coefficients; {:,} negative and {:,} positive coefficients'.format(
(coef_df.weight == 0).mean(),
(coef_df.weight < 0).sum(),
(coef_df.weight > 0).sum()
)
# In[21]:
coef_df.head(10)
# ## Investigate the predictions
# In[22]:
predict_df = pd.DataFrame.from_items([
('sample_id', X.index),
('testing', X.index.isin(X_test.index).astype(int)),
('status', y),
('decision_function', cv_pipeline.decision_function(X)),
('probability', cv_pipeline.predict_proba(X)[:, 1]),
])
predict_df['probability_str'] = predict_df['probability'].apply('{:.1%}'.format)
# In[23]:
# Top predictions amongst negatives (potential hidden responders)
predict_df.sort_values('decision_function', ascending=False).query("status == 0").head(10)
# In[24]:
# Ignore numpy warning caused by seaborn
warnings.filterwarnings('ignore', 'using a non-integer number instead of an integer')
ax = sns.distplot(predict_df.query("status == 0").decision_function, hist=False, label='Negatives')
ax = sns.distplot(predict_df.query("status == 1").decision_function, hist=False, label='Positives')
# In[25]:
ax = sns.distplot(predict_df.query("status == 0").probability, hist=False, label='Negatives')
ax = sns.distplot(predict_df.query("status == 1").probability, hist=False, label='Positives')
| 25.308594
| 167
| 0.713536
|
acfb96783c63e8e128f16b6e25ba1099299bbfa7
| 2,905
|
py
|
Python
|
setup.py
|
andreierdoss/news-please
|
054d30a154f05303a9992057e085c47859f80e4b
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
andreierdoss/news-please
|
054d30a154f05303a9992057e085c47859f80e4b
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
andreierdoss/news-please
|
054d30a154f05303a9992057e085c47859f80e4b
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup, find_packages
setup(name='news-please',
version='1.4.26',
description="news-please is an open source easy-to-use news extractor that just works.",
long_description="""\
news-please is an open source, easy-to-use news crawler that extracts structured information from almost any news website. It can follow recursively internal hyperlinks and read RSS feeds to fetch both most recent and also old, archived articles. You only need to provide the root URL of the news website. Furthermore, its API allows developers to access the exctraction functionality within their software. news-please also implements a workflow optimized for the news archive provided by commoncrawl.org, allowing users to efficiently crawl and extract news articles including various filter options.""",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS',
'Operating System :: Microsoft',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
'Topic :: Scientific/Engineering :: Information Analysis',
],
keywords='news crawler news scraper news extractor crawler extractor scraper information retrieval',
author='Felix Hamborg',
author_email='felix.hamborg@uni-konstanz.de',
url='https://github.com/fhamborg/news-please',
download_url='https://github.com/fhamborg/news-please',
license='Apache License 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'Scrapy>=1.1.0',
'PyMySQL>=0.7.9',
'psycopg2>=2.8.4',
'hjson>=1.5.8',
'elasticsearch>=2.4',
'beautifulsoup4>=4.3.2',
'readability-lxml>=0.6.2',
'newspaper3k>=0.2.8',
'langdetect>=1.0.7',
'python-dateutil>=2.4.0',
'plac>=0.9.6',
'dotmap>=1.2.17',
'readability-lxml>=0.6.2',
'PyDispatcher>=2.0.5',
'warcio>=1.3.3',
'ago>=0.0.9',
'six>=1.10.0',
'lxml>=3.3.5',
'awscli>=1.11.117',
'hurry.filesize>=0.9',
'bs4'
],
extras_require={
':sys_platform == "win32"': [
'pywin32>=220'
]
},
entry_points={
'console_scripts': ['news-please = newsplease.__main__:main',
'news-please-cc = newsplease.examples.commoncrawl:main']
},
)
| 44.692308
| 606
| 0.598967
|
acfb96a959613ca6341b302b8f660a8444c397f3
| 5,530
|
py
|
Python
|
h/admin/views/users.py
|
ssin122/test-h
|
c10062ae23b690afaac0ab4af7b9a5a5e4b686a9
|
[
"MIT"
] | null | null | null |
h/admin/views/users.py
|
ssin122/test-h
|
c10062ae23b690afaac0ab4af7b9a5a5e4b686a9
|
[
"MIT"
] | null | null | null |
h/admin/views/users.py
|
ssin122/test-h
|
c10062ae23b690afaac0ab4af7b9a5a5e4b686a9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from elasticsearch import helpers as es_helpers
import jinja2
from pyramid import httpexceptions
from pyramid.view import view_config
from h import models
from h.accounts.events import ActivationEvent
from h.services.rename_user import UserRenameError
from h.tasks.admin import rename_user
from memex import storage
from h.i18n import TranslationString as _
class UserDeletionError(Exception):
pass
class UserNotFoundError(Exception):
pass
@view_config(route_name='admin_users',
request_method='GET',
renderer='h:templates/admin/users.html.jinja2',
permission='admin_users')
def users_index(request):
user = None
user_meta = {}
username = request.params.get('username')
authority = request.params.get('authority')
if username:
username = username.strip()
authority = authority.strip()
user = models.User.get_by_username(request.db, username, authority)
if user is None:
user = models.User.get_by_email(request.db, username, authority)
if user is not None:
svc = request.find_service(name='annotation_stats')
counts = svc.user_annotation_counts(user.userid)
user_meta['annotations_count'] = counts['total']
return {
'default_authority': request.auth_domain,
'username': username,
'authority': authority,
'user': user,
'user_meta': user_meta
}
@view_config(route_name='admin_users_activate',
request_method='POST',
request_param='userid',
permission='admin_users')
def users_activate(request):
user = _form_request_user(request)
user.activate()
request.session.flash(jinja2.Markup(_(
'User {name} has been activated!'.format(name=user.username))),
'success')
request.registry.notify(ActivationEvent(request, user))
return httpexceptions.HTTPFound(
location=request.route_path('admin_users',
_query=(('username', user.username),
('authority', user.authority))))
@view_config(route_name='admin_users_rename',
request_method='POST',
permission='admin_users')
def users_rename(request):
user = _form_request_user(request)
old_username = user.username
new_username = request.params.get('new_username').strip()
try:
svc = request.find_service(name='rename_user')
svc.check(user, new_username)
rename_user.delay(user.id, new_username)
request.session.flash(
'The user "%s" will be renamed to "%s" in the backgroud. Refresh this page to see if it\'s already done' %
(old_username, new_username), 'success')
return httpexceptions.HTTPFound(
location=request.route_path('admin_users',
_query=(('username', new_username),
('authority', user.authority))))
except (UserRenameError, ValueError) as e:
request.session.flash(str(e), 'error')
return httpexceptions.HTTPFound(
location=request.route_path('admin_users',
_query=(('username', old_username),
('authority', user.authority))))
@view_config(route_name='admin_users_delete',
request_method='POST',
permission='admin_users')
def users_delete(request):
user = _form_request_user(request)
try:
delete_user(request, user)
request.session.flash(
'Successfully deleted user %s with authority %s' % (user.username, user.authority), 'success')
except UserDeletionError as e:
request.session.flash(str(e), 'error')
return httpexceptions.HTTPFound(
location=request.route_path('admin_users'))
@view_config(context=UserNotFoundError)
def user_not_found(exc, request):
request.session.flash(jinja2.Markup(_(exc.message)), 'error')
return httpexceptions.HTTPFound(location=request.route_path('admin_users'))
def delete_user(request, user):
"""
Deletes a user with all their group memberships and annotations.
Raises UserDeletionError when deletion fails with the appropriate error
message.
"""
if models.Group.created_by(request.db, user).count() > 0:
raise UserDeletionError('Cannot delete user who is a group creator.')
user.groups = []
query = _all_user_annotations_query(request, user)
annotations = es_helpers.scan(client=request.es.conn, query={'query': query})
for annotation in annotations:
storage.delete_annotation(request.db, annotation['_id'])
request.db.delete(user)
def _all_user_annotations_query(request, user):
"""Query matching all annotations (shared and private) owned by user."""
return {
'filtered': {
'filter': {'term': {'user': user.userid.lower()}},
'query': {'match_all': {}}
}
}
def _form_request_user(request):
"""Return the User which a user admin form action relates to."""
userid = request.params['userid'].strip()
user_service = request.find_service(name='user')
user = user_service.fetch(userid)
if user is None:
raise UserNotFoundError("Could not find user with userid %s" % userid)
return user
def includeme(config):
config.scan(__name__)
| 31.067416
| 118
| 0.648101
|
acfb96bab8978c36905120e70b4dba6f87278557
| 194
|
py
|
Python
|
fun1.py
|
hallel21-meet/meet2019y1lab6
|
6171904b2cebdc736c2c02b0801e3ee06dcd6506
|
[
"MIT"
] | null | null | null |
fun1.py
|
hallel21-meet/meet2019y1lab6
|
6171904b2cebdc736c2c02b0801e3ee06dcd6506
|
[
"MIT"
] | null | null | null |
fun1.py
|
hallel21-meet/meet2019y1lab6
|
6171904b2cebdc736c2c02b0801e3ee06dcd6506
|
[
"MIT"
] | null | null | null |
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license()" for more information.
>>>
| 38.8
| 72
| 0.690722
|
acfb96ea0d3d5b43b0f6a42db5a0caa9a9a88bc7
| 11,136
|
py
|
Python
|
src/pretix/plugins/pretixdroid/views.py
|
td00/pretix
|
e31bd7600c85598de135f2eb5012e2f33fdb1d11
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/plugins/pretixdroid/views.py
|
td00/pretix
|
e31bd7600c85598de135f2eb5012e2f33fdb1d11
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/plugins/pretixdroid/views.py
|
td00/pretix
|
e31bd7600c85598de135f2eb5012e2f33fdb1d11
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2017-08-09T17:11:28.000Z
|
2017-08-09T17:11:28.000Z
|
import json
import logging
import string
import dateutil.parser
from django.db import transaction
from django.db.models import Count, Q
from django.http import (
HttpResponseForbidden, HttpResponseNotFound, JsonResponse,
)
from django.shortcuts import get_object_or_404
from django.utils.crypto import get_random_string
from django.utils.decorators import method_decorator
from django.utils.timezone import now
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import TemplateView, View
from pretix.base.models import Checkin, Event, Order, OrderPosition
from pretix.base.models.event import SubEvent
from pretix.control.permissions import EventPermissionRequiredMixin
from pretix.helpers.urls import build_absolute_uri
from pretix.multidomain.urlreverse import (
build_absolute_uri as event_absolute_uri,
)
logger = logging.getLogger('pretix.plugins.pretixdroid')
API_VERSION = 3
class ConfigView(EventPermissionRequiredMixin, TemplateView):
template_name = 'pretixplugins/pretixdroid/configuration.html'
permission = 'can_change_orders'
def get_context_data(self, **kwargs):
ctx = super().get_context_data()
key = self.request.event.settings.get('pretixdroid_key')
if not key or 'flush_key' in self.request.GET:
key = get_random_string(length=32,
allowed_chars=string.ascii_uppercase + string.ascii_lowercase + string.digits)
self.request.event.settings.set('pretixdroid_key', key)
subevent = None
url = build_absolute_uri('plugins:pretixdroid:api.redeem', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
if self.request.event.has_subevents:
if self.request.GET.get('subevent'):
subevent = get_object_or_404(SubEvent, event=self.request.event, pk=self.request.GET['subevent'])
url = build_absolute_uri('plugins:pretixdroid:api.redeem', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
'subevent': subevent.pk
})
ctx['subevent'] = subevent
ctx['qrdata'] = json.dumps({
'version': API_VERSION,
'url': url[:-7], # the slice removes the redeem/ part at the end
'key': key,
})
return ctx
class ApiView(View):
@method_decorator(csrf_exempt)
def dispatch(self, request, **kwargs):
try:
self.event = Event.objects.get(
slug=self.kwargs['event'],
organizer__slug=self.kwargs['organizer']
)
except Event.DoesNotExist:
return HttpResponseNotFound('Unknown event')
if (not self.event.settings.get('pretixdroid_key')
or self.event.settings.get('pretixdroid_key') != request.GET.get('key', '-unset-')):
return HttpResponseForbidden('Invalid key')
self.subevent = None
if self.event.has_subevents:
if 'subevent' in kwargs:
self.subevent = get_object_or_404(SubEvent, event=self.event, pk=kwargs['subevent'])
else:
return HttpResponseForbidden('No subevent selected.')
else:
if 'subevent' in kwargs:
return HttpResponseForbidden('Subevents not enabled.')
return super().dispatch(request, **kwargs)
class ApiRedeemView(ApiView):
def post(self, request, **kwargs):
secret = request.POST.get('secret', '!INVALID!')
force = request.POST.get('force', 'false') in ('true', 'True')
nonce = request.POST.get('nonce')
response = {
'version': API_VERSION
}
if 'datetime' in request.POST:
dt = dateutil.parser.parse(request.POST.get('datetime'))
else:
dt = now()
try:
with transaction.atomic():
created = False
op = OrderPosition.objects.select_related('item', 'variation', 'order', 'addon_to').get(
order__event=self.event, secret=secret, subevent=self.subevent
)
if op.order.status == Order.STATUS_PAID or force:
ci, created = Checkin.objects.get_or_create(position=op, defaults={
'datetime': dt,
'nonce': nonce,
})
else:
response['status'] = 'error'
response['reason'] = 'unpaid'
if 'status' not in response:
if created or (nonce and nonce == ci.nonce):
response['status'] = 'ok'
if created:
op.order.log_action('pretix.plugins.pretixdroid.scan', data={
'position': op.id,
'positionid': op.positionid,
'first': True,
'forced': op.order.status != Order.STATUS_PAID,
'datetime': dt,
})
else:
if force:
response['status'] = 'ok'
else:
response['status'] = 'error'
response['reason'] = 'already_redeemed'
op.order.log_action('pretix.plugins.pretixdroid.scan', data={
'position': op.id,
'positionid': op.positionid,
'first': False,
'forced': force,
'datetime': dt,
})
response['data'] = {
'secret': op.secret,
'order': op.order.code,
'item': str(op.item),
'variation': str(op.variation) if op.variation else None,
'attendee_name': op.attendee_name or (op.addon_to.attendee_name if op.addon_to else ''),
}
except OrderPosition.DoesNotExist:
response['status'] = 'error'
response['reason'] = 'unknown_ticket'
return JsonResponse(response)
def serialize_op(op):
return {
'secret': op.secret,
'order': op.order.code,
'item': str(op.item),
'variation': str(op.variation) if op.variation else None,
'attendee_name': op.attendee_name or (op.addon_to.attendee_name if op.addon_to else ''),
'redeemed': bool(op.checkin_cnt),
'paid': op.order.status == Order.STATUS_PAID,
}
class ApiSearchView(ApiView):
def get(self, request, **kwargs):
query = request.GET.get('query', '!INVALID!')
response = {
'version': API_VERSION
}
if len(query) >= 4:
ops = OrderPosition.objects.select_related('item', 'variation', 'order', 'addon_to').filter(
Q(order__event=self.event)
& Q(
Q(secret__istartswith=query) | Q(attendee_name__icontains=query) | Q(order__code__istartswith=query)
)
& Q(subevent=self.subevent)
).annotate(checkin_cnt=Count('checkins'))[:25]
response['results'] = [serialize_op(op) for op in ops]
else:
response['results'] = []
return JsonResponse(response)
class ApiDownloadView(ApiView):
def get(self, request, **kwargs):
response = {
'version': API_VERSION
}
ops = OrderPosition.objects.select_related('item', 'variation', 'order', 'addon_to').filter(
Q(order__event=self.event) & Q(subevent=self.subevent)
).annotate(checkin_cnt=Count('checkins'))
response['results'] = [serialize_op(op) for op in ops]
return JsonResponse(response)
class ApiStatusView(ApiView):
def get(self, request, **kwargs):
ev = self.subevent or self.event
response = {
'version': API_VERSION,
'event': {
'name': str(ev.name),
'slug': self.event.slug,
'organizer': {
'name': str(self.event.organizer),
'slug': self.event.organizer.slug
},
'subevent': self.subevent.pk if self.subevent else str(self.event),
'date_from': ev.date_from,
'date_to': ev.date_to,
'timezone': self.event.settings.timezone,
'url': event_absolute_uri(self.event, 'presale:event.index')
},
'checkins': Checkin.objects.filter(
position__order__event=self.event, position__subevent=self.subevent
).count(),
'total': OrderPosition.objects.filter(
order__event=self.event, order__status=Order.STATUS_PAID, subevent=self.subevent
).count()
}
op_by_item = {
p['item']: p['cnt']
for p in OrderPosition.objects.filter(
order__event=self.event,
order__status=Order.STATUS_PAID,
subevent=self.subevent
).order_by().values('item').annotate(cnt=Count('id'))
}
op_by_variation = {
p['variation']: p['cnt']
for p in OrderPosition.objects.filter(
order__event=self.event,
order__status=Order.STATUS_PAID,
subevent=self.subevent
).order_by().values('variation').annotate(cnt=Count('id'))
}
c_by_item = {
p['position__item']: p['cnt']
for p in Checkin.objects.filter(
position__order__event=self.event,
position__order__status=Order.STATUS_PAID,
position__subevent=self.subevent
).order_by().values('position__item').annotate(cnt=Count('id'))
}
c_by_variation = {
p['position__variation']: p['cnt']
for p in Checkin.objects.filter(
position__order__event=self.event,
position__order__status=Order.STATUS_PAID,
position__subevent=self.subevent
).order_by().values('position__variation').annotate(cnt=Count('id'))
}
response['items'] = []
for item in self.event.items.order_by('pk').prefetch_related('variations'):
i = {
'id': item.pk,
'name': str(item),
'admission': item.admission,
'checkins': c_by_item.get(item.pk, 0),
'total': op_by_item.get(item.pk, 0),
'variations': []
}
for var in item.variations.all():
i['variations'].append({
'id': var.pk,
'name': str(var),
'checkins': c_by_variation.get(var.pk, 0),
'total': op_by_variation.get(var.pk, 0),
})
response['items'].append(i)
return JsonResponse(response)
| 38.136986
| 120
| 0.552712
|
acfb96f21513c24d399ad4f1ee700707fcada541
| 2,125
|
py
|
Python
|
Client/StickClient/ipExchange.py
|
nikolajlauridsen/LANStick
|
11cf2def5bde8c14c792f9a13c376226c0c8bb09
|
[
"MIT"
] | null | null | null |
Client/StickClient/ipExchange.py
|
nikolajlauridsen/LANStick
|
11cf2def5bde8c14c792f9a13c376226c0c8bb09
|
[
"MIT"
] | null | null | null |
Client/StickClient/ipExchange.py
|
nikolajlauridsen/LANStick
|
11cf2def5bde8c14c792f9a13c376226c0c8bb09
|
[
"MIT"
] | null | null | null |
from random_words import RandomWords, RandomNicknames
import requests
import socket
import hashlib
import json
import os
from .config import server_ip, server_port, listening_port
class IpExchange:
def __init__(self):
self.server_url = f'http://{server_ip}:{server_port}'
self.rw = RandomWords()
self.rn = RandomNicknames()
def send_info(self, filename, _zip='no'):
filename = os.path.split(filename)[-1]
# Get connection info
ip = self.get_local_ip()
# Generate pass phrase
passphrase = f"{self.rn.random_nick(gender='u').capitalize()}{self.rw.random_word().capitalize()}"
# Hash it
pass_hash = hashlib.md5(passphrase.encode()).hexdigest()
# Send it off to the server
payload = {"id": pass_hash,
"filename": filename,
"zip": _zip,
"size": os.path.getsize(filename),
"ip": ip,
"port": listening_port}
res = requests.post(f'{self.server_url}/transfer', data=payload)
res.raise_for_status()
return passphrase, payload
def get_info(self, passphrase):
# Request pass phrase and hash it
pass_hash = hashlib.md5(passphrase.encode()).hexdigest()
# Request ip and port from server
payload = {"id": pass_hash}
res = requests.get(f'{self.server_url}/transfer', data=payload)
res.raise_for_status()
return json.loads(res.content.decode())
def teardown(self, pass_hash):
payload = {'id': pass_hash}
res = requests.post(f'{self.server_url}/teardown', data=payload)
res.raise_for_status()
@staticmethod
def get_local_ip():
"""
Get the local IP
Thanks stackoverflow
https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib/25850698#25850698
:return:
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 1)) # connect() for UDP doesn't send packets
return s.getsockname()[0]
| 31.25
| 116
| 0.613176
|
acfb9776441228fae512cf7acc789f2200427a1b
| 113
|
py
|
Python
|
envplus/__init__.py
|
jsvine/envplus
|
1ad7e1fabd4f57b2af003c81bda8595609912af8
|
[
"MIT"
] | 54
|
2015-01-25T06:26:53.000Z
|
2022-02-28T08:50:53.000Z
|
envplus/__init__.py
|
jsvine/envplus
|
1ad7e1fabd4f57b2af003c81bda8595609912af8
|
[
"MIT"
] | 1
|
2017-11-02T14:14:34.000Z
|
2017-11-02T14:22:32.000Z
|
envplus/__init__.py
|
jsvine/envplus
|
1ad7e1fabd4f57b2af003c81bda8595609912af8
|
[
"MIT"
] | 8
|
2015-11-07T14:02:55.000Z
|
2021-07-06T22:44:59.000Z
|
import env
import pathfile
import helpers
VERSION_TUPLE = (0, 0, 0)
VERSION = ".".join(map(str, VERSION_TUPLE))
| 16.142857
| 43
| 0.725664
|
acfb979d076da8f693ce9546c5e2b18271a1dd1d
| 1,015
|
py
|
Python
|
merge_sort.py
|
Nefari0uss/grokking-algorithms
|
ec93a22e6c02dd1ce833c498b4abfc142fb10b5a
|
[
"MIT"
] | 1
|
2019-12-03T20:08:46.000Z
|
2019-12-03T20:08:46.000Z
|
merge_sort.py
|
Nefari0uss/grokking-algorithms
|
ec93a22e6c02dd1ce833c498b4abfc142fb10b5a
|
[
"MIT"
] | null | null | null |
merge_sort.py
|
Nefari0uss/grokking-algorithms
|
ec93a22e6c02dd1ce833c498b4abfc142fb10b5a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
def merge_sort(arr):
if len(arr) > 1:
# sub-divide into partitions
mid = len(arr) // 2
left = arr[:mid] # from 0 to mid
right = arr[mid:] # from mid to end
merge_sort(left)
merge_sort(right)
i, j, k = 0, 0, 0 # i is left, j is right, k is the original array
# keep sorting until only 1 table is left with elements
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i += 1
else:
arr[k] = right[j]
j += 1
k += 1
# sort the remaining element(s)
while i < len(left):
arr[k] = left[i]
i += 1
k += 1
while j < len(right):
arr[k] = right[j]
j += 1
k += 1
return arr
arr = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print("Original array: ", arr)
print("Sorted Array: ", merge_sort(arr))
| 22.555556
| 75
| 0.44532
|
acfb995b08f80a3e0f3a6a243a2f9c41d0773e8c
| 218
|
py
|
Python
|
nlpaug/augmenter/char/__init__.py
|
techthiyanes/nlpaug
|
bb2fc63349bf949f6f6047ff447a0efb16983c0a
|
[
"MIT"
] | 3,121
|
2019-04-21T07:02:47.000Z
|
2022-03-31T22:17:36.000Z
|
nlpaug/augmenter/char/__init__.py
|
moh2236945/nlpaug
|
9d4fb11dcff9980ebaec9d8e6cc7a0381f7db67b
|
[
"MIT"
] | 186
|
2019-05-31T18:18:13.000Z
|
2022-03-28T10:11:05.000Z
|
nlpaug/augmenter/char/__init__.py
|
moh2236945/nlpaug
|
9d4fb11dcff9980ebaec9d8e6cc7a0381f7db67b
|
[
"MIT"
] | 371
|
2019-03-17T17:59:56.000Z
|
2022-03-31T01:45:15.000Z
|
from __future__ import absolute_import
from nlpaug.augmenter.char.char_augmenter import *
from nlpaug.augmenter.char.ocr import *
from nlpaug.augmenter.char.random import *
from nlpaug.augmenter.char.keyboard import *
| 36.333333
| 50
| 0.834862
|
acfb9a0e6bf6904024f672fbc7d5ac9e8d7bf878
| 351
|
py
|
Python
|
euler1.py
|
transacplus/euler_project
|
645e27064b005ad9a3c4a17e220e65b0cbabfa86
|
[
"MIT"
] | null | null | null |
euler1.py
|
transacplus/euler_project
|
645e27064b005ad9a3c4a17e220e65b0cbabfa86
|
[
"MIT"
] | null | null | null |
euler1.py
|
transacplus/euler_project
|
645e27064b005ad9a3c4a17e220e65b0cbabfa86
|
[
"MIT"
] | null | null | null |
"""
If we list all the natural numbers below 10 that are multiples of 3 or 5,
we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
"""
list=[]
for i in range(1000):
if i%3 == 0 or i%5 == 0 :
list.append(i)
print(list)
print (" Somme des éléments %s " % sum(list) )
| 27
| 75
| 0.612536
|
acfb9a787bb7d69b24daec713ab2eb16c5cdd3dc
| 1,086
|
py
|
Python
|
flask_rest_experiments/ext/restapi/resources.py
|
knopperman/flask-rest-experiments
|
f57fba15fc8e4b3b968c1ce0402bef216a829bc5
|
[
"Unlicense"
] | null | null | null |
flask_rest_experiments/ext/restapi/resources.py
|
knopperman/flask-rest-experiments
|
f57fba15fc8e4b3b968c1ce0402bef216a829bc5
|
[
"Unlicense"
] | null | null | null |
flask_rest_experiments/ext/restapi/resources.py
|
knopperman/flask-rest-experiments
|
f57fba15fc8e4b3b968c1ce0402bef216a829bc5
|
[
"Unlicense"
] | null | null | null |
from flask import abort, jsonify
from flask_restful import Resource
from flask_simplelogin import login_required
from flask_rest_experiments.models import Product
class ProductResource(Resource):
def get(self):
products = Product.query.all() or abort(204)
return jsonify(
{"products": [product.to_dict() for product in products]}
)
@login_required(basic=True, username="admin")
def post(self):
"""
Creates a new product.
Only admin user authenticated using basic auth can post
Basic takes base64 encripted username:password.
# curl -XPOST localhost:5000/api/v1/product/ \
# -H "Authorization: Basic Y2h1Y2s6bm9ycmlz" \
# -H "Content-Type: application/json"
"""
return NotImplementedError(
"Someone please complete this example and send a PR :)"
)
class ProductItemResource(Resource):
def get(self, product_id):
product = Product.query.filter_by(id=product_id).first() or abort(404)
return jsonify(product.to_dict())
| 30.166667
| 78
| 0.662983
|
acfb9af0040d71cff90a76aa3f067ecd97e623e6
| 1,546
|
py
|
Python
|
setup.py
|
jenniferip/openexrpython
|
ea8b5080db1e63b1f150c835d750126056a4cee5
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
jenniferip/openexrpython
|
ea8b5080db1e63b1f150c835d750126056a4cee5
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
jenniferip/openexrpython
|
ea8b5080db1e63b1f150c835d750126056a4cee5
|
[
"BSD-3-Clause"
] | null | null | null |
from distutils.core import setup
from distutils.extension import Extension
from distutils.command.build_py import build_py as _build_py
from distutils.sysconfig import get_config_var
from distutils.version import LooseVersion
import sys
import os
import platform
from distutils.core import setup, Extension
version = "1.3.2"
compiler_args = ['-g', '-DVERSION="%s"' % version]
if sys.platform == 'darwin':
compiler_args.append('-std=c++14')
if 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
current_system = LooseVersion(platform.mac_ver()[0])
python_target = LooseVersion(
get_config_var('MACOSX_DEPLOYMENT_TARGET'))
if python_target < '10.9' and current_system >= '10.9':
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
setup(name='OpenEXR',
author = 'James Bowman',
author_email = 'jamesb@excamera.com',
url = 'http://www.excamera.com/sphinx/articles-openexr.html',
description = "Python bindings for ILM's OpenEXR image file format",
long_description = "Python bindings for ILM's OpenEXR image file format",
version=version,
ext_modules=[
Extension('OpenEXR',
['OpenEXR.cpp'],
include_dirs=['/usr/include/OpenEXR', '/usr/local/include/OpenEXR', '/opt/local/include/OpenEXR', '/usr/local/include/Imath'],
library_dirs=['/usr/local/lib', '/opt/local/lib'],
libraries=['Iex', 'Half', 'Imath', 'IlmImf', 'OpenEXR', 'z'],
extra_compile_args=compiler_args)
],
py_modules=['Imath'],
)
| 36.809524
| 140
| 0.678525
|
acfb9b30f902a8997e2756431e3237c691ec2d0c
| 2,446
|
py
|
Python
|
paradrop/daemon/paradrop/core/config/uciutils.py
|
VegetableChook/Paradrop
|
a38e1773877d5b136c3b626edd8c033a12b43e56
|
[
"Apache-2.0"
] | 1
|
2018-03-22T13:04:19.000Z
|
2018-03-22T13:04:19.000Z
|
paradrop/daemon/paradrop/core/config/uciutils.py
|
VegetableChook/Paradrop
|
a38e1773877d5b136c3b626edd8c033a12b43e56
|
[
"Apache-2.0"
] | null | null | null |
paradrop/daemon/paradrop/core/config/uciutils.py
|
VegetableChook/Paradrop
|
a38e1773877d5b136c3b626edd8c033a12b43e56
|
[
"Apache-2.0"
] | null | null | null |
import traceback
from paradrop.lib.utils import uci
from paradrop.base.output import out
def setConfig(chute, old, cacheKeys, filepath):
"""
Helper function used to modify config file of each various setting in /etc/config/
Returns:
True: if it modified a file
False: if it did NOT cause any modifications
Raises exception if an error occurs.
"""
# First pull out all the cache keys from the @new chute
newconfigs = []
for c in cacheKeys:
t = chute.getCache(c)
if(t):
newconfigs += t
if(len(newconfigs) == 0):
out.info('no settings to add %r\n' % (chute))
# We are no longer returning because we need to remove the old configs if necessary
# return False
# add comment to each config so we can differentiate between different chute specific configs
for e in newconfigs:
c, o = e
c['comment'] = chute.name
# Get the old configs from the file for this chuteid
# Find the config file
cfgFile = uci.UCIConfig(filepath)
# Get all the configs that existed in the old version
# Note we are getting the old configs from the etc/config/ file instead of the chute object
# This is to improve reliability - sometimes the file isn't changed it should be
# because we have reset the board, messed with chutes, etc. and the new/old chuteobjects are identical
oldconfigs = cfgFile.getChuteConfigs(chute.name)
if (uci.chuteConfigsMatch(oldconfigs, newconfigs)):
# configs match, skipping reloading
# Save a backup in case we need to restore.
cfgFile.backup(backupToken="paradrop")
return False
else:
# We need to make changes so delete old configs, load new configs
# configs don't match, changing chutes and reloading
cfgFile.delConfigs(oldconfigs)
cfgFile.addConfigs(newconfigs)
cfgFile.save(backupToken="paradrop", internalid=chute.name)
return True
def restoreConfigFile(chute, configname):
"""
Restore a system config file from backup.
This can only be used during a chute update operation to revert changes
that were made during that update operation.
configname: name of configuration file ("network", "wireless", etc.)
"""
filepath = uci.getSystemPath(configname)
cfgFile = uci.UCIConfig(filepath)
cfgFile.restore(backupToken="paradrop", saveBackup=False)
| 35.449275
| 106
| 0.683156
|
acfb9bce60df8bcfc0d3e3b1bc4098b367dae573
| 888
|
py
|
Python
|
examples/use_instance.py
|
gitter-badger/Mi.py
|
ef6611c93c8a5237ec9d51ff89e845b85771e070
|
[
"MIT"
] | 13
|
2021-09-14T02:47:23.000Z
|
2022-02-27T16:48:09.000Z
|
examples/use_instance.py
|
gitter-badger/Mi.py
|
ef6611c93c8a5237ec9d51ff89e845b85771e070
|
[
"MIT"
] | 62
|
2021-08-28T10:56:55.000Z
|
2022-03-30T06:47:28.000Z
|
examples/use_instance.py
|
gitter-badger/Mi.py
|
ef6611c93c8a5237ec9d51ff89e845b85771e070
|
[
"MIT"
] | 3
|
2021-12-23T20:10:57.000Z
|
2022-03-30T13:19:49.000Z
|
import asyncio
from mi import Client, Drive, Note, Router
from mi.ext import tasks
uri = "wss://example.com/streaming"
token = "This is your token"
bot = Client()
@tasks.loop(60)
async def task():
print("ループしてますよ~")
@bot.listen()
async def on_message(note: Note):
print(
f"{note.author.instance.name} | {note.author.username}さんがノートしました: {note.content}"
)
@bot.listen()
async def on_reaction(ctx: Note):
print(
f"{ctx.author.instance.name} | {ctx.author.name}さんがリアクションを付けました: {ctx.reactions}"
)
@bot.event()
async def on_ready(ws):
print("work on my machine")
await Router(ws).connect_channel(["global", "main"]) # globalとmainチャンネルに接続
task.start() # タスクを起動
res = await bot.post_note("Hello World") # ノートを投稿
print(res.content)
task.stop() # タスクを止める
if __name__ == "__main__":
asyncio.run(bot.start(uri, token))
| 21.658537
| 89
| 0.662162
|
acfb9c709f0d66b446caa642c29d71114f9fd4a1
| 3,042
|
py
|
Python
|
huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/model/list_ports_response.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/model/list_ports_response.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/model/list_ports_response.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListPortsResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'ports': 'list[Port]'
}
attribute_map = {
'ports': 'ports'
}
def __init__(self, ports=None):
"""ListPortsResponse - a model defined in huaweicloud sdk"""
super(ListPortsResponse, self).__init__()
self._ports = None
self.discriminator = None
if ports is not None:
self.ports = ports
@property
def ports(self):
"""Gets the ports of this ListPortsResponse.
port列表对象
:return: The ports of this ListPortsResponse.
:rtype: list[Port]
"""
return self._ports
@ports.setter
def ports(self, ports):
"""Sets the ports of this ListPortsResponse.
port列表对象
:param ports: The ports of this ListPortsResponse.
:type: list[Port]
"""
self._ports = ports
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListPortsResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.452174
| 79
| 0.547009
|
acfb9eeeeb0e17e017e5a7954628057844b45f1c
| 1,436
|
py
|
Python
|
minion/server/app.py
|
timofurrer/minion-ci
|
411d0ea6638fb37d7e170cc8c8c5815304cc9f5c
|
[
"MIT"
] | 49
|
2016-03-07T06:42:40.000Z
|
2021-03-06T02:43:02.000Z
|
minion/server/app.py
|
timofurrer/minion-ci
|
411d0ea6638fb37d7e170cc8c8c5815304cc9f5c
|
[
"MIT"
] | 16
|
2016-03-08T07:20:52.000Z
|
2017-04-21T18:15:12.000Z
|
minion/server/app.py
|
timofurrer/minion-ci
|
411d0ea6638fb37d7e170cc8c8c5815304cc9f5c
|
[
"MIT"
] | 9
|
2016-03-29T22:08:52.000Z
|
2021-06-16T16:29:30.000Z
|
"""
`minion-ci` is a minimalist, decentralized, flexible Continuous Integration Server for hackers.
This module contains the server code for `minion-ci`.
:copyright: (c) by Timo Furrer
:license: MIT, see LICENSE for details
"""
import re
import click
import yaml
from flask import Flask
from jinja2 import evalcontextfilter, Markup, escape
from .config import DefaultConfig
from .routes import api
from .models import db
from .core import workers
def parse_config(path):
"""Parse minion-ci server configuration file."""
with open(path) as config_file:
return yaml.load(config_file)
@evalcontextfilter
def nl2br(eval_ctx, value):
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
result = u'\n\n'.join(u'<p>%s</p>' % p.replace('\n', '<br>\n') \
for p in _paragraph_re.split(escape(value)))
if eval_ctx.autoescape:
result = Markup(result)
return result
def create_app(name):
"""Create flask app"""
app = Flask(__name__)
app.register_blueprint(api)
app.config.from_object(DefaultConfig)
try:
app.config.update(parse_config(app.config["DEFAULT_CONFIGURATION_FILE"]))
except FileNotFoundError:
pass
# add required filters
app.jinja_env.filters["nl2br"] = nl2br
# initialize mongodb engine for use in flask
db.init_app(app)
# initilize worker pool and job queue
workers.init_app(app)
return app
| 24.338983
| 99
| 0.688022
|
acfba0717929d57dce3244329753b74d69ff3d75
| 1,446
|
py
|
Python
|
get_snapshot.py
|
JavierOramas/scholar_standing_bot
|
9afde1fc0d56a3c57cf281092ff5c3d123ddac2f
|
[
"MIT"
] | null | null | null |
get_snapshot.py
|
JavierOramas/scholar_standing_bot
|
9afde1fc0d56a3c57cf281092ff5c3d123ddac2f
|
[
"MIT"
] | null | null | null |
get_snapshot.py
|
JavierOramas/scholar_standing_bot
|
9afde1fc0d56a3c57cf281092ff5c3d123ddac2f
|
[
"MIT"
] | 2
|
2021-09-19T21:08:55.000Z
|
2021-09-19T21:09:39.000Z
|
import os
import requests
import json
from bs4 import BeautifulSoup
def read_data(id):
id = str(id)
try:
with open('./db/'+id+'.json', 'r') as f:
return json.loads(f.readline())
except:
return {}
def write_data(id,db):
id = str(id)
with open('./db/'+id+'.json', 'w') as f:
f.write(json.dumps(db))
import requests
def get_snapshot():
for cp,dir,files in os.walk('./db/'):
for file in files:
if file.endswith('.json'):
user = file[:-len('.json')]
db = read_data(user)
list = []
print(db)
if len(db.keys()) > 0:
for i in db.keys():
wallet = db[i]['wallet']
yesterday = db[i]['yesterday']
slp = requests.get(f'https://game-api.skymavis.com/game-api/clients/{wallet}/items/1').json()['total']
if slp >= yesterday:
slp_new = slp-yesterday
else:
slp_new = slp
db[i]['slp'].append(slp_new)
db[i]['yesterday'] = slp
if len(db[i]['slp']) > 14:
db[i]['slp'] = db[i]['slp'][-14:]
list.append((i,slp_new))
write_data(user,db)
pass
get_snapshot()
| 28.352941
| 126
| 0.425311
|
acfba0dedb6038e87af8a0469c64ee28ec9418a6
| 22,436
|
py
|
Python
|
sdk/python/pulumi_gcp/serviceaccount/iam_policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/serviceaccount/iam_policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/serviceaccount/iam_policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['IAMPolicyArgs', 'IAMPolicy']
@pulumi.input_type
class IAMPolicyArgs:
def __init__(__self__, *,
policy_data: pulumi.Input[str],
service_account_id: pulumi.Input[str]):
"""
The set of arguments for constructing a IAMPolicy resource.
:param pulumi.Input[str] policy_data: The policy data generated by
a `organizations.get_iam_policy` data source.
:param pulumi.Input[str] service_account_id: The fully-qualified name of the service account to apply policy to.
"""
pulumi.set(__self__, "policy_data", policy_data)
pulumi.set(__self__, "service_account_id", service_account_id)
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> pulumi.Input[str]:
"""
The policy data generated by
a `organizations.get_iam_policy` data source.
"""
return pulumi.get(self, "policy_data")
@policy_data.setter
def policy_data(self, value: pulumi.Input[str]):
pulumi.set(self, "policy_data", value)
@property
@pulumi.getter(name="serviceAccountId")
def service_account_id(self) -> pulumi.Input[str]:
"""
The fully-qualified name of the service account to apply policy to.
"""
return pulumi.get(self, "service_account_id")
@service_account_id.setter
def service_account_id(self, value: pulumi.Input[str]):
pulumi.set(self, "service_account_id", value)
@pulumi.input_type
class _IAMPolicyState:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
policy_data: Optional[pulumi.Input[str]] = None,
service_account_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IAMPolicy resources.
:param pulumi.Input[str] etag: (Computed) The etag of the service account IAM policy.
:param pulumi.Input[str] policy_data: The policy data generated by
a `organizations.get_iam_policy` data source.
:param pulumi.Input[str] service_account_id: The fully-qualified name of the service account to apply policy to.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if policy_data is not None:
pulumi.set(__self__, "policy_data", policy_data)
if service_account_id is not None:
pulumi.set(__self__, "service_account_id", service_account_id)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The etag of the service account IAM policy.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> Optional[pulumi.Input[str]]:
"""
The policy data generated by
a `organizations.get_iam_policy` data source.
"""
return pulumi.get(self, "policy_data")
@policy_data.setter
def policy_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_data", value)
@property
@pulumi.getter(name="serviceAccountId")
def service_account_id(self) -> Optional[pulumi.Input[str]]:
"""
The fully-qualified name of the service account to apply policy to.
"""
return pulumi.get(self, "service_account_id")
@service_account_id.setter
def service_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_id", value)
class IAMPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
policy_data: Optional[pulumi.Input[str]] = None,
service_account_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
When managing IAM roles, you can treat a service account either as a resource or as an identity. This resource is to add iam policy bindings to a service account resource, such as allowing the members to run operations as or modify the service account. To configure permissions for a service account on other GCP resources, use the google_project_iam set of resources.
Three different resources help you manage your IAM policy for a service account. Each of these resources serves a different use case:
* `serviceAccount.IAMPolicy`: Authoritative. Sets the IAM policy for the service account and replaces any existing policy already attached.
* `serviceAccount.IAMBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the service account are preserved.
* `serviceAccount.IAMMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the service account are preserved.
> **Note:** `serviceAccount.IAMPolicy` **cannot** be used in conjunction with `serviceAccount.IAMBinding` and `serviceAccount.IAMMember` or they will fight over what your policy should be.
> **Note:** `serviceAccount.IAMBinding` resources **can be** used in conjunction with `serviceAccount.IAMMember` resources **only if** they do not grant privilege to the same role.
## google\_service\_account\_iam\_policy
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/iam.serviceAccountUser",
members=["user:jane@example.com"],
)])
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that only Jane can interact with")
admin_account_iam = gcp.service_account.IAMPolicy("admin-account-iam",
service_account_id=sa.name,
policy_data=admin.policy_data)
```
## google\_service\_account\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that only Jane can use")
admin_account_iam = gcp.service_account.IAMBinding("admin-account-iam",
service_account_id=sa.name,
role="roles/iam.serviceAccountUser",
members=["user:jane@example.com"])
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that only Jane can use")
admin_account_iam = gcp.service_account.IAMBinding("admin-account-iam",
condition=gcp.service.account.IAMBindingConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
members=["user:jane@example.com"],
role="roles/iam.serviceAccountUser",
service_account_id=sa.name)
```
## google\_service\_account\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
default = gcp.compute.get_default_service_account()
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that Jane can use")
admin_account_iam = gcp.service_account.IAMMember("admin-account-iam",
service_account_id=sa.name,
role="roles/iam.serviceAccountUser",
member="user:jane@example.com")
# Allow SA service account use the default GCE account
gce_default_account_iam = gcp.service_account.IAMMember("gce-default-account-iam",
service_account_id=default.name,
role="roles/iam.serviceAccountUser",
member=sa.email.apply(lambda email: f"serviceAccount:{email}"))
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that Jane can use")
admin_account_iam = gcp.service_account.IAMMember("admin-account-iam",
condition=gcp.service.account.IAMMemberConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
member="user:jane@example.com",
role="roles/iam.serviceAccountUser",
service_account_id=sa.name)
```
## Import
Service account IAM resources can be imported using the project, service account email, role, member identity, and condition (beta).
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam projects/{your-project-id}/serviceAccounts/{your-service-account-email}
```
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/iam.serviceAccountUser"
```
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/editor user:foo@example.com"
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`. With conditions
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/iam.serviceAccountUser expires_after_2019_12_31"
```
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/iam.serviceAccountUser user:foo@example.com expires_after_2019_12_31"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] policy_data: The policy data generated by
a `organizations.get_iam_policy` data source.
:param pulumi.Input[str] service_account_id: The fully-qualified name of the service account to apply policy to.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IAMPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
When managing IAM roles, you can treat a service account either as a resource or as an identity. This resource is to add iam policy bindings to a service account resource, such as allowing the members to run operations as or modify the service account. To configure permissions for a service account on other GCP resources, use the google_project_iam set of resources.
Three different resources help you manage your IAM policy for a service account. Each of these resources serves a different use case:
* `serviceAccount.IAMPolicy`: Authoritative. Sets the IAM policy for the service account and replaces any existing policy already attached.
* `serviceAccount.IAMBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the service account are preserved.
* `serviceAccount.IAMMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the service account are preserved.
> **Note:** `serviceAccount.IAMPolicy` **cannot** be used in conjunction with `serviceAccount.IAMBinding` and `serviceAccount.IAMMember` or they will fight over what your policy should be.
> **Note:** `serviceAccount.IAMBinding` resources **can be** used in conjunction with `serviceAccount.IAMMember` resources **only if** they do not grant privilege to the same role.
## google\_service\_account\_iam\_policy
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/iam.serviceAccountUser",
members=["user:jane@example.com"],
)])
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that only Jane can interact with")
admin_account_iam = gcp.service_account.IAMPolicy("admin-account-iam",
service_account_id=sa.name,
policy_data=admin.policy_data)
```
## google\_service\_account\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that only Jane can use")
admin_account_iam = gcp.service_account.IAMBinding("admin-account-iam",
service_account_id=sa.name,
role="roles/iam.serviceAccountUser",
members=["user:jane@example.com"])
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that only Jane can use")
admin_account_iam = gcp.service_account.IAMBinding("admin-account-iam",
condition=gcp.service.account.IAMBindingConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
members=["user:jane@example.com"],
role="roles/iam.serviceAccountUser",
service_account_id=sa.name)
```
## google\_service\_account\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
default = gcp.compute.get_default_service_account()
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that Jane can use")
admin_account_iam = gcp.service_account.IAMMember("admin-account-iam",
service_account_id=sa.name,
role="roles/iam.serviceAccountUser",
member="user:jane@example.com")
# Allow SA service account use the default GCE account
gce_default_account_iam = gcp.service_account.IAMMember("gce-default-account-iam",
service_account_id=default.name,
role="roles/iam.serviceAccountUser",
member=sa.email.apply(lambda email: f"serviceAccount:{email}"))
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
sa = gcp.service_account.Account("sa",
account_id="my-service-account",
display_name="A service account that Jane can use")
admin_account_iam = gcp.service_account.IAMMember("admin-account-iam",
condition=gcp.service.account.IAMMemberConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
member="user:jane@example.com",
role="roles/iam.serviceAccountUser",
service_account_id=sa.name)
```
## Import
Service account IAM resources can be imported using the project, service account email, role, member identity, and condition (beta).
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam projects/{your-project-id}/serviceAccounts/{your-service-account-email}
```
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/iam.serviceAccountUser"
```
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/editor user:foo@example.com"
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`. With conditions
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/iam.serviceAccountUser expires_after_2019_12_31"
```
```sh
$ pulumi import gcp:serviceAccount/iAMPolicy:IAMPolicy admin-account-iam "projects/{your-project-id}/serviceAccounts/{your-service-account-email} roles/iam.serviceAccountUser user:foo@example.com expires_after_2019_12_31"
```
:param str resource_name: The name of the resource.
:param IAMPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IAMPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
policy_data: Optional[pulumi.Input[str]] = None,
service_account_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IAMPolicyArgs.__new__(IAMPolicyArgs)
if policy_data is None and not opts.urn:
raise TypeError("Missing required property 'policy_data'")
__props__.__dict__["policy_data"] = policy_data
if service_account_id is None and not opts.urn:
raise TypeError("Missing required property 'service_account_id'")
__props__.__dict__["service_account_id"] = service_account_id
__props__.__dict__["etag"] = None
super(IAMPolicy, __self__).__init__(
'gcp:serviceAccount/iAMPolicy:IAMPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
etag: Optional[pulumi.Input[str]] = None,
policy_data: Optional[pulumi.Input[str]] = None,
service_account_id: Optional[pulumi.Input[str]] = None) -> 'IAMPolicy':
"""
Get an existing IAMPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] etag: (Computed) The etag of the service account IAM policy.
:param pulumi.Input[str] policy_data: The policy data generated by
a `organizations.get_iam_policy` data source.
:param pulumi.Input[str] service_account_id: The fully-qualified name of the service account to apply policy to.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IAMPolicyState.__new__(_IAMPolicyState)
__props__.__dict__["etag"] = etag
__props__.__dict__["policy_data"] = policy_data
__props__.__dict__["service_account_id"] = service_account_id
return IAMPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
(Computed) The etag of the service account IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> pulumi.Output[str]:
"""
The policy data generated by
a `organizations.get_iam_policy` data source.
"""
return pulumi.get(self, "policy_data")
@property
@pulumi.getter(name="serviceAccountId")
def service_account_id(self) -> pulumi.Output[str]:
"""
The fully-qualified name of the service account to apply policy to.
"""
return pulumi.get(self, "service_account_id")
| 45.601626
| 376
| 0.658406
|
acfba1c579321fcff81d1dace9a9d067362d39fb
| 5,985
|
py
|
Python
|
scripts/watchdogs-server.py
|
NanderSantos/My-WatchDog
|
4f32fa206787a490a606b35ee90a8e8687338593
|
[
"MIT"
] | null | null | null |
scripts/watchdogs-server.py
|
NanderSantos/My-WatchDog
|
4f32fa206787a490a606b35ee90a8e8687338593
|
[
"MIT"
] | null | null | null |
scripts/watchdogs-server.py
|
NanderSantos/My-WatchDog
|
4f32fa206787a490a606b35ee90a8e8687338593
|
[
"MIT"
] | null | null | null |
#python3.6
# native python libraries imports
import random
import time
import json
# extern libraries imports
from paho.mqtt import client as mqtt_client
# scripts imports
import firebasehandler
import emailhandler
# Functions
def pubAnimalName(message):
jsonMessage = json.loads(message)
print("pubAnimalName:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to storage
firebasehandler.nameAnimal(jsonMessage)
def pubAnimalPhoto(message):
jsonMessage = json.loads(message)
print("pubAnimalPhoto:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to storage
firebasehandler.imageStore(jsonMessage)
def pubFoodReleaseDone(message):
jsonMessage = json.loads(message)
print("pubFoodReleaseDone:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
firebasehandler.foodReleaseRelat(jsonMessage)
def pubLowLevelFood(message):
jsonMessage = json.loads(message)
print("pubLowLevelFood:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
spotID = jsonMessage["spotID"]
emailhandler.sendEmail(
"Olá!",
f"O Posto de Alimentação de ID <{spotID}> está com um nível baixo de ração! É importante reabastecê-lo o mais rápido o possível!",
"Alerta de Nível Baixo de Ração"
)
def pubLogMessage(message):
jsonMessage = json.loads(message)
print("pubLogMessage:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
firebasehandler.pubLog(jsonMessage)
spotID = jsonMessage["spotID"]
errorMessage = jsonMessage["message"]
emailhandler.sendEmail(
"Olá!",
f"O Posto de Alimentação de ID <{spotID}> registrou o seguinte log: \"{errorMessage}\"!",
"Log registrado em um Posto de Alimentação"
)
def pubSpotActivate(message):
jsonMessage = json.loads(message)
print("pubSpotActivate:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to database
firebasehandler.database.child("spots").child(jsonMessage["spotID"]).update({ "isSpotActivate": jsonMessage["status"] })
# send to ESP by subSpotActivate
client.publish("watchdog/reports/subSpotActivate", message)
def pubFoodReleaseActivate(message):
jsonMessage = json.loads(message)
print("pubFoodReleaseActivate:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to database
firebasehandler.database.child("spots").child(jsonMessage["spotID"]).update({ "isFoodReleaseActivate": jsonMessage["status"] })
# send to ESP by subFoodReleaspubFoodReleaseActivate
client.publish("watchdog/reports/subFoodReleaseActivate", message)
def pubCamActivate(message):
jsonMessage = json.loads(message)
print("pubCamActivate:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to database
firebasehandler.database.child("spots").child(jsonMessage["spotID"]).update({ "isCamActivate": jsonMessage["status"] })
# send to ESP by subCamActivate
client.publish("watchdog/reports/subCamActivate", message)
def pubPresenceSensorActivate(message):
jsonMessage = json.loads(message)
print("pubPresenceSensorActivate:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to database
firebasehandler.database.child("spots").child(jsonMessage["spotID"]).update({ "isPresenceSensorActivate": jsonMessage["status"] })
# send to ESP by subPresenceSensorActivate
client.publish("watchdog/reports/subPresenceSensorActivate", message)
def pubRfidSensorActivate(message):
jsonMessage = json.loads(message)
print("pubRfidSensorActivate:")
print(json.dumps(jsonMessage, sort_keys=True, indent=4))
# save to database
firebasehandler.database.child("spots").child(jsonMessage["spotID"]).update({ "isRfidSensorActivate": jsonMessage["status"] })
# send to ESP by subRfidSensorActivate
client.publish("watchdog/reports/subRfidSensorActivate", message)
# Callbacks
MQTT_TOPICS = [("watchdog/reports/pubAnimalName", 0, pubAnimalName),
("watchdog/reports/pubAnimalPhoto", 0, pubAnimalPhoto),
("watchdog/reports/pubFoodReleaseDone", 0, pubFoodReleaseDone),
("watchdog/reports/pubLowLevelFood", 0, pubLowLevelFood),
("watchdog/reports/pubLogMessage", 0, pubLogMessage),
("watchdog/reports/pubSpotActivate", 0, pubSpotActivate),
("watchdog/reports/pubFoodReleaseActivate", 0, pubFoodReleaseActivate),
("watchdog/reports/pubCamActivate", 0, pubCamActivate),
("watchdog/reports/pubPresenceSensorActivate", 0, pubPresenceSensorActivate),
("watchdog/reports/pubRfidSensorActivate", 0, pubRfidSensorActivate)]
# Configuração do MQTT
MQTT_BROKER = "broker.hivemq.com"
MQTT_PORT = 1883
CLIENT_ID = f'python-mqtt-{random.randint(0, 1000)}'
def on_connect(client, userdata, flags, rc):
if rc == 0:
print("Connected to broker")
global Connected #Use global variable
Connected = True #Signal connection
else:
print("Connection failed")
def on_message(client, userdata, message):
data = message.payload
receive = data.decode("utf-8")
for topic in MQTT_TOPICS:
if topic[0] == message.topic:
topic[2](receive)
Connected = False #global variable for the state of the connection
client = mqtt_client.Client(CLIENT_ID) #create new instance
client.on_connect= on_connect #attach function to callback
client.on_message= on_message #attach function to callback
client.connect(MQTT_BROKER, MQTT_PORT) #connect to broker
client.loop_start() #start the loop
while Connected != True: #Wait for connection
time.sleep(0.1)
client.subscribe(list(map(lambda topic: (topic[0], topic[1]), MQTT_TOPICS)))
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
print ("exiting")
| 34.796512
| 134
| 0.707268
|
acfba1d3302216995a2a17ad3e4776c2e7cf9e11
| 23,864
|
py
|
Python
|
rubin_sim/maf/metrics/calibrationMetrics.py
|
RileyWClarke/flarubin
|
eb7b1ee21c828523f8a5374fe4510fe6e5ec2a2a
|
[
"MIT"
] | null | null | null |
rubin_sim/maf/metrics/calibrationMetrics.py
|
RileyWClarke/flarubin
|
eb7b1ee21c828523f8a5374fe4510fe6e5ec2a2a
|
[
"MIT"
] | null | null | null |
rubin_sim/maf/metrics/calibrationMetrics.py
|
RileyWClarke/flarubin
|
eb7b1ee21c828523f8a5374fe4510fe6e5ec2a2a
|
[
"MIT"
] | null | null | null |
import numpy as np
from .baseMetric import BaseMetric
import rubin_sim.maf.utils as mafUtils
import rubin_sim.utils as utils
from scipy.optimize import curve_fit
from builtins import str
__all__ = ['ParallaxMetric', 'ProperMotionMetric', 'RadiusObsMetric',
'ParallaxCoverageMetric', 'ParallaxDcrDegenMetric']
class ParallaxMetric(BaseMetric):
"""Calculate the uncertainty in a parallax measurement given a series of observations.
Uses columns ra_pi_amp and dec_pi_amp, calculated by the ParallaxFactorStacker.
Parameters
----------
metricName : str, optional
Default 'parallax'.
m5Col : str, optional
The default column name for m5 information in the input data. Default fiveSigmaDepth.
filterCol : str, optional
The column name for the filter information. Default filter.
seeingCol : str, optional
The column name for the seeing information. Since the astrometry errors are based on the physical
size of the PSF, this should be the FWHM of the physical psf. Default seeingFwhmGeom.
rmag : float, optional
The r magnitude of the fiducial star in r band. Other filters are sclaed using sedTemplate keyword.
Default 20.0
SedTemplate : str, optional
The template to use. This can be 'flat' or 'O','B','A','F','G','K','M'. Default flat.
atm_err : float, optional
The expected centroiding error due to the atmosphere, in arcseconds. Default 0.01.
normalize : `bool`, optional
Compare the astrometric uncertainty to the uncertainty that would result if half the observations
were taken at the start and half at the end. A perfect survey will have a value close to 1, while
a poorly scheduled survey will be close to 0. Default False.
badval : float, optional
The value to return when the metric value cannot be calculated. Default -666.
"""
def __init__(self, metricName='parallax', m5Col='fiveSigmaDepth',
filterCol='filter', seeingCol='seeingFwhmGeom', rmag=20.,
SedTemplate='flat', badval=-666,
atm_err=0.01, normalize=False, **kwargs):
Cols = [m5Col, filterCol, seeingCol, 'ra_pi_amp', 'dec_pi_amp']
if normalize:
units = 'ratio'
else:
units = 'mas'
super(ParallaxMetric, self).__init__(Cols, metricName=metricName, units=units,
badval=badval, **kwargs)
# set return type
self.m5Col = m5Col
self.seeingCol = seeingCol
self.filterCol = filterCol
filters = ['u', 'g', 'r', 'i', 'z', 'y']
self.mags = {}
if SedTemplate == 'flat':
for f in filters:
self.mags[f] = rmag
else:
self.mags = utils.stellarMags(SedTemplate, rmag=rmag)
self.atm_err = atm_err
self.normalize = normalize
self.comment = 'Estimated uncertainty in parallax measurement ' \
'(assuming no proper motion or that proper motion '
self.comment += 'is well fit). Uses measurements in all bandpasses, ' \
'and estimates astrometric error based on SNR '
self.comment += 'in each visit. '
if SedTemplate == 'flat':
self.comment += 'Assumes a flat SED. '
if self.normalize:
self.comment += 'This normalized version of the metric displays the ' \
'estimated uncertainty in the parallax measurement, '
self.comment += 'divided by the minimum parallax uncertainty possible ' \
'(if all visits were six '
self.comment += 'months apart). Values closer to 1 indicate more optimal ' \
'scheduling for parallax measurement.'
def _final_sigma(self, position_errors, ra_pi_amp, dec_pi_amp):
"""Assume parallax in RA and DEC are fit independently, then combined.
All inputs assumed to be arcsec """
with np.errstate(divide='ignore', invalid='ignore'):
sigma_A = position_errors/ra_pi_amp
sigma_B = position_errors/dec_pi_amp
sigma_ra = np.sqrt(1./np.sum(1./sigma_A**2))
sigma_dec = np.sqrt(1./np.sum(1./sigma_B**2))
# Combine RA and Dec uncertainties, convert to mas
sigma = np.sqrt(1./(1./sigma_ra**2+1./sigma_dec**2))*1e3
return sigma
def run(self, dataslice, slicePoint=None):
filters = np.unique(dataslice[self.filterCol])
if hasattr(filters[0], 'decode'):
filters = [str(f.decode('utf-8')) for f in filters]
snr = np.zeros(len(dataslice), dtype='float')
# compute SNR for all observations
for filt in filters:
good = np.where(dataslice[self.filterCol] == filt)
snr[good] = mafUtils.m52snr(self.mags[str(filt)], dataslice[self.m5Col][good])
position_errors = np.sqrt(mafUtils.astrom_precision(dataslice[self.seeingCol],
snr)**2+self.atm_err**2)
sigma = self._final_sigma(position_errors, dataslice['ra_pi_amp'], dataslice['dec_pi_amp'])
if self.normalize:
# Leave the dec parallax as zero since one can't have ra and dec maximized at the same time.
sigma = self._final_sigma(position_errors,
dataslice['ra_pi_amp']*0+1., dataslice['dec_pi_amp']*0)/sigma
return sigma
class ProperMotionMetric(BaseMetric):
"""Calculate the uncertainty in the returned proper motion.
This metric assumes gaussian errors in the astrometry measurements.
Parameters
----------
metricName : str, optional
Default 'properMotion'.
m5Col : str, optional
The default column name for m5 information in the input data. Default fiveSigmaDepth.
mjdCol : str, optional
The column name for the exposure time. Default observationStartMJD.
filterCol : str, optional
The column name for the filter information. Default filter.
seeingCol : str, optional
The column name for the seeing information. Since the astrometry errors are based on the physical
size of the PSF, this should be the FWHM of the physical psf. Default seeingFwhmGeom.
rmag : float, optional
The r magnitude of the fiducial star in r band. Other filters are sclaed using sedTemplate keyword.
Default 20.0
SedTemplate : str, optional
The template to use. This can be 'flat' or 'O','B','A','F','G','K','M'. Default flat.
atm_err : float, optional
The expected centroiding error due to the atmosphere, in arcseconds. Default 0.01.
normalize : `bool`, optional
Compare the astrometric uncertainty to the uncertainty that would result if half the observations
were taken at the start and half at the end. A perfect survey will have a value close to 1, while
a poorly scheduled survey will be close to 0. Default False.
baseline : float, optional
The length of the survey used for the normalization, in years. Default 10.
badval : float, optional
The value to return when the metric value cannot be calculated. Default -666.
"""
def __init__(self, metricName='properMotion',
m5Col='fiveSigmaDepth', mjdCol='observationStartMJD',
filterCol='filter', seeingCol='seeingFwhmGeom', rmag=20.,
SedTemplate='flat', badval= -666,
atm_err=0.01, normalize=False,
baseline=10., **kwargs):
cols = [m5Col, mjdCol, filterCol, seeingCol]
if normalize:
units = 'ratio'
else:
units = 'mas/yr'
super(ProperMotionMetric, self).__init__(col=cols, metricName=metricName, units=units,
badval=badval, **kwargs)
# set return type
self.mjdCol = mjdCol
self.seeingCol = seeingCol
self.m5Col = m5Col
filters = ['u', 'g', 'r', 'i', 'z', 'y']
self.mags = {}
if SedTemplate == 'flat':
for f in filters:
self.mags[f] = rmag
else:
self.mags = utils.stellarMags(SedTemplate, rmag=rmag)
self.atm_err = atm_err
self.normalize = normalize
self.baseline = baseline
self.comment = 'Estimated uncertainty of the proper motion fit ' \
'(assuming no parallax or that parallax is well fit). '
self.comment += 'Uses visits in all bands, and generates approximate ' \
'astrometric errors using the SNR in each visit. '
if SedTemplate == 'flat':
self.comment += 'Assumes a flat SED. '
if self.normalize:
self.comment += 'This normalized version of the metric represents ' \
'the estimated uncertainty in the proper '
self.comment += 'motion divided by the minimum uncertainty possible ' \
'(if all visits were '
self.comment += 'obtained on the first and last days of the survey). '
self.comment += 'Values closer to 1 indicate more optimal scheduling.'
def run(self, dataslice, slicePoint=None):
filters = np.unique(dataslice['filter'])
filters = [str(f) for f in filters]
precis = np.zeros(dataslice.size, dtype='float')
for f in filters:
observations = np.where(dataslice['filter'] == f)
if np.size(observations[0]) < 2:
precis[observations] = self.badval
else:
snr = mafUtils.m52snr(self.mags[f],
dataslice[self.m5Col][observations])
precis[observations] = mafUtils.astrom_precision(
dataslice[self.seeingCol][observations], snr)
precis[observations] = np.sqrt(precis[observations]**2 + self.atm_err**2)
good = np.where(precis != self.badval)
result = mafUtils.sigma_slope(dataslice[self.mjdCol][good], precis[good])
result = result*365.25*1e3 # Convert to mas/yr
if (self.normalize) & (good[0].size > 0):
new_dates = dataslice[self.mjdCol][good]*0
nDates = new_dates.size
new_dates[nDates//2:] = self.baseline*365.25
result = (mafUtils.sigma_slope(new_dates, precis[good])*365.25*1e3)/result
# Observations that are very close together can still fail
if np.isnan(result):
result = self.badval
return result
class ParallaxCoverageMetric(BaseMetric):
"""
Check how well the parallax factor is distributed. Subtracts the weighted mean position of the
parallax offsets, then computes the weighted mean radius of the points.
If points are well distributed, the mean radius will be near 1. If phase coverage is bad,
radius will be close to zero.
For points on the Ecliptic, uniform sampling should result in a metric value of ~0.5.
At the poles, uniform sampling would result in a metric value of ~1.
Conceptually, it is helpful to remember that the parallax motion of a star at the pole is
a (nearly circular) ellipse while the motion of a star on the ecliptic is a straight line. Thus, any
pair of observations separated by 6 months will give the full parallax range for a star on the pole
but only observations on very specific dates will give the full range for a star on the ecliptic.
Optionally also demand that there are observations above the snrLimit kwarg spanning thetaRange radians.
Parameters
----------
m5Col: str, optional
Column name for individual visit m5. Default fiveSigmaDepth.
mjdCol: str, optional
Column name for exposure time dates. Default observationStartMJD.
filterCol: str, optional
Column name for filter. Default filter.
seeingCol: str, optional
Column name for seeing (assumed FWHM). Default seeingFwhmGeom.
rmag: float, optional
Magnitude of fiducial star in r filter. Other filters are scaled using sedTemplate keyword.
Default 20.0
sedTemplate: str, optional
Template to use (can be 'flat' or 'O','B','A','F','G','K','M'). Default 'flat'.
atm_err: float, optional
Centroiding error due to atmosphere in arcsec. Default 0.01 (arcseconds).
thetaRange: float, optional
Range of parallax offset angles to demand (in radians). Default=0 (means no range requirement).
snrLimit: float, optional
Only include points above the snrLimit when computing thetaRange. Default 5.
Returns
--------
metricValue: float
Returns a weighted mean of the length of the parallax factor vectors.
Values near 1 imply that the points are well distributed.
Values near 0 imply that the parallax phase coverage is bad.
Near the ecliptic, uniform sampling results in metric values of about 0.5.
Notes
-----
Uses the ParallaxFactor stacker to calculate ra_pi_amp and dec_pi_amp.
"""
def __init__(self, metricName='ParallaxCoverageMetric', m5Col='fiveSigmaDepth',
mjdCol='observationStartMJD', filterCol='filter', seeingCol='seeingFwhmGeom',
rmag=20., SedTemplate='flat',
atm_err=0.01, thetaRange=0., snrLimit=5, **kwargs):
cols = ['ra_pi_amp', 'dec_pi_amp', m5Col, mjdCol, filterCol, seeingCol]
units = 'ratio'
super(ParallaxCoverageMetric, self).__init__(cols,
metricName=metricName, units=units,
**kwargs)
self.m5Col = m5Col
self.seeingCol = seeingCol
self.filterCol = filterCol
self.mjdCol = mjdCol
# Demand the range of theta values
self.thetaRange = thetaRange
self.snrLimit = snrLimit
filters = ['u', 'g', 'r', 'i', 'z', 'y']
self.mags = {}
if SedTemplate == 'flat':
for f in filters:
self.mags[f] = rmag
else:
self.mags = utils.stellarMags(SedTemplate, rmag=rmag)
self.atm_err = atm_err
caption = "Parallax factor coverage for an r=%.2f star (0 is bad, 0.5-1 is good). " % (rmag)
caption += "One expects the parallax factor coverage to vary because stars on the ecliptic "
caption += "can be observed when they have no parallax offset while stars at the pole are always "
caption += "offset by the full parallax offset."""
self.comment = caption
def _thetaCheck(self, ra_pi_amp, dec_pi_amp, snr):
good = np.where(snr >= self.snrLimit)
theta = np.arctan2(dec_pi_amp[good], ra_pi_amp[good])
# Make values between 0 and 2pi
theta = theta-np.min(theta)
result = 0.
if np.max(theta) >= self.thetaRange:
# Check that things are in differnet quadrants
theta = (theta+np.pi) % 2.*np.pi
theta = theta-np.min(theta)
if np.max(theta) >= self.thetaRange:
result = 1
return result
def _computeWeights(self, dataSlice, snr):
# Compute centroid uncertainty in each visit
position_errors = np.sqrt(mafUtils.astrom_precision(dataSlice[self.seeingCol],
snr)**2+self.atm_err**2)
weights = 1./position_errors**2
return weights
def _weightedR(self, dec_pi_amp, ra_pi_amp, weights):
ycoord = dec_pi_amp-np.average(dec_pi_amp, weights=weights)
xcoord = ra_pi_amp-np.average(ra_pi_amp, weights=weights)
radius = np.sqrt(xcoord**2+ycoord**2)
aveRad = np.average(radius, weights=weights)
return aveRad
def run(self, dataSlice, slicePoint=None):
if np.size(dataSlice) < 2:
return self.badval
filters = np.unique(dataSlice[self.filterCol])
filters = [str(f) for f in filters]
snr = np.zeros(len(dataSlice), dtype='float')
# compute SNR for all observations
for filt in filters:
inFilt = np.where(dataSlice[self.filterCol] == filt)
snr[inFilt] = mafUtils.m52snr(self.mags[str(filt)], dataSlice[self.m5Col][inFilt])
weights = self._computeWeights(dataSlice, snr)
aveR = self._weightedR(dataSlice['ra_pi_amp'], dataSlice['dec_pi_amp'], weights)
if self.thetaRange > 0:
thetaCheck = self._thetaCheck(dataSlice['ra_pi_amp'], dataSlice['dec_pi_amp'], snr)
else:
thetaCheck = 1.
result = aveR*thetaCheck
return result
class ParallaxDcrDegenMetric(BaseMetric):
"""Use the full parallax and DCR displacement vectors to find if they are degenerate.
Parameters
----------
metricName : str, optional
Default 'ParallaxDcrDegenMetric'.
seeingCol : str, optional
Default 'FWHMgeom'
m5Col : str, optional
Default 'fiveSigmaDepth'
filterCol : str
Default 'filter'
atm_err : float
Minimum error in photometry centroids introduced by the atmosphere (arcseconds). Default 0.01.
rmag : float
r-band magnitude of the fiducual star that is being used (mag).
SedTemplate : str
The SED template to use for fiducia star colors, passed to rubin_sim.utils.stellarMags.
Default 'flat'
tol : float
Tolerance for how well curve_fit needs to work before believing the covariance result.
Default 0.05.
Returns
-------
metricValue : float
Returns the correlation coefficient between the best-fit parallax amplitude and DCR amplitude.
The RA and Dec offsets are fit simultaneously. Values close to zero are good, values close to +/- 1
are bad. Experience with fitting Monte Carlo simulations suggests the astrometric fits start
becoming poor around a correlation of 0.7.
"""
def __init__(self, metricName='ParallaxDcrDegenMetric', seeingCol='seeingFwhmGeom',
m5Col='fiveSigmaDepth', atm_err=0.01, rmag=20., SedTemplate='flat',
filterCol='filter', tol=0.05, **kwargs):
self.m5Col = m5Col
self.seeingCol = seeingCol
self.filterCol = filterCol
self.tol = tol
units = 'Correlation'
# just put all the columns that all the stackers will need here?
cols = ['ra_pi_amp', 'dec_pi_amp', 'ra_dcr_amp', 'dec_dcr_amp',
seeingCol, m5Col]
super(ParallaxDcrDegenMetric, self).__init__(cols, metricName=metricName, units=units,
**kwargs)
self.filters = ['u', 'g', 'r', 'i', 'z', 'y']
self.mags = {}
if SedTemplate == 'flat':
for f in self.filters:
self.mags[f] = rmag
else:
self.mags = utils.stellarMags(SedTemplate, rmag=rmag)
self.atm_err = atm_err
def _positions(self, x, a, b):
"""
Function to find parallax and dcr amplitudes
x should be a vector with [[parallax_x1, parallax_x2..., parallax_y1, parallax_y2...],
[dcr_x1, dcr_x2..., dcr_y1, dcr_y2...]]
"""
result = a*x[0, :] + b*x[1, :]
return result
def run(self, dataSlice, slicePoint=None):
# The idea here is that we calculate position errors (in RA and Dec) for all observations.
# Then we generate arrays of the parallax offsets (delta RA parallax = ra_pi_amp, etc)
# and the DCR offsets (delta RA DCR = ra_dcr_amp, etc), and just add them together into one
# RA (and Dec) offset. Then, we try to fit for how we combined these offsets, but while
# considering the astrometric noise. If we can figure out that we just added them together
# (i.e. the curve_fit result is [a=1, b=1] for the function _positions above)
# then we should be able to disentangle the parallax and DCR offsets when fitting 'for real'.
# compute SNR for all observations
snr = np.zeros(len(dataSlice), dtype='float')
for filt in self.filters:
inFilt = np.where(dataSlice[self.filterCol] == filt)
snr[inFilt] = mafUtils.m52snr(self.mags[filt], dataSlice[self.m5Col][inFilt])
# Compute the centroiding uncertainties
# Note that these centroiding uncertainties depend on the physical size of the PSF, thus
# we are using seeingFwhmGeom for these metrics, not seeingFwhmEff.
position_errors = np.sqrt(mafUtils.astrom_precision(dataSlice[self.seeingCol], snr)**2 +
self.atm_err**2)
# Construct the vectors of RA/Dec offsets. xdata is the "input data". ydata is the "output".
xdata = np.empty((2, dataSlice.size * 2), dtype=float)
xdata[0, :] = np.concatenate((dataSlice['ra_pi_amp'], dataSlice['dec_pi_amp']))
xdata[1, :] = np.concatenate((dataSlice['ra_dcr_amp'], dataSlice['dec_dcr_amp']))
ydata = np.sum(xdata, axis=0)
# Use curve_fit to compute covariance between parallax and dcr amplitudes
# Set the initial guess slightly off from the correct [1,1] to make sure it iterates.
popt, pcov = curve_fit(self._positions, xdata, ydata, p0=[1.1, 0.9],
sigma=np.concatenate((position_errors, position_errors)),
absolute_sigma=True)
# Catch if the fit failed to converge on the correct solution.
if np.max(np.abs(popt - np.array([1., 1.]))) > self.tol:
return self.badval
# Covariance between best fit parallax amplitude and DCR amplitude.
cov = pcov[1, 0]
# Convert covarience between parallax and DCR amplitudes to normalized correlation
perr = np.sqrt(np.diag(pcov))
correlation = cov/(perr[0]*perr[1])
result = correlation
# This can throw infs.
if np.isinf(result):
result = self.badval
return result
def calcDist_cosines(RA1, Dec1, RA2, Dec2):
# Taken from simSelfCalib.py
"""Calculates distance on a sphere using spherical law of cosines.
Give this function RA/Dec values in radians. Returns angular distance(s), in radians.
Note that since this is all numpy, you could input arrays of RA/Decs."""
# This formula can have rounding errors for case where distances are small.
# Oh, the joys of wikipedia - http://en.wikipedia.org/wiki/Great-circle_distance
# For the purposes of these calculations, this is probably accurate enough.
D = np.sin(Dec2)*np.sin(Dec1) + np.cos(Dec1)*np.cos(Dec2)*np.cos(RA2-RA1)
D = np.arccos(D)
return D
class RadiusObsMetric(BaseMetric):
"""find the radius in the focal plane. returns things in degrees."""
def __init__(self, metricName='radiusObs', raCol='fieldRA', decCol='fieldDec',
units='radians', **kwargs):
self.raCol = raCol
self.decCol = decCol
super(RadiusObsMetric, self).__init__(col=[self.raCol, self.decCol],
metricName=metricName, units=units, **kwargs)
def run(self, dataSlice, slicePoint):
ra = slicePoint['ra']
dec = slicePoint['dec']
distances = calcDist_cosines(ra, dec, np.radians(dataSlice[self.raCol]),
np.radians(dataSlice[self.decCol]))
distances = np.degrees(distances)
return distances
def reduceMean(self, distances):
return np.mean(distances)
def reduceRMS(self, distances):
return np.std(distances)
def reduceFullRange(self, distances):
return np.max(distances)-np.min(distances)
| 48.307692
| 108
| 0.626006
|
acfba22177c0ee26356972d60527460bef1340f5
| 5,893
|
py
|
Python
|
dewar/dewar.py
|
tfpk/dewar
|
ccac973a4115d1d264f6b74e61beca05deeb000c
|
[
"Apache-2.0"
] | 1
|
2022-02-23T08:09:47.000Z
|
2022-02-23T08:09:47.000Z
|
dewar/dewar.py
|
tfpk/dewar
|
ccac973a4115d1d264f6b74e61beca05deeb000c
|
[
"Apache-2.0"
] | 4
|
2020-03-24T16:45:25.000Z
|
2021-06-01T23:21:32.000Z
|
dewar/dewar.py
|
tfpk/dewar
|
ccac973a4115d1d264f6b74e61beca05deeb000c
|
[
"Apache-2.0"
] | 1
|
2020-06-22T19:07:15.000Z
|
2020-06-22T19:07:15.000Z
|
from collections import namedtuple
from pathlib import Path
import functools
import shutil
import time
from proxy_tools import module_property
from dewar.jinja import JINJA_FUNCTIONS
from dewar.parser import fill_path
from dewar.validator import validate_page
from dewar._internal import get_caller_location, get_closest_site
from jinja2 import Environment, FileSystemLoader, select_autoescape
@module_property
def site():
"""The site property returns an instance of a site.
It first searches the stack to find the most recent function
registered to a site. If it finds one, it returns that site.
If it can't find a site in the stack, it returns the most
recently created site instance.
Otherwise, it raises a RuntimeError.
"""
try:
return get_closest_site()
except RuntimeError:
pass
if '_site_instances' in globals():
return _site_instances[-1]
raise RuntimeError("Site could not be found.")
class Site:
"""This is the root class of any dewar project, that encapsulates
all the pages in a project.
:param path: the path where the directories with site files can
be found, such as the `templates/` and `static/`
directories.
:param create_backups: whether to create backups of old sites.
:param static_render_path: whether to create backups of old sites.
"""
def __init__(self, path=None, static_render_path='static', create_backups=True):
self.registered_functions = set()
self.create_backups = create_backups
self.static_render_path = static_render_path
if path:
self.path = Path(path)
else:
self.path = get_caller_location()
self.template_path = self.path / 'templates'
self.static_path = self.path / 'static'
self._jinja_env = Environment(
loader=FileSystemLoader(str(self.template_path), followlinks=True),
autoescape=select_autoescape(['html', 'xml'])
)
for func in JINJA_FUNCTIONS:
self._jinja_env.globals[func.__name__] = func
self._jinja_env.globals['site'] = self
if '_site_instances' not in globals():
global _site_instances
_site_instances = []
_site_instances.append(self)
def register(self, path, validate=True):
"""A decorator that registers a page function with a site.
:param path: The path to that page in the rendered site.
:param validate: If True, when the page function returns,
it will raise an error if it doesn't return
a value that can create a page/pages.
"""
if path.startswith('/'):
raise ValueError("Path argument can't begin with a '/''")
def decorator(f):
def wrapper():
if hasattr(wrapper, '_returned'):
return wrapper._returned
elif hasattr(wrapper, '_called'):
raise RuntimeError("Calling functions within themselves not allowed!")
else:
wrapper._called = True
content = f()
wrapper._returned = content
if validate:
validate_page(wrapper)
return content
wrapper.name = f.__name__
wrapper.__name__ = wrapper.name
wrapper.path = path
wrapper._registered_to = self
self.registered_functions.add(wrapper)
return wrapper
return decorator
def close(self):
"""Remove a site from the global list of sites.
This functionality is rarely needed, as most applications will
only involve one site. If you need it though, this will prevent
this module's site property from returning returning this site.
Note: The site property will still return this site if it is
used in a page function registered to this site, or by a
function that is called by a page function registered to
this site.
"""
try:
_site_instances.remove(self)
except ValueError:
raise RuntimeError("Site Instance was already closed.")
def _render_file(self, path, content):
"""Renders a given file to a path. Used by the render function.
:param path: a path to write to.
:param content: content to be written to that path.
"""
render_path_folder = path.parent
render_path_folder.mkdir(parents=True, exist_ok=True)
with open(path, 'w') as render_file:
render_file.write(content)
def _render_static(self, path):
"""Renders all the static content to the given path
:param path: The path to write to.
"""
static_render_path = path / self.static_render_path
if not self.static_path.exists():
return
shutil.copytree(self.static_path, static_render_path)
def render(self, path='./dist/'):
"""Write the site to a path.
:param path: The path to write to.
"""
path = Path(path)
render_functions = self.registered_functions
if self.create_backups:
shutil.make_archive(path / '..' / 'old' / f'site_{time.time()}', 'zip', path)
shutil.rmtree(path)
self._render_static(path)
for func in render_functions:
content = func()
if isinstance(content, str):
render_path = path / func.path
self._render_file(render_path, content)
else:
for params in content:
filled_path = fill_path(func.path, params)
render_path = path / filled_path
self._render_file(render_path, content[params])
| 33.293785
| 90
| 0.61904
|
acfba24c0884550d6a761c166c3a0052578a3cf5
| 813
|
py
|
Python
|
test/yml/python/steps/pycodestyle_test.py
|
swellaby/azure-pipeline-templates
|
adc83c8b0a4a8347f4c453073d131de897357090
|
[
"MIT"
] | 1
|
2019-10-11T14:31:33.000Z
|
2019-10-11T14:31:33.000Z
|
test/yml/python/steps/pycodestyle_test.py
|
swellaby/azure-pipeline-templates
|
adc83c8b0a4a8347f4c453073d131de897357090
|
[
"MIT"
] | 64
|
2019-03-09T19:02:05.000Z
|
2021-11-09T14:02:52.000Z
|
test/yml/python/steps/pycodestyle_test.py
|
swellaby/azure-pipeline-templates
|
adc83c8b0a4a8347f4c453073d131de897357090
|
[
"MIT"
] | null | null | null |
from test.test_utilities import parse_python_step_template_yaml_file
contents = parse_python_step_template_yaml_file("pycodestyle.yml")
steps = contents["steps"]
parameters = contents["parameters"]
first = steps[0]
def test_target_parameter_default():
assert parameters["target"] == "."
def test_additional_args_parameter_default():
assert parameters["additionalArgs"] == ""
def test_task_display_name_parameter_default():
assert parameters["taskDisplayName"] == "Lint"
def test_step_display_name():
assert first["displayName"] == "${{ parameters.taskDisplayName }}"
def test_num_steps():
assert len(steps) == 1
def test_script_contents():
assert first["script"] == (
"pycodestyle "
"${{ parameters.target }} "
"${{ parameters.additionalArgs }}"
)
| 23.228571
| 70
| 0.709717
|
acfba29c34db006fdda75030d8a06cc8bcf2d68c
| 585
|
py
|
Python
|
docs/tickets/15.py
|
lino-framework/lino_book
|
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
|
[
"BSD-2-Clause"
] | 3
|
2016-08-25T05:58:09.000Z
|
2019-12-05T11:13:45.000Z
|
docs/tickets/15.py
|
lino-framework/lino_book
|
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
|
[
"BSD-2-Clause"
] | 18
|
2016-11-12T21:38:58.000Z
|
2019-12-03T17:54:38.000Z
|
docs/tickets/15.py
|
lino-framework/lino_book
|
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
|
[
"BSD-2-Clause"
] | 9
|
2016-10-15T11:12:33.000Z
|
2021-09-22T04:37:37.000Z
|
import uno # everything works well if you uncomment this line
import logging
# Django's trick for older Python versions:
try:
from logging.config import dictConfig
except ImportError:
from django.utils.dictconfig import dictConfig
dictConfig({
'version': 1,
'handlers': {
'console':{
'class': 'logging.StreamHandler',
'level': 'DEBUG',
},
},
'loggers': {
'my': {
'handlers': ['console'],
'level': 'INFO',
}
}
})
logger = logging.getLogger('my')
logger.info("Hello world")
| 20.172414
| 62
| 0.574359
|
acfba45e5b808f981ee485ed2445b7fd3902b6c4
| 457
|
py
|
Python
|
scripts/uttplay.py
|
jkleczar/ttslabdev
|
52a7515734fd59e1a16dece8e2d567a33c435a27
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/uttplay.py
|
jkleczar/ttslabdev
|
52a7515734fd59e1a16dece8e2d567a33c435a27
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/uttplay.py
|
jkleczar/ttslabdev
|
52a7515734fd59e1a16dece8e2d567a33c435a27
|
[
"BSD-3-Clause"
] | 1
|
2019-02-25T10:27:34.000Z
|
2019-02-25T10:27:34.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Play waveform embedded in utt...
"""
from __future__ import unicode_literals, division, print_function #Py2
__author__ = "Daniel van Niekerk"
__email__ = "dvn.demitasse@gmail.com"
import sys
import ttslab
if __name__ == '__main__':
try:
uttfn = sys.argv[1]
except IndexError:
print("USAGE: uttplay.py UTTFNAME")
sys.exit(1)
ttslab.fromfile(uttfn)["waveform"].play()
| 20.772727
| 70
| 0.66302
|
acfba590ec0b12a159b62c31ef2cfa5bcc44acd2
| 13,660
|
py
|
Python
|
CHAP06/wrapper/terraformexec.py
|
dotcs/Terraform-Cookbook
|
16938bf044353b1552f3ffb676153f922e147700
|
[
"MIT"
] | 86
|
2020-02-05T15:00:16.000Z
|
2022-03-28T12:06:14.000Z
|
CHAP06/wrapper/terraformexec.py
|
dotcs/Terraform-Cookbook
|
16938bf044353b1552f3ffb676153f922e147700
|
[
"MIT"
] | 1
|
2021-01-14T16:49:50.000Z
|
2021-01-14T16:49:50.000Z
|
CHAP06/wrapper/terraformexec.py
|
dotcs/Terraform-Cookbook
|
16938bf044353b1552f3ffb676153f922e147700
|
[
"MIT"
] | 113
|
2020-02-09T12:34:19.000Z
|
2022-03-22T18:42:59.000Z
|
import os
import argparse
import json
import yaml # require pip install pyyaml
import sys
import coloredlogs,logging
from coloredlogs import ColoredFormatter
import subprocess
import shutil
import subprocess
import shlex
#import azdo
class Terraform(object):
def __init__(self, az_subscription_id, az_client_id, az_client_secret, az_tenant_id, az_access_key, terraform_path,
backendFile, varFiles, logger, use_apply=True, run_output=True, applyAutoApprove=True, variables=None,
planout="out.tfplan", outputazdo=None, terraformversion="0.12.8", verbose=False, useazcli=False):
self.backendFile = backendFile
self.varFiles = varFiles
self.variables = dict() if variables is None else variables
self.use_apply = use_apply
self.applyAutoApprove = applyAutoApprove
self.planout = planout
self.outputazdo = outputazdo
self.run_output = run_output
self.terraform_version = terraformversion
self.verbose = verbose
self.logger = logger
self.useazcli = useazcli
self.terraform_path = terraform_path
if self.useazcli == True:
os.system("az login --service-principal -u "+az_client_id +
" -p " + az_client_secret+" --tenant "+az_tenant_id+"")
os.system("az account set --subscription "+az_subscription_id+"")
os.environ["ARM_SUBSCRIPTION_ID"] = az_subscription_id
os.environ["ARM_CLIENT_ID"] = az_client_id
os.environ["ARM_CLIENT_SECRET"] = az_client_secret
os.environ["ARM_TENANT_ID"] = az_tenant_id
os.environ["ARM_ACCESS_KEY"] = az_access_key
def Init(self):
self.logger.info("\n=> Run Terrform init")
#self.logger.info ("{0:{1}^30}".format("Run Terrform init","="))
terraformcmd = "terraform init -no-color -backend-config={} -reconfigure".format(self.backendFile)
if self.verbose:
self.logger.info("[{}]".format(terraformcmd))
self.RunCommand(terraformcmd)
def Format(self):
self.logger.info("\n=> Run Terraform fmt")
terraformcmd ="terraform fmt -no-color"
if self.verbose:
self.logger.info("[{}]".format(terraformcmd))
self.RunCommand(terraformcmd)
def Validate(self):
self.logger.info("\n=> Run Terraform validate")
terraformcmd ="terraform validate -no-color"
if self.verbose:
self.logger.info("[{}]".format(terraformcmd))
return self.RunCommand(terraformcmd)
def Plan(self):
self.logger.info("\n=> Run Terrform plan")
cmd = ""
for file in self.varFiles:
cmd += " -var-file="+file
for var in self.variables:
cmd += """ -var "{}={}" """.format(var["name"], var["value"])
cmd += " -out "+self.planout
terraformcmd = "terraform plan -detailed-exitcode -no-color {}".format(cmd)
if self.verbose:
self.logger.info("[{}]".format(terraformcmd))
return self.RunCommand(terraformcmd)
def Apply(self):
self.logger.info("\n=> Run Terraform Apply")
cmd = ""
if self.applyAutoApprove:
cmd += "-auto-approve"
cmd += " "+self.planout
else:
for file in self.varFiles:
cmd += " -var-file="+file
for var in self.variables:
cmd += """ -var "{}={}" """.format(var["name"], var["value"])
terraformcmd = "terraform apply -no-color {}".format(cmd)
if self.verbose:
self.logger.info("[{}]".format(terraformcmd))
return self.RunCommand(terraformcmd)
def Destroy(self):
self.logger.info("\n=> Run Terrform destroy")
cmd = ""
for file in self.varFiles:
cmd += " -var-file="+file
for var in self.variables:
cmd += """ -var "{}={}" """.format(var["name"], var["value"])
cmd += " -auto-approve"
terraformcmd = "terraform destroy -no-color {}".format(cmd)
self.logger.info("[{}]".format(terraformcmd))
self.RunCommand(terraformcmd)
def Output(self):
self.logger.info("\n=> Run terraform output in "+os.getcwd())
terraformcmd = "terraform output -json"
if self.verbose:
self.logger.info("[{}]".format(terraformcmd))
outputjson = os.popen(terraformcmd).read()
if self.verbose:
self.logger.info(outputjson)
# change the JSON string into a JSON object
jsonObject = json.loads(outputjson)
with open('outputtf.json', 'w') as outfile:
self.logger.info("[INFO : Write outputtf.json]")
json.dump(jsonObject, outfile, indent=4)
return jsonObject
def CheckIfDestroy(self):
self.logger.info("\n=> Check in the terraform plan out if Terraform will destroy resources")
plan = os.popen("terraform show -json "+self.planout +" | jq .resource_changes[].change.actions[]").read()
finddelete = plan.find("delete")
if finddelete > 0:
self.logger.info("DESTROY in the plan : Terraform can't be done, use --acceptDestroy option for force the destroy")
return True
else:
self.logger.info("Great there is no destroy in the plan !!")
return False
def Clean(self,deleteTheOutPlan=True):
if deleteTheOutPlan == True :
if os.path.exists(self.planout):
self.logger.info("Delete the "+self.planout+" file")
os.remove(self.planout)
if os.path.exists(".terraform"):
self.logger.info("Delete the .terraform folder")
shutil.rmtree(".terraform")
def RunCommand(self, command):
p = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
self.logger.info(line.decode("utf-8").replace("\n",""))
p.stdout.close()
p.wait()
return p.returncode
def RunApply(t):
"""
Run the Apply command
"""
apply_ret_code = t.Apply()
logger.info("Apply return code: {}".format(apply_ret_code))
if(apply_ret_code in [1]):
sys.exit("Error in Terraform apply")
if __name__ == "__main__":
logger = logging.getLogger()
handler = logging.StreamHandler()
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
coloredlogs.install(fmt='%(message)s', level='DEBUG', logger=logger)
logger.info ("{0:{1}^40}".format("Start Terraform execution","="))
parser = argparse.ArgumentParser()
# authentification Azure pour Terraform avec Service Principal, pour overrider le fichier json de config
parser.add_argument("--subscriptionId", required=False,
help="Azure SubscriptionId Id")
parser.add_argument("--clientId", required=False, help="Azure Client Id")
parser.add_argument("--clientSecret", required=True,
help="Azure Client Secret")
parser.add_argument("--tenantId", required=False, help="Azure Tenant Id")
parser.add_argument("--accessKey", required=False,
help="Azure Access Key for storage backend")
# fichier json de config
parser.add_argument("--configfile", required=True,
help="Configuration file json")
# fichier json de config
parser.add_argument("--terraformpath", required=False,
help="Terraform files path")
# permet a Terraform d'appliquer les changements
parser.add_argument("--apply", help="Run Terraform apply",
action="store_true")
# permet a Terraform de detruire les resources
parser.add_argument("--acceptdestroy", help="Accept Terraform Destroy operation",
action="store_true")
# Terraform execute destroy au lieu de apply
parser.add_argument("--destroy", help="Execute Terraform Destroy",
action="store_true")
# verbose mode
parser.add_argument("-v", "--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
# Loading of the configuration file Json / Yaml
with open(args.configfile) as config:
name, ext = os.path.splitext(args.configfile)
#print(name)
#print(ext)
if(ext == ".json"):
data = json.load(config)
else:
data = yaml.load(config, Loader=yaml.Loader)
useazcli = data["use_azcli"]
backendfile = data["backendfile"]
autoapprove = data["auto-approve"]
varfiles = data["varfiles"] # array of files
if "vars" in data:
variables = data["vars"] # dict of variable name : value
else:
variables = None
outplan = data["planout"]
outputAzdo = data["outputToAzDo"]
terraformoutput = data["run_output"]
terraformpath = os.getcwd()
if("terraform_path" in data):
terraformpath = data["terraform_path"]
if(args.terraformpath != None):
terraformpath = args.terraformpath
applyterraform = data["run_apply"]
if(args.apply == False):
applyterraform = "false"
if "subscriptionId" in data["azure_credentials"]:
azSubscriptionId = data["azure_credentials"]["subscriptionId"]
if(args.subscriptionId != None):
azSubscriptionId = args.subscriptionId
if "clientId" in data["azure_credentials"]:
azClientId = data["azure_credentials"]["clientId"]
if(args.clientId != None):
azClientId = args.clientId
if "tenantId" in data["azure_credentials"]:
azTenantId = data["azure_credentials"]["tenantId"]
if(args.tenantId != None):
azTenantId = args.tenantId
if "accessKey" in data["azure_credentials"]:
azAccessKey = data["azure_credentials"]["accessKey"]
if(args.accessKey != None):
azAccessKey = args.accessKey
userAcceptDestroy = args.acceptdestroy
# Affichage des arguments et de la config si -v
if args.verbose:
logger.info("========== DEBUG MODE =========================")
logger.info("useazcli: "+str(useazcli))
logger.info("backendfile: "+str(backendfile))
logger.info("autoapprove: "+str(autoapprove))
logger.info("varfiles: "+str(varfiles))
logger.info("variables: "+str(variables))
logger.info("outplan: "+str(outplan))
logger.info("outputAzdo: "+str(outputAzdo))
logger.info("terraformpath: "+str(terraformpath))
logger.info("terraformoutput: "+str(terraformoutput))
logger.info("applyterraform: "+str(applyterraform))
logger.info("acceptDestroy: "+str(userAcceptDestroy))
logger.info("verbose: "+str(args.verbose))
logger.info("azSubscriptionId: "+str(azSubscriptionId))
logger.info("azClientId: "+str(azClientId))
logger.info("azTenantId: "+str(azTenantId))
logger.info("================================================")
# Appel du constructeur
t = Terraform(azSubscriptionId, azClientId, args.clientSecret, azTenantId, azAccessKey, terraformpath,
backendfile, varfiles, logger, applyAutoApprove=autoapprove, variables=variables, planout=outplan,
outputazdo=outputAzdo, use_apply=applyterraform, run_output=terraformoutput, verbose=args.verbose, useazcli=useazcli)
currentfolder = os.getcwd()
os.chdir(terraformpath)
# Terraform Format
t.Format()
# Terraform Init
t.Init()
# Terraform Validate
is_valide_code = t.Validate()
logger.info("Validate return code: {}".format(is_valide_code))
if(is_valide_code in [1]):
sys.exit("Error in Terraform validate")
else:
if(args.destroy == True):
# Terraform Destroy
t.Destroy()
else:
# Terraform Plan
plan_ret_code = t.Plan()
logger.info("Plan return code: {}".format(plan_ret_code))
# Si erreur dans le plan de Terraform
if(plan_ret_code in [1]):
sys.exit("Error in Terraform plan")
else:
if(plan_ret_code in [2]): # plan need changes
if (t.use_apply == True):
terraformdestroy = False # Does changes need delete resources
if(userAcceptDestroy == True):
# Terraform Apply with acceptDestroy
RunApply(t)
if(userAcceptDestroy == False):
#terraformdestroy = t.CheckIfDestroy() # check in the terraform plan
if(t.CheckIfDestroy() == False):
# Terraform Apply
RunApply(t)
else:
sys.exit("Error Terraform will be destroy resources")
else:
logger.info("=> Terraform apply is skipped")
if(plan_ret_code in [0, 2]): # no changes or changes
# Terraform Output tf => Azure DevOps variables
if(t.run_output == True):
jsonObject = t.Output()
# azdo.tfoutputtoAzdo(outputAzdo, jsonObject)
else:
logger.info("==> Terraform output is skipped")
# clean folder
t.Clean()
logger.info ("{0:{1}^40}".format("End Terraform execution","="))
| 36.524064
| 135
| 0.595388
|
acfba6cf1bd8355d2eee034aef8a1de1ef0d9e41
| 953
|
py
|
Python
|
13_no_sublists.py
|
roy2020china/BingDemo
|
3427dcc7943d20cc8dccad39c6ada1f403d53d09
|
[
"MIT"
] | 1
|
2017-06-13T09:54:44.000Z
|
2017-06-13T09:54:44.000Z
|
13_no_sublists.py
|
roy2020china/BingDemo
|
3427dcc7943d20cc8dccad39c6ada1f403d53d09
|
[
"MIT"
] | null | null | null |
13_no_sublists.py
|
roy2020china/BingDemo
|
3427dcc7943d20cc8dccad39c6ada1f403d53d09
|
[
"MIT"
] | null | null | null |
# Write a procedure, input a list with sublist elements, and output a list with no sublists.
# 写一个函数,输入一个含有列表的列表,输出一个不含有列表的列表。
# input /输入:[1, [2, 0], [3, 0, [4, 7, 5]]]
# output /输出: x = [1, 2, 0, 3, 0, 4, 7, 5]
def get_final_list(a_list):
final_list = []
to_check = a_list
#print to_check
while to_check:
if isinstance(to_check[0], list) or isinstance(to_check[0], tuple):
new_list = to_check[0]
del to_check[0]
#print to_check
to_check = new_list + to_check # NOT to_check += new_list
#print to_check
else:
final_list.append(to_check[0])
del to_check[0]
#print final_list
return final_list
def is_sublist(i):
if isinstance(i, list) or isinstance(i, tuple):
return True
else:
return False
# x = [1, [2, 0], [3, 0, [4, 7, 5]]]
# print get_final_list(x)
# >>>[1, 2, 0, 3, 0, 4, 7, 5]
| 27.228571
| 92
| 0.563484
|
acfba91a933aeb1f696d9d464c2b2862d9e659f5
| 62,030
|
py
|
Python
|
homeassistant/components/alexa/capabilities.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 1
|
2017-05-30T22:21:05.000Z
|
2017-05-30T22:21:05.000Z
|
homeassistant/components/alexa/capabilities.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 58
|
2020-08-03T07:33:02.000Z
|
2022-03-31T06:02:05.000Z
|
homeassistant/components/alexa/capabilities.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 2
|
2021-03-22T21:42:48.000Z
|
2021-04-12T12:26:39.000Z
|
"""Alexa capabilities."""
import logging
from typing import List, Optional
from homeassistant.components import (
cover,
fan,
image_processing,
input_number,
light,
timer,
vacuum,
)
from homeassistant.components.alarm_control_panel import ATTR_CODE_FORMAT, FORMAT_NUMBER
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
import homeassistant.components.climate.const as climate
import homeassistant.components.media_player.const as media_player
from homeassistant.const import (
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
ATTR_UNIT_OF_MEASUREMENT,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_IDLE,
STATE_LOCKED,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.core import State
import homeassistant.util.color as color_util
import homeassistant.util.dt as dt_util
from .const import (
API_TEMP_UNITS,
API_THERMOSTAT_MODES,
API_THERMOSTAT_PRESETS,
DATE_FORMAT,
Inputs,
)
from .errors import UnsupportedProperty
from .resources import (
AlexaCapabilityResource,
AlexaGlobalCatalog,
AlexaModeResource,
AlexaPresetResource,
AlexaSemantics,
)
_LOGGER = logging.getLogger(__name__)
class AlexaCapability:
"""Base class for Alexa capability interfaces.
The Smart Home Skills API defines a number of "capability interfaces",
roughly analogous to domains in Home Assistant. The supported interfaces
describe what actions can be performed on a particular device.
https://developer.amazon.com/docs/device-apis/message-guide.html
"""
supported_locales = {"en-US"}
def __init__(self, entity: State, instance: Optional[str] = None):
"""Initialize an Alexa capability."""
self.entity = entity
self.instance = instance
def name(self) -> str:
"""Return the Alexa API name of this interface."""
raise NotImplementedError
@staticmethod
def properties_supported() -> List[dict]:
"""Return what properties this entity supports."""
return []
@staticmethod
def properties_proactively_reported() -> bool:
"""Return True if properties asynchronously reported."""
return False
@staticmethod
def properties_retrievable() -> bool:
"""Return True if properties can be retrieved."""
return False
@staticmethod
def properties_non_controllable() -> bool:
"""Return True if non controllable."""
return None
@staticmethod
def get_property(name):
"""Read and return a property.
Return value should be a dict, or raise UnsupportedProperty.
Properties can also have a timeOfSample and uncertaintyInMilliseconds,
but returning those metadata is not yet implemented.
"""
raise UnsupportedProperty(name)
@staticmethod
def supports_deactivation():
"""Applicable only to scenes."""
return None
@staticmethod
def capability_proactively_reported():
"""Return True if the capability is proactively reported.
Set properties_proactively_reported() for proactively reported properties.
Applicable to DoorbellEventSource.
"""
return None
@staticmethod
def capability_resources():
"""Return the capability object.
Applicable to ToggleController, RangeController, and ModeController interfaces.
"""
return []
@staticmethod
def configuration():
"""Return the configuration object.
Applicable to the ThermostatController, SecurityControlPanel, ModeController, RangeController,
and EventDetectionSensor.
"""
return []
@staticmethod
def configurations():
"""Return the configurations object.
The plural configurations object is different that the singular configuration object.
Applicable to EqualizerController interface.
"""
return []
@staticmethod
def inputs():
"""Applicable only to media players."""
return []
@staticmethod
def semantics():
"""Return the semantics object.
Applicable to ToggleController, RangeController, and ModeController interfaces.
"""
return []
@staticmethod
def supported_operations():
"""Return the supportedOperations object."""
return []
@staticmethod
def camera_stream_configurations():
"""Applicable only to CameraStreamController."""
return None
def serialize_discovery(self):
"""Serialize according to the Discovery API."""
result = {"type": "AlexaInterface", "interface": self.name(), "version": "3"}
instance = self.instance
if instance is not None:
result["instance"] = instance
properties_supported = self.properties_supported()
if properties_supported:
result["properties"] = {
"supported": self.properties_supported(),
"proactivelyReported": self.properties_proactively_reported(),
"retrievable": self.properties_retrievable(),
}
proactively_reported = self.capability_proactively_reported()
if proactively_reported is not None:
result["proactivelyReported"] = proactively_reported
non_controllable = self.properties_non_controllable()
if non_controllable is not None:
result["properties"]["nonControllable"] = non_controllable
supports_deactivation = self.supports_deactivation()
if supports_deactivation is not None:
result["supportsDeactivation"] = supports_deactivation
capability_resources = self.capability_resources()
if capability_resources:
result["capabilityResources"] = capability_resources
configuration = self.configuration()
if configuration:
result["configuration"] = configuration
# The plural configurations object is different than the singular configuration object above.
configurations = self.configurations()
if configurations:
result["configurations"] = configurations
semantics = self.semantics()
if semantics:
result["semantics"] = semantics
supported_operations = self.supported_operations()
if supported_operations:
result["supportedOperations"] = supported_operations
inputs = self.inputs()
if inputs:
result["inputs"] = inputs
camera_stream_configurations = self.camera_stream_configurations()
if camera_stream_configurations:
result["cameraStreamConfigurations"] = camera_stream_configurations
return result
def serialize_properties(self):
"""Return properties serialized for an API response."""
for prop in self.properties_supported():
prop_name = prop["name"]
try:
prop_value = self.get_property(prop_name)
except UnsupportedProperty:
raise
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Unexpected error getting %s.%s property from %s",
self.name(),
prop_name,
self.entity,
)
prop_value = None
if prop_value is None:
continue
result = {
"name": prop_name,
"namespace": self.name(),
"value": prop_value,
"timeOfSample": dt_util.utcnow().strftime(DATE_FORMAT),
"uncertaintyInMilliseconds": 0,
}
instance = self.instance
if instance is not None:
result["instance"] = instance
yield result
class Alexa(AlexaCapability):
"""Implements Alexa Interface.
Although endpoints implement this interface implicitly,
The API suggests you should explicitly include this interface.
https://developer.amazon.com/docs/device-apis/alexa-interface.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"fr-CA",
"fr-FR",
"it-IT",
"ja-JP",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa"
class AlexaEndpointHealth(AlexaCapability):
"""Implements Alexa.EndpointHealth.
https://developer.amazon.com/docs/smarthome/state-reporting-for-a-smart-home-skill.html#report-state-when-alexa-requests-it
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.EndpointHealth"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "connectivity"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "connectivity":
raise UnsupportedProperty(name)
if self.entity.state == STATE_UNAVAILABLE:
return {"value": "UNREACHABLE"}
return {"value": "OK"}
class AlexaPowerController(AlexaCapability):
"""Implements Alexa.PowerController.
https://developer.amazon.com/docs/device-apis/alexa-powercontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.PowerController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "powerState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "powerState":
raise UnsupportedProperty(name)
if self.entity.domain == climate.DOMAIN:
is_on = self.entity.state != climate.HVAC_MODE_OFF
elif self.entity.domain == vacuum.DOMAIN:
is_on = self.entity.state == vacuum.STATE_CLEANING
elif self.entity.domain == timer.DOMAIN:
is_on = self.entity.state != STATE_IDLE
else:
is_on = self.entity.state != STATE_OFF
return "ON" if is_on else "OFF"
class AlexaLockController(AlexaCapability):
"""Implements Alexa.LockController.
https://developer.amazon.com/docs/device-apis/alexa-lockcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"es-US",
"fr-CA",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
"pt-BR",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.LockController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "lockState"}]
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "lockState":
raise UnsupportedProperty(name)
if self.entity.state == STATE_LOCKED:
return "LOCKED"
if self.entity.state == STATE_UNLOCKED:
return "UNLOCKED"
return "JAMMED"
class AlexaSceneController(AlexaCapability):
"""Implements Alexa.SceneController.
https://developer.amazon.com/docs/device-apis/alexa-scenecontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
}
def __init__(self, entity, supports_deactivation):
"""Initialize the entity."""
super().__init__(entity)
self.supports_deactivation = lambda: supports_deactivation
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.SceneController"
class AlexaBrightnessController(AlexaCapability):
"""Implements Alexa.BrightnessController.
https://developer.amazon.com/docs/device-apis/alexa-brightnesscontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
"pt-BR",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.BrightnessController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "brightness"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "brightness":
raise UnsupportedProperty(name)
if "brightness" in self.entity.attributes:
return round(self.entity.attributes["brightness"] / 255.0 * 100)
return 0
class AlexaColorController(AlexaCapability):
"""Implements Alexa.ColorController.
https://developer.amazon.com/docs/device-apis/alexa-colorcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
"pt-BR",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ColorController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "color"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "color":
raise UnsupportedProperty(name)
hue, saturation = self.entity.attributes.get(light.ATTR_HS_COLOR, (0, 0))
return {
"hue": hue,
"saturation": saturation / 100.0,
"brightness": self.entity.attributes.get(light.ATTR_BRIGHTNESS, 0) / 255.0,
}
class AlexaColorTemperatureController(AlexaCapability):
"""Implements Alexa.ColorTemperatureController.
https://developer.amazon.com/docs/device-apis/alexa-colortemperaturecontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
"pt-BR",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ColorTemperatureController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "colorTemperatureInKelvin"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "colorTemperatureInKelvin":
raise UnsupportedProperty(name)
if "color_temp" in self.entity.attributes:
return color_util.color_temperature_mired_to_kelvin(
self.entity.attributes["color_temp"]
)
return None
class AlexaPercentageController(AlexaCapability):
"""Implements Alexa.PercentageController.
https://developer.amazon.com/docs/device-apis/alexa-percentagecontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.PercentageController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "percentage"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "percentage":
raise UnsupportedProperty(name)
if self.entity.domain == fan.DOMAIN:
return self.entity.attributes.get(fan.ATTR_PERCENTAGE) or 0
if self.entity.domain == cover.DOMAIN:
return self.entity.attributes.get(cover.ATTR_CURRENT_POSITION, 0)
return 0
class AlexaSpeaker(AlexaCapability):
"""Implements Alexa.Speaker.
https://developer.amazon.com/docs/device-apis/alexa-speaker.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"it-IT",
"ja-JP",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.Speaker"
def properties_supported(self):
"""Return what properties this entity supports."""
properties = [{"name": "volume"}]
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if supported & media_player.SUPPORT_VOLUME_MUTE:
properties.append({"name": "muted"})
return properties
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name == "volume":
current_level = self.entity.attributes.get(
media_player.ATTR_MEDIA_VOLUME_LEVEL
)
if current_level is not None:
return round(float(current_level) * 100)
if name == "muted":
return bool(
self.entity.attributes.get(media_player.ATTR_MEDIA_VOLUME_MUTED)
)
return None
class AlexaStepSpeaker(AlexaCapability):
"""Implements Alexa.StepSpeaker.
https://developer.amazon.com/docs/device-apis/alexa-stepspeaker.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"it-IT",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.StepSpeaker"
class AlexaPlaybackController(AlexaCapability):
"""Implements Alexa.PlaybackController.
https://developer.amazon.com/docs/device-apis/alexa-playbackcontroller.html
"""
supported_locales = {"de-DE", "en-AU", "en-CA", "en-GB", "en-IN", "en-US", "fr-FR"}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.PlaybackController"
def supported_operations(self):
"""Return the supportedOperations object.
Supported Operations: FastForward, Next, Pause, Play, Previous, Rewind, StartOver, Stop
"""
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
operations = {
media_player.SUPPORT_NEXT_TRACK: "Next",
media_player.SUPPORT_PAUSE: "Pause",
media_player.SUPPORT_PLAY: "Play",
media_player.SUPPORT_PREVIOUS_TRACK: "Previous",
media_player.SUPPORT_STOP: "Stop",
}
return [
value
for operation, value in operations.items()
if operation & supported_features
]
class AlexaInputController(AlexaCapability):
"""Implements Alexa.InputController.
https://developer.amazon.com/docs/device-apis/alexa-inputcontroller.html
"""
supported_locales = {"de-DE", "en-AU", "en-CA", "en-GB", "en-IN", "en-US"}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.InputController"
def inputs(self):
"""Return the list of valid supported inputs."""
source_list = self.entity.attributes.get(
media_player.ATTR_INPUT_SOURCE_LIST, []
)
return AlexaInputController.get_valid_inputs(source_list)
@staticmethod
def get_valid_inputs(source_list):
"""Return list of supported inputs."""
input_list = []
for source in source_list:
formatted_source = (
source.lower().replace("-", "").replace("_", "").replace(" ", "")
)
if formatted_source in Inputs.VALID_SOURCE_NAME_MAP:
input_list.append(
{"name": Inputs.VALID_SOURCE_NAME_MAP[formatted_source]}
)
return input_list
class AlexaTemperatureSensor(AlexaCapability):
"""Implements Alexa.TemperatureSensor.
https://developer.amazon.com/docs/device-apis/alexa-temperaturesensor.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.TemperatureSensor"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "temperature"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "temperature":
raise UnsupportedProperty(name)
unit = self.entity.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
temp = self.entity.state
if self.entity.domain == climate.DOMAIN:
unit = self.hass.config.units.temperature_unit
temp = self.entity.attributes.get(climate.ATTR_CURRENT_TEMPERATURE)
if temp in (STATE_UNAVAILABLE, STATE_UNKNOWN, None):
return None
try:
temp = float(temp)
except ValueError:
_LOGGER.warning("Invalid temp value %s for %s", temp, self.entity.entity_id)
return None
return {"value": temp, "scale": API_TEMP_UNITS[unit]}
class AlexaContactSensor(AlexaCapability):
"""Implements Alexa.ContactSensor.
The Alexa.ContactSensor interface describes the properties and events used
to report the state of an endpoint that detects contact between two
surfaces. For example, a contact sensor can report whether a door or window
is open.
https://developer.amazon.com/docs/device-apis/alexa-contactsensor.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-IN",
"en-US",
"es-ES",
"it-IT",
"ja-JP",
}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ContactSensor"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "detectionState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "detectionState":
raise UnsupportedProperty(name)
if self.entity.state == STATE_ON:
return "DETECTED"
return "NOT_DETECTED"
class AlexaMotionSensor(AlexaCapability):
"""Implements Alexa.MotionSensor.
https://developer.amazon.com/docs/device-apis/alexa-motionsensor.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-IN",
"en-US",
"es-ES",
"it-IT",
"ja-JP",
"pt-BR",
}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.MotionSensor"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "detectionState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "detectionState":
raise UnsupportedProperty(name)
if self.entity.state == STATE_ON:
return "DETECTED"
return "NOT_DETECTED"
class AlexaThermostatController(AlexaCapability):
"""Implements Alexa.ThermostatController.
https://developer.amazon.com/docs/device-apis/alexa-thermostatcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
"pt-BR",
}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ThermostatController"
def properties_supported(self):
"""Return what properties this entity supports."""
properties = [{"name": "thermostatMode"}]
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if supported & climate.SUPPORT_TARGET_TEMPERATURE:
properties.append({"name": "targetSetpoint"})
if supported & climate.SUPPORT_TARGET_TEMPERATURE_RANGE:
properties.append({"name": "lowerSetpoint"})
properties.append({"name": "upperSetpoint"})
return properties
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if self.entity.state == STATE_UNAVAILABLE:
return None
if name == "thermostatMode":
preset = self.entity.attributes.get(climate.ATTR_PRESET_MODE)
if preset in API_THERMOSTAT_PRESETS:
mode = API_THERMOSTAT_PRESETS[preset]
else:
mode = API_THERMOSTAT_MODES.get(self.entity.state)
if mode is None:
_LOGGER.error(
"%s (%s) has unsupported state value '%s'",
self.entity.entity_id,
type(self.entity),
self.entity.state,
)
raise UnsupportedProperty(name)
return mode
unit = self.hass.config.units.temperature_unit
if name == "targetSetpoint":
temp = self.entity.attributes.get(ATTR_TEMPERATURE)
elif name == "lowerSetpoint":
temp = self.entity.attributes.get(climate.ATTR_TARGET_TEMP_LOW)
elif name == "upperSetpoint":
temp = self.entity.attributes.get(climate.ATTR_TARGET_TEMP_HIGH)
else:
raise UnsupportedProperty(name)
if temp is None:
return None
try:
temp = float(temp)
except ValueError:
_LOGGER.warning(
"Invalid temp value %s for %s in %s", temp, name, self.entity.entity_id
)
return None
return {"value": temp, "scale": API_TEMP_UNITS[unit]}
def configuration(self):
"""Return configuration object.
Translates climate HVAC_MODES and PRESETS to supported Alexa ThermostatMode Values.
ThermostatMode Value must be AUTO, COOL, HEAT, ECO, OFF, or CUSTOM.
"""
supported_modes = []
hvac_modes = self.entity.attributes.get(climate.ATTR_HVAC_MODES)
for mode in hvac_modes:
thermostat_mode = API_THERMOSTAT_MODES.get(mode)
if thermostat_mode:
supported_modes.append(thermostat_mode)
preset_modes = self.entity.attributes.get(climate.ATTR_PRESET_MODES)
if preset_modes:
for mode in preset_modes:
thermostat_mode = API_THERMOSTAT_PRESETS.get(mode)
if thermostat_mode:
supported_modes.append(thermostat_mode)
# Return False for supportsScheduling until supported with event listener in handler.
configuration = {"supportsScheduling": False}
if supported_modes:
configuration["supportedModes"] = supported_modes
return configuration
class AlexaPowerLevelController(AlexaCapability):
"""Implements Alexa.PowerLevelController.
https://developer.amazon.com/docs/device-apis/alexa-powerlevelcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"it-IT",
"ja-JP",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.PowerLevelController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "powerLevel"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "powerLevel":
raise UnsupportedProperty(name)
if self.entity.domain == fan.DOMAIN:
return self.entity.attributes.get(fan.ATTR_PERCENTAGE) or 0
return None
class AlexaSecurityPanelController(AlexaCapability):
"""Implements Alexa.SecurityPanelController.
https://developer.amazon.com/docs/device-apis/alexa-securitypanelcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"es-US",
"fr-CA",
"fr-FR",
"it-IT",
"ja-JP",
"pt-BR",
}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.SecurityPanelController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "armState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "armState":
raise UnsupportedProperty(name)
arm_state = self.entity.state
if arm_state == STATE_ALARM_ARMED_HOME:
return "ARMED_STAY"
if arm_state == STATE_ALARM_ARMED_AWAY:
return "ARMED_AWAY"
if arm_state == STATE_ALARM_ARMED_NIGHT:
return "ARMED_NIGHT"
if arm_state == STATE_ALARM_ARMED_CUSTOM_BYPASS:
return "ARMED_STAY"
return "DISARMED"
def configuration(self):
"""Return configuration object with supported authorization types."""
code_format = self.entity.attributes.get(ATTR_CODE_FORMAT)
supported = self.entity.attributes[ATTR_SUPPORTED_FEATURES]
configuration = {}
supported_arm_states = [{"value": "DISARMED"}]
if supported & SUPPORT_ALARM_ARM_AWAY:
supported_arm_states.append({"value": "ARMED_AWAY"})
if supported & SUPPORT_ALARM_ARM_HOME:
supported_arm_states.append({"value": "ARMED_STAY"})
if supported & SUPPORT_ALARM_ARM_NIGHT:
supported_arm_states.append({"value": "ARMED_NIGHT"})
configuration["supportedArmStates"] = supported_arm_states
if code_format == FORMAT_NUMBER:
configuration["supportedAuthorizationTypes"] = [{"type": "FOUR_DIGIT_PIN"}]
return configuration
class AlexaModeController(AlexaCapability):
"""Implements Alexa.ModeController.
The instance property must be unique across ModeController, RangeController, ToggleController within the same device.
The instance property should be a concatenated string of device domain period and single word.
e.g. fan.speed & fan.direction.
The instance property must not contain words from other instance property strings within the same device.
e.g. Instance property cover.position & cover.tilt_position will cause the Alexa.Discovery directive to fail.
An instance property string value may be reused for different devices.
https://developer.amazon.com/docs/device-apis/alexa-modecontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"fr-CA",
"fr-FR",
"it-IT",
"ja-JP",
}
def __init__(self, entity, instance, non_controllable=False):
"""Initialize the entity."""
super().__init__(entity, instance)
self._resource = None
self._semantics = None
self.properties_non_controllable = lambda: non_controllable
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ModeController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "mode"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "mode":
raise UnsupportedProperty(name)
# Fan Direction
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_DIRECTION}":
mode = self.entity.attributes.get(fan.ATTR_DIRECTION, None)
if mode in (fan.DIRECTION_FORWARD, fan.DIRECTION_REVERSE, STATE_UNKNOWN):
return f"{fan.ATTR_DIRECTION}.{mode}"
# Cover Position
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
# Return state instead of position when using ModeController.
mode = self.entity.state
if mode in (
cover.STATE_OPEN,
cover.STATE_OPENING,
cover.STATE_CLOSED,
cover.STATE_CLOSING,
STATE_UNKNOWN,
):
return f"{cover.ATTR_POSITION}.{mode}"
return None
def configuration(self):
"""Return configuration with modeResources."""
if isinstance(self._resource, AlexaCapabilityResource):
return self._resource.serialize_configuration()
return None
def capability_resources(self):
"""Return capabilityResources object."""
# Fan Direction Resource
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_DIRECTION}":
self._resource = AlexaModeResource(
[AlexaGlobalCatalog.SETTING_DIRECTION], False
)
self._resource.add_mode(
f"{fan.ATTR_DIRECTION}.{fan.DIRECTION_FORWARD}", [fan.DIRECTION_FORWARD]
)
self._resource.add_mode(
f"{fan.ATTR_DIRECTION}.{fan.DIRECTION_REVERSE}", [fan.DIRECTION_REVERSE]
)
return self._resource.serialize_capability_resources()
# Cover Position Resources
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
self._resource = AlexaModeResource(
["Position", AlexaGlobalCatalog.SETTING_OPENING], False
)
self._resource.add_mode(
f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
[AlexaGlobalCatalog.VALUE_OPEN],
)
self._resource.add_mode(
f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
[AlexaGlobalCatalog.VALUE_CLOSE],
)
self._resource.add_mode(
f"{cover.ATTR_POSITION}.custom",
["Custom", AlexaGlobalCatalog.SETTING_PRESET],
)
return self._resource.serialize_capability_resources()
return None
def semantics(self):
"""Build and return semantics object."""
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
# Cover Position
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
lower_labels = [AlexaSemantics.ACTION_LOWER]
raise_labels = [AlexaSemantics.ACTION_RAISE]
self._semantics = AlexaSemantics()
# Add open/close semantics if tilt is not supported.
if not supported & cover.SUPPORT_SET_TILT_POSITION:
lower_labels.append(AlexaSemantics.ACTION_CLOSE)
raise_labels.append(AlexaSemantics.ACTION_OPEN)
self._semantics.add_states_to_value(
[AlexaSemantics.STATES_CLOSED],
f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
)
self._semantics.add_states_to_value(
[AlexaSemantics.STATES_OPEN],
f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
)
self._semantics.add_action_to_directive(
lower_labels,
"SetMode",
{"mode": f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}"},
)
self._semantics.add_action_to_directive(
raise_labels,
"SetMode",
{"mode": f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}"},
)
return self._semantics.serialize_semantics()
return None
class AlexaRangeController(AlexaCapability):
"""Implements Alexa.RangeController.
The instance property must be unique across ModeController, RangeController, ToggleController within the same device.
The instance property should be a concatenated string of device domain period and single word.
e.g. fan.speed & fan.direction.
The instance property must not contain words from other instance property strings within the same device.
e.g. Instance property cover.position & cover.tilt_position will cause the Alexa.Discovery directive to fail.
An instance property string value may be reused for different devices.
https://developer.amazon.com/docs/device-apis/alexa-rangecontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"fr-CA",
"fr-FR",
"it-IT",
"ja-JP",
}
def __init__(self, entity, instance, non_controllable=False):
"""Initialize the entity."""
super().__init__(entity, instance)
self._resource = None
self._semantics = None
self.properties_non_controllable = lambda: non_controllable
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.RangeController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "rangeValue"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "rangeValue":
raise UnsupportedProperty(name)
# Return None for unavailable and unknown states.
# Allows the Alexa.EndpointHealth Interface to handle the unavailable state in a stateReport.
if self.entity.state in (STATE_UNAVAILABLE, STATE_UNKNOWN, None):
return None
# Fan Speed
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_SPEED}":
speed_list = self.entity.attributes.get(fan.ATTR_SPEED_LIST)
speed = self.entity.attributes.get(fan.ATTR_SPEED)
if speed_list is not None and speed is not None:
speed_index = next(
(i for i, v in enumerate(speed_list) if v == speed), None
)
return speed_index
# Cover Position
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
return self.entity.attributes.get(cover.ATTR_CURRENT_POSITION)
# Cover Tilt
if self.instance == f"{cover.DOMAIN}.tilt":
return self.entity.attributes.get(cover.ATTR_CURRENT_TILT_POSITION)
# Input Number Value
if self.instance == f"{input_number.DOMAIN}.{input_number.ATTR_VALUE}":
return float(self.entity.state)
# Vacuum Fan Speed
if self.instance == f"{vacuum.DOMAIN}.{vacuum.ATTR_FAN_SPEED}":
speed_list = self.entity.attributes.get(vacuum.ATTR_FAN_SPEED_LIST)
speed = self.entity.attributes.get(vacuum.ATTR_FAN_SPEED)
if speed_list is not None and speed is not None:
speed_index = next(
(i for i, v in enumerate(speed_list) if v == speed), None
)
return speed_index
return None
def configuration(self):
"""Return configuration with presetResources."""
if isinstance(self._resource, AlexaCapabilityResource):
return self._resource.serialize_configuration()
return None
def capability_resources(self):
"""Return capabilityResources object."""
# Fan Speed Resources
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_SPEED}":
speed_list = self.entity.attributes[fan.ATTR_SPEED_LIST]
max_value = len(speed_list) - 1
self._resource = AlexaPresetResource(
labels=[AlexaGlobalCatalog.SETTING_FAN_SPEED],
min_value=0,
max_value=max_value,
precision=1,
)
for index, speed in enumerate(speed_list):
labels = []
if isinstance(speed, str):
labels.append(speed.replace("_", " "))
if index == 1:
labels.append(AlexaGlobalCatalog.VALUE_MINIMUM)
if index == max_value:
labels.append(AlexaGlobalCatalog.VALUE_MAXIMUM)
if len(labels) > 0:
self._resource.add_preset(value=index, labels=labels)
return self._resource.serialize_capability_resources()
# Cover Position Resources
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
self._resource = AlexaPresetResource(
["Position", AlexaGlobalCatalog.SETTING_OPENING],
min_value=0,
max_value=100,
precision=1,
unit=AlexaGlobalCatalog.UNIT_PERCENT,
)
return self._resource.serialize_capability_resources()
# Cover Tilt Resources
if self.instance == f"{cover.DOMAIN}.tilt":
self._resource = AlexaPresetResource(
["Tilt", "Angle", AlexaGlobalCatalog.SETTING_DIRECTION],
min_value=0,
max_value=100,
precision=1,
unit=AlexaGlobalCatalog.UNIT_PERCENT,
)
return self._resource.serialize_capability_resources()
# Input Number Value
if self.instance == f"{input_number.DOMAIN}.{input_number.ATTR_VALUE}":
min_value = float(self.entity.attributes[input_number.ATTR_MIN])
max_value = float(self.entity.attributes[input_number.ATTR_MAX])
precision = float(self.entity.attributes.get(input_number.ATTR_STEP, 1))
unit = self.entity.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
self._resource = AlexaPresetResource(
["Value", AlexaGlobalCatalog.SETTING_PRESET],
min_value=min_value,
max_value=max_value,
precision=precision,
unit=unit,
)
self._resource.add_preset(
value=min_value, labels=[AlexaGlobalCatalog.VALUE_MINIMUM]
)
self._resource.add_preset(
value=max_value, labels=[AlexaGlobalCatalog.VALUE_MAXIMUM]
)
return self._resource.serialize_capability_resources()
# Vacuum Fan Speed Resources
if self.instance == f"{vacuum.DOMAIN}.{vacuum.ATTR_FAN_SPEED}":
speed_list = self.entity.attributes[vacuum.ATTR_FAN_SPEED_LIST]
max_value = len(speed_list) - 1
self._resource = AlexaPresetResource(
labels=[AlexaGlobalCatalog.SETTING_FAN_SPEED],
min_value=0,
max_value=max_value,
precision=1,
)
for index, speed in enumerate(speed_list):
labels = [speed.replace("_", " ")]
if index == 1:
labels.append(AlexaGlobalCatalog.VALUE_MINIMUM)
if index == max_value:
labels.append(AlexaGlobalCatalog.VALUE_MAXIMUM)
self._resource.add_preset(value=index, labels=labels)
return self._resource.serialize_capability_resources()
return None
def semantics(self):
"""Build and return semantics object."""
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
# Cover Position
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
lower_labels = [AlexaSemantics.ACTION_LOWER]
raise_labels = [AlexaSemantics.ACTION_RAISE]
self._semantics = AlexaSemantics()
# Add open/close semantics if tilt is not supported.
if not supported & cover.SUPPORT_SET_TILT_POSITION:
lower_labels.append(AlexaSemantics.ACTION_CLOSE)
raise_labels.append(AlexaSemantics.ACTION_OPEN)
self._semantics.add_states_to_value(
[AlexaSemantics.STATES_CLOSED], value=0
)
self._semantics.add_states_to_range(
[AlexaSemantics.STATES_OPEN], min_value=1, max_value=100
)
self._semantics.add_action_to_directive(
lower_labels, "SetRangeValue", {"rangeValue": 0}
)
self._semantics.add_action_to_directive(
raise_labels, "SetRangeValue", {"rangeValue": 100}
)
return self._semantics.serialize_semantics()
# Cover Tilt
if self.instance == f"{cover.DOMAIN}.tilt":
self._semantics = AlexaSemantics()
self._semantics.add_action_to_directive(
[AlexaSemantics.ACTION_CLOSE], "SetRangeValue", {"rangeValue": 0}
)
self._semantics.add_action_to_directive(
[AlexaSemantics.ACTION_OPEN], "SetRangeValue", {"rangeValue": 100}
)
self._semantics.add_states_to_value([AlexaSemantics.STATES_CLOSED], value=0)
self._semantics.add_states_to_range(
[AlexaSemantics.STATES_OPEN], min_value=1, max_value=100
)
return self._semantics.serialize_semantics()
return None
class AlexaToggleController(AlexaCapability):
"""Implements Alexa.ToggleController.
The instance property must be unique across ModeController, RangeController, ToggleController within the same device.
The instance property should be a concatenated string of device domain period and single word.
e.g. fan.speed & fan.direction.
The instance property must not contain words from other instance property strings within the same device.
e.g. Instance property cover.position & cover.tilt_position will cause the Alexa.Discovery directive to fail.
An instance property string value may be reused for different devices.
https://developer.amazon.com/docs/device-apis/alexa-togglecontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"fr-CA",
"fr-FR",
"it-IT",
"ja-JP",
"pt-BR",
}
def __init__(self, entity, instance, non_controllable=False):
"""Initialize the entity."""
super().__init__(entity, instance)
self._resource = None
self._semantics = None
self.properties_non_controllable = lambda: non_controllable
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ToggleController"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "toggleState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "toggleState":
raise UnsupportedProperty(name)
# Fan Oscillating
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}":
is_on = bool(self.entity.attributes.get(fan.ATTR_OSCILLATING))
return "ON" if is_on else "OFF"
return None
def capability_resources(self):
"""Return capabilityResources object."""
# Fan Oscillating Resource
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}":
self._resource = AlexaCapabilityResource(
[AlexaGlobalCatalog.SETTING_OSCILLATE, "Rotate", "Rotation"]
)
return self._resource.serialize_capability_resources()
return None
class AlexaChannelController(AlexaCapability):
"""Implements Alexa.ChannelController.
https://developer.amazon.com/docs/device-apis/alexa-channelcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
"pt-BR",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.ChannelController"
class AlexaDoorbellEventSource(AlexaCapability):
"""Implements Alexa.DoorbellEventSource.
https://developer.amazon.com/docs/device-apis/alexa-doorbelleventsource.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"es-MX",
"es-US",
"fr-CA",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.DoorbellEventSource"
def capability_proactively_reported(self):
"""Return True for proactively reported capability."""
return True
class AlexaPlaybackStateReporter(AlexaCapability):
"""Implements Alexa.PlaybackStateReporter.
https://developer.amazon.com/docs/device-apis/alexa-playbackstatereporter.html
"""
supported_locales = {"de-DE", "en-GB", "en-US", "es-MX", "fr-FR"}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.PlaybackStateReporter"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "playbackState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def properties_retrievable(self):
"""Return True if properties can be retrieved."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "playbackState":
raise UnsupportedProperty(name)
playback_state = self.entity.state
if playback_state == STATE_PLAYING:
return {"state": "PLAYING"}
if playback_state == STATE_PAUSED:
return {"state": "PAUSED"}
return {"state": "STOPPED"}
class AlexaSeekController(AlexaCapability):
"""Implements Alexa.SeekController.
https://developer.amazon.com/docs/device-apis/alexa-seekcontroller.html
"""
supported_locales = {"de-DE", "en-GB", "en-US", "es-MX"}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.SeekController"
class AlexaEventDetectionSensor(AlexaCapability):
"""Implements Alexa.EventDetectionSensor.
https://developer.amazon.com/docs/device-apis/alexa-eventdetectionsensor.html
"""
supported_locales = {"en-US"}
def __init__(self, hass, entity):
"""Initialize the entity."""
super().__init__(entity)
self.hass = hass
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.EventDetectionSensor"
def properties_supported(self):
"""Return what properties this entity supports."""
return [{"name": "humanPresenceDetectionState"}]
def properties_proactively_reported(self):
"""Return True if properties asynchronously reported."""
return True
def get_property(self, name):
"""Read and return a property."""
if name != "humanPresenceDetectionState":
raise UnsupportedProperty(name)
human_presence = "NOT_DETECTED"
state = self.entity.state
# Return None for unavailable and unknown states.
# Allows the Alexa.EndpointHealth Interface to handle the unavailable state in a stateReport.
if state in (STATE_UNAVAILABLE, STATE_UNKNOWN, None):
return None
if self.entity.domain == image_processing.DOMAIN:
if int(state):
human_presence = "DETECTED"
elif state == STATE_ON:
human_presence = "DETECTED"
return {"value": human_presence}
def configuration(self):
"""Return supported detection types."""
return {
"detectionMethods": ["AUDIO", "VIDEO"],
"detectionModes": {
"humanPresence": {
"featureAvailability": "ENABLED",
"supportsNotDetected": True,
}
},
}
class AlexaEqualizerController(AlexaCapability):
"""Implements Alexa.EqualizerController.
https://developer.amazon.com/en-US/docs/alexa/device-apis/alexa-equalizercontroller.html
"""
supported_locales = {"de-DE", "en-IN", "en-US", "es-ES", "it-IT", "ja-JP", "pt-BR"}
VALID_SOUND_MODES = {
"MOVIE",
"MUSIC",
"NIGHT",
"SPORT",
"TV",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.EqualizerController"
def properties_supported(self):
"""Return what properties this entity supports.
Either bands, mode or both can be specified. Only mode is supported at this time.
"""
return [{"name": "mode"}]
def get_property(self, name):
"""Read and return a property."""
if name != "mode":
raise UnsupportedProperty(name)
sound_mode = self.entity.attributes.get(media_player.ATTR_SOUND_MODE)
if sound_mode and sound_mode.upper() in self.VALID_SOUND_MODES:
return sound_mode.upper()
return None
def configurations(self):
"""Return the sound modes supported in the configurations object."""
configurations = None
supported_sound_modes = self.get_valid_inputs(
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST, [])
)
if supported_sound_modes:
configurations = {"modes": {"supported": supported_sound_modes}}
return configurations
@classmethod
def get_valid_inputs(cls, sound_mode_list):
"""Return list of supported inputs."""
input_list = []
for sound_mode in sound_mode_list:
sound_mode = sound_mode.upper()
if sound_mode in cls.VALID_SOUND_MODES:
input_list.append({"name": sound_mode})
return input_list
class AlexaTimeHoldController(AlexaCapability):
"""Implements Alexa.TimeHoldController.
https://developer.amazon.com/docs/device-apis/alexa-timeholdcontroller.html
"""
supported_locales = {"en-US"}
def __init__(self, entity, allow_remote_resume=False):
"""Initialize the entity."""
super().__init__(entity)
self._allow_remote_resume = allow_remote_resume
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.TimeHoldController"
def configuration(self):
"""Return configuration object.
Set allowRemoteResume to True if Alexa can restart the operation on the device.
When false, Alexa does not send the Resume directive.
"""
return {"allowRemoteResume": self._allow_remote_resume}
class AlexaCameraStreamController(AlexaCapability):
"""Implements Alexa.CameraStreamController.
https://developer.amazon.com/docs/device-apis/alexa-camerastreamcontroller.html
"""
supported_locales = {
"de-DE",
"en-AU",
"en-CA",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fr-FR",
"hi-IN",
"it-IT",
"ja-JP",
"pt-BR",
}
def name(self):
"""Return the Alexa API name of this interface."""
return "Alexa.CameraStreamController"
def camera_stream_configurations(self):
"""Return cameraStreamConfigurations object."""
return [
{
"protocols": ["HLS"],
"resolutions": [{"width": 1280, "height": 720}],
"authorizationTypes": ["NONE"],
"videoCodecs": ["H264"],
"audioCodecs": ["AAC"],
}
]
| 30.72313
| 127
| 0.604304
|
acfbab6bf6f507e82056f053694fe19f6eea3083
| 1,290
|
py
|
Python
|
setup.py
|
Karnav-Thakur/discord-ext-menus
|
0d5ae36276b4bcdc56e441037fa9fcbe28bb5d7b
|
[
"MIT"
] | null | null | null |
setup.py
|
Karnav-Thakur/discord-ext-menus
|
0d5ae36276b4bcdc56e441037fa9fcbe28bb5d7b
|
[
"MIT"
] | null | null | null |
setup.py
|
Karnav-Thakur/discord-ext-menus
|
0d5ae36276b4bcdc56e441037fa9fcbe28bb5d7b
|
[
"MIT"
] | null | null | null |
from setuptools import setup
import re
version = ''
with open('discord/ext/menus/__init__.py') as f:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('version is not set')
if version.endswith(('a', 'b', 'rc')):
# append version identifier based on commit count
try:
import subprocess
p = subprocess.Popen(['git', 'rev-list', '--count', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
version += out.decode('utf-8').strip()
p = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
version += '+g' + out.decode('utf-8').strip()
except Exception:
pass
setup(name='discord-ext-menus',
author='Rapptz',
url='https://github.com/Rapptz/discord-ext-menus',
version=version,
packages=['discord.ext.menus'],
license='MIT',
description='An extension module to make reaction based menus with discord.py',
install_requires=['py-cord>=1.2.5'],
python_requires='>=3.5.3'
)
| 33.947368
| 99
| 0.579845
|
acfbac5503c2b6e158df43b9f8ec6cd6f0b8c0af
| 17,316
|
py
|
Python
|
moto/sns/models.py
|
jzucker2/moto
|
ba3c9db8a76f1428892e867c68c1e2f4c04c1fa1
|
[
"Apache-2.0"
] | null | null | null |
moto/sns/models.py
|
jzucker2/moto
|
ba3c9db8a76f1428892e867c68c1e2f4c04c1fa1
|
[
"Apache-2.0"
] | null | null | null |
moto/sns/models.py
|
jzucker2/moto
|
ba3c9db8a76f1428892e867c68c1e2f4c04c1fa1
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import datetime
import uuid
import json
import boto.sns
import requests
import six
import re
from moto.compat import OrderedDict
from moto.core import BaseBackend, BaseModel
from moto.core.utils import iso_8601_datetime_with_milliseconds
from moto.sqs import sqs_backends
from moto.awslambda import lambda_backends
from .exceptions import (
SNSNotFoundError, DuplicateSnsEndpointError, SnsEndpointDisabled, SNSInvalidParameter,
InvalidParameterValue
)
from .utils import make_arn_for_topic, make_arn_for_subscription
DEFAULT_ACCOUNT_ID = 123456789012
DEFAULT_PAGE_SIZE = 100
class Topic(BaseModel):
def __init__(self, name, sns_backend):
self.name = name
self.sns_backend = sns_backend
self.account_id = DEFAULT_ACCOUNT_ID
self.display_name = ""
self.policy = json.dumps(DEFAULT_TOPIC_POLICY)
self.delivery_policy = ""
self.effective_delivery_policy = json.dumps(DEFAULT_EFFECTIVE_DELIVERY_POLICY)
self.arn = make_arn_for_topic(
self.account_id, name, sns_backend.region_name)
self.subscriptions_pending = 0
self.subscriptions_confimed = 0
self.subscriptions_deleted = 0
def publish(self, message, subject=None, message_attributes=None):
message_id = six.text_type(uuid.uuid4())
subscriptions, _ = self.sns_backend.list_subscriptions(self.arn)
for subscription in subscriptions:
subscription.publish(message, message_id, subject=subject,
message_attributes=message_attributes)
return message_id
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'TopicName':
return self.name
raise UnformattedGetAttTemplateException()
@property
def physical_resource_id(self):
return self.arn
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
sns_backend = sns_backends[region_name]
properties = cloudformation_json['Properties']
topic = sns_backend.create_topic(
properties.get("TopicName")
)
for subscription in properties.get("Subscription", []):
sns_backend.subscribe(topic.arn, subscription[
'Endpoint'], subscription['Protocol'])
return topic
class Subscription(BaseModel):
def __init__(self, topic, endpoint, protocol):
self.topic = topic
self.endpoint = endpoint
self.protocol = protocol
self.arn = make_arn_for_subscription(self.topic.arn)
self.attributes = {}
self._filter_policy = None # filter policy as a dict, not json.
self.confirmed = False
def publish(self, message, message_id, subject=None,
message_attributes=None):
if not self._matches_filter_policy(message_attributes):
return
if self.protocol == 'sqs':
queue_name = self.endpoint.split(":")[-1]
region = self.endpoint.split(":")[3]
enveloped_message = json.dumps(self.get_post_data(message, message_id, subject), sort_keys=True, indent=2, separators=(',', ': '))
sqs_backends[region].send_message(queue_name, enveloped_message)
elif self.protocol in ['http', 'https']:
post_data = self.get_post_data(message, message_id, subject)
requests.post(self.endpoint, json=post_data)
elif self.protocol == 'lambda':
# TODO: support bad function name
# http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html
arr = self.endpoint.split(":")
region = arr[3]
qualifier = None
if len(arr) == 7:
assert arr[5] == 'function'
function_name = arr[-1]
elif len(arr) == 8:
assert arr[5] == 'function'
qualifier = arr[-1]
function_name = arr[-2]
else:
assert False
lambda_backends[region].send_message(function_name, message, subject=subject, qualifier=qualifier)
def _matches_filter_policy(self, message_attributes):
# TODO: support Anything-but matching, prefix matching and
# numeric value matching.
if not self._filter_policy:
return True
if message_attributes is None:
message_attributes = {}
def _field_match(field, rules, message_attributes):
if field not in message_attributes:
return False
for rule in rules:
if isinstance(rule, six.string_types):
# only string value matching is supported
if message_attributes[field] == rule:
return True
return False
return all(_field_match(field, rules, message_attributes)
for field, rules in six.iteritems(self._filter_policy))
def get_post_data(self, message, message_id, subject):
return {
"Type": "Notification",
"MessageId": message_id,
"TopicArn": self.topic.arn,
"Subject": subject or "my subject",
"Message": message,
"Timestamp": iso_8601_datetime_with_milliseconds(datetime.datetime.utcnow()),
"SignatureVersion": "1",
"Signature": "EXAMPLElDMXvB8r9R83tGoNn0ecwd5UjllzsvSvbItzfaMpN2nk5HVSw7XnOn/49IkxDKz8YrlH2qJXj2iZB0Zo2O71c4qQk1fMUDi3LGpij7RCW7AW9vYYsSqIKRnFS94ilu7NFhUzLiieYr4BKHpdTmdD6c0esKEYBpabxDSc=",
"SigningCertURL": "https://sns.us-east-1.amazonaws.com/SimpleNotificationService-f3ecfb7224c7233fe7bb5f59f96de52f.pem",
"UnsubscribeURL": "https://sns.us-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-east-1:123456789012:some-topic:2bcfbf39-05c3-41de-beaa-fcfcc21c8f55"
}
class PlatformApplication(BaseModel):
def __init__(self, region, name, platform, attributes):
self.region = region
self.name = name
self.platform = platform
self.attributes = attributes
@property
def arn(self):
return "arn:aws:sns:{region}:123456789012:app/{platform}/{name}".format(
region=self.region,
platform=self.platform,
name=self.name,
)
class PlatformEndpoint(BaseModel):
def __init__(self, region, application, custom_user_data, token, attributes):
self.region = region
self.application = application
self.custom_user_data = custom_user_data
self.token = token
self.attributes = attributes
self.id = uuid.uuid4()
self.messages = OrderedDict()
self.__fixup_attributes()
def __fixup_attributes(self):
# When AWS returns the attributes dict, it always contains these two elements, so we need to
# automatically ensure they exist as well.
if 'Token' not in self.attributes:
self.attributes['Token'] = self.token
if 'Enabled' not in self.attributes:
self.attributes['Enabled'] = 'True'
@property
def enabled(self):
return json.loads(self.attributes.get('Enabled', 'true').lower())
@property
def arn(self):
return "arn:aws:sns:{region}:123456789012:endpoint/{platform}/{name}/{id}".format(
region=self.region,
platform=self.application.platform,
name=self.application.name,
id=self.id,
)
def publish(self, message):
if not self.enabled:
raise SnsEndpointDisabled("Endpoint %s disabled" % self.id)
# This is where we would actually send a message
message_id = six.text_type(uuid.uuid4())
self.messages[message_id] = message
return message_id
class SNSBackend(BaseBackend):
def __init__(self, region_name):
super(SNSBackend, self).__init__()
self.topics = OrderedDict()
self.subscriptions = OrderedDict()
self.applications = {}
self.platform_endpoints = {}
self.region_name = region_name
self.sms_attributes = {}
self.opt_out_numbers = ['+447420500600', '+447420505401', '+447632960543', '+447632960028', '+447700900149', '+447700900550', '+447700900545', '+447700900907']
self.permissions = {}
def reset(self):
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
def update_sms_attributes(self, attrs):
self.sms_attributes.update(attrs)
def create_topic(self, name):
fails_constraints = not re.match(r'^[a-zA-Z0-9](?:[A-Za-z0-9_-]{0,253}[a-zA-Z0-9])?$', name)
if fails_constraints:
raise InvalidParameterValue("Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long.")
candidate_topic = Topic(name, self)
if candidate_topic.arn in self.topics:
return self.topics[candidate_topic.arn]
else:
self.topics[candidate_topic.arn] = candidate_topic
return candidate_topic
def _get_values_nexttoken(self, values_map, next_token=None):
if next_token is None:
next_token = 0
next_token = int(next_token)
values = list(values_map.values())[
next_token: next_token + DEFAULT_PAGE_SIZE]
if len(values) == DEFAULT_PAGE_SIZE:
next_token = next_token + DEFAULT_PAGE_SIZE
else:
next_token = None
return values, next_token
def _get_topic_subscriptions(self, topic):
return [sub for sub in self.subscriptions.values() if sub.topic == topic]
def list_topics(self, next_token=None):
return self._get_values_nexttoken(self.topics, next_token)
def delete_topic(self, arn):
topic = self.get_topic(arn)
subscriptions = self._get_topic_subscriptions(topic)
for sub in subscriptions:
self.unsubscribe(sub.arn)
self.topics.pop(arn)
def get_topic(self, arn):
try:
return self.topics[arn]
except KeyError:
raise SNSNotFoundError("Topic with arn {0} not found".format(arn))
def get_topic_from_phone_number(self, number):
for subscription in self.subscriptions.values():
if subscription.protocol == 'sms' and subscription.endpoint == number:
return subscription.topic.arn
raise SNSNotFoundError('Could not find valid subscription')
def set_topic_attribute(self, topic_arn, attribute_name, attribute_value):
topic = self.get_topic(topic_arn)
setattr(topic, attribute_name, attribute_value)
def subscribe(self, topic_arn, endpoint, protocol):
# AWS doesn't create duplicates
old_subscription = self._find_subscription(topic_arn, endpoint, protocol)
if old_subscription:
return old_subscription
topic = self.get_topic(topic_arn)
subscription = Subscription(topic, endpoint, protocol)
self.subscriptions[subscription.arn] = subscription
return subscription
def _find_subscription(self, topic_arn, endpoint, protocol):
for subscription in self.subscriptions.values():
if subscription.topic.arn == topic_arn and subscription.endpoint == endpoint and subscription.protocol == protocol:
return subscription
return None
def unsubscribe(self, subscription_arn):
self.subscriptions.pop(subscription_arn)
def list_subscriptions(self, topic_arn=None, next_token=None):
if topic_arn:
topic = self.get_topic(topic_arn)
filtered = OrderedDict(
[(sub.arn, sub) for sub in self._get_topic_subscriptions(topic)])
return self._get_values_nexttoken(filtered, next_token)
else:
return self._get_values_nexttoken(self.subscriptions, next_token)
def publish(self, arn, message, subject=None, message_attributes=None):
if subject is not None and len(subject) > 100:
# Note that the AWS docs around length are wrong: https://github.com/spulec/moto/issues/1503
raise ValueError('Subject must be less than 100 characters')
try:
topic = self.get_topic(arn)
message_id = topic.publish(message, subject=subject,
message_attributes=message_attributes)
except SNSNotFoundError:
endpoint = self.get_endpoint(arn)
message_id = endpoint.publish(message)
return message_id
def create_platform_application(self, region, name, platform, attributes):
application = PlatformApplication(region, name, platform, attributes)
self.applications[application.arn] = application
return application
def get_application(self, arn):
try:
return self.applications[arn]
except KeyError:
raise SNSNotFoundError(
"Application with arn {0} not found".format(arn))
def set_application_attributes(self, arn, attributes):
application = self.get_application(arn)
application.attributes.update(attributes)
return application
def list_platform_applications(self):
return self.applications.values()
def delete_platform_application(self, platform_arn):
self.applications.pop(platform_arn)
def create_platform_endpoint(self, region, application, custom_user_data, token, attributes):
if any(token == endpoint.token for endpoint in self.platform_endpoints.values()):
raise DuplicateSnsEndpointError("Duplicate endpoint token: %s" % token)
platform_endpoint = PlatformEndpoint(
region, application, custom_user_data, token, attributes)
self.platform_endpoints[platform_endpoint.arn] = platform_endpoint
return platform_endpoint
def list_endpoints_by_platform_application(self, application_arn):
return [
endpoint for endpoint
in self.platform_endpoints.values()
if endpoint.application.arn == application_arn
]
def get_endpoint(self, arn):
try:
return self.platform_endpoints[arn]
except KeyError:
raise SNSNotFoundError(
"Endpoint with arn {0} not found".format(arn))
def set_endpoint_attributes(self, arn, attributes):
endpoint = self.get_endpoint(arn)
endpoint.attributes.update(attributes)
return endpoint
def delete_endpoint(self, arn):
try:
del self.platform_endpoints[arn]
except KeyError:
raise SNSNotFoundError(
"Endpoint with arn {0} not found".format(arn))
def get_subscription_attributes(self, arn):
_subscription = [_ for _ in self.subscriptions.values() if _.arn == arn]
if not _subscription:
raise SNSNotFoundError("Subscription with arn {0} not found".format(arn))
subscription = _subscription[0]
return subscription.attributes
def set_subscription_attributes(self, arn, name, value):
if name not in ['RawMessageDelivery', 'DeliveryPolicy', 'FilterPolicy']:
raise SNSInvalidParameter('AttributeName')
# TODO: should do validation
_subscription = [_ for _ in self.subscriptions.values() if _.arn == arn]
if not _subscription:
raise SNSNotFoundError("Subscription with arn {0} not found".format(arn))
subscription = _subscription[0]
subscription.attributes[name] = value
if name == 'FilterPolicy':
subscription._filter_policy = json.loads(value)
sns_backends = {}
for region in boto.sns.regions():
sns_backends[region.name] = SNSBackend(region.name)
DEFAULT_TOPIC_POLICY = {
"Version": "2008-10-17",
"Id": "us-east-1/698519295917/test__default_policy_ID",
"Statement": [{
"Effect": "Allow",
"Sid": "us-east-1/698519295917/test__default_statement_ID",
"Principal": {
"AWS": "*"
},
"Action": [
"SNS:GetTopicAttributes",
"SNS:SetTopicAttributes",
"SNS:AddPermission",
"SNS:RemovePermission",
"SNS:DeleteTopic",
"SNS:Subscribe",
"SNS:ListSubscriptionsByTopic",
"SNS:Publish",
"SNS:Receive",
],
"Resource": "arn:aws:sns:us-east-1:698519295917:test",
"Condition": {
"StringLike": {
"AWS:SourceArn": "arn:aws:*:*:698519295917:*"
}
}
}]
}
DEFAULT_EFFECTIVE_DELIVERY_POLICY = {
'http': {
'disableSubscriptionOverrides': False,
'defaultHealthyRetryPolicy': {
'numNoDelayRetries': 0,
'numMinDelayRetries': 0,
'minDelayTarget': 20,
'maxDelayTarget': 20,
'numMaxDelayRetries': 0,
'numRetries': 3,
'backoffFunction': 'linear'
}
}
}
| 37.643478
| 200
| 0.644086
|
acfbadfee6c9b441a06a6918e497d2ed8ea2eb3f
| 2,702
|
py
|
Python
|
specs/client/ml_pipelines/cl_bases.py
|
bitgn/ml-pipelines
|
904b6fa200aac1638491658c2d51ea40c33cbffa
|
[
"BSD-2-Clause"
] | 7
|
2019-07-25T05:36:21.000Z
|
2020-08-23T18:04:53.000Z
|
specs/client/ml_pipelines/cl_bases.py
|
bitgn/ml-pipelines
|
904b6fa200aac1638491658c2d51ea40c33cbffa
|
[
"BSD-2-Clause"
] | 1
|
2019-08-18T14:05:49.000Z
|
2019-08-18T17:51:39.000Z
|
specs/client/ml_pipelines/cl_bases.py
|
bitgn/ml-pipelines
|
904b6fa200aac1638491658c2d51ea40c33cbffa
|
[
"BSD-2-Clause"
] | 4
|
2019-08-18T13:42:45.000Z
|
2021-04-04T11:15:21.000Z
|
from dataclasses import dataclass
from typing import Callable, Optional
from . import mlp_api_pb2 as api
from . import mlp_api_pb2_grpc as rpc
import grpc
from . import errors
import os
import google.protobuf.message as pb
class EntityId:
def __init__(self, uid: bytes):
self.uid= uid
class JobRunId (EntityId):
pass
class DatasetVersionId(EntityId):
pass
class SystemId(EntityId):
pass
class Context:
def __init__(self, catalog: rpc.CatalogStub):
self.catalog = catalog
def create_project(self, req: api.CreateProjectRequest) -> api.ProjectInfoResponse:
resp: api.ProjectInfoResponse = self._rpc(lambda: self.catalog.CreateProject(req))
return resp
def get_project(self, req: api.GetProjectRequest) -> api.ProjectInfoResponse:
resp: api.ProjectInfoResponse = self._rpc(lambda: self.catalog.GetProject(req))
return resp
def list_projects(self, req: api.ListProjectsRequest) -> api.ListProjectsResponse:
resp: api.ListProjectsResponse = self._rpc(lambda : self.catalog.ListProjects(req))
return resp
def create_dataset(self, req: api.CreateDatasetRequest) -> api.DatasetInfoResponse:
s: api.DatasetInfoResponse = self._rpc(lambda: self.catalog.CreateDataset(req))
return s
def get_dataset(self, req: api.GetDatasetRequest) -> api.DatasetInfoResponse:
s: api.DatasetInfoResponse = self._rpc(lambda: self.catalog.GetDataset(req))
return s
def list_datasets(self, req:api.ListDatasetsRequest) -> api.ListDatasetsResponse:
s:api.ListDatasetsResponse = self._rpc(lambda : self.catalog.ListDatasets(req))
return s
def update_dataset(self, req: api.UpdateDatasetRequest) -> api.UpdateDatasetRequest:
s:api.UpdateDatasetRequest = self._rpc(lambda: self.catalog.UpdateDataset(req))
return s
def reset(self):
self._rpc(lambda : self.catalog.Reset(api.ResetRequest()))
def stat(self):
s:api.StatResponse = self._rpc(lambda : self.catalog.Stat(api.StatRequest()))
return s
def add_dataset_activity(self, req: api.AddDatasetActivityRequest):
s:api.AddDatasetActivityResponse = self._rpc(lambda : self.catalog.AddDatasetActivity(req))
return s
def _rpc(self, callable: Callable[[], pb.Message]):
try:
resp = callable()
if not hasattr(resp,"error"):
raise ValueError(print(f'{resp.__class__} has no error attribute'))
if resp.error.code != 0:
raise errors.from_error(resp.error)
return resp
except grpc.RpcError as e:
raise errors.from_exception(e)
| 30.359551
| 99
| 0.689119
|
acfbaea934b9d5ace24d4971a23c1122a8c75220
| 706
|
py
|
Python
|
py_elasticinfra/metrics/memory.py
|
NullConvergence/py_metrics
|
fa58959591a1a4ee90cb4145acd4ed5f9f6c3b8a
|
[
"MIT"
] | null | null | null |
py_elasticinfra/metrics/memory.py
|
NullConvergence/py_metrics
|
fa58959591a1a4ee90cb4145acd4ed5f9f6c3b8a
|
[
"MIT"
] | null | null | null |
py_elasticinfra/metrics/memory.py
|
NullConvergence/py_metrics
|
fa58959591a1a4ee90cb4145acd4ed5f9f6c3b8a
|
[
"MIT"
] | null | null | null |
import psutil
from .base import BaseMetric
class Memory(BaseMetric):
def __init__(self):
pass
def measure(self):
mem = psutil.virtual_memory()
swap = psutil.swap_memory()
return {
"virtual_memory": {
"total": mem.total,
"available": mem.available,
"percent": mem.percent,
"used": mem.used,
"free": mem.free
},
"swap_memory": {
"total": swap.total,
"used": swap.used,
"free": swap.free,
"percent": swap.percent
}
}
def get_type(self):
return 'memory'
| 23.533333
| 43
| 0.454674
|
acfbaedd92eded8e72508180832b0bb135147a27
| 309
|
py
|
Python
|
application/views/errors.py
|
AstroChem/MAPServer
|
3ea8209d5d106c8799216ae2405ba1c396477eec
|
[
"MIT"
] | null | null | null |
application/views/errors.py
|
AstroChem/MAPServer
|
3ea8209d5d106c8799216ae2405ba1c396477eec
|
[
"MIT"
] | 1
|
2020-04-29T16:16:37.000Z
|
2020-04-29T16:16:37.000Z
|
application/views/errors.py
|
AstroChem/MAPServer
|
3ea8209d5d106c8799216ae2405ba1c396477eec
|
[
"MIT"
] | 1
|
2020-04-29T16:03:51.000Z
|
2020-04-29T16:03:51.000Z
|
from flask import render_template
from flask import Blueprint
bp = Blueprint("errors", __name__)
@bp.app_errorhandler(404)
def page_not_found(e):
return render_template("errors/404.html"), 404
@bp.app_errorhandler(500)
def internal_server_error(e):
return render_template("errors/500.html"), 500
| 20.6
| 50
| 0.770227
|
acfbb01dbe08116ae039cce0e9a6de7466af795e
| 8,836
|
py
|
Python
|
changegen/__main__.py
|
trailbehind/changegen
|
ba1ed0f4165b5c64a146e2a3bc1cd2030d692f4d
|
[
"MIT"
] | 3
|
2021-01-30T13:00:33.000Z
|
2021-02-03T21:32:13.000Z
|
changegen/__main__.py
|
trailbehind/changegen
|
ba1ed0f4165b5c64a146e2a3bc1cd2030d692f4d
|
[
"MIT"
] | 9
|
2021-04-26T16:42:34.000Z
|
2021-11-01T20:55:02.000Z
|
changegen/__main__.py
|
trailbehind/changegen
|
ba1ed0f4165b5c64a146e2a3bc1cd2030d692f4d
|
[
"MIT"
] | null | null | null |
import logging
import math
import os
import subprocess
import sys
import click
import psycopg2 as psy
from . import PACKAGE_NAME
from .generator import generate_changes
from .generator import generate_deletions
from .util import setup_logging
"""
cli.py
Tony Cannistra <tony@gaiagps.com>
Provides main changegen CLI-based entrypoint.
"""
def _get_max_ids(source_extract):
# get the max ID from source extract using osmium
## first ensure that osmium exists
try:
proc = subprocess.check_call(
"osmium --help",
shell=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
except subprocess.CalledProcessError as e:
logging.warning(
"osmium not found; unable to determine max OSM id in source extract"
)
raise e
ids = {}
for idtype in ["data.maxid.ways", "data.maxid.nodes", "data.maxid.relations"]:
proc = subprocess.Popen(
f"osmium fileinfo -e -g {idtype} --no-progress {source_extract}",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
if proc.stderr.read():
raise subprocess.CalledProcessError(-1, "osmium", "Error in osmium.")
ids[idtype.split(".")[-1]] = int(proc.stdout.read().strip())
return ids
def _get_db_tables(suffix, dbname, dbport, dbuser, dbpass, dbhost):
c = psy.connect(
dbname=dbname, host=dbhost, user=dbuser, password=dbpass, port=dbport
)
cur = c.cursor()
_q = (
"SELECT table_name from information_schema.tables "
f"where table_name LIKE '%{suffix}'"
)
cur.execute(_q)
ans = cur.fetchall()
c.close()
return [a[0] for a in ans]
@click.command()
@click.option("-d", "--debug", help="Enable verbose logging.", is_flag=True)
@click.option(
"-s",
"--suffix",
help=(
"Suffix for DB tables containing newly-added features."
" Can be passed multiple times for multiple suffixes."
),
default=["_new"],
show_default=True,
multiple=True,
)
@click.option(
"--deletions",
help=(
"Name of table containing OSM IDs for which <delete> tags "
" should be created in the resulting changefile. Table must "
" contain <osm_id> column. Can be passed multiple times."
),
multiple=True,
default=[],
)
@click.option(
"-e",
"--existing",
help=(
"Table of geometries to use when determining whether existing"
" features must be altered to include linestring intersections."
),
multiple=True,
default=[],
)
@click.option(
"-m",
"--modify_meta",
help=(
"Create <modify> tags in changefile, instead of create nodes "
"for all tables specified by --suffix. Only applies to "
"Ways with with modified metadata, not geometries (see full help)."
),
is_flag=True,
)
@click.option("-o", "-outdir", help="Directory to output change files to.", default=".")
@click.option("--compress", help="gzip-compress xml output", is_flag=True)
@click.option("--neg_id", help="use negative ids for new OSM elements", is_flag=True)
@click.option(
"--id_offset",
help="Integer value to start generating IDs from.",
type=int,
default=0,
show_default=True,
)
@click.option(
"--no_collisions",
help="Stop execution if the chosen ID offset "
"will cause collisions with existing OSM ids."
" (requires osmium).",
is_flag=True,
)
@click.option(
"--self",
"-si",
help=(
"Check for and add intersections among newly-added features. "
"It is strongly adviseable to create a geometry index on "
"new geometry tables' geometry column before using this option."
),
is_flag=True,
)
@click.option(
"--max_nodes_per_way",
help=(
"Number of nodes allowed per way. Default 2000."
" If a way exceeds this value "
" it will be subdivided into smaller ways. Pass `none` for no limit."
),
default="2000",
)
@click.option(
"--hstore_tags",
help=(
"Specify Postgres hstore column to obtain tags from, "
"in addition to table columns. "
"This column should contain a hstore, and the keys will be compared "
"to existing columns. Column key values will take "
"precedence over values in the hstore if duplicates are found. "
"Note that this column name will apply to both source tables "
"and intersection tables. "
),
default=None,
show_default=True,
)
@click.option("--osmsrc", help="Source OSM PBF File path", required=True)
@click.argument("dbname", default=os.environ.get("PGDATABASE", "conflate"))
@click.argument("dbport", default=os.environ.get("PGPORT", "15432"))
@click.argument("dbuser", default=os.environ.get("PGUSER", "postgres"))
@click.argument("dbhost", default=os.environ.get("PGHOST", "localhost"))
@click.argument("dbpass", default=os.environ.get("PGPASSWORD", ""))
def main(*args: tuple, **kwargs: dict):
"""
Create osmchange file describing changes to an imposm-based PostGIS
database after a spatial conflation workflow.
This module relies on a PostGIS database generated by imposm3
from an OSM Planet file. Connection parameters can be provided via
standard Postgres environment variables, as positional arguments,
or a combination of both. Defaults are from the environment variables.
If they don't exist, suitable defaults are provided.
This module produces a change file that includes any newly-added
features as well as any features that must be modified to
properly represent linestring intersections. The resulting file
can be applied to a Planet file to alter the file with the
conflated changes.
If the tables selected by --suffix do not represent new features
but actually represent features with modified metadata, use --modify_meta.
NOTE that --modify_meta does not support modified geometries (use default
behavior with a --delete table for that).
--modify_meta is not compatible with intersection detection. Creation of
modify nodes is only compatible with linestring features.
"""
setup_logging(debug=kwargs["debug"])
logging.debug(f"Args: {kwargs}")
# Check for ID collisions and warn
try:
ids = _get_max_ids(kwargs["osmsrc"])
if any([kwargs["id_offset"] < id for id in ids.values()]):
_log_text = f"Chosen ID offset {kwargs['id_offset']} may cause collisions with existing OSM IDs (max IDs: {ids})."
if kwargs["no_collisions"]:
logging.fatal(_log_text)
sys.exit(-1)
else:
logging.warning(_log_text)
except subprocess.CalledProcessError:
logging.error("Error checking existing OSM max ids.")
new_tables = []
for suffix in kwargs["suffix"]:
new_tables.extend(
_get_db_tables(
suffix,
kwargs["dbname"],
kwargs["dbport"],
kwargs["dbuser"],
kwargs["dbpass"],
kwargs["dbhost"],
)
)
logging.info(f"Found tables in db: {new_tables}")
max_nodes_per_way = kwargs["max_nodes_per_way"]
if str(max_nodes_per_way).lower() == "none":
max_nodes_per_way = math.inf
elif max_nodes_per_way == None:
max_nodes_per_way = 2000
if kwargs["modify_meta"] and kwargs["existing"]:
raise RuntimeError("--modify_meta cannot be used with --existing.")
for table in new_tables:
generate_changes(
table,
kwargs["existing"],
kwargs["deletions"],
kwargs["dbname"],
kwargs["dbport"],
kwargs["dbuser"],
kwargs["dbpass"] if kwargs["dbpass"] != "" else None,
kwargs["dbhost"],
kwargs["osmsrc"],
os.path.join(str(kwargs["o"]), f"{table}.osc"),
compress=kwargs["compress"],
neg_id=kwargs["neg_id"],
id_offset=kwargs["id_offset"],
self_intersections=kwargs["self"],
max_nodes_per_way=int(max_nodes_per_way),
modify_only=kwargs["modify_meta"],
hstore_column=kwargs["hstore_tags"],
)
for table in kwargs["deletions"]:
generate_deletions(
table,
"osm_id",
kwargs["dbname"],
kwargs["dbport"],
kwargs["dbuser"],
kwargs["dbpass"] if kwargs["dbpass"] != "" else None,
kwargs["dbhost"],
kwargs["osmsrc"],
os.path.join(str(kwargs["o"]), f"{table}.osc"),
compress=kwargs["compress"],
)
if __name__ == "__main__":
main(prog_name=PACKAGE_NAME)
| 32.725926
| 126
| 0.625622
|
acfbb01efe67c3610ba29b7e3e25dfaa51917c8e
| 3,459
|
py
|
Python
|
aloe/timelord/timelord_launcher.py
|
Aloe-Network/aloe-blockchain
|
72b1f64f177e144a81b9d38f194427ea39e16edb
|
[
"Apache-2.0"
] | 3
|
2021-06-03T09:09:28.000Z
|
2021-07-24T16:22:40.000Z
|
aloe/timelord/timelord_launcher.py
|
Aloe-Network/aloe-blockchain
|
72b1f64f177e144a81b9d38f194427ea39e16edb
|
[
"Apache-2.0"
] | null | null | null |
aloe/timelord/timelord_launcher.py
|
Aloe-Network/aloe-blockchain
|
72b1f64f177e144a81b9d38f194427ea39e16edb
|
[
"Apache-2.0"
] | 1
|
2021-07-14T04:15:26.000Z
|
2021-07-14T04:15:26.000Z
|
import asyncio
import logging
import pathlib
import signal
import socket
import time
from typing import Dict, List
import pkg_resources
from aloe.util.aloe_logging import initialize_logging
from aloe.util.config import load_config
from aloe.util.default_root import DEFAULT_ROOT_PATH
from aloe.util.setproctitle import setproctitle
active_processes: List = []
stopped = False
lock = asyncio.Lock()
log = logging.getLogger(__name__)
async def kill_processes():
global stopped
global active_processes
async with lock:
stopped = True
for process in active_processes:
try:
process.kill()
except ProcessLookupError:
pass
def find_vdf_client() -> pathlib.Path:
p = pathlib.Path(pkg_resources.get_distribution("aloevdf").location) / "vdf_client"
if p.is_file():
return p
raise FileNotFoundError("can't find vdf_client binary")
async def spawn_process(host: str, port: int, counter: int):
global stopped
global active_processes
path_to_vdf_client = find_vdf_client()
first_10_seconds = True
start_time = time.time()
while not stopped:
try:
dirname = path_to_vdf_client.parent
basename = path_to_vdf_client.name
resolved = socket.gethostbyname(host)
proc = await asyncio.create_subprocess_shell(
f"{basename} {resolved} {port} {counter}",
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
env={"PATH": dirname},
)
except Exception as e:
log.warning(f"Exception while spawning process {counter}: {(e)}")
continue
async with lock:
active_processes.append(proc)
stdout, stderr = await proc.communicate()
if stdout:
log.info(f"VDF client {counter}: {stdout.decode().rstrip()}")
if stderr:
if first_10_seconds:
if time.time() - start_time > 10:
first_10_seconds = False
else:
log.error(f"VDF client {counter}: {stderr.decode().rstrip()}")
log.info(f"Process number {counter} ended.")
async with lock:
if proc in active_processes:
active_processes.remove(proc)
await asyncio.sleep(0.1)
async def spawn_all_processes(config: Dict, net_config: Dict):
await asyncio.sleep(5)
port = config["port"]
process_count = config["process_count"]
awaitables = [spawn_process(net_config["self_hostname"], port, i) for i in range(process_count)]
await asyncio.gather(*awaitables)
def main():
root_path = DEFAULT_ROOT_PATH
setproctitle("aloe_timelord_launcher")
net_config = load_config(root_path, "config.yaml")
config = net_config["timelord_launcher"]
initialize_logging("TLauncher", config["logging"], root_path)
def signal_received():
asyncio.create_task(kill_processes())
loop = asyncio.get_event_loop()
try:
loop.add_signal_handler(signal.SIGINT, signal_received)
loop.add_signal_handler(signal.SIGTERM, signal_received)
except NotImplementedError:
log.info("signal handlers unsupported")
try:
loop.run_until_complete(spawn_all_processes(config, net_config))
finally:
log.info("Launcher fully closed.")
loop.close()
if __name__ == "__main__":
main()
| 30.078261
| 100
| 0.65279
|
acfbb032c23d16f7238ff8aacbe4b112eb6b3e0f
| 26,307
|
py
|
Python
|
xarray/core/missing.py
|
readthedocs-assistant/xarray
|
84961e6a2b30f495ddc55c4024f105a3f89e6243
|
[
"Apache-2.0"
] | null | null | null |
xarray/core/missing.py
|
readthedocs-assistant/xarray
|
84961e6a2b30f495ddc55c4024f105a3f89e6243
|
[
"Apache-2.0"
] | null | null | null |
xarray/core/missing.py
|
readthedocs-assistant/xarray
|
84961e6a2b30f495ddc55c4024f105a3f89e6243
|
[
"Apache-2.0"
] | null | null | null |
import datetime as dt
import warnings
from functools import partial
from numbers import Number
from typing import Any, Callable, Dict, Hashable, Sequence, Union
import numpy as np
import pandas as pd
from packaging.version import Version
from . import utils
from .common import _contains_datetime_like_objects, ones_like
from .computation import apply_ufunc
from .duck_array_ops import datetime_to_numeric, push, timedelta_to_numeric
from .options import OPTIONS, _get_keep_attrs
from .pycompat import dask_version, is_duck_dask_array
from .utils import OrderedSet, is_scalar
from .variable import Variable, broadcast_variables
def _get_nan_block_lengths(obj, dim: Hashable, index: Variable):
"""
Return an object where each NaN element in 'obj' is replaced by the
length of the gap the element is in.
"""
# make variable so that we get broadcasting for free
index = Variable([dim], index)
# algorithm from https://github.com/pydata/xarray/pull/3302#discussion_r324707072
arange = ones_like(obj) * index
valid = obj.notnull()
valid_arange = arange.where(valid)
cumulative_nans = valid_arange.ffill(dim=dim).fillna(index[0])
nan_block_lengths = (
cumulative_nans.diff(dim=dim, label="upper")
.reindex({dim: obj[dim]})
.where(valid)
.bfill(dim=dim)
.where(~valid, 0)
.fillna(index[-1] - valid_arange.max())
)
return nan_block_lengths
class BaseInterpolator:
"""Generic interpolator class for normalizing interpolation methods"""
cons_kwargs: Dict[str, Any]
call_kwargs: Dict[str, Any]
f: Callable
method: str
def __call__(self, x):
return self.f(x, **self.call_kwargs)
def __repr__(self):
return "{type}: method={method}".format(
type=self.__class__.__name__, method=self.method
)
class NumpyInterpolator(BaseInterpolator):
"""One-dimensional linear interpolation.
See Also
--------
numpy.interp
"""
def __init__(self, xi, yi, method="linear", fill_value=None, period=None):
if method != "linear":
raise ValueError("only method `linear` is valid for the NumpyInterpolator")
self.method = method
self.f = np.interp
self.cons_kwargs = {}
self.call_kwargs = {"period": period}
self._xi = xi
self._yi = yi
nan = np.nan if yi.dtype.kind != "c" else np.nan + np.nan * 1j
if fill_value is None:
self._left = nan
self._right = nan
elif isinstance(fill_value, Sequence) and len(fill_value) == 2:
self._left = fill_value[0]
self._right = fill_value[1]
elif is_scalar(fill_value):
self._left = fill_value
self._right = fill_value
else:
raise ValueError(f"{fill_value} is not a valid fill_value")
def __call__(self, x):
return self.f(
x,
self._xi,
self._yi,
left=self._left,
right=self._right,
**self.call_kwargs,
)
class ScipyInterpolator(BaseInterpolator):
"""Interpolate a 1-D function using Scipy interp1d
See Also
--------
scipy.interpolate.interp1d
"""
def __init__(
self,
xi,
yi,
method=None,
fill_value=None,
assume_sorted=True,
copy=False,
bounds_error=False,
order=None,
**kwargs,
):
from scipy.interpolate import interp1d
if method is None:
raise ValueError(
"method is a required argument, please supply a "
"valid scipy.inter1d method (kind)"
)
if method == "polynomial":
if order is None:
raise ValueError("order is required when method=polynomial")
method = order
self.method = method
self.cons_kwargs = kwargs
self.call_kwargs = {}
nan = np.nan if yi.dtype.kind != "c" else np.nan + np.nan * 1j
if fill_value is None and method == "linear":
fill_value = nan, nan
elif fill_value is None:
fill_value = nan
self.f = interp1d(
xi,
yi,
kind=self.method,
fill_value=fill_value,
bounds_error=bounds_error,
assume_sorted=assume_sorted,
copy=copy,
**self.cons_kwargs,
)
class SplineInterpolator(BaseInterpolator):
"""One-dimensional smoothing spline fit to a given set of data points.
See Also
--------
scipy.interpolate.UnivariateSpline
"""
def __init__(
self,
xi,
yi,
method="spline",
fill_value=None,
order=3,
nu=0,
ext=None,
**kwargs,
):
from scipy.interpolate import UnivariateSpline
if method != "spline":
raise ValueError("only method `spline` is valid for the SplineInterpolator")
self.method = method
self.cons_kwargs = kwargs
self.call_kwargs = {"nu": nu, "ext": ext}
if fill_value is not None:
raise ValueError("SplineInterpolator does not support fill_value")
self.f = UnivariateSpline(xi, yi, k=order, **self.cons_kwargs)
def _apply_over_vars_with_dim(func, self, dim=None, **kwargs):
"""Wrapper for datasets"""
ds = type(self)(coords=self.coords, attrs=self.attrs)
for name, var in self.data_vars.items():
if dim in var.dims:
ds[name] = func(var, dim=dim, **kwargs)
else:
ds[name] = var
return ds
def get_clean_interp_index(
arr, dim: Hashable, use_coordinate: Union[str, bool] = True, strict: bool = True
):
"""Return index to use for x values in interpolation or curve fitting.
Parameters
----------
arr : DataArray
Array to interpolate or fit to a curve.
dim : str
Name of dimension along which to fit.
use_coordinate : str or bool
If use_coordinate is True, the coordinate that shares the name of the
dimension along which interpolation is being performed will be used as the
x values. If False, the x values are set as an equally spaced sequence.
strict : bool
Whether to raise errors if the index is either non-unique or non-monotonic (default).
Returns
-------
Variable
Numerical values for the x-coordinates.
Notes
-----
If indexing is along the time dimension, datetime coordinates are converted
to time deltas with respect to 1970-01-01.
"""
# Question: If use_coordinate is a string, what role does `dim` play?
from xarray.coding.cftimeindex import CFTimeIndex
if use_coordinate is False:
axis = arr.get_axis_num(dim)
return np.arange(arr.shape[axis], dtype=np.float64)
if use_coordinate is True:
index = arr.get_index(dim)
else: # string
index = arr.coords[use_coordinate]
if index.ndim != 1:
raise ValueError(
f"Coordinates used for interpolation must be 1D, "
f"{use_coordinate} is {index.ndim}D."
)
index = index.to_index()
# TODO: index.name is None for multiindexes
# set name for nice error messages below
if isinstance(index, pd.MultiIndex):
index.name = dim
if strict:
if not index.is_monotonic:
raise ValueError(f"Index {index.name!r} must be monotonically increasing")
if not index.is_unique:
raise ValueError(f"Index {index.name!r} has duplicate values")
# Special case for non-standard calendar indexes
# Numerical datetime values are defined with respect to 1970-01-01T00:00:00 in units of nanoseconds
if isinstance(index, (CFTimeIndex, pd.DatetimeIndex)):
offset = type(index[0])(1970, 1, 1)
if isinstance(index, CFTimeIndex):
index = index.values
index = Variable(
data=datetime_to_numeric(index, offset=offset, datetime_unit="ns"),
dims=(dim,),
)
# raise if index cannot be cast to a float (e.g. MultiIndex)
try:
index = index.values.astype(np.float64)
except (TypeError, ValueError):
# pandas raises a TypeError
# xarray/numpy raise a ValueError
raise TypeError(
f"Index {index.name!r} must be castable to float64 to support "
f"interpolation or curve fitting, got {type(index).__name__}."
)
return index
def interp_na(
self,
dim: Hashable = None,
use_coordinate: Union[bool, str] = True,
method: str = "linear",
limit: int = None,
max_gap: Union[int, float, str, pd.Timedelta, np.timedelta64, dt.timedelta] = None,
keep_attrs: bool = None,
**kwargs,
):
"""Interpolate values according to different methods."""
from xarray.coding.cftimeindex import CFTimeIndex
if dim is None:
raise NotImplementedError("dim is a required argument")
if limit is not None:
valids = _get_valid_fill_mask(self, dim, limit)
if max_gap is not None:
max_type = type(max_gap).__name__
if not is_scalar(max_gap):
raise ValueError("max_gap must be a scalar.")
# TODO: benbovy - flexible indexes: update when CFTimeIndex (and DatetimeIndex?)
# has its own class inheriting from xarray.Index
if (
dim in self.xindexes
and isinstance(
self.xindexes[dim].to_pandas_index(), (pd.DatetimeIndex, CFTimeIndex)
)
and use_coordinate
):
# Convert to float
max_gap = timedelta_to_numeric(max_gap)
if not use_coordinate:
if not isinstance(max_gap, (Number, np.number)):
raise TypeError(
f"Expected integer or floating point max_gap since use_coordinate=False. Received {max_type}."
)
# method
index = get_clean_interp_index(self, dim, use_coordinate=use_coordinate)
interp_class, kwargs = _get_interpolator(method, **kwargs)
interpolator = partial(func_interpolate_na, interp_class, **kwargs)
if keep_attrs is None:
keep_attrs = _get_keep_attrs(default=True)
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "overflow", RuntimeWarning)
warnings.filterwarnings("ignore", "invalid value", RuntimeWarning)
arr = apply_ufunc(
interpolator,
self,
index,
input_core_dims=[[dim], [dim]],
output_core_dims=[[dim]],
output_dtypes=[self.dtype],
dask="parallelized",
vectorize=True,
keep_attrs=keep_attrs,
).transpose(*self.dims)
if limit is not None:
arr = arr.where(valids)
if max_gap is not None:
if dim not in self.coords:
raise NotImplementedError(
"max_gap not implemented for unlabeled coordinates yet."
)
nan_block_lengths = _get_nan_block_lengths(self, dim, index)
arr = arr.where(nan_block_lengths <= max_gap)
return arr
def func_interpolate_na(interpolator, y, x, **kwargs):
"""helper function to apply interpolation along 1 dimension"""
# reversed arguments are so that attrs are preserved from da, not index
# it would be nice if this wasn't necessary, works around:
# "ValueError: assignment destination is read-only" in assignment below
out = y.copy()
nans = pd.isnull(y)
nonans = ~nans
# fast track for no-nans, all nan but one, and all-nans cases
n_nans = nans.sum()
if n_nans == 0 or n_nans >= len(y) - 1:
return y
f = interpolator(x[nonans], y[nonans], **kwargs)
out[nans] = f(x[nans])
return out
def _bfill(arr, n=None, axis=-1):
"""inverse of ffill"""
arr = np.flip(arr, axis=axis)
# fill
arr = push(arr, axis=axis, n=n)
# reverse back to original
return np.flip(arr, axis=axis)
def ffill(arr, dim=None, limit=None):
"""forward fill missing values"""
if not OPTIONS["use_bottleneck"]:
raise RuntimeError(
"ffill requires bottleneck to be enabled."
" Call `xr.set_options(use_bottleneck=True)` to enable it."
)
axis = arr.get_axis_num(dim)
# work around for bottleneck 178
_limit = limit if limit is not None else arr.shape[axis]
return apply_ufunc(
push,
arr,
dask="allowed",
keep_attrs=True,
output_dtypes=[arr.dtype],
kwargs=dict(n=_limit, axis=axis),
).transpose(*arr.dims)
def bfill(arr, dim=None, limit=None):
"""backfill missing values"""
if not OPTIONS["use_bottleneck"]:
raise RuntimeError(
"bfill requires bottleneck to be enabled."
" Call `xr.set_options(use_bottleneck=True)` to enable it."
)
axis = arr.get_axis_num(dim)
# work around for bottleneck 178
_limit = limit if limit is not None else arr.shape[axis]
return apply_ufunc(
_bfill,
arr,
dask="allowed",
keep_attrs=True,
output_dtypes=[arr.dtype],
kwargs=dict(n=_limit, axis=axis),
).transpose(*arr.dims)
def _import_interpolant(interpolant, method):
"""Import interpolant from scipy.interpolate."""
try:
from scipy import interpolate
return getattr(interpolate, interpolant)
except ImportError as e:
raise ImportError(f"Interpolation with method {method} requires scipy.") from e
def _get_interpolator(method, vectorizeable_only=False, **kwargs):
"""helper function to select the appropriate interpolator class
returns interpolator class and keyword arguments for the class
"""
interp1d_methods = [
"linear",
"nearest",
"zero",
"slinear",
"quadratic",
"cubic",
"polynomial",
]
valid_methods = interp1d_methods + [
"barycentric",
"krog",
"pchip",
"spline",
"akima",
]
# prioritize scipy.interpolate
if (
method == "linear"
and not kwargs.get("fill_value", None) == "extrapolate"
and not vectorizeable_only
):
kwargs.update(method=method)
interp_class = NumpyInterpolator
elif method in valid_methods:
if method in interp1d_methods:
kwargs.update(method=method)
interp_class = ScipyInterpolator
elif vectorizeable_only:
raise ValueError(
f"{method} is not a vectorizeable interpolator. "
f"Available methods are {interp1d_methods}"
)
elif method == "barycentric":
interp_class = _import_interpolant("BarycentricInterpolator", method)
elif method == "krog":
interp_class = _import_interpolant("KroghInterpolator", method)
elif method == "pchip":
interp_class = _import_interpolant("PchipInterpolator", method)
elif method == "spline":
kwargs.update(method=method)
interp_class = SplineInterpolator
elif method == "akima":
interp_class = _import_interpolant("Akima1DInterpolator", method)
else:
raise ValueError(f"{method} is not a valid scipy interpolator")
else:
raise ValueError(f"{method} is not a valid interpolator")
return interp_class, kwargs
def _get_interpolator_nd(method, **kwargs):
"""helper function to select the appropriate interpolator class
returns interpolator class and keyword arguments for the class
"""
valid_methods = ["linear", "nearest"]
if method in valid_methods:
kwargs.update(method=method)
interp_class = _import_interpolant("interpn", method)
else:
raise ValueError(
f"{method} is not a valid interpolator for interpolating "
"over multiple dimensions."
)
return interp_class, kwargs
def _get_valid_fill_mask(arr, dim, limit):
"""helper function to determine values that can be filled when limit is not
None"""
kw = {dim: limit + 1}
# we explicitly use construct method to avoid copy.
new_dim = utils.get_temp_dimname(arr.dims, "_window")
return (
arr.isnull()
.rolling(min_periods=1, **kw)
.construct(new_dim, fill_value=False)
.sum(new_dim, skipna=False)
) <= limit
def _localize(var, indexes_coords):
"""Speed up for linear and nearest neighbor method.
Only consider a subspace that is needed for the interpolation
"""
indexes = {}
for dim, [x, new_x] in indexes_coords.items():
minval = np.nanmin(new_x.values)
maxval = np.nanmax(new_x.values)
index = x.to_index()
imin = index.get_loc(minval, method="nearest")
imax = index.get_loc(maxval, method="nearest")
indexes[dim] = slice(max(imin - 2, 0), imax + 2)
indexes_coords[dim] = (x[indexes[dim]], new_x)
return var.isel(**indexes), indexes_coords
def _floatize_x(x, new_x):
"""Make x and new_x float.
This is particulary useful for datetime dtype.
x, new_x: tuple of np.ndarray
"""
x = list(x)
new_x = list(new_x)
for i in range(len(x)):
if _contains_datetime_like_objects(x[i]):
# Scipy casts coordinates to np.float64, which is not accurate
# enough for datetime64 (uses 64bit integer).
# We assume that the most of the bits are used to represent the
# offset (min(x)) and the variation (x - min(x)) can be
# represented by float.
xmin = x[i].values.min()
x[i] = x[i]._to_numeric(offset=xmin, dtype=np.float64)
new_x[i] = new_x[i]._to_numeric(offset=xmin, dtype=np.float64)
return x, new_x
def interp(var, indexes_coords, method, **kwargs):
"""Make an interpolation of Variable
Parameters
----------
var : Variable
indexes_coords
Mapping from dimension name to a pair of original and new coordinates.
Original coordinates should be sorted in strictly ascending order.
Note that all the coordinates should be Variable objects.
method : string
One of {'linear', 'nearest', 'zero', 'slinear', 'quadratic',
'cubic'}. For multidimensional interpolation, only
{'linear', 'nearest'} can be used.
**kwargs
keyword arguments to be passed to scipy.interpolate
Returns
-------
Interpolated Variable
See Also
--------
DataArray.interp
Dataset.interp
"""
if not indexes_coords:
return var.copy()
# default behavior
kwargs["bounds_error"] = kwargs.get("bounds_error", False)
result = var
# decompose the interpolation into a succession of independant interpolation
for indexes_coords in decompose_interp(indexes_coords):
var = result
# target dimensions
dims = list(indexes_coords)
x, new_x = zip(*[indexes_coords[d] for d in dims])
destination = broadcast_variables(*new_x)
# transpose to make the interpolated axis to the last position
broadcast_dims = [d for d in var.dims if d not in dims]
original_dims = broadcast_dims + dims
new_dims = broadcast_dims + list(destination[0].dims)
interped = interp_func(
var.transpose(*original_dims).data, x, destination, method, kwargs
)
result = Variable(new_dims, interped, attrs=var.attrs)
# dimension of the output array
out_dims = OrderedSet()
for d in var.dims:
if d in dims:
out_dims.update(indexes_coords[d][1].dims)
else:
out_dims.add(d)
result = result.transpose(*out_dims)
return result
def interp_func(var, x, new_x, method, kwargs):
"""
multi-dimensional interpolation for array-like. Interpolated axes should be
located in the last position.
Parameters
----------
var : np.ndarray or dask.array.Array
Array to be interpolated. The final dimension is interpolated.
x : a list of 1d array.
Original coordinates. Should not contain NaN.
new_x : a list of 1d array
New coordinates. Should not contain NaN.
method : string
{'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic'} for
1-dimensional interpolation.
{'linear', 'nearest'} for multidimensional interpolation
**kwargs
Optional keyword arguments to be passed to scipy.interpolator
Returns
-------
interpolated: array
Interpolated array
Notes
-----
This requiers scipy installed.
See Also
--------
scipy.interpolate.interp1d
"""
if not x:
return var.copy()
if len(x) == 1:
func, kwargs = _get_interpolator(method, vectorizeable_only=True, **kwargs)
else:
func, kwargs = _get_interpolator_nd(method, **kwargs)
if is_duck_dask_array(var):
import dask.array as da
ndim = var.ndim
nconst = ndim - len(x)
out_ind = list(range(nconst)) + list(range(ndim, ndim + new_x[0].ndim))
# blockwise args format
x_arginds = [[_x, (nconst + index,)] for index, _x in enumerate(x)]
x_arginds = [item for pair in x_arginds for item in pair]
new_x_arginds = [
[_x, [ndim + index for index in range(_x.ndim)]] for _x in new_x
]
new_x_arginds = [item for pair in new_x_arginds for item in pair]
args = (
var,
range(ndim),
*x_arginds,
*new_x_arginds,
)
_, rechunked = da.unify_chunks(*args)
args = tuple([elem for pair in zip(rechunked, args[1::2]) for elem in pair])
new_x = rechunked[1 + (len(rechunked) - 1) // 2 :]
new_axes = {
ndim + i: new_x[0].chunks[i]
if new_x[0].chunks is not None
else new_x[0].shape[i]
for i in range(new_x[0].ndim)
}
# if usefull, re-use localize for each chunk of new_x
localize = (method in ["linear", "nearest"]) and (new_x[0].chunks is not None)
# scipy.interpolate.interp1d always forces to float.
# Use the same check for blockwise as well:
if not issubclass(var.dtype.type, np.inexact):
dtype = np.float_
else:
dtype = var.dtype
if dask_version < Version("2020.12"):
# Using meta and dtype at the same time doesn't work.
# Remove this whenever the minimum requirement for dask is 2020.12:
meta = None
else:
meta = var._meta
return da.blockwise(
_dask_aware_interpnd,
out_ind,
*args,
interp_func=func,
interp_kwargs=kwargs,
localize=localize,
concatenate=True,
dtype=dtype,
new_axes=new_axes,
meta=meta,
align_arrays=False,
)
return _interpnd(var, x, new_x, func, kwargs)
def _interp1d(var, x, new_x, func, kwargs):
# x, new_x are tuples of size 1.
x, new_x = x[0], new_x[0]
rslt = func(x, var, assume_sorted=True, **kwargs)(np.ravel(new_x))
if new_x.ndim > 1:
return rslt.reshape(var.shape[:-1] + new_x.shape)
if new_x.ndim == 0:
return rslt[..., -1]
return rslt
def _interpnd(var, x, new_x, func, kwargs):
x, new_x = _floatize_x(x, new_x)
if len(x) == 1:
return _interp1d(var, x, new_x, func, kwargs)
# move the interpolation axes to the start position
var = var.transpose(range(-len(x), var.ndim - len(x)))
# stack new_x to 1 vector, with reshape
xi = np.stack([x1.values.ravel() for x1 in new_x], axis=-1)
rslt = func(x, var, xi, **kwargs)
# move back the interpolation axes to the last position
rslt = rslt.transpose(range(-rslt.ndim + 1, 1))
return rslt.reshape(rslt.shape[:-1] + new_x[0].shape)
def _dask_aware_interpnd(var, *coords, interp_func, interp_kwargs, localize=True):
"""Wrapper for `_interpnd` through `blockwise`
The first half arrays in `coords` are original coordinates,
the other half are destination coordinates
"""
n_x = len(coords) // 2
nconst = len(var.shape) - n_x
# _interpnd expect coords to be Variables
x = [Variable([f"dim_{nconst + dim}"], _x) for dim, _x in enumerate(coords[:n_x])]
new_x = [
Variable([f"dim_{len(var.shape) + dim}" for dim in range(len(_x.shape))], _x)
for _x in coords[n_x:]
]
if localize:
# _localize expect var to be a Variable
var = Variable([f"dim_{dim}" for dim in range(len(var.shape))], var)
indexes_coords = {_x.dims[0]: (_x, _new_x) for _x, _new_x in zip(x, new_x)}
# simple speed up for the local interpolation
var, indexes_coords = _localize(var, indexes_coords)
x, new_x = zip(*[indexes_coords[d] for d in indexes_coords])
# put var back as a ndarray
var = var.data
return _interpnd(var, x, new_x, interp_func, interp_kwargs)
def decompose_interp(indexes_coords):
"""Decompose the interpolation into a succession of independant interpolation keeping the order"""
dest_dims = [
dest[1].dims if dest[1].ndim > 0 else [dim]
for dim, dest in indexes_coords.items()
]
partial_dest_dims = []
partial_indexes_coords = {}
for i, index_coords in enumerate(indexes_coords.items()):
partial_indexes_coords.update([index_coords])
if i == len(dest_dims) - 1:
break
partial_dest_dims += [dest_dims[i]]
other_dims = dest_dims[i + 1 :]
s_partial_dest_dims = {dim for dims in partial_dest_dims for dim in dims}
s_other_dims = {dim for dims in other_dims for dim in dims}
if not s_partial_dest_dims.intersection(s_other_dims):
# this interpolation is orthogonal to the rest
yield partial_indexes_coords
partial_dest_dims = []
partial_indexes_coords = {}
yield partial_indexes_coords
| 30.660839
| 114
| 0.61592
|
acfbb05b78af07ad6ab9d7c93add4af5267db75c
| 5,495
|
py
|
Python
|
model_zoo/official/gnn/gat/train.py
|
i4oolish/mindspore
|
dac3be31d0f2c0a3516200f47af30980e566601b
|
[
"Apache-2.0"
] | 2
|
2020-08-12T16:14:40.000Z
|
2020-12-04T03:05:57.000Z
|
model_zoo/official/gnn/gat/train.py
|
dilingsong/mindspore
|
4276050f2494cfbf8682560a1647576f859991e8
|
[
"Apache-2.0"
] | null | null | null |
model_zoo/official/gnn/gat/train.py
|
dilingsong/mindspore
|
4276050f2494cfbf8682560a1647576f859991e8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Test train gat"""
import argparse
import os
import numpy as np
import mindspore.context as context
from mindspore.train.serialization import _exec_save_checkpoint, load_checkpoint
from src.config import GatConfig
from src.dataset import load_and_process
from src.gat import GAT
from src.utils import LossAccuracyWrapper, TrainGAT
def train():
"""Train GAT model."""
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='./data/cora/cora_mr', help='Data dir')
parser.add_argument('--train_nodes_num', type=int, default=140, help='Nodes numbers for training')
parser.add_argument('--eval_nodes_num', type=int, default=500, help='Nodes numbers for evaluation')
parser.add_argument('--test_nodes_num', type=int, default=1000, help='Nodes numbers for test')
args = parser.parse_args()
if not os.path.exists("ckpts"):
os.mkdir("ckpts")
context.set_context(mode=context.GRAPH_MODE,
device_target="Ascend",
save_graphs=False)
# train parameters
hid_units = GatConfig.hid_units
n_heads = GatConfig.n_heads
early_stopping = GatConfig.early_stopping
lr = GatConfig.lr
l2_coeff = GatConfig.l2_coeff
num_epochs = GatConfig.num_epochs
feature, biases, y_train, train_mask, y_val, eval_mask, y_test, test_mask = load_and_process(args.data_dir,
args.train_nodes_num,
args.eval_nodes_num,
args.test_nodes_num)
feature_size = feature.shape[2]
num_nodes = feature.shape[1]
num_class = y_train.shape[2]
gat_net = GAT(feature,
biases,
feature_size,
num_class,
num_nodes,
hid_units,
n_heads,
attn_drop=GatConfig.attn_dropout,
ftr_drop=GatConfig.feature_dropout)
gat_net.add_flags_recursive(fp16=True)
eval_net = LossAccuracyWrapper(gat_net,
num_class,
y_val,
eval_mask,
l2_coeff)
train_net = TrainGAT(gat_net,
num_class,
y_train,
train_mask,
lr,
l2_coeff)
train_net.set_train(True)
val_acc_max = 0.0
val_loss_min = np.inf
for _epoch in range(num_epochs):
train_result = train_net()
train_loss = train_result[0].asnumpy()
train_acc = train_result[1].asnumpy()
eval_result = eval_net()
eval_loss = eval_result[0].asnumpy()
eval_acc = eval_result[1].asnumpy()
print("Epoch:{}, train loss={:.5f}, train acc={:.5f} | val loss={:.5f}, val acc={:.5f}".format(
_epoch, train_loss, train_acc, eval_loss, eval_acc))
if eval_acc >= val_acc_max or eval_loss < val_loss_min:
if eval_acc >= val_acc_max and eval_loss < val_loss_min:
val_acc_model = eval_acc
val_loss_model = eval_loss
if os.path.exists("ckpts/gat.ckpt"):
os.remove("ckpts/gat.ckpt")
_exec_save_checkpoint(train_net.network, "ckpts/gat.ckpt")
val_acc_max = np.max((val_acc_max, eval_acc))
val_loss_min = np.min((val_loss_min, eval_loss))
curr_step = 0
else:
curr_step += 1
if curr_step == early_stopping:
print("Early Stop Triggered!, Min loss: {}, Max accuracy: {}".format(val_loss_min, val_acc_max))
print("Early stop model validation loss: {}, accuracy{}".format(val_loss_model, val_acc_model))
break
gat_net_test = GAT(feature,
biases,
feature_size,
num_class,
num_nodes,
hid_units,
n_heads,
attn_drop=0.0,
ftr_drop=0.0)
load_checkpoint("ckpts/gat.ckpt", net=gat_net_test)
gat_net_test.add_flags_recursive(fp16=True)
test_net = LossAccuracyWrapper(gat_net_test,
num_class,
y_test,
test_mask,
l2_coeff)
test_result = test_net()
print("Test loss={}, test acc={}".format(test_result[0], test_result[1]))
if __name__ == "__main__":
train()
| 41.007463
| 118
| 0.55323
|
acfbb215f1c2ca6a2f1b841478dd6f51918e0b90
| 4,143
|
py
|
Python
|
lib/music21/demos/bhadley/mrjobaws/awsutility.py
|
lasconic/randomsheetmusic
|
862e2991b38531d6c2122c809cf6a7745653df57
|
[
"MIT"
] | 2
|
2016-12-29T04:34:53.000Z
|
2017-08-01T20:36:18.000Z
|
lib/music21/demos/bhadley/mrjobaws/awsutility.py
|
lasconic/randomsheetmusic
|
862e2991b38531d6c2122c809cf6a7745653df57
|
[
"MIT"
] | null | null | null |
lib/music21/demos/bhadley/mrjobaws/awsutility.py
|
lasconic/randomsheetmusic
|
862e2991b38531d6c2122c809cf6a7745653df57
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: awsutility.py
# Purpose: methods for use by mrjob to deploy on amazon web services
#
# Authors: Beth Hadley
#
# Copyright: (c) 2011 The music21 Project
# License: LGPL
#-------------------------------------------------------------------------------
import unittest, doctest
import music21
import os
from music21 import *
from music21 import features
from music21.features import jSymbolic
from music21.features import native
from music21 import corpus
from music21 import common
#def generateCompleteCorpusFileList():
# '''
# utility for generating a text file containing all corpus file names
# '''
# def skip(path):
# for skipString in ['.svn','.py','theoryExercises','demos','license.txt']:
# if skipString in path:
# return True
# return False
#
# pathList = []
# i=0
# for path in corpus.getCorePaths():
# if not skip(path):
# pathList.append( path.replace('/home/bhadley/music21Workspace/music21baseubuntu/trunk/music21/corpus/',''))
# i+=1
# print 'Total number of files: ' + str(i)
# #Total number of files: 2203
# outFile = open('corpusPaths.txt','w')
#
# for x in pathList:
# outFile.write("%s\n" % x)
def md5OfCorpusFile(fileDir, scoreNumber=None):
'''
returns the md5 hash of the text file contents. the file must be the full
name of the corpus file
>>> from music21.demos.bhadley.mrjobaws.awsutility import md5OfCorpusFile
>>> a = md5OfCorpusFile('bach/bwv431.mxl')
>>> a
'3b8c4b8db4288c43efde44ddcdb4d8d2'
>>> s = corpus.parse('bwv431')
>>> s.corpusFilepath
u'bach/bwv431.mxl'
>>> b = md5OfCorpusFile(s.corpusFilepath)
>>> b
'3b8c4b8db4288c43efde44ddcdb4d8d2'
>>> a == b
True
>>> md5OfCorpusFile('airdsAirs/book3.abc','413')
'c1666c19d63fc0940f111008e2269f75.413'
'''
corpusFP = common.getCorpusFilePath()
fileIn = open(corpusFP + os.sep + fileDir,'rb')
md5 = common.getMd5 ( fileIn.read() )
if scoreNumber:
return md5 + '.' + scoreNumber
else:
return md5
def unbundleOpus(opusStream):
'''
unbundles the opusStream into seperate scores, and returns a list of tuples, the
score and the md5hash (for the entire contents of the opus), a '.', and the score
number it corresponds to
>>> #_DOCS_SHOW s = corpus.parse('book1')
>>> #_DOCS_SHOW unbundleOpus(s)[15:17]
[(<music21.stream.Score ...>, '1ae57f04a11981d502dc93e230f3466b.16'), (<music21.stream.Score ...>, '1ae57f04a11981d502dc93e230f3466b.17')]
'''
results = []
corpusFilepath = opusStream.corpusFilepath
md5hash = md5OfCorpusFile(corpusFilepath)
for num in opusStream.getNumbers():
st = opusStream.getScoreByNumber(num)
corpus.base._addCorpusFilepath(st, corpusFilepath)
results.append ( (st, (md5hash+'.'+num) ) )
return results
def getStreamAndmd5(corpusFilepath):
'''
returns a list of all the corpus,md5hash pairs associated with this file, typically
this is just a list of one tuple but if the file path contains an opus file, these
are parsed into tuples with :meth:`music21.demos.bhadley.aws.unbundleOpus` and the list is returned
>>> from music21.demos.bhadley.mrjobaws.awsutility import getStreamAndmd5
>>> #_DOCS_SHOW getStreamAndmd5('airdsAirs/book3.abc')[12:14]
[(<music21.stream.Score ...>, 'c1666c19d63fc0940f111008e2269f75.413'), (<music21.stream.Score ...>, 'c1666c19d63fc0940f111008e2269f75.414')]
>>> getStreamAndmd5('bach/bwv412.mxl')
[(<music21.stream.Score ...>, 'f9d5807477e61f03b66c99ce825a3b5f')]
'''
s = corpus.parse(corpusFilepath)
if s.isClassOrSubclass(['Opus']):
return unbundleOpus(s)
else:
return [(s,md5OfCorpusFile(corpusFilepath))]
class Test(unittest.TestCase):
def runTest(self):
pass
if __name__ == "__main__":
music21.mainTest(Test)
| 32.367188
| 144
| 0.629254
|
acfbb2adccdae90d07544352bc888092d34483f5
| 14,129
|
py
|
Python
|
bin/ta_quolab/api.py
|
quolab/splunk-add-on
|
dc1120053180f1ac16152563a9bcb0c3daa8f808
|
[
"Apache-2.0"
] | 1
|
2021-03-09T20:24:06.000Z
|
2021-03-09T20:24:06.000Z
|
bin/ta_quolab/api.py
|
quolab/splunk-add-on
|
dc1120053180f1ac16152563a9bcb0c3daa8f808
|
[
"Apache-2.0"
] | 8
|
2021-06-11T19:45:02.000Z
|
2021-08-03T22:36:24.000Z
|
bin/ta_quolab/api.py
|
quolab/splunk-add-on
|
dc1120053180f1ac16152563a9bcb0c3daa8f808
|
[
"Apache-2.0"
] | null | null | null |
""" QuoLab Add on for Splunk share code for QuoLab API access
"""
import json
import re
import ssl
import time
from logging import getLogger
from threading import Event, Thread
import requests
from cypresspoint.spath import splunk_dot_notation
from requests.auth import AuthBase, HTTPBasicAuth
from requests.utils import default_user_agent
try:
from time import monotonic
except ImportError:
# Good-enough fallback for PY2 users
from time import time as monotonic
from . import __version__
logger = getLogger("quolab.common")
""" http debug logging
import logging
from http.client import HTTPConnection # py3
log = logging.getLogger('urllib3')
log.setLevel(logging.DEBUG)
"""
class QuolabAuth(AuthBase):
def __init__(self, token):
self._token = token
def __call__(self, request):
request.headers['Authorization'] = "Quoken {}".format(self._token)
return request
class QuoLabAPI(object):
base_headers = {
"Accept": "application/json",
}
def __init__(self, url, verify=None):
self.session = requests.Session()
self.token = None
self.username = None
self.password = None
self.url = url
self.verify = verify
if verify is False:
import urllib3
urllib3.disable_warnings()
logger.info("SSL Certificate validation has been disabled.")
def login(self, username, password):
self.username = username
self.password = password
self.token = None
def login_token(self, token):
self.token = token
self.username = None
self.password = None
def get_auth(self):
if self.token:
auth = QuolabAuth(self.token)
else:
auth = HTTPBasicAuth(self.username, self.password)
return auth
'''
def _call(self, method, partial_url, params=None, data=None, headers=None):
h = dict(self.base_headers)
if headers is not None:
h.update(headers)
# print("Headers: {!r}".format(headers))
r = self.session.request(method, self.url + partial_url, params, data, h,
auth=HTTPBasicAuth(self.username, self.password),
verify=self.verify)
return r
def _paginated_call(self, session, method, partial_url, params=None, data=None, headers=None):
h = dict(self.base_headers)
if headers is not None:
h.update(headers)
session.request(method, self.url + partial_url, params=params, data=data, headers=h,
auth=HTTPBasicAuth(self.username, self.password),
verify=self.verify)
'''
timeline_default_facets = ("display",)
def get_timeline_events(self, timeline_id, facets=None):
""" Call /v1/timeline/<timeline_id>/event to return events within the timeline's buffer. """
# https://node77.cloud.quolab.com/v1/timeline/51942b79b8b34827bf721077fa22a590/event?facets.display=1
url = "{}/v1/timeline/{}/event".format(self.url, timeline_id)
if facets is None:
facets = self.timeline_default_facets
headers = {
'content-type': "application/json",
'user-agent': "ta-quolab/{} {}".format(__version__, default_user_agent())
}
data = {}
for facet in facets:
data["facets.{}".format(facet)] = 1
auth = self.get_auth()
try:
response = self.session.request(
"GET", url,
data=data,
headers=headers,
auth=auth,
verify=self.verify)
except requests.ConnectionError as e:
logger.error("QuoLab API failed due to %s", e)
response.raise_for_status()
try:
data = response.json()
except ValueError as e:
raw = response.content
content_length = len(raw)
logger.error("QuoLab API response could not be parsed. url=%s content-length=%d %s",
url, content_length, e)
logger.error("QuoLab API response could not be parsed. url=%s Sample:\n%s\n...\n%s",
url, raw[:200], raw[-200:])
logger.debug("QuoLab API raw response: url=%s \n%s", url, raw)
raise
# debugging
logger.info("QuoLab API response was parsed as json successfully!")
assert data["status"] == "OK"
for record in data.get("records", []):
yield record
def subscribe_timeline(self, recv_message_callback, oob_callback, timeline_id, facets=None):
if facets is None:
facets = self.timeline_default_facets
qws = QuoLabWebSocket(self.url, self.get_auth(), timeline_id,
recv_message_callback, oob_callback, self.verify, facets=facets)
# Run server_forever() in it's own thread, so we can return to the caller
# thread.start_new_thread(qws.connect, ())
# Python 3 use: Thread(target=qws.connect, daemon=True).start()
t = Thread(target=qws.connect)
t.daemon = True
t.start()
if not qws.is_setup.wait(15):
logger.error("Took too long to setup websocket to {}. Triggering a shutdown", self.url)
qws.is_done.set()
time.sleep(120)
# Forceable shutdown that will kill all threads
logger.error("Forcing a hard shutdown!")
# Give log time to write
time.sleep(3)
import os
os._exit(3)
# XXX: Trigger a clean shutdown
raise SystemExit(3)
return qws
def query_catalog(self, query, query_limit, timeout=30, fetch_count=1000, write_error=None):
""" Handle the query to QuoLab API that drives this SPL command
Returns [results]
"""
if write_error is None:
def write_error(s, *args, **kwargs): pass
# XXX: COMPLETE MIGRATION OF THIS METHOD!!
# XXX: REPLACE THIS CODE IN quolab_query.py
# CATALOG QUERY
session = self.session
start = monotonic()
# Allow total run time to be 10x the individual query limit
expire = start + (timeout * 10)
url = "{}/v1/catalog/query".format(self.url)
headers = {
'content-type': "application/json",
'user-agent': "ta-quolab/{} {}".format(__version__, default_user_agent())
}
# XXX: Per query limit optimization? Maybe based on type, or number of facets enabled?
query["limit"] = min(query_limit, fetch_count)
# Q: What do query results look like when time has been exceeded? Any special handling required?
query.setdefault("hints", {})["timeout"] = timeout
i = http_calls = 0
auth = self.get_auth()
while True:
data = json.dumps(query)
logger.debug("Sending query to API: %r headers=%r auth=%s",
data, headers, auth.__class__.__name__)
try:
response = session.request(
"POST", url,
data=data,
headers=headers,
auth=auth,
verify=self.verify)
http_calls += 1
except requests.ConnectionError as e:
logger.error("QuoLab API failed due to %s", e)
write_error("QuoLab server connection failed to {}", url)
return
if response.status_code >= 400 and response.status_code < 500:
body = response.json()
if "status" in body or "message" in body:
status = body.get("status", response.status_code)
message = body.get("message", "")
logger.error("QuoLab API returned unexpected status response from query. "
"status=%r message=%r query=%r", status, message, query)
write_error("QuoLab query failed: {} ({})", message, status)
return
# When non-success status code without a message/status, then just raise an exception.
try:
response.raise_for_status()
body = response.json()
except Exception as e:
logger.debug("Body response for %s: %s", e, response.text)
raise
logger.debug("Response body: %s", body)
records = body["records"]
for record in records:
result = (splunk_dot_notation(record))
result["_raw"] = json.dumps(record)
# Q: Are there ever fields that should be returned as _time instead of system clock time?
result["_time"] = time.time()
yield result
i += 1
if i >= query_limit:
break
if monotonic() > expire:
logger.warning("Aborting query due to time expiration")
break
if i >= query_limit:
break
ellipsis = body.get("ellipsis", None)
if ellipsis:
logger.debug("Query next batch. i=%d, query_limit=%d, limit=%d, ellipsis=%s",
i, query_limit, query["limit"], ellipsis)
query["resume"] = ellipsis
else:
break
logger.info("Query/return efficiency: http_calls=%d, query_limit=%d, per_post_limit=%d duration=%0.3f",
http_calls, query_limit, query["limit"], monotonic()-start)
class QuoLabWebSocket(object):
def __init__(self, url, auth, timeline, message_callback, oob_callback, verify, facets=()):
self.url = url
self.auth = auth
self.timeline = timeline
self.message_callback = message_callback
self.oob_callback = oob_callback
self.verify = verify
self.facets = facets
self.is_done = Event()
self.is_setup = Event()
@staticmethod
def _convert_request_auth_headers(auth):
class C(object):
pass
o = C()
o.headers = {}
auth(o)
return o.headers
def connect(self):
import websocket
auth_header = self._convert_request_auth_headers(self.auth)
ws_url = "{}/v1/socket".format(re.sub('^http', 'ws', self.url))
logger.info("WEB socket URL = %s", ws_url)
ws = websocket.WebSocketApp(ws_url,
header=auth_header,
on_message=self.on_message,
on_error=self.on_error,
on_open=self.on_open,
on_close=self.on_close)
kw = {}
if self.verify is False:
kw["sslopt"] = {"cert_reqs": ssl.CERT_NONE}
# Set ping_interval to cause enable_multithread=True in WebSocket() constructor
ws.run_forever(ping_interval=30, ping_timeout=10, **kw)
def on_message(self, ws, msg):
j = json.loads(msg)
msg_formatted = json.dumps(j, indent=4)
# XXX: Remove the following debug message after initial development
logger.debug('[Websocket Message]\n%s', msg_formatted)
event_name = j.get('name')
if event_name == "event":
self.message_callback(j)
return
if event_name == "bound":
logger.info("Websocket bound to %s", j["cid"])
try:
self.oob_callback("bound", j)
except Exception:
logger.exception("Failure during callback! callback=%r", self.oob_callback)
else:
logger.info("Unknown '%s', message not ingested: %s", event_name, msg_formatted)
'''
# Indexing the rest
try:
t = float(j['body']['data']['document']['first:Min'])
j['body']['data']['document']['first:Min'] = int(t * 1000)
t = float(j['body']['data']['document']['last:Max'])
j['body']['data']['document']['last:Max'] = int(t * 1000)
t = float(j['body']['timestamp'])
j['body']['timestamp'] = int(t * 1000)
except:
return
'''
def on_error(self, ws, err):
logger.error("[Websocket Error] %s", err)
# Q: Does this always/automatically trigger a shutdown? Should it?
try:
self.oob_callback("error", err)
except:
logger.exception("Callback error during on_error handling")
finally:
self.is_done.set()
def _build_bind_request(self):
doc = {
"attach": {
"ns": "activity-stream",
"name": "event",
"cid": self.timeline,
},
"body": {
"composition": {
"catalog": {
"facets": {facet: True for facet in self.facets},
"object": "object"
}
}
},
"cid": "activity-stream-event-{}".format(self.timeline),
"name": "bind",
"ns": "link/binding"
}
return doc
def on_open(self, ws):
logger.debug("[Websocket Open]. Request timeline=%s", self.timeline)
def run(*args):
req = self._build_bind_request() # XXX: Support facets here!
logger.debug("[Websocket Open:run()] Request payload: %s", req)
ws.send(json.dumps(req))
self.is_setup.set()
import _thread as thread
thread.start_new_thread(run, ())
# Thread(target=run).start()
def on_close(self, ws):
ws.close()
logger.info('[Websocket Closed]')
try:
self.oob_callback("close")
except:
logger.exception("Callback error during on_error handling")
finally:
self.is_done.set()
| 35.589421
| 111
| 0.552481
|
acfbb312e4eae2794f2e2f3d8f930acfa3fe1cd1
| 824
|
py
|
Python
|
fieldWidgetClassworkProject/fieldWidgetClassworkApp/migrations/0001_initial.py
|
cs-fullstack-2019-spring/django-fields-widgets-cw-bettyjware11
|
26ff2e3da84d618f30d27e4cf145d287f59ad48f
|
[
"Apache-2.0"
] | null | null | null |
fieldWidgetClassworkProject/fieldWidgetClassworkApp/migrations/0001_initial.py
|
cs-fullstack-2019-spring/django-fields-widgets-cw-bettyjware11
|
26ff2e3da84d618f30d27e4cf145d287f59ad48f
|
[
"Apache-2.0"
] | null | null | null |
fieldWidgetClassworkProject/fieldWidgetClassworkApp/migrations/0001_initial.py
|
cs-fullstack-2019-spring/django-fields-widgets-cw-bettyjware11
|
26ff2e3da84d618f30d27e4cf145d287f59ad48f
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.6 on 2019-03-07 17:53
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SuperHero',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default=0, max_length=200)),
('cityOfOrigin', models.CharField(default=0, max_length=200)),
('booleanItem', models.BooleanField()),
('radio', models.CharField(max_length=200)),
('dropDown', models.CharField(max_length=200)),
('checkbox', models.CharField(max_length=200)),
],
),
]
| 30.518519
| 114
| 0.572816
|
acfbb5070881cf0e75800731518910b1185a5410
| 2,116
|
py
|
Python
|
catalyst/callbacks/tests/test_optimizer_callback.py
|
elephantmipt/catalyst
|
6c706e4859ed7c58e5e6a5b7634176bffd0e2465
|
[
"Apache-2.0"
] | 2
|
2019-04-19T21:34:31.000Z
|
2019-05-02T22:50:25.000Z
|
catalyst/callbacks/tests/test_optimizer_callback.py
|
elephantmipt/catalyst
|
6c706e4859ed7c58e5e6a5b7634176bffd0e2465
|
[
"Apache-2.0"
] | null | null | null |
catalyst/callbacks/tests/test_optimizer_callback.py
|
elephantmipt/catalyst
|
6c706e4859ed7c58e5e6a5b7634176bffd0e2465
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
import random
import torch
import torch.nn as nn
from catalyst.callbacks import OptimizerCallback
class DummyRunner:
def __init__(
self, loss_value: torch.tensor, optimizer: torch.optim.Optimizer
):
self.batch_metrics = {"loss": loss_value}
self.is_train_loader = True
self.optimizer = optimizer
self.device = torch.device("cpu")
def get_attr(self, key, *args, **kwargs):
return getattr(self, key)
def test_zero_grad():
model = nn.Linear(10, 2)
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
criterion = nn.BCEWithLogitsLoss()
batch_size = 3
inp = torch.randn(batch_size, 10)
target = torch.FloatTensor(batch_size, 2).uniform_()
callback = OptimizerCallback(metric_key="loss", use_fast_zero_grad=False)
loss1 = criterion(model(inp), target)
loss1_value = loss1.detach().item()
runner = DummyRunner(loss1, optimizer)
callback.on_stage_start(runner)
callback.on_epoch_start(runner)
callback.on_batch_end(runner)
loss2 = criterion(model(inp), target)
loss2_value = loss2.detach().item()
runner.batch_metrics = {"loss": loss2}
callback.on_epoch_start(runner)
callback.on_batch_end(runner)
assert loss1_value > loss2_value
def test_fast_zero_grad():
model = nn.Linear(10, 2)
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
criterion = nn.BCEWithLogitsLoss()
batch_size = 3
inp = torch.randn(batch_size, 10)
target = torch.FloatTensor(batch_size, 2).uniform_()
callback = OptimizerCallback(metric_key="loss", use_fast_zero_grad=True)
loss1 = criterion(model(inp), target)
loss1_value = loss1.detach().item()
runner = DummyRunner(loss1, optimizer)
callback.on_stage_start(runner)
callback.on_epoch_start(runner)
callback.on_batch_end(runner)
loss2 = criterion(model(inp), target)
loss2_value = loss2.detach().item()
runner.batch_metrics = {"loss": loss2}
callback.on_epoch_start(runner)
callback.on_batch_end(runner)
assert loss1_value > loss2_value
| 26.123457
| 77
| 0.696597
|
acfbb60487d6a411f2440a95c93647bb028c3b53
| 6,346
|
py
|
Python
|
v1.0.0.test/toontown/safezone/DistributedTreasure.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v1.0.0.test/toontown/safezone/DistributedTreasure.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v1.0.0.test/toontown/safezone/DistributedTreasure.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
from panda3d.core import *
from panda3d.direct import *
from direct.interval.IntervalGlobal import *
from toontown.toonbase.ToontownGlobals import *
from direct.distributed import DistributedObject
from direct.directnotify import DirectNotifyGlobal
class DistributedTreasure(DistributedObject.DistributedObject):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedTreasure')
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.av = None
self.treasureFlyTrack = None
self.modelPath = None
self.nodePath = None
self.dropShadow = None
self.modelFindString = None
self.grabSoundPath = None
self.rejectSoundPath = 'phase_4/audio/sfx/ring_miss.ogg'
self.playSoundForRemoteToons = 1
self.scale = 1.0
self.shadow = 1
self.fly = 1
self.zOffset = 0.0
self.billboard = 0
return
def disable(self):
self.ignoreAll()
self.nodePath.detachNode()
DistributedObject.DistributedObject.disable(self)
def delete(self):
if self.treasureFlyTrack:
self.treasureFlyTrack.finish()
self.treasureFlyTrack = None
DistributedObject.DistributedObject.delete(self)
self.nodePath.removeNode()
return
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
self.loadModel(self.modelPath, self.modelFindString)
self.startAnimation()
self.nodePath.wrtReparentTo(render)
self.accept(self.uniqueName('entertreasureSphere'), self.handleEnterSphere)
def handleEnterSphere(self, collEntry=None):
localAvId = base.localAvatar.getDoId()
if not self.fly:
self.handleGrab(localAvId)
self.d_requestGrab()
def d_requestGrab(self):
self.sendUpdate('requestGrab', [])
def getSphereRadius(self):
return 2.0
def loadModel(self, modelPath, modelFindString=None):
self.grabSound = base.loader.loadSfx(self.grabSoundPath)
self.rejectSound = base.loader.loadSfx(self.rejectSoundPath)
if self.nodePath == None:
self.makeNodePath()
else:
self.treasure.getChildren().detach()
model = loader.loadModel(modelPath)
if modelFindString != None:
model = model.find('**/' + modelFindString)
model.instanceTo(self.treasure)
return
def makeNodePath(self):
self.nodePath = NodePath(self.uniqueName('treasure'))
if self.billboard:
self.nodePath.setBillboardPointEye()
self.nodePath.setScale(0.9 * self.scale)
self.treasure = self.nodePath.attachNewNode('treasure')
if self.shadow:
if not self.dropShadow:
self.dropShadow = loader.loadModel('phase_3/models/props/drop_shadow')
self.dropShadow.setColor(0, 0, 0, 0.5)
self.dropShadow.setPos(0, 0, 0.025)
self.dropShadow.setScale(0.4 * self.scale)
self.dropShadow.flattenLight()
self.dropShadow.reparentTo(self.nodePath)
collSphere = CollisionSphere(0, 0, 0, self.getSphereRadius())
collSphere.setTangible(0)
collNode = CollisionNode(self.uniqueName('treasureSphere'))
collNode.setIntoCollideMask(WallBitmask)
collNode.addSolid(collSphere)
self.collNodePath = self.nodePath.attachNewNode(collNode)
self.collNodePath.stash()
def getParentNodePath(self):
return render
def setPosition(self, x, y, z):
if not self.nodePath:
self.makeNodePath()
self.nodePath.reparentTo(self.getParentNodePath())
self.nodePath.setPos(x, y, z + self.zOffset)
self.collNodePath.unstash()
def setGrab(self, avId):
if avId == 0:
return
if self.fly or avId != base.localAvatar.getDoId():
self.handleGrab(avId)
def setReject(self):
if self.treasureFlyTrack:
self.treasureFlyTrack.finish()
self.treasureFlyTrack = None
base.playSfx(self.rejectSound, node=self.nodePath)
self.treasureFlyTrack = Sequence(LerpColorScaleInterval(self.nodePath, 0.8, colorScale=VBase4(0, 0, 0, 0), startColorScale=VBase4(1, 1, 1, 1), blendType='easeIn'), LerpColorScaleInterval(self.nodePath, 0.2, colorScale=VBase4(1, 1, 1, 1), startColorScale=VBase4(0, 0, 0, 0), blendType='easeOut'), name=self.uniqueName('treasureFlyTrack'))
self.treasureFlyTrack.start()
return
def handleGrab(self, avId):
self.collNodePath.stash()
self.avId = avId
if avId in self.cr.doId2do:
av = self.cr.doId2do[avId]
self.av = av
else:
self.nodePath.detachNode()
return
if self.playSoundForRemoteToons or self.avId == base.localAvatar.getDoId():
base.playSfx(self.grabSound, node=self.nodePath)
if not self.fly:
self.nodePath.detachNode()
return
self.nodePath.wrtReparentTo(av)
if self.treasureFlyTrack:
self.treasureFlyTrack.finish()
self.treasureFlyTrack = None
avatarGoneName = self.av.uniqueName('disable')
self.accept(avatarGoneName, self.handleUnexpectedExit)
flytime = 1.0
track = Sequence(LerpPosInterval(self.nodePath, flytime, pos=Point3(0, 0, 3), startPos=self.nodePath.getPos(), blendType='easeInOut'), Func(self.nodePath.detachNode), Func(self.ignore, avatarGoneName))
if self.shadow:
self.treasureFlyTrack = Sequence(HideInterval(self.dropShadow), track, ShowInterval(self.dropShadow), name=self.uniqueName('treasureFlyTrack'))
else:
self.treasureFlyTrack = Sequence(track, name=self.uniqueName('treasureFlyTrack'))
self.treasureFlyTrack.start()
return
def handleUnexpectedExit(self):
self.notify.warning('While getting treasure, ' + str(self.avId) + ' disconnected.')
if self.treasureFlyTrack:
self.treasureFlyTrack.finish()
self.treasureFlyTrack = None
return
def getStareAtNodeAndOffset(self):
return (
self.nodePath, Point3())
def startAnimation(self):
pass
| 39.17284
| 345
| 0.651434
|
acfbb825f955930ce8414845ea5eb67fe9af6282
| 9,723
|
py
|
Python
|
venv/lib/python3.5/site-packages/yapf/yapflib/yapf_api.py
|
prashant0598/CoffeeApp
|
4fa006aebf06e12ed34766450ddcfa548ee63307
|
[
"MIT"
] | null | null | null |
venv/lib/python3.5/site-packages/yapf/yapflib/yapf_api.py
|
prashant0598/CoffeeApp
|
4fa006aebf06e12ed34766450ddcfa548ee63307
|
[
"MIT"
] | null | null | null |
venv/lib/python3.5/site-packages/yapf/yapflib/yapf_api.py
|
prashant0598/CoffeeApp
|
4fa006aebf06e12ed34766450ddcfa548ee63307
|
[
"MIT"
] | null | null | null |
# Copyright 2015-2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Entry points for YAPF.
The main APIs that YAPF exposes to drive the reformatting.
FormatFile(): reformat a file.
FormatCode(): reformat a string of code.
These APIs have some common arguments:
style_config: (string) Either a style name or a path to a file that contains
formatting style settings. If None is specified, use the default style
as set in style.DEFAULT_STYLE_FACTORY
lines: (list of tuples of integers) A list of tuples of lines, [start, end],
that we want to format. The lines are 1-based indexed. It can be used by
third-party code (e.g., IDEs) when reformatting a snippet of code rather
than a whole file.
print_diff: (bool) Instead of returning the reformatted source, return a
diff that turns the formatted source into reformatter source.
verify: (bool) True if reformatted code should be verified for syntax.
"""
import difflib
import re
import sys
from lib2to3.pgen2 import tokenize
from yapf.yapflib import blank_line_calculator
from yapf.yapflib import comment_splicer
from yapf.yapflib import continuation_splicer
from yapf.yapflib import file_resources
from yapf.yapflib import py3compat
from yapf.yapflib import pytree_unwrapper
from yapf.yapflib import pytree_utils
from yapf.yapflib import reformatter
from yapf.yapflib import split_penalty
from yapf.yapflib import style
from yapf.yapflib import subtype_assigner
def FormatFile(filename,
style_config=None,
lines=None,
print_diff=False,
verify=False,
in_place=False,
logger=None):
"""Format a single Python file and return the formatted code.
Arguments:
filename: (unicode) The file to reformat.
in_place: (bool) If True, write the reformatted code back to the file.
logger: (io streamer) A stream to output logging.
remaining arguments: see comment at the top of this module.
Returns:
Tuple of (reformatted_code, encoding, changed). reformatted_code is None if
the file is sucessfully written to (having used in_place). reformatted_code
is a diff if print_diff is True.
Raises:
IOError: raised if there was an error reading the file.
ValueError: raised if in_place and print_diff are both specified.
"""
_CheckPythonVersion()
if in_place and print_diff:
raise ValueError('Cannot pass both in_place and print_diff.')
original_source, newline, encoding = ReadFile(filename, logger)
reformatted_source, changed = FormatCode(
original_source,
style_config=style_config,
filename=filename,
lines=lines,
print_diff=print_diff,
verify=verify)
if reformatted_source.rstrip('\n'):
lines = reformatted_source.rstrip('\n').split('\n')
reformatted_source = newline.join(line for line in lines) + newline
if in_place:
if original_source and original_source != reformatted_source:
file_resources.WriteReformattedCode(filename, reformatted_source,
in_place, encoding)
return None, encoding, changed
return reformatted_source, encoding, changed
def FormatCode(unformatted_source,
filename='<unknown>',
style_config=None,
lines=None,
print_diff=False,
verify=False):
"""Format a string of Python code.
This provides an alternative entry point to YAPF.
Arguments:
unformatted_source: (unicode) The code to format.
filename: (unicode) The name of the file being reformatted.
remaining arguments: see comment at the top of this module.
Returns:
Tuple of (reformatted_source, changed). reformatted_source conforms to the
desired formatting style. changed is True if the source changed.
"""
_CheckPythonVersion()
style.SetGlobalStyle(style.CreateStyleFromConfig(style_config))
if not unformatted_source.endswith('\n'):
unformatted_source += '\n'
tree = pytree_utils.ParseCodeToTree(unformatted_source)
# Run passes on the tree, modifying it in place.
comment_splicer.SpliceComments(tree)
continuation_splicer.SpliceContinuations(tree)
subtype_assigner.AssignSubtypes(tree)
split_penalty.ComputeSplitPenalties(tree)
blank_line_calculator.CalculateBlankLines(tree)
uwlines = pytree_unwrapper.UnwrapPyTree(tree)
for uwl in uwlines:
uwl.CalculateFormattingInformation()
_MarkLinesToFormat(uwlines, lines)
reformatted_source = reformatter.Reformat(uwlines, verify)
if unformatted_source == reformatted_source:
return '' if print_diff else reformatted_source, False
code_diff = _GetUnifiedDiff(
unformatted_source, reformatted_source, filename=filename)
if print_diff:
return code_diff, code_diff != ''
return reformatted_source, True
def _CheckPythonVersion(): # pragma: no cover
errmsg = 'yapf is only supported for Python 2.7 or 3.4+'
if sys.version_info[0] == 2:
if sys.version_info[1] < 7:
raise RuntimeError(errmsg)
elif sys.version_info[0] == 3:
if sys.version_info[1] < 4:
raise RuntimeError(errmsg)
def ReadFile(filename, logger=None):
"""Read the contents of the file.
An optional logger can be specified to emit messages to your favorite logging
stream. If specified, then no exception is raised. This is external so that it
can be used by third-party applications.
Arguments:
filename: (unicode) The name of the file.
logger: (function) A function or lambda that takes a string and emits it.
Returns:
The contents of filename.
Raises:
IOError: raised if there was an error reading the file.
"""
try:
with open(filename, 'rb') as fd:
encoding = tokenize.detect_encoding(fd.readline)[0]
except IOError as err:
if logger:
logger(err)
raise
try:
# Preserves line endings.
with py3compat.open_with_encoding(
filename, mode='r', encoding=encoding, newline='') as fd:
lines = fd.readlines()
line_ending = file_resources.LineEnding(lines)
source = '\n'.join(line.rstrip('\r\n') for line in lines) + '\n'
return source, line_ending, encoding
except IOError as err: # pragma: no cover
if logger:
logger(err)
raise
DISABLE_PATTERN = r'^#+ +yapf: *disable$'
ENABLE_PATTERN = r'^#+ +yapf: *enable$'
def _MarkLinesToFormat(uwlines, lines):
"""Skip sections of code that we shouldn't reformat."""
if lines:
for uwline in uwlines:
uwline.disable = True
# Sort and combine overlapping ranges.
lines = sorted(lines)
line_ranges = [lines[0]] if len(lines[0]) else []
index = 1
while index < len(lines):
current = line_ranges[-1]
if lines[index][0] <= current[1]:
# The ranges overlap, so combine them.
line_ranges[-1] = (current[0], max(lines[index][1], current[1]))
else:
line_ranges.append(lines[index])
index += 1
# Mark lines to format as not being disabled.
index = 0
for start, end in sorted(line_ranges):
while index < len(uwlines) and uwlines[index].last.lineno < start:
index += 1
if index >= len(uwlines):
break
while index < len(uwlines):
if uwlines[index].lineno > end:
break
if (uwlines[index].lineno >= start or
uwlines[index].last.lineno >= start):
uwlines[index].disable = False
index += 1
# Now go through the lines and disable any lines explicitly marked as
# disabled.
index = 0
while index < len(uwlines):
uwline = uwlines[index]
if uwline.is_comment:
if _DisableYAPF(uwline.first.value.strip()):
index += 1
while index < len(uwlines):
uwline = uwlines[index]
if uwline.is_comment and _EnableYAPF(uwline.first.value.strip()):
break
uwline.disable = True
index += 1
elif re.search(DISABLE_PATTERN, uwline.last.value.strip(), re.IGNORECASE):
uwline.disable = True
index += 1
def _DisableYAPF(line):
return (re.search(DISABLE_PATTERN, line.split('\n')[0].strip(),
re.IGNORECASE) or re.search(DISABLE_PATTERN,
line.split('\n')[-1].strip(),
re.IGNORECASE))
def _EnableYAPF(line):
return (re.search(ENABLE_PATTERN, line.split('\n')[0].strip(),
re.IGNORECASE) or re.search(ENABLE_PATTERN,
line.split('\n')[-1].strip(),
re.IGNORECASE))
def _GetUnifiedDiff(before, after, filename='code'):
"""Get a unified diff of the changes.
Arguments:
before: (unicode) The original source code.
after: (unicode) The reformatted source code.
filename: (unicode) The code's filename.
Returns:
The unified diff text.
"""
before = before.splitlines()
after = after.splitlines()
return '\n'.join(
difflib.unified_diff(
before,
after,
filename,
filename,
'(original)',
'(reformatted)',
lineterm='')) + '\n'
| 32.627517
| 80
| 0.676334
|
acfbb8a4e84a66ac40c279b77966313c4b1b5c8e
| 726
|
py
|
Python
|
tests/python/pants_test/task_test_base.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/task_test_base.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/task_test_base.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.base.deprecated import deprecated_module
from pants.testutil.task_test_base import ConsoleTaskTestBase as ConsoleTaskTestBase # noqa
from pants.testutil.task_test_base import DeclarativeTaskTestMixin as DeclarativeTaskTestMixin # noqa
from pants.testutil.task_test_base import TaskTestBase as TaskTestBase # noqa
from pants.testutil.task_test_base import ensure_cached as ensure_cached # noqa
from pants.testutil.task_test_base import is_exe as is_exe # noqa
deprecated_module(
removal_version="1.25.0.dev0",
hint_message="Import pants.testutil.task_test_base instead."
)
| 45.375
| 102
| 0.830579
|
acfbb90503ddf022142c416a0cf054ca7698873b
| 6,691
|
py
|
Python
|
cmdb_sdk/model/easy_command/target_log_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
cmdb_sdk/model/easy_command/target_log_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
cmdb_sdk/model/easy_command/target_log_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: target_log.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from cmdb_sdk.model.easy_command import action_log_pb2 as cmdb__sdk_dot_model_dot_easy__command_dot_action__log__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='target_log.proto',
package='easy_command',
syntax='proto3',
serialized_options=_b('ZFgo.easyops.local/contracts/protorepo-models/easyops/model/easy_command'),
serialized_pb=_b('\n\x10target_log.proto\x12\x0c\x65\x61sy_command\x1a,cmdb_sdk/model/easy_command/action_log.proto\"\xe6\x01\n\tTargetLog\x12\x10\n\x08targetId\x18\x01 \x01(\t\x12\x12\n\ntargetName\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\x11\n\tsysStatus\x18\x04 \x01(\t\x12\x0c\n\x04\x63ode\x18\x05 \x01(\x05\x12\x0b\n\x03msg\x18\x06 \x01(\t\x12+\n\nactionsLog\x18\x07 \x03(\x0b\x32\x17.easy_command.ActionLog\x12\x10\n\x08usedTime\x18\x08 \x01(\x05\x12\x11\n\tstartTime\x18\t \x01(\t\x12\x12\n\nupdateTime\x18\n \x01(\t\x12\x0f\n\x07\x65ndTime\x18\x0b \x01(\tBHZFgo.easyops.local/contracts/protorepo-models/easyops/model/easy_commandb\x06proto3')
,
dependencies=[cmdb__sdk_dot_model_dot_easy__command_dot_action__log__pb2.DESCRIPTOR,])
_TARGETLOG = _descriptor.Descriptor(
name='TargetLog',
full_name='easy_command.TargetLog',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='targetId', full_name='easy_command.TargetLog.targetId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='targetName', full_name='easy_command.TargetLog.targetName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='easy_command.TargetLog.status', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sysStatus', full_name='easy_command.TargetLog.sysStatus', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='easy_command.TargetLog.code', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='msg', full_name='easy_command.TargetLog.msg', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actionsLog', full_name='easy_command.TargetLog.actionsLog', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='usedTime', full_name='easy_command.TargetLog.usedTime', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='startTime', full_name='easy_command.TargetLog.startTime', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updateTime', full_name='easy_command.TargetLog.updateTime', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='endTime', full_name='easy_command.TargetLog.endTime', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=81,
serialized_end=311,
)
_TARGETLOG.fields_by_name['actionsLog'].message_type = cmdb__sdk_dot_model_dot_easy__command_dot_action__log__pb2._ACTIONLOG
DESCRIPTOR.message_types_by_name['TargetLog'] = _TARGETLOG
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TargetLog = _reflection.GeneratedProtocolMessageType('TargetLog', (_message.Message,), {
'DESCRIPTOR' : _TARGETLOG,
'__module__' : 'target_log_pb2'
# @@protoc_insertion_point(class_scope:easy_command.TargetLog)
})
_sym_db.RegisterMessage(TargetLog)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 46.144828
| 667
| 0.74533
|
acfbb942e11fdafd4f8852c83785cbc4a785c6d9
| 62,764
|
py
|
Python
|
custom/ckan/ckan/controllers/package.py
|
lynxlab/ckanext-lait
|
baee73a89ca587c391befaf9a95f070ff77f49ec
|
[
"Apache-2.0"
] | null | null | null |
custom/ckan/ckan/controllers/package.py
|
lynxlab/ckanext-lait
|
baee73a89ca587c391befaf9a95f070ff77f49ec
|
[
"Apache-2.0"
] | null | null | null |
custom/ckan/ckan/controllers/package.py
|
lynxlab/ckanext-lait
|
baee73a89ca587c391befaf9a95f070ff77f49ec
|
[
"Apache-2.0"
] | null | null | null |
import logging
from urllib import urlencode
import datetime
import os
import mimetypes
import cgi
from pylons import config
from genshi.template import MarkupTemplate
from genshi.template.text import NewTextTemplate
from paste.deploy.converters import asbool
import paste.fileapp
import ckan.logic as logic
import ckan.lib.base as base
import ckan.lib.maintain as maintain
import ckan.lib.package_saver as package_saver
import ckan.lib.i18n as i18n
import ckan.lib.navl.dictization_functions as dict_fns
import ckan.lib.accept as accept
import ckan.lib.helpers as h
import ckan.model as model
import ckan.lib.datapreview as datapreview
import ckan.lib.plugins
import ckan.lib.uploader as uploader
import ckan.plugins as p
import ckan.lib.render
from ckan.common import OrderedDict, _, json, request, c, g, response
from home import CACHE_PARAMETERS
log = logging.getLogger(__name__)
render = base.render
abort = base.abort
redirect = base.redirect
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
check_access = logic.check_access
get_action = logic.get_action
tuplize_dict = logic.tuplize_dict
clean_dict = logic.clean_dict
parse_params = logic.parse_params
flatten_to_string_key = logic.flatten_to_string_key
lookup_package_plugin = ckan.lib.plugins.lookup_package_plugin
def _encode_params(params):
return [(k, v.encode('utf-8') if isinstance(v, basestring) else str(v))
for k, v in params]
def url_with_params(url, params):
params = _encode_params(params)
return url + u'?' + urlencode(params)
def search_url(params, package_type=None):
if not package_type or package_type == 'dataset':
url = h.url_for(controller='package', action='search')
else:
url = h.url_for('{0}_search'.format(package_type))
return url_with_params(url, params)
class PackageController(base.BaseController):
def _package_form(self, package_type=None):
return lookup_package_plugin(package_type).package_form()
def _setup_template_variables(self, context, data_dict, package_type=None):
return lookup_package_plugin(package_type).\
setup_template_variables(context, data_dict)
def _new_template(self, package_type):
return lookup_package_plugin(package_type).new_template()
def _edit_template(self, package_type):
return lookup_package_plugin(package_type).edit_template()
def _search_template(self, package_type):
return lookup_package_plugin(package_type).search_template()
def _read_template(self, package_type):
return lookup_package_plugin(package_type).read_template()
def _history_template(self, package_type):
return lookup_package_plugin(package_type).history_template()
def _guess_package_type(self, expecting_name=False):
"""
Guess the type of package from the URL handling the case
where there is a prefix on the URL (such as /data/package)
"""
# Special case: if the rot URL '/' has been redirected to the package
# controller (e.g. by an IRoutes extension) then there's nothing to do
# here.
if request.path == '/':
return 'dataset'
parts = [x for x in request.path.split('/') if x]
idx = -1
if expecting_name:
idx = -2
pt = parts[idx]
if pt == 'package':
pt = 'dataset'
return pt
def search(self):
from ckan.lib.search import SearchError
package_type = self._guess_package_type()
try:
context = {'model': model, 'user': c.user or c.author,
'auth_user_obj': c.userobj}
check_access('site_read', context)
except NotAuthorized:
abort(401, _('Not authorized to see this page'))
# unicode format (decoded from utf8)
q = c.q = request.params.get('q', u'')
c.query_error = False
try:
page = int(request.params.get('page', 1))
except ValueError, e:
abort(400, ('"page" parameter must be an integer'))
limit = g.datasets_per_page
# most search operations should reset the page counter:
params_nopage = [(k, v) for k, v in request.params.items()
if k != 'page']
def drill_down_url(alternative_url=None, **by):
return h.add_url_param(alternative_url=alternative_url,
controller='package', action='search',
new_params=by)
c.drill_down_url = drill_down_url
def remove_field(key, value=None, replace=None):
return h.remove_url_param(key, value=value, replace=replace,
controller='package', action='search')
c.remove_field = remove_field
sort_by = request.params.get('sort', None)
params_nosort = [(k, v) for k, v in params_nopage if k != 'sort']
def _sort_by(fields):
"""
Sort by the given list of fields.
Each entry in the list is a 2-tuple: (fieldname, sort_order)
eg - [('metadata_modified', 'desc'), ('name', 'asc')]
If fields is empty, then the default ordering is used.
"""
params = params_nosort[:]
if fields:
sort_string = ', '.join('%s %s' % f for f in fields)
params.append(('sort', sort_string))
return search_url(params, package_type)
c.sort_by = _sort_by
if sort_by is None:
c.sort_by_fields = []
else:
c.sort_by_fields = [field.split()[0]
for field in sort_by.split(',')]
def pager_url(q=None, page=None):
params = list(params_nopage)
params.append(('page', page))
return search_url(params, package_type)
c.search_url_params = urlencode(_encode_params(params_nopage))
try:
c.fields = []
# c.fields_grouped will contain a dict of params containing
# a list of values eg {'tags':['tag1', 'tag2']}
c.fields_grouped = {}
search_extras = {}
fq = ''
for (param, value) in request.params.items():
if param not in ['q', 'page', 'sort'] \
and len(value) and not param.startswith('_'):
if not param.startswith('ext_'):
c.fields.append((param, value))
fq += ' %s:"%s"' % (param, value)
if param not in c.fields_grouped:
c.fields_grouped[param] = [value]
else:
c.fields_grouped[param].append(value)
else:
search_extras[param] = value
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'for_view': True,
'auth_user_obj': c.userobj}
if package_type and package_type != 'dataset':
# Only show datasets of this particular type
fq += ' +dataset_type:{type}'.format(type=package_type)
else:
# Unless changed via config options, don't show non standard
# dataset types on the default search page
if not asbool(config.get('ckan.search.show_all_types', 'False')):
fq += ' +dataset_type:dataset'
facets = OrderedDict()
# LAit customization
default_facet_titles = {
'organization': _('Organizations'),
'category': _('Categories'),
'rating_average_int': _('Community Rating'),
'tags': _('Tags'),
'res_format': _('Formats'),
'license_id': _('Licenses'),
}
lait_custom_facets = ['organization','category','rating_average_int','tags','res_format','license_id']
for facet in lait_custom_facets:
if facet in default_facet_titles:
facets[facet] = default_facet_titles[facet]
else:
facets[facet] = facet
# Facet titles
for plugin in p.PluginImplementations(p.IFacets):
facets = plugin.dataset_facets(facets, package_type)
c.facet_titles = facets
data_dict = {
'q': q,
'fq': fq.strip(),
'facet.field': facets.keys(),
'rows': limit,
'start': (page - 1) * limit,
'sort': sort_by,
'extras': search_extras
}
query = get_action('package_search')(context, data_dict)
c.sort_by_selected = query['sort']
c.page = h.Page(
collection=query['results'],
page=page,
url=pager_url,
item_count=query['count'],
items_per_page=limit
)
c.facets = query['facets']
c.search_facets = query['search_facets']
c.page.items = query['results']
except SearchError, se:
log.error('Dataset search error: %r', se.args)
c.query_error = True
c.facets = {}
c.search_facets = {}
c.page = h.Page(collection=[])
c.search_facets_limits = {}
for facet in c.search_facets.keys():
try:
limit = int(request.params.get('_%s_limit' % facet,
g.facets_default_number))
except ValueError:
abort(400, _('Parameter "{parameter_name}" is not '
'an integer').format(
parameter_name='_%s_limit' % facet
))
c.search_facets_limits[facet] = limit
maintain.deprecate_context_item(
'facets',
'Use `c.search_facets` instead.')
self._setup_template_variables(context, {},
package_type=package_type)
return render(self._search_template(package_type))
def _content_type_from_extension(self, ext):
ct, mu, ext = accept.parse_extension(ext)
if not ct:
return None, None, None,
return ct, ext, (NewTextTemplate, MarkupTemplate)[mu]
def _content_type_from_accept(self):
"""
Given a requested format this method determines the content-type
to set and the genshi template loader to use in order to render
it accurately. TextTemplate must be used for non-xml templates
whilst all that are some sort of XML should use MarkupTemplate.
"""
ct, mu, ext = accept.parse_header(request.headers.get('Accept', ''))
return ct, ext, (NewTextTemplate, MarkupTemplate)[mu]
def resources(self, id):
package_type = self._get_package_type(id.split('@')[0])
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'for_view': True,
'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
check_access('package_update', context, data_dict)
except NotAuthorized, e:
abort(401, _('User %r not authorized to edit %s') % (c.user, id))
# check if package exists
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
c.pkg = context['package']
except NotFound:
abort(404, _('Dataset not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % id)
self._setup_template_variables(context, {'id': id},
package_type=package_type)
return render('package/resources.html')
def read(self, id, format='html'):
if not format == 'html':
ctype, extension, loader = \
self._content_type_from_extension(format)
if not ctype:
# An unknown format, we'll carry on in case it is a
# revision specifier and re-constitute the original id
id = "%s.%s" % (id, format)
ctype, format, loader = "text/html; charset=utf-8", "html", \
MarkupTemplate
else:
ctype, format, loader = self._content_type_from_accept()
response.headers['Content-Type'] = ctype
package_type = self._get_package_type(id.split('@')[0])
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'for_view': True,
'auth_user_obj': c.userobj}
data_dict = {'id': id}
# interpret @<revision_id> or @<date> suffix
split = id.split('@')
if len(split) == 2:
data_dict['id'], revision_ref = split
if model.is_id(revision_ref):
context['revision_id'] = revision_ref
else:
try:
date = h.date_str_to_datetime(revision_ref)
context['revision_date'] = date
except TypeError, e:
abort(400, _('Invalid revision format: %r') % e.args)
except ValueError, e:
abort(400, _('Invalid revision format: %r') % e.args)
elif len(split) > 2:
abort(400, _('Invalid revision format: %r') %
'Too many "@" symbols')
# check if package exists
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
c.pkg = context['package']
except NotFound:
abort(404, _('Dataset not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % id)
# used by disqus plugin
c.current_package_id = c.pkg.id
c.related_count = c.pkg.related_count
# can the resources be previewed?
for resource in c.pkg_dict['resources']:
resource['can_be_previewed'] = self._resource_preview(
{'resource': resource, 'package': c.pkg_dict})
self._setup_template_variables(context, {'id': id},
package_type=package_type)
package_saver.PackageSaver().render_package(c.pkg_dict, context)
template = self._read_template(package_type)
template = template[:template.index('.') + 1] + format
try:
return render(template, loader_class=loader)
except ckan.lib.render.TemplateNotFound:
msg = _("Viewing {package_type} datasets in {format} format is "
"not supported (template file {file} not found).".format(
package_type=package_type, format=format, file=template))
abort(404, msg)
assert False, "We should never get here"
def history(self, id):
package_type = self._get_package_type(id.split('@')[0])
if 'diff' in request.params or 'selected1' in request.params:
try:
params = {'id': request.params.getone('pkg_name'),
'diff': request.params.getone('selected1'),
'oldid': request.params.getone('selected2'),
}
except KeyError, e:
if 'pkg_name' in dict(request.params):
id = request.params.getone('pkg_name')
c.error = \
_('Select two revisions before doing the comparison.')
else:
params['diff_entity'] = 'package'
h.redirect_to(controller='revision', action='diff', **params)
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
c.pkg_revisions = get_action('package_revision_list')(context,
data_dict)
# TODO: remove
# Still necessary for the authz check in group/layout.html
c.pkg = context['package']
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % '')
except NotFound:
abort(404, _('Dataset not found'))
format = request.params.get('format', '')
if format == 'atom':
# Generate and return Atom 1.0 document.
from webhelpers.feedgenerator import Atom1Feed
feed = Atom1Feed(
title=_(u'CKAN Dataset Revision History'),
link=h.url_for(controller='revision', action='read',
id=c.pkg_dict['name']),
description=_(u'Recent changes to CKAN Dataset: ') +
(c.pkg_dict['title'] or ''),
language=unicode(i18n.get_lang()),
)
for revision_dict in c.pkg_revisions:
revision_date = h.date_str_to_datetime(
revision_dict['timestamp'])
try:
dayHorizon = int(request.params.get('days'))
except:
dayHorizon = 30
dayAge = (datetime.datetime.now() - revision_date).days
if dayAge >= dayHorizon:
break
if revision_dict['message']:
item_title = u'%s' % revision_dict['message'].\
split('\n')[0]
else:
item_title = u'%s' % revision_dict['id']
item_link = h.url_for(controller='revision', action='read',
id=revision_dict['id'])
item_description = _('Log message: ')
item_description += '%s' % (revision_dict['message'] or '')
item_author_name = revision_dict['author']
item_pubdate = revision_date
feed.add_item(
title=item_title,
link=item_link,
description=item_description,
author_name=item_author_name,
pubdate=item_pubdate,
)
feed.content_type = 'application/atom+xml'
return feed.writeString('utf-8')
c.related_count = c.pkg.related_count
return render(self._history_template(c.pkg_dict.get('type',
package_type)))
def new(self, data=None, errors=None, error_summary=None):
package_type = self._guess_package_type(True)
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj,
'save': 'save' in request.params}
# Package needs to have a organization group in the call to
# check_access and also to save it
try:
check_access('package_create', context)
except NotAuthorized:
abort(401, _('Unauthorized to create a package'))
if context['save'] and not data:
return self._save_new(context, package_type=package_type)
data = data or clean_dict(dict_fns.unflatten(tuplize_dict(parse_params(
request.params, ignore_keys=CACHE_PARAMETERS))))
c.resources_json = h.json.dumps(data.get('resources', []))
# convert tags if not supplied in data
if data and not data.get('tag_string'):
data['tag_string'] = ', '.join(
h.dict_list_reduce(data.get('tags', {}), 'name'))
errors = errors or {}
error_summary = error_summary or {}
# in the phased add dataset we need to know that
# we have already completed stage 1
stage = ['active']
if data.get('state') == 'draft':
stage = ['active', 'complete']
elif data.get('state') == 'draft-complete':
stage = ['active', 'complete', 'complete']
# if we are creating from a group then this allows the group to be
# set automatically
data['group_id'] = request.params.get('group') or \
request.params.get('groups__0__id')
vars = {'data': data, 'errors': errors,
'error_summary': error_summary,
'action': 'new', 'stage': stage}
c.errors_json = h.json.dumps(errors)
self._setup_template_variables(context, {},
package_type=package_type)
# TODO: This check is to maintain backwards compatibility with the
# old way of creating custom forms. This behaviour is now deprecated.
if hasattr(self, 'package_form'):
c.form = render(self.package_form, extra_vars=vars)
else:
c.form = render(self._package_form(package_type=package_type),
extra_vars=vars)
return render(self._new_template(package_type),
extra_vars={'stage': stage})
def resource_edit(self, id, resource_id, data=None, errors=None,
error_summary=None):
if request.method == 'POST' and not data:
data = data or clean_dict(dict_fns.unflatten(tuplize_dict(parse_params(
request.POST))))
# we don't want to include save as it is part of the form
del data['save']
context = {'model': model, 'session': model.Session,
'api_version': 3, 'for_edit': True,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data['package_id'] = id
try:
if resource_id:
data['id'] = resource_id
get_action('resource_update')(context, data)
else:
get_action('resource_create')(context, data)
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
return self.resource_edit(id, resource_id, data,
errors, error_summary)
except NotAuthorized:
abort(401, _('Unauthorized to edit this resource'))
redirect(h.url_for(controller='package', action='resource_read',
id=id, resource_id=resource_id))
context = {'model': model, 'session': model.Session,
'api_version': 3, 'for_edit': True,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
pkg_dict = get_action('package_show')(context, {'id': id})
if pkg_dict['state'].startswith('draft'):
# dataset has not yet been fully created
resource_dict = get_action('resource_show')(context, {'id': resource_id})
fields = ['url', 'resource_type', 'format', 'name', 'description', 'id']
data = {}
for field in fields:
data[field] = resource_dict[field]
return self.new_resource(id, data=data)
# resource is fully created
try:
resource_dict = get_action('resource_show')(context, {'id': resource_id})
except NotFound:
abort(404, _('Resource not found'))
c.pkg_dict = pkg_dict
c.resource = resource_dict
# set the form action
c.form_action = h.url_for(controller='package',
action='resource_edit',
resource_id=resource_id,
id=id)
if not data:
data = resource_dict
errors = errors or {}
error_summary = error_summary or {}
vars = {'data': data, 'errors': errors,
'error_summary': error_summary, 'action': 'new'}
return render('package/resource_edit.html', extra_vars=vars)
def new_resource(self, id, data=None, errors=None, error_summary=None):
''' FIXME: This is a temporary action to allow styling of the
forms. '''
if request.method == 'POST' and not data:
save_action = request.params.get('save')
data = data or clean_dict(dict_fns.unflatten(tuplize_dict(parse_params(
request.POST))))
# we don't want to include save as it is part of the form
del data['save']
resource_id = data['id']
del data['id']
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
# see if we have any data that we are trying to save
data_provided = False
for key, value in data.iteritems():
if ((value or isinstance(value, cgi.FieldStorage))
and key != 'resource_type'):
data_provided = True
break
if not data_provided and save_action != "go-dataset-complete":
if save_action == 'go-dataset':
# go to final stage of adddataset
redirect(h.url_for(controller='package',
action='edit', id=id))
# see if we have added any resources
try:
data_dict = get_action('package_show')(context, {'id': id})
except NotAuthorized:
abort(401, _('Unauthorized to update dataset'))
except NotFound:
abort(404,
_('The dataset {id} could not be found.').format(id=id))
if not len(data_dict['resources']):
# no data so keep on page
msg = _('You must add at least one data resource')
# On new templates do not use flash message
if g.legacy_templates:
h.flash_error(msg)
redirect(h.url_for(controller='package',
action='new_resource', id=id))
else:
errors = {}
error_summary = {_('Error'): msg}
return self.new_resource(id, data, errors, error_summary)
# we have a resource so let them add metadata
redirect(h.url_for(controller='package',
action='new_metadata', id=id))
data['package_id'] = id
try:
if resource_id:
data['id'] = resource_id
get_action('resource_update')(context, data)
else:
get_action('resource_create')(context, data)
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
return self.new_resource(id, data, errors, error_summary)
except NotAuthorized:
abort(401, _('Unauthorized to create a resource'))
except NotFound:
abort(404,
_('The dataset {id} could not be found.').format(id=id))
if save_action == 'go-metadata':
# go to final stage of add dataset
redirect(h.url_for(controller='package',
action='new_metadata', id=id))
elif save_action == 'go-dataset':
# go to first stage of add dataset
redirect(h.url_for(controller='package',
action='edit', id=id))
elif save_action == 'go-dataset-complete':
# go to first stage of add dataset
redirect(h.url_for(controller='package',
action='read', id=id))
else:
# add more resources
redirect(h.url_for(controller='package',
action='new_resource', id=id))
errors = errors or {}
error_summary = error_summary or {}
vars = {'data': data, 'errors': errors,
'error_summary': error_summary, 'action': 'new'}
vars['pkg_name'] = id
# get resources for sidebar
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
try:
pkg_dict = get_action('package_show')(context, {'id': id})
except NotFound:
abort(404, _('The dataset {id} could not be found.').format(id=id))
# required for nav menu
vars['pkg_dict'] = pkg_dict
template = 'package/new_resource_not_draft.html'
if pkg_dict['state'] == 'draft':
vars['stage'] = ['complete', 'active']
template = 'package/new_resource.html'
elif pkg_dict['state'] == 'draft-complete':
vars['stage'] = ['complete', 'active', 'complete']
template = 'package/new_resource.html'
return render(template, extra_vars=vars)
def new_metadata(self, id, data=None, errors=None, error_summary=None):
''' FIXME: This is a temporary action to allow styling of the
forms. '''
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
if request.method == 'POST' and not data:
save_action = request.params.get('save')
data = data or clean_dict(dict_fns.unflatten(tuplize_dict(parse_params(
request.POST))))
# we don't want to include save as it is part of the form
del data['save']
data_dict = get_action('package_show')(context, {'id': id})
data_dict['id'] = id
# update the state
if save_action == 'finish':
# we want this to go live when saved
data_dict['state'] = 'active'
elif save_action in ['go-resources', 'go-dataset']:
data_dict['state'] = 'draft-complete'
# allow the state to be changed
context['allow_state_change'] = True
data_dict.update(data)
try:
get_action('package_update')(context, data_dict)
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
return self.new_metadata(id, data, errors, error_summary)
except NotAuthorized:
abort(401, _('Unauthorized to update dataset'))
if save_action == 'go-resources':
# we want to go back to the add resources form stage
redirect(h.url_for(controller='package',
action='new_resource', id=id))
elif save_action == 'go-dataset':
# we want to go back to the add dataset stage
redirect(h.url_for(controller='package',
action='edit', id=id))
redirect(h.url_for(controller='package', action='read', id=id))
if not data:
data = get_action('package_show')(context, {'id': id})
errors = errors or {}
error_summary = error_summary or {}
vars = {'data': data, 'errors': errors, 'error_summary': error_summary}
vars['pkg_name'] = id
package_type = self._get_package_type(id)
self._setup_template_variables(context, {},
package_type=package_type)
return render('package/new_package_metadata.html', extra_vars=vars)
def edit(self, id, data=None, errors=None, error_summary=None):
package_type = self._get_package_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj,
'save': 'save' in request.params,
'moderated': config.get('moderated'),
'pending': True}
if context['save'] and not data:
return self._save_edit(id, context, package_type=package_type)
try:
c.pkg_dict = get_action('package_show')(context, {'id': id})
context['for_edit'] = True
old_data = get_action('package_show')(context, {'id': id})
# old data is from the database and data is passed from the
# user if there is a validation error. Use users data if there.
if data:
old_data.update(data)
data = old_data
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % '')
except NotFound:
abort(404, _('Dataset not found'))
# are we doing a multiphase add?
if data.get('state', '').startswith('draft'):
c.form_action = h.url_for(controller='package', action='new')
c.form_style = 'new'
return self.new(data=data, errors=errors,
error_summary=error_summary)
c.pkg = context.get("package")
c.resources_json = h.json.dumps(data.get('resources', []))
try:
check_access('package_update', context)
except NotAuthorized, e:
abort(401, _('User %r not authorized to edit %s') % (c.user, id))
# convert tags if not supplied in data
if data and not data.get('tag_string'):
data['tag_string'] = ', '.join(h.dict_list_reduce(
c.pkg_dict.get('tags', {}), 'name'))
errors = errors or {}
vars = {'data': data, 'errors': errors,
'error_summary': error_summary, 'action': 'edit'}
c.errors_json = h.json.dumps(errors)
self._setup_template_variables(context, {'id': id},
package_type=package_type)
c.related_count = c.pkg.related_count
# we have already completed stage 1
vars['stage'] = ['active']
if data.get('state') == 'draft':
vars['stage'] = ['active', 'complete']
elif data.get('state') == 'draft-complete':
vars['stage'] = ['active', 'complete', 'complete']
# TODO: This check is to maintain backwards compatibility with the
# old way of creating custom forms. This behaviour is now deprecated.
if hasattr(self, 'package_form'):
c.form = render(self.package_form, extra_vars=vars)
else:
c.form = render(self._package_form(package_type=package_type),
extra_vars=vars)
return render(self._edit_template(package_type),
extra_vars={'stage': vars['stage']})
def read_ajax(self, id, revision=None):
package_type = self._get_package_type(id)
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj,
'revision_id': revision}
try:
data = get_action('package_show')(context, {'id': id})
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % '')
except NotFound:
abort(404, _('Dataset not found'))
data.pop('tags')
data = flatten_to_string_key(data)
response.headers['Content-Type'] = 'application/json;charset=utf-8'
return h.json.dumps(data)
def history_ajax(self, id):
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
pkg_revisions = get_action('package_revision_list')(
context, data_dict)
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % '')
except NotFound:
abort(404, _('Dataset not found'))
data = []
approved = False
for num, revision in enumerate(pkg_revisions):
if not approved and revision['approved_timestamp']:
current_approved, approved = True, True
else:
current_approved = False
data.append({'revision_id': revision['id'],
'message': revision['message'],
'timestamp': revision['timestamp'],
'author': revision['author'],
'approved': bool(revision['approved_timestamp']),
'current_approved': current_approved})
response.headers['Content-Type'] = 'application/json;charset=utf-8'
return h.json.dumps(data)
def _get_package_type(self, id):
"""
Given the id of a package it determines the plugin to load
based on the package's type name (type). The plugin found
will be returned, or None if there is no plugin associated with
the type.
"""
pkg = model.Package.get(id)
if pkg:
return pkg.type or 'dataset'
return None
def _tag_string_to_list(self, tag_string):
''' This is used to change tags from a sting to a list of dicts '''
out = []
for tag in tag_string.split(','):
tag = tag.strip()
if tag:
out.append({'name': tag,
'state': 'active'})
return out
def _save_new(self, context, package_type=None):
# The staged add dataset used the new functionality when the dataset is
# partially created so we need to know if we actually are updating or
# this is a real new.
is_an_update = False
ckan_phase = request.params.get('_ckan_phase')
from ckan.lib.search import SearchIndexError
try:
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(request.POST))))
if ckan_phase:
# prevent clearing of groups etc
context['allow_partial_update'] = True
# sort the tags
data_dict['tags'] = self._tag_string_to_list(
data_dict['tag_string'])
if data_dict.get('pkg_name'):
is_an_update = True
# This is actually an update not a save
data_dict['id'] = data_dict['pkg_name']
del data_dict['pkg_name']
# this is actually an edit not a save
pkg_dict = get_action('package_update')(context, data_dict)
if request.params['save'] == 'go-metadata':
# redirect to add metadata
url = h.url_for(controller='package',
action='new_metadata',
id=pkg_dict['name'])
else:
# redirect to add dataset resources
url = h.url_for(controller='package',
action='new_resource',
id=pkg_dict['name'])
redirect(url)
# Make sure we don't index this dataset
if request.params['save'] not in ['go-resource', 'go-metadata']:
data_dict['state'] = 'draft'
# allow the state to be changed
context['allow_state_change'] = True
data_dict['type'] = package_type
context['message'] = data_dict.get('log_message', '')
pkg_dict = get_action('package_create')(context, data_dict)
if ckan_phase:
# redirect to add dataset resources
url = h.url_for(controller='package',
action='new_resource',
id=pkg_dict['name'])
redirect(url)
self._form_save_redirect(pkg_dict['name'], 'new', package_type=package_type)
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % '')
except NotFound, e:
abort(404, _('Dataset not found'))
except dict_fns.DataError:
abort(400, _(u'Integrity Error'))
except SearchIndexError, e:
try:
exc_str = unicode(repr(e.args))
except Exception: # We don't like bare excepts
exc_str = unicode(str(e))
abort(500, _(u'Unable to add package to search index.') + exc_str)
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
if is_an_update:
# we need to get the state of the dataset to show the stage we
# are on.
pkg_dict = get_action('package_show')(context, data_dict)
data_dict['state'] = pkg_dict['state']
return self.edit(data_dict['id'], data_dict,
errors, error_summary)
data_dict['state'] = 'none'
return self.new(data_dict, errors, error_summary)
def _save_edit(self, name_or_id, context, package_type=None):
from ckan.lib.search import SearchIndexError
log.debug('Package save request name: %s POST: %r',
name_or_id, request.POST)
try:
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(request.POST))))
if '_ckan_phase' in data_dict:
# we allow partial updates to not destroy existing resources
context['allow_partial_update'] = True
data_dict['tags'] = self._tag_string_to_list(
data_dict['tag_string'])
del data_dict['_ckan_phase']
del data_dict['save']
context['message'] = data_dict.get('log_message', '')
if not context['moderated']:
context['pending'] = False
data_dict['id'] = name_or_id
pkg = get_action('package_update')(context, data_dict)
if request.params.get('save', '') == 'Approve':
get_action('make_latest_pending_package_active')(
context, data_dict)
c.pkg = context['package']
c.pkg_dict = pkg
self._form_save_redirect(pkg['name'], 'edit', package_type=package_type)
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % id)
except NotFound, e:
abort(404, _('Dataset not found'))
except dict_fns.DataError:
abort(400, _(u'Integrity Error'))
except SearchIndexError, e:
try:
exc_str = unicode(repr(e.args))
except Exception: # We don't like bare excepts
exc_str = unicode(str(e))
abort(500, _(u'Unable to update search index.') + exc_str)
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
return self.edit(name_or_id, data_dict, errors, error_summary)
def _form_save_redirect(self, pkgname, action, package_type=None):
'''This redirects the user to the CKAN package/read page,
unless there is request parameter giving an alternate location,
perhaps an external website.
@param pkgname - Name of the package just edited
@param action - What the action of the edit was
'''
assert action in ('new', 'edit')
url = request.params.get('return_to') or \
config.get('package_%s_return_url' % action)
if url:
url = url.replace('<NAME>', pkgname)
else:
if package_type is None or package_type == 'dataset':
url = h.url_for(controller='package', action='read', id=pkgname)
else:
url = h.url_for('{0}_read'.format(package_type), id=pkgname)
redirect(url)
def _adjust_license_id_options(self, pkg, fs):
options = fs.license_id.render_opts['options']
is_included = False
for option in options:
license_id = option[1]
if license_id == pkg.license_id:
is_included = True
if not is_included:
options.insert(1, (pkg.license_id, pkg.license_id))
def delete(self, id):
if 'cancel' in request.params:
h.redirect_to(controller='package', action='edit', id=id)
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
try:
check_access('package_delete', context, {'id': id})
except NotAuthorized:
abort(401, _('Unauthorized to delete package %s') % '')
try:
if request.method == 'POST':
get_action('package_delete')(context, {'id': id})
h.flash_notice(_('Dataset has been deleted.'))
h.redirect_to(controller='package', action='search')
c.pkg_dict = get_action('package_show')(context, {'id': id})
except NotAuthorized:
abort(401, _('Unauthorized to delete package %s') % '')
except NotFound:
abort(404, _('Dataset not found'))
return render('package/confirm_delete.html')
def resource_delete(self, id, resource_id):
if 'cancel' in request.params:
h.redirect_to(controller='package', action='resource_edit', resource_id=resource_id, id=id)
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
try:
check_access('package_delete', context, {'id': id})
except NotAuthorized:
abort(401, _('Unauthorized to delete package %s') % '')
try:
if request.method == 'POST':
get_action('resource_delete')(context, {'id': resource_id})
h.flash_notice(_('Resource has been deleted.'))
h.redirect_to(controller='package', action='read', id=id)
c.resource_dict = get_action('resource_show')(context, {'id': resource_id})
c.pkg_id = id
except NotAuthorized:
abort(401, _('Unauthorized to delete resource %s') % '')
except NotFound:
abort(404, _('Resource not found'))
return render('package/confirm_delete_resource.html')
def autocomplete(self):
# DEPRECATED in favour of /api/2/util/dataset/autocomplete
q = unicode(request.params.get('q', ''))
if not len(q):
return ''
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {'q': q}
packages = get_action('package_autocomplete')(context, data_dict)
pkg_list = []
for pkg in packages:
pkg_list.append('%s|%s' % (pkg['match_displayed'].
replace('|', ' '), pkg['name']))
return '\n'.join(pkg_list)
def _render_edit_form(self, fs, params={}, clear_session=False):
# errors arrive in c.error and fs.errors
c.log_message = params.get('log_message', '')
# rgrp: expunge everything from session before dealing with
# validation errors) so we don't have any problematic saves
# when the fs.render causes a flush.
# seb: If the session is *expunged*, then the form can't be
# rendered; I've settled with a rollback for now, which isn't
# necessarily what's wanted here.
# dread: I think this only happened with tags because until
# this changeset, Tag objects were created in the Renderer
# every time you hit preview. So I don't believe we need to
# clear the session any more. Just in case I'm leaving it in
# with the log comments to find out.
if clear_session:
# log to see if clearing the session is ever required
if model.Session.new or model.Session.dirty or \
model.Session.deleted:
log.warn('Expunging session changes which were not expected: '
'%r %r %r', (model.Session.new, model.Session.dirty,
model.Session.deleted))
try:
model.Session.rollback()
except AttributeError:
# older SQLAlchemy versions
model.Session.clear()
edit_form_html = fs.render()
c.form = h.literal(edit_form_html)
return h.literal(render('package/edit_form.html'))
def _update_authz(self, fs):
validation = fs.validate()
if not validation:
c.form = self._render_edit_form(fs, request.params)
raise package_saver.ValidationException(fs)
try:
fs.sync()
except Exception, inst:
model.Session.rollback()
raise
else:
model.Session.commit()
def resource_read(self, id, resource_id):
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj, "for_view":True}
try:
c.resource = get_action('resource_show')(context,
{'id': resource_id})
c.package = get_action('package_show')(context, {'id': id})
# required for nav menu
c.pkg = context['package']
c.pkg_dict = c.package
except NotFound:
abort(404, _('Resource not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read resource %s') % id)
# get package license info
license_id = c.package.get('license_id')
try:
c.package['isopen'] = model.Package.\
get_license_register()[license_id].isopen()
except KeyError:
c.package['isopen'] = False
# TODO: find a nicer way of doing this
c.datastore_api = '%s/api/action' % config.get('ckan.site_url', '').rstrip('/')
c.related_count = c.pkg.related_count
c.resource['can_be_previewed'] = self._resource_preview(
{'resource': c.resource, 'package': c.package})
return render('package/resource_read.html')
def _resource_preview(self, data_dict):
return bool(datapreview.res_format(data_dict['resource'])
in datapreview.direct() + datapreview.loadable()
or datapreview.get_preview_plugin(
data_dict, return_first=True))
def resource_download(self, id, resource_id, filename=None):
"""
Provides a direct download by either redirecting the user to the url stored
or downloading an uploaded file directly.
"""
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
try:
rsc = get_action('resource_show')(context, {'id': resource_id})
pkg = get_action('package_show')(context, {'id': id})
except NotFound:
abort(404, _('Resource not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read resource %s') % id)
if rsc.get('url_type') == 'upload':
upload = uploader.ResourceUpload(rsc)
filepath = upload.get_path(rsc['id'])
fileapp = paste.fileapp.FileApp(filepath)
try:
status, headers, app_iter = request.call_application(fileapp)
except OSError:
abort(404, _('Resource data not found'))
response.headers.update(dict(headers))
content_type, content_enc = mimetypes.guess_type(rsc.get('url',''))
response.headers['Content-Type'] = content_type
response.status = status
return app_iter
elif not 'url' in rsc:
abort(404, _('No download is available'))
redirect(rsc['url'])
def follow(self, id):
'''Start following this dataset.'''
context = {'model': model,
'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
get_action('follow_dataset')(context, data_dict)
package_dict = get_action('package_show')(context, data_dict)
h.flash_success(_("You are now following {0}").format(
package_dict['title']))
except ValidationError as e:
error_message = (e.extra_msg or e.message or e.error_summary
or e.error_dict)
h.flash_error(error_message)
except NotAuthorized as e:
h.flash_error(e.extra_msg)
h.redirect_to(controller='package', action='read', id=id)
def unfollow(self, id):
'''Stop following this dataset.'''
context = {'model': model,
'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
get_action('unfollow_dataset')(context, data_dict)
package_dict = get_action('package_show')(context, data_dict)
h.flash_success(_("You are no longer following {0}").format(
package_dict['title']))
except ValidationError as e:
error_message = (e.extra_msg or e.message or e.error_summary
or e.error_dict)
h.flash_error(error_message)
except (NotFound, NotAuthorized) as e:
error_message = e.extra_msg or e.message
h.flash_error(error_message)
h.redirect_to(controller='package', action='read', id=id)
def followers(self, id=None):
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'for_view': True,
'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
c.pkg = context['package']
c.followers = get_action('dataset_follower_list')(context,
{'id': c.pkg_dict['id']})
c.related_count = c.pkg.related_count
except NotFound:
abort(404, _('Dataset not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read package %s') % id)
return render('package/followers.html')
def groups(self, id):
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'for_view': True,
'auth_user_obj': c.userobj, 'use_cache': False}
data_dict = {'id': id}
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
except NotFound:
abort(404, _('Dataset not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read dataset %s') % id)
if request.method == 'POST':
new_group = request.POST.get('group_added')
if new_group:
data_dict = {"id": new_group,
"object": id,
"object_type": 'package',
"capacity": 'public'}
try:
get_action('member_create')(context, data_dict)
except NotFound:
abort(404, _('Group not found'))
removed_group = None
for param in request.POST:
if param.startswith('group_remove'):
removed_group = param.split('.')[-1]
break
if removed_group:
data_dict = {"id": removed_group,
"object": id,
"object_type": 'package'}
try:
get_action('member_delete')(context, data_dict)
except NotFound:
abort(404, _('Group not found'))
redirect(h.url_for(controller='package',
action='groups', id=id))
context['is_member'] = True
users_groups = get_action('group_list_authz')(context, data_dict)
pkg_group_ids = set(group['id'] for group
in c.pkg_dict.get('groups', []))
user_group_ids = set(group['id'] for group
in users_groups)
c.group_dropdown = [[group['id'], group['display_name']]
for group in users_groups if
group['id'] not in pkg_group_ids]
for group in c.pkg_dict.get('groups', []):
group['user_member'] = (group['id'] in user_group_ids)
return render('package/group_list.html')
def activity(self, id):
'''Render this package's public activity stream page.'''
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'for_view': True,
'auth_user_obj': c.userobj}
data_dict = {'id': id}
try:
c.pkg_dict = get_action('package_show')(context, data_dict)
c.pkg = context['package']
c.package_activity_stream = get_action(
'package_activity_list_html')(context,
{'id': c.pkg_dict['id']})
c.related_count = c.pkg.related_count
except NotFound:
abort(404, _('Dataset not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read dataset %s') % id)
return render('package/activity.html')
def resource_embedded_dataviewer(self, id, resource_id,
width=500, height=500):
"""
Embeded page for a read-only resource dataview. Allows
for width and height to be specified as part of the
querystring (as well as accepting them via routes).
"""
context = {'model': model, 'session': model.Session,
'user': c.user or c.author, 'auth_user_obj': c.userobj}
try:
c.resource = get_action('resource_show')(context,
{'id': resource_id})
c.package = get_action('package_show')(context, {'id': id})
c.resource_json = h.json.dumps(c.resource)
# double check that the resource belongs to the specified package
if not c.resource['id'] in [r['id']
for r in c.package['resources']]:
raise NotFound
except NotFound:
abort(404, _('Resource not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read resource %s') % id)
# Construct the recline state
state_version = int(request.params.get('state_version', '1'))
recline_state = self._parse_recline_state(request.params)
if recline_state is None:
abort(400, ('"state" parameter must be a valid recline '
'state (version %d)' % state_version))
c.recline_state = h.json.dumps(recline_state)
c.width = max(int(request.params.get('width', width)), 100)
c.height = max(int(request.params.get('height', height)), 100)
c.embedded = True
return render('package/resource_embedded_dataviewer.html')
def _parse_recline_state(self, params):
state_version = int(request.params.get('state_version', '1'))
if state_version != 1:
return None
recline_state = {}
for k, v in request.params.items():
try:
v = h.json.loads(v)
except ValueError:
pass
recline_state[k] = v
recline_state.pop('width', None)
recline_state.pop('height', None)
recline_state['readOnly'] = True
# previous versions of recline setup used elasticsearch_url attribute
# for data api url - see http://trac.ckan.org/ticket/2639
# fix by relocating this to url attribute which is the default location
if 'dataset' in recline_state and 'elasticsearch_url' in recline_state['dataset']:
recline_state['dataset']['url'] = recline_state['dataset']['elasticsearch_url']
# Ensure only the currentView is available
# default to grid view if none specified
if not recline_state.get('currentView', None):
recline_state['currentView'] = 'grid'
for k in recline_state.keys():
if k.startswith('view-') and \
not k.endswith(recline_state['currentView']):
recline_state.pop(k)
return recline_state
def resource_datapreview(self, id, resource_id):
'''
Embeded page for a resource data-preview.
Depending on the type, different previews are loaded. This could be an
img tag where the image is loaded directly or an iframe that embeds a
webpage, recline or a pdf preview.
'''
context = {
'model': model,
'session': model.Session,
'user': c.user or c.author,
'auth_user_obj': c.userobj
}
try:
c.resource = get_action('resource_show')(context,
{'id': resource_id})
c.package = get_action('package_show')(context, {'id': id})
data_dict = {'resource': c.resource, 'package': c.package}
preview_plugin = datapreview.get_preview_plugin(data_dict)
if preview_plugin is None:
abort(409, _('No preview has been defined.'))
preview_plugin.setup_template_variables(context, data_dict)
c.resource_json = json.dumps(c.resource)
except NotFound:
abort(404, _('Resource not found'))
except NotAuthorized:
abort(401, _('Unauthorized to read resource %s') % id)
else:
return render(preview_plugin.preview_template(context, data_dict))
| 41.898531
| 114
| 0.549646
|
acfbb96ff89199ef9ede1a0a9d10a85a803fcfc4
| 29,799
|
py
|
Python
|
catkin_tools/verbs/catkin_build/build.py
|
timonegk/catkin_tools
|
70ca62f67dc125e5879864a7a80c261b9a9bc914
|
[
"Apache-2.0"
] | null | null | null |
catkin_tools/verbs/catkin_build/build.py
|
timonegk/catkin_tools
|
70ca62f67dc125e5879864a7a80c261b9a9bc914
|
[
"Apache-2.0"
] | null | null | null |
catkin_tools/verbs/catkin_build/build.py
|
timonegk/catkin_tools
|
70ca62f67dc125e5879864a7a80c261b9a9bc914
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This modules implements the engine for building packages in parallel"""
import os
import pkg_resources
from queue import Queue
import sys
import time
import traceback
import yaml
import asyncio
try:
from catkin_pkg.package import parse_package
from catkin_pkg.package import InvalidPackage
from catkin_pkg.packages import find_packages
from catkin_pkg.topological_order import topological_order_packages
except ImportError as e:
sys.exit(
'Importing "catkin_pkg" failed: %s\nMake sure that you have installed '
'"catkin_pkg", and that it is up to date and on the PYTHONPATH.' % e
)
from catkin_tools.common import FakeLock, expand_glob_package
from catkin_tools.common import format_time_delta
from catkin_tools.common import get_cached_recursive_build_depends_in_workspace
from catkin_tools.common import get_recursive_run_depends_in_workspace
from catkin_tools.common import log
from catkin_tools.common import wide_log
from catkin_tools.execution.controllers import ConsoleStatusController
from catkin_tools.execution.executor import execute_jobs
from catkin_tools.execution.executor import run_until_complete
from catkin_tools.jobs.catkin import create_catkin_build_job
from catkin_tools.jobs.catkin import create_catkin_clean_job
from catkin_tools.jobs.catkin import get_prebuild_package
from .color import clr
BUILDSPACE_MARKER_FILE = '.catkin_tools.yaml'
BUILDSPACE_IGNORE_FILE = 'CATKIN_IGNORE'
DEVELSPACE_MARKER_FILE = '.catkin_tools.yaml'
def determine_packages_to_be_built(packages, context, workspace_packages):
"""Returns list of packages which should be built, and those package's deps.
:param packages: list of packages to be built, if None all packages are built
:type packages: list
:param context: Workspace context
:type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
:returns: tuple of packages to be built and those package's deps
:rtype: tuple
"""
start = time.time()
# If there are no packages raise
if not workspace_packages:
log("[build] No packages were found in the source space '{0}'".format(context.source_space_abs))
else:
wide_log("[build] Found '{0}' packages in {1}."
.format(len(workspace_packages), format_time_delta(time.time() - start)))
# Order the packages by topology
ordered_packages = topological_order_packages(workspace_packages)
# Set the packages in the workspace for the context
context.packages = ordered_packages
# Determine the packages which should be built
packages_to_be_built = []
packages_to_be_built_deps = []
# Check if topological_order_packages determined any circular dependencies, if so print an error and fail.
# If this is the case, the last entry of ordered packages is a tuple that starts with nil.
if ordered_packages and ordered_packages[-1][0] is None:
guilty_packages = ", ".join(ordered_packages[-1][1:])
sys.exit("[build] Circular dependency detected in the following packages: {}".format(guilty_packages))
workspace_package_names = dict([(pkg.name, (path, pkg)) for path, pkg in ordered_packages])
# Determine the packages to be built
if packages:
# First assert all of the packages given are in the workspace
for package in packages:
if package not in workspace_package_names:
# Try whether package is a pattern and matches
glob_packages = expand_glob_package(package, workspace_package_names)
if len(glob_packages) > 0:
packages.extend(glob_packages)
continue
else:
sys.exit("[build] Given package '{0}' is not in the workspace "
"and pattern does not match any package".format(package))
# If metapackage, include run depends which are in the workspace
package_obj = workspace_package_names[package][1]
if 'metapackage' in [e.tagname for e in package_obj.exports]:
for rdep in package_obj.run_depends:
if rdep.name in workspace_package_names:
packages.append(rdep.name)
# Limit the packages to be built to just the provided packages
for pkg_path, package in ordered_packages:
if package.name in packages:
packages_to_be_built.append((pkg_path, package))
# Get the recursive dependencies for each of these packages
pkg_deps = get_cached_recursive_build_depends_in_workspace(package, ordered_packages)
packages_to_be_built_deps.extend(pkg_deps)
else:
# Only use whitelist when no other packages are specified
if len(context.whitelist) > 0:
# Expand glob patterns in whitelist
whitelist = []
for whitelisted_package in context.whitelist:
whitelist.extend(expand_glob_package(whitelisted_package, workspace_package_names))
packages_to_be_built = [p for p in ordered_packages if (p[1].name in whitelist)]
else:
packages_to_be_built = ordered_packages
# Filter packages with blacklist
if len(context.blacklist) > 0:
# Expand glob patterns in blacklist
blacklist = []
for blacklisted_package in context.blacklist:
blacklist.extend(expand_glob_package(blacklisted_package, workspace_package_names))
# Apply blacklist to packages and dependencies
packages_to_be_built = [
(path, pkg) for path, pkg in packages_to_be_built
if (pkg.name not in blacklist or pkg.name in packages)]
packages_to_be_built_deps = [
(path, pkg) for path, pkg in packages_to_be_built_deps
if (pkg.name not in blacklist or pkg.name in packages)]
return packages_to_be_built, packages_to_be_built_deps, ordered_packages
def verify_start_with_option(start_with, packages, all_packages, packages_to_be_built):
if start_with is not None:
if start_with not in [pkg.name for pth, pkg in all_packages]:
sys.exit("Package given for --start-with, '{0}', is not in the workspace.".format(start_with))
elif start_with not in [pkg.name for pth, pkg in packages_to_be_built]:
sys.exit("Package given for --start-with, '{0}', "
"is in the workspace but would not be built with given package arguments: '{1}'"
.format(start_with, ' '.join(packages)))
def get_built_unbuilt_packages(context, workspace_packages):
"""Get list of packages in workspace which have not been built."""
# Get the names of all packages which have already been built
built_packages = set([
pkg.name for (path, pkg) in
find_packages(context.package_metadata_path(), warnings=[]).items()])
# Get names of all unbuilt packages
unbuilt_pkgs = set()
for path, pkg in workspace_packages.items():
if pkg.name not in built_packages:
unbuilt_pkgs.add(pkg.name)
return built_packages, unbuilt_pkgs
def build_isolated_workspace(
context,
packages=None,
start_with=None,
no_deps=False,
unbuilt=False,
n_jobs=None,
force_cmake=False,
pre_clean=False,
force_color=False,
quiet=False,
interleave_output=False,
no_status=False,
limit_status_rate=10.0,
lock_install=False,
no_notify=False,
continue_on_failure=False,
summarize_build=None,
):
"""Builds a catkin workspace in isolation
This function will find all of the packages in the source space, start some
executors, feed them packages to build based on dependencies and topological
ordering, and then monitor the output of the executors, handling loggings of
the builds, starting builds, failing builds, and finishing builds of
packages, and handling the shutdown of the executors when appropriate.
:param context: context in which to build the catkin workspace
:type context: :py:class:`catkin_tools.verbs.catkin_build.context.Context`
:param packages: list of packages to build, by default their dependencies will also be built
:type packages: list
:param start_with: package to start with, skipping all packages which proceed it in the topological order
:type start_with: str
:param no_deps: If True, the dependencies of packages will not be built first
:type no_deps: bool
:param n_jobs: number of parallel package build n_jobs
:type n_jobs: int
:param force_cmake: forces invocation of CMake if True, default is False
:type force_cmake: bool
:param force_color: forces colored output even if terminal does not support it
:type force_color: bool
:param quiet: suppresses the output of commands unless there is an error
:type quiet: bool
:param interleave_output: prints the output of commands as they are received
:type interleave_output: bool
:param no_status: disables status bar
:type no_status: bool
:param limit_status_rate: rate to which status updates are limited; the default 0, places no limit.
:type limit_status_rate: float
:param lock_install: causes executors to synchronize on access of install commands
:type lock_install: bool
:param no_notify: suppresses system notifications
:type no_notify: bool
:param continue_on_failure: do not stop building other jobs on error
:type continue_on_failure: bool
:param summarize_build: if True summarizes the build at the end, if None and continue_on_failure is True and the
the build fails, then the build will be summarized, but if False it never will be summarized.
:type summarize_build: bool
:raises: SystemExit if buildspace is a file or no packages were found in the source space
or if the provided options are invalid
"""
pre_start_time = time.time()
# Assert that the limit_status_rate is valid
if limit_status_rate < 0:
sys.exit("[build] @!@{rf}Error:@| The value of --status-rate must be greater than or equal to zero.")
# Declare a buildspace marker describing the build config for error checking
buildspace_marker_data = {
'workspace': context.workspace,
'profile': context.profile,
'install': context.install,
'install_space': context.install_space_abs,
'devel_space': context.devel_space_abs,
'source_space': context.source_space_abs}
# Check build config
if os.path.exists(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE)):
with open(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE)) as buildspace_marker_file:
existing_buildspace_marker_data = yaml.safe_load(buildspace_marker_file)
misconfig_lines = ''
for (k, v) in existing_buildspace_marker_data.items():
new_v = buildspace_marker_data.get(k, None)
if new_v != v:
misconfig_lines += (
'\n - %s: %s (stored) is not %s (commanded)' %
(k, v, new_v))
if len(misconfig_lines) > 0:
sys.exit(clr(
"\n@{rf}Error:@| Attempting to build a catkin workspace using build space: "
"\"%s\" but that build space's most recent configuration "
"differs from the commanded one in ways which will cause "
"problems. Fix the following options or use @{yf}`catkin "
"clean -b`@| to remove the build space: %s" %
(context.build_space_abs, misconfig_lines)))
# Summarize the context
summary_notes = []
if force_cmake:
summary_notes += [clr("@!@{cf}NOTE:@| Forcing CMake to run for each package.")]
log(context.summary(summary_notes))
# Make sure there is a build folder and it is not a file
if os.path.exists(context.build_space_abs):
if os.path.isfile(context.build_space_abs):
sys.exit(clr(
"[build] @{rf}Error:@| " +
"Build space '{0}' exists but is a file and not a folder."
.format(context.build_space_abs)))
# If it doesn't exist, create it
else:
log("[build] Creating build space: '{0}'".format(context.build_space_abs))
os.makedirs(context.build_space_abs)
# Write the current build config for config error checking
with open(os.path.join(context.build_space_abs, BUILDSPACE_MARKER_FILE), 'w') as buildspace_marker_file:
buildspace_marker_file.write(yaml.dump(buildspace_marker_data, default_flow_style=False))
# Get all the packages in the context source space
# Suppress warnings since this is a utility function
try:
workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[])
except InvalidPackage as ex:
sys.exit(clr("@{rf}Error:@| The file %s is an invalid package.xml file."
" See below for details:\n\n%s" % (ex.package_path, ex.msg)))
# Get packages which have not been built yet
built_packages, unbuilt_pkgs = get_built_unbuilt_packages(context, workspace_packages)
# Handle unbuilt packages
if unbuilt:
# Check if there are any unbuilt
if len(unbuilt_pkgs) > 0:
# Add the unbuilt packages
packages.extend(list(unbuilt_pkgs))
else:
log("[build] No unbuilt packages to be built.")
return
# If no_deps is given, ensure packages to build are provided
if no_deps and packages is None:
log(clr("[build] @!@{rf}Error:@| With no_deps, you must specify packages to build."))
return
# Find list of packages in the workspace
packages_to_be_built, packages_to_be_built_deps, all_packages = determine_packages_to_be_built(
packages, context, workspace_packages)
if not no_deps:
# Extend packages to be built to include their deps
packages_to_be_built.extend(packages_to_be_built_deps)
# Also re-sort
try:
packages_to_be_built = topological_order_packages(dict(packages_to_be_built))
except AttributeError:
log(clr("[build] @!@{rf}Error:@| The workspace packages have a circular "
"dependency, and cannot be built. Please run `catkin list "
"--deps` to determine the problematic package(s)."))
return
# Check the number of packages to be built
if len(packages_to_be_built) == 0:
log(clr('[build] No packages to be built.'))
# Assert start_with package is in the workspace
verify_start_with_option(
start_with,
packages,
all_packages,
packages_to_be_built + packages_to_be_built_deps)
# Populate .catkin file if we're not installing
# NOTE: This is done to avoid the Catkin CMake code from doing it,
# which isn't parallel-safe. Catkin CMake only modifies this file if
# it's package source path isn't found.
if not context.install:
dot_catkin_file_path = os.path.join(context.devel_space_abs, '.catkin')
# If the file exists, get the current paths
if os.path.exists(dot_catkin_file_path):
dot_catkin_paths = open(dot_catkin_file_path, 'r').read().split(';')
else:
dot_catkin_paths = []
# Update the list with the new packages (in topological order)
packages_to_be_built_paths = [
os.path.join(context.source_space_abs, path)
for path, pkg in packages_to_be_built
]
new_dot_catkin_paths = [
os.path.join(context.source_space_abs, path)
for path in [os.path.join(context.source_space_abs, path) for path, pkg in all_packages]
if path in dot_catkin_paths or path in packages_to_be_built_paths
]
# Write the new file if it's different, otherwise, leave it alone
if dot_catkin_paths == new_dot_catkin_paths:
wide_log("[build] Package table is up to date.")
else:
wide_log("[build] Updating package table.")
open(dot_catkin_file_path, 'w').write(';'.join(new_dot_catkin_paths))
# Remove packages before start_with
if start_with is not None:
for path, pkg in list(packages_to_be_built):
if pkg.name != start_with:
wide_log(clr("@!@{pf}Skipping@| @{gf}---@| @{cf}{}@|").format(pkg.name))
packages_to_be_built.pop(0)
else:
break
# Get the names of all packages to be built
packages_to_be_built_names = [p.name for _, p in packages_to_be_built]
packages_to_be_built_deps_names = [p.name for _, p in packages_to_be_built_deps]
# Generate prebuild and prebuild clean jobs, if necessary
prebuild_jobs = {}
setup_util_present = os.path.exists(os.path.join(context.devel_space_abs, '_setup_util.py'))
if context.install:
setup_util_present &= os.path.exists(os.path.join(context.install_space_abs, '_setup_util.py'))
catkin_present = 'catkin' in (packages_to_be_built_names + packages_to_be_built_deps_names)
catkin_built = 'catkin' in built_packages
prebuild_built = 'catkin_tools_prebuild' in built_packages
# Handle the prebuild jobs if the develspace is linked
prebuild_pkg_deps = []
if context.link_devel:
prebuild_pkg = None
# Construct a dictionary to lookup catkin package by name
pkg_dict = dict([(pkg.name, (pth, pkg)) for pth, pkg in all_packages])
if setup_util_present:
# Setup util is already there, determine if it needs to be
# regenerated
if catkin_built:
if catkin_present:
prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
elif prebuild_built:
if catkin_present:
# TODO: Clean prebuild package
ct_prebuild_pkg_path = get_prebuild_package(
context.build_space_abs, context.devel_space_abs, force_cmake)
ct_prebuild_pkg = parse_package(ct_prebuild_pkg_path)
prebuild_jobs['caktin_tools_prebuild'] = create_catkin_clean_job(
context,
ct_prebuild_pkg,
ct_prebuild_pkg_path,
dependencies=[],
dry_run=False,
clean_build=True,
clean_devel=True,
clean_install=True)
# TODO: Build catkin package
prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
prebuild_pkg_deps.append('catkin_tools_prebuild')
else:
# How did these get here??
log("Warning: devel space setup files have an unknown origin.")
else:
# Setup util needs to be generated
if catkin_built or prebuild_built:
log("Warning: generated devel space setup files have been deleted.")
if catkin_present:
# Build catkin package
prebuild_pkg_path, prebuild_pkg = pkg_dict['catkin']
else:
# Generate and buildexplicit prebuild package
prebuild_pkg_path = get_prebuild_package(context.build_space_abs, context.devel_space_abs, force_cmake)
prebuild_pkg = parse_package(prebuild_pkg_path)
if prebuild_pkg is not None:
# Create the prebuild job
prebuild_job = create_catkin_build_job(
context,
prebuild_pkg,
prebuild_pkg_path,
dependencies=prebuild_pkg_deps,
force_cmake=force_cmake,
pre_clean=pre_clean,
prebuild=True)
# Add the prebuld job
prebuild_jobs[prebuild_job.jid] = prebuild_job
# Remove prebuild jobs from normal job list
for prebuild_jid, prebuild_job in prebuild_jobs.items():
if prebuild_jid in packages_to_be_built_names:
packages_to_be_built_names.remove(prebuild_jid)
# Initial jobs list is just the prebuild jobs
jobs = [] + list(prebuild_jobs.values())
# Get all build type plugins
build_job_creators = {
ep.name: ep.load()['create_build_job']
for ep in pkg_resources.iter_entry_points(group='catkin_tools.jobs')
}
# It's a problem if there aren't any build types available
if len(build_job_creators) == 0:
sys.exit('Error: No build types available. Please check your catkin_tools installation.')
# Construct jobs
for pkg_path, pkg in all_packages:
if pkg.name not in packages_to_be_built_names:
continue
# Get actual build deps
deps = [
p.name for _, p
in get_cached_recursive_build_depends_in_workspace(pkg, packages_to_be_built)
if p.name not in prebuild_jobs
]
# All jobs depend on the prebuild jobs if they're defined
if not no_deps:
for j in prebuild_jobs.values():
deps.append(j.jid)
# Determine the job parameters
build_job_kwargs = dict(
context=context,
package=pkg,
package_path=pkg_path,
dependencies=deps,
force_cmake=force_cmake,
pre_clean=pre_clean)
# Create the job based on the build type
build_type = pkg.get_build_type()
if build_type in build_job_creators:
jobs.append(build_job_creators[build_type](**build_job_kwargs))
else:
wide_log(clr(
"[build] @!@{yf}Warning:@| Skipping package `{}` because it "
"has an unsupported package build type: `{}`"
).format(pkg.name, build_type))
wide_log(clr("[build] Note: Available build types:"))
for bt_name in build_job_creators.keys():
wide_log(clr("[build] - `{}`".format(bt_name)))
# Queue for communicating status
event_queue = Queue()
try:
# Spin up status output thread
status_thread = ConsoleStatusController(
'build',
['package', 'packages'],
jobs,
n_jobs,
[pkg.name for _, pkg in context.packages],
[p for p in context.whitelist],
[p for p in context.blacklist],
event_queue,
show_notifications=not no_notify,
show_active_status=not no_status,
show_buffered_stdout=not quiet and not interleave_output,
show_buffered_stderr=not interleave_output,
show_live_stdout=interleave_output,
show_live_stderr=interleave_output,
show_stage_events=not quiet,
show_full_summary=(summarize_build is True),
pre_start_time=pre_start_time,
active_status_rate=limit_status_rate)
status_thread.start()
# Initialize locks
locks = {
'installspace': asyncio.Lock() if lock_install else FakeLock()
}
# Block while running N jobs asynchronously
try:
all_succeeded = run_until_complete(execute_jobs(
'build',
jobs,
locks,
event_queue,
context.log_space_abs,
max_toplevel_jobs=n_jobs,
continue_on_failure=continue_on_failure,
continue_without_deps=False))
except Exception:
status_thread.keep_running = False
all_succeeded = False
status_thread.join(1.0)
wide_log(str(traceback.format_exc()))
status_thread.join(1.0)
# Warn user about new packages
now_built_packages, now_unbuilt_pkgs = get_built_unbuilt_packages(context, workspace_packages)
new_pkgs = [p for p in unbuilt_pkgs if p not in now_unbuilt_pkgs]
if len(new_pkgs) > 0:
log(clr("[build] @/@!Note:@| @/Workspace packages have changed, "
"please re-source setup files to use them.@|"))
if all_succeeded:
# Create isolated devel setup if necessary
if context.isolate_devel:
if not context.install:
_create_unmerged_devel_setup(context, now_unbuilt_pkgs)
else:
_create_unmerged_devel_setup_for_install(context)
return 0
else:
return 1
except KeyboardInterrupt:
wide_log("[build] Interrupted by user!")
event_queue.put(None)
return 130 # EOWNERDEAD return code is not part of the errno module.
def _create_unmerged_devel_setup(context, unbuilt):
# Find all of the leaf packages in the workspace
# where leaf means that nothing in the workspace depends on it
ordered_packages = context.packages
workspace_packages = dict([(p.name, p) for pth, p in ordered_packages])
# Get all packages which are dependencies of packages in the workspace which have been built
dependencies = set(sum([
[d.name for d in p.buildtool_depends + p.build_depends + p.run_depends]
for _, p in workspace_packages.items()
if p.name not in unbuilt
], []))
# Compute the packages on which no other packages depend
leaf_packages = [
pkg.name
for name, pkg in workspace_packages.items()
if pkg.name not in dependencies
]
leaf_paths = [
os.path.join(context.devel_space_abs, p, 'setup.sh')
for p in leaf_packages
]
leaf_sources = [
'. {}'.format(source_path)
for source_path in leaf_paths
if os.path.isfile(source_path)
]
# In addition to the leaf packages, we need to source the recursive run depends of the leaf packages
run_depends_packages = get_recursive_run_depends_in_workspace(
[workspace_packages[p] for p in leaf_packages], ordered_packages)
run_depends_paths = [
os.path.join(context.devel_space_abs, pth, 'setup.sh')
for pth, pkg in run_depends_packages
]
run_depends_sources = [
'. {}'.format(source_path)
for source_path in run_depends_paths
if os.path.isfile(source_path)
]
# Create the setup.sh file
setup_sh_path = os.path.join(context.devel_space_abs, 'setup.sh')
env_file = SETUP_SH_TEMPLATE.format(
first_source=leaf_sources[0],
leaf_sources='\n'.join(leaf_sources[1:]),
run_depends_sources='\n'.join(run_depends_sources)
)
with open(setup_sh_path, 'w') as f:
f.write(env_file)
# Create setup.bash file
setup_bash_path = os.path.join(context.devel_space_abs, 'setup.bash')
with open(setup_bash_path, 'w') as f:
f.write(SETUP_BASH_TEMPLATE)
# Create setup.zsh file
setup_zsh_path = os.path.join(context.devel_space_abs, 'setup.zsh')
with open(setup_zsh_path, 'w') as f:
f.write(SETUP_ZSH_TEMPLATE)
def _create_unmerged_devel_setup_for_install(context):
"""Create non-functioning placeholder scripts in develspace."""
for path in [os.path.join(context.devel_space_abs, f) for f in ['setup.sh', 'setup.bash', 'setup.zsh']]:
with open(path, 'w') as f:
f.write(SETUP_PLACEHOLDER_TEMPLATE)
SETUP_SH_TEMPLATE = """\
#!/usr/bin/env sh
# generated from within catkin_tools/verbs/catkin_build/build.py
# This file is aggregates the many setup.sh files in the various
# unmerged devel spaces in this folder.
# This is occomplished by sourcing each leaf package and all the
# recursive run dependencies of those leaf packages
# Source the first package's setup.sh without the --extend option
{first_source}
# remove all passed in args, resetting $@, $*, $#, $n
shift $#
# set the --extend arg for rest of the packages setup.sh's
set -- $@ "--extend"
# source setup.sh for each of the leaf packages in the workspace
{leaf_sources}
# And now the setup.sh for each of their recursive run dependencies
{run_depends_sources}
"""
SETUP_BASH_TEMPLATE = """\
#!/usr/bin/env bash
# generated from within catkin_tools/verbs/catkin_build/build.py
CATKIN_SHELL=bash
# source setup.sh from same directory as this file
_BUILD_SETUP_DIR=$(builtin cd "`dirname "${BASH_SOURCE[0]}"`" && pwd)
. "$_BUILD_SETUP_DIR/setup.sh"
"""
SETUP_ZSH_TEMPLATE = """\
#!/usr/bin/env zsh
# generated from within catkin_tools/verbs/catkin_build/build.py
CATKIN_SHELL=zsh
# source setup.sh from same directory as this file
_BUILD_SETUP_DIR=$(builtin cd -q "`dirname "$0"`" && pwd)
emulate sh # emulate POSIX
. "$_BUILD_SETUP_DIR/setup.sh"
emulate zsh # back to zsh mode
"""
SETUP_PLACEHOLDER_TEMPLATE = """\
#!/usr/bin/env sh
# generated from within catkin_tools/verbs/catkin_build/build.py
echo "Error: This workspace was built with the '--install' option."
echo " You should source the setup files in the install space instead."
echo " Your environment has not been changed."
"""
| 41.215768
| 119
| 0.664318
|
acfbb97b61ec8fef32fd6d08568aa5de942730b6
| 609
|
py
|
Python
|
src/tests/test_crashing.py
|
pajowu/pretix
|
d6985123b4528f134ead71ce0a4613c9a309fd2c
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-04-25T00:11:00.000Z
|
2020-04-25T00:11:00.000Z
|
src/tests/test_crashing.py
|
pajowu/pretix
|
d6985123b4528f134ead71ce0a4613c9a309fd2c
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2019-07-08T10:29:54.000Z
|
2020-01-08T17:32:07.000Z
|
src/tests/test_crashing.py
|
pajowu/pretix
|
d6985123b4528f134ead71ce0a4613c9a309fd2c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import os
import pytest
@pytest.mark.skip
def test_crash():
"""
This is a test that crashes with SIGKILL every (n+1)-th time it runs (n = 0, 1, 2, …).
This is useful for debugging our pytest-xdist monkeypatch that we apply in conftest.py
to deal with random test crashes on Travis CI using SQLite. Usually, this test is
skipped to avoid causing additional crashes in real runs.
"""
if os.path.exists('crashed.tmp'):
assert 1
os.remove('crashed.tmp')
else:
with open('crashed.tmp', 'w') as f:
f.write('hi')
os.kill(os.getpid(), 9)
| 29
| 90
| 0.633826
|
acfbbab2078dd91065ab5923c58f02aa3c270c27
| 36,933
|
py
|
Python
|
selfdrive/controls/lib/events.py
|
ellian76/openpilot-0.8.12
|
fed8b3cad14f3f6c9296f7ddea5973f4d8d284a0
|
[
"MIT"
] | null | null | null |
selfdrive/controls/lib/events.py
|
ellian76/openpilot-0.8.12
|
fed8b3cad14f3f6c9296f7ddea5973f4d8d284a0
|
[
"MIT"
] | null | null | null |
selfdrive/controls/lib/events.py
|
ellian76/openpilot-0.8.12
|
fed8b3cad14f3f6c9296f7ddea5973f4d8d284a0
|
[
"MIT"
] | 1
|
2022-01-21T17:21:08.000Z
|
2022-01-21T17:21:08.000Z
|
from enum import IntEnum
from typing import Dict, Union, Callable, Any
from cereal import log, car
import cereal.messaging as messaging
from common.realtime import DT_CTRL
from selfdrive.config import Conversions as CV
from selfdrive.locationd.calibrationd import MIN_SPEED_FILTER
AlertSize = log.ControlsState.AlertSize
AlertStatus = log.ControlsState.AlertStatus
VisualAlert = car.CarControl.HUDControl.VisualAlert
AudibleAlert = car.CarControl.HUDControl.AudibleAlert
EventName = car.CarEvent.EventName
# Alert priorities
class Priority(IntEnum):
LOWEST = 0
LOWER = 1
LOW = 2
MID = 3
HIGH = 4
HIGHEST = 5
# Event types
class ET:
ENABLE = 'enable'
PRE_ENABLE = 'preEnable'
NO_ENTRY = 'noEntry'
WARNING = 'warning'
USER_DISABLE = 'userDisable'
SOFT_DISABLE = 'softDisable'
IMMEDIATE_DISABLE = 'immediateDisable'
PERMANENT = 'permanent'
# get event name from enum
EVENT_NAME = {v: k for k, v in EventName.schema.enumerants.items()}
class Events:
def __init__(self):
self.events = []
self.static_events = []
self.events_prev = dict.fromkeys(EVENTS.keys(), 0)
@property
def names(self):
return self.events
def __len__(self):
return len(self.events)
def add(self, event_name, static=False):
if static:
self.static_events.append(event_name)
self.events.append(event_name)
def clear(self):
self.events_prev = {k: (v + 1 if k in self.events else 0) for k, v in self.events_prev.items()}
self.events = self.static_events.copy()
def any(self, event_type):
for e in self.events:
if event_type in EVENTS.get(e, {}).keys():
return True
return False
def create_alerts(self, event_types, callback_args=None):
if callback_args is None:
callback_args = []
ret = []
for e in self.events:
types = EVENTS[e].keys()
for et in event_types:
if et in types:
alert = EVENTS[e][et]
if not isinstance(alert, Alert):
alert = alert(*callback_args)
if DT_CTRL * (self.events_prev[e] + 1) >= alert.creation_delay:
alert.alert_type = f"{EVENT_NAME[e]}/{et}"
alert.event_type = et
ret.append(alert)
return ret
def add_from_msg(self, events):
for e in events:
self.events.append(e.name.raw)
def to_msg(self):
ret = []
for event_name in self.events:
event = car.CarEvent.new_message()
event.name = event_name
for event_type in EVENTS.get(event_name, {}).keys():
setattr(event, event_type, True)
ret.append(event)
return ret
# 메세지 한글화 : 로웰 ( https://github.com/crwusiz/openpilot )
class Alert:
def __init__(self,
alert_text_1: str,
alert_text_2: str,
alert_status: log.ControlsState.AlertStatus,
alert_size: log.ControlsState.AlertSize,
priority: Priority,
visual_alert: car.CarControl.HUDControl.VisualAlert,
audible_alert: car.CarControl.HUDControl.AudibleAlert,
duration: float,
alert_rate: float = 0.,
creation_delay: float = 0.):
self.alert_text_1 = alert_text_1
self.alert_text_2 = alert_text_2
self.alert_status = alert_status
self.alert_size = alert_size
self.priority = priority
self.visual_alert = visual_alert
self.audible_alert = audible_alert
self.duration = duration
self.alert_rate = alert_rate
self.creation_delay = creation_delay
self.alert_type = ""
self.event_type = None
def __str__(self) -> str:
return f"{self.alert_text_1}/{self.alert_text_2} {self.priority} {self.visual_alert} {self.audible_alert}"
def __gt__(self, alert2) -> bool:
return self.priority > alert2.priority
class NoEntryAlert(Alert):
def __init__(self, alert_text_2, visual_alert=VisualAlert.none):
#super().__init__("openpilot Unavailable", alert_text_2, AlertStatus.normal,
super().__init__("오픈파일럿 사용불가", alert_text_2, AlertStatus.normal,
AlertSize.mid, Priority.LOW, visual_alert,
AudibleAlert.refuse, 3.)
class SoftDisableAlert(Alert):
def __init__(self, alert_text_2):
#super().__init__("TAKE CONTROL IMMEDIATELY", alert_text_2,
super().__init__("핸들을 즉시 잡아주세요", alert_text_2,
AlertStatus.userPrompt, AlertSize.full,
Priority.MID, VisualAlert.steerRequired,
AudibleAlert.warningSoft, 2.),
class ImmediateDisableAlert(Alert):
def __init__(self, alert_text_2):
#super().__init__("TAKE CONTROL IMMEDIATELY", alert_text_2,
super().__init__("핸들을 즉시 잡아주세요", alert_text_2,
AlertStatus.critical, AlertSize.full,
Priority.HIGHEST, VisualAlert.steerRequired,
AudibleAlert.warningImmediate, 4.),
class EngagementAlert(Alert):
def __init__(self, audible_alert: car.CarControl.HUDControl.AudibleAlert):
super().__init__("", "",
AlertStatus.normal, AlertSize.none,
Priority.MID, VisualAlert.none,
audible_alert, .2),
class NormalPermanentAlert(Alert):
def __init__(self, alert_text_1: str, alert_text_2: str = "", duration: float = 0.2, priority: Priority = Priority.LOWER, creation_delay: float = 0.):
super().__init__(alert_text_1, alert_text_2,
AlertStatus.normal, AlertSize.mid if len(alert_text_2) else AlertSize.small,
priority, VisualAlert.none, AudibleAlert.none, duration, creation_delay=creation_delay),
class StartupAlert(Alert):
def __init__(self, alert_text_1: str, alert_text_2: str = "항상 핸들을 잡고 도로를 주시하세요", alert_status=AlertStatus.normal):
super().__init__(alert_text_1, alert_text_2,
alert_status, AlertSize.mid,
Priority.LOWER, VisualAlert.none, AudibleAlert.ready, 5.),
# ********** helper functions **********
def get_display_speed(speed_ms: float, metric: bool) -> str:
speed = int(round(speed_ms * (CV.MS_TO_KPH if metric else CV.MS_TO_MPH)))
unit = 'km/h' if metric else 'mph'
return f"{speed} {unit}"
# ********** alert callback functions **********
def below_engage_speed_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
return NoEntryAlert(f"Speed Below {get_display_speed(CP.minEnableSpeed, metric)}")
def below_steer_speed_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
speed = int(round(CP.minSteerSpeed * (CV.MS_TO_KPH if metric else CV.MS_TO_MPH)))
unit = "㎞/h" if metric else "mph"
return Alert(
#f"Steer Unavailable Below {get_display_speed(CP.minSteerSpeed, metric)}",
#"",
f"{get_display_speed(CP.minSteerSpeed, metric)} 이상의 속도에서 조향제어가능합니다",
"",
AlertStatus.userPrompt, AlertSize.small,
Priority.MID, VisualAlert.steerRequired, AudibleAlert.prompt, 0.4)
def calibration_incomplete_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
speed = int(MIN_SPEED_FILTER * (CV.MS_TO_KPH if metric else CV.MS_TO_MPH))
unit = "㎞/h" if metric else "mph"
return Alert(
#"Calibration in Progress: %d%%" % sm['liveCalibration'].calPerc,
#f"Drive Above {get_display_speed(MIN_SPEED_FILTER, metric)}",
"캘리브레이션 진행중입니다 : %d%%" % sm['liveCalibration'].calPerc,
f"속도를 {get_display_speed(MIN_SPEED_FILTER, metric)}이상으로 주행하세요",
AlertStatus.normal, AlertSize.mid,
Priority.LOWEST, VisualAlert.none, AudibleAlert.none, .2)
def no_gps_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
gps_integrated = sm['peripheralState'].pandaType in [log.PandaState.PandaType.uno, log.PandaState.PandaType.dos]
return Alert(
#"Poor GPS reception",
#"If sky is visible, contact support" if gps_integrated else "Check GPS antenna placement",
"GPS 수신불량",
"GPS 연결상태 및 안테나를 점검하세요" if gps_integrated else "GPS 안테나를 점검하세요",
AlertStatus.normal, AlertSize.mid,
Priority.LOWER, VisualAlert.none, AudibleAlert.none, .2, creation_delay=300.)
def wrong_car_mode_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
#text = "Cruise Mode Disabled"
text = "크루즈 비활성상태"
if CP.carName == "honda":
#text = "Main Switch Off"
text = "메인 스위치 OFF"
return NoEntryAlert(text)
def joystick_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
axes = sm['testJoystick'].axes
gb, steer = list(axes)[:2] if len(axes) else (0., 0.)
vals = f"Gas: {round(gb * 100.)}%, Steer: {round(steer * 100.)}%"
#return NormalPermanentAlert("Joystick Mode", vals)
return NormalPermanentAlert("조이스틱 모드", vals)
def auto_lane_change_alert(CP: car.CarParams, sm: messaging.SubMaster, metric: bool) -> Alert:
alc_timer = sm['lateralPlan'].autoLaneChangeTimer
return Alert(
"자동차선변경이 %d초 뒤에 시작됩니다" % alc_timer,
"차선의 차량을 확인하세요",
AlertStatus.normal, AlertSize.mid,
Priority.LOW, VisualAlert.none, AudibleAlert.promptRepeat, .75, alert_rate=0.75)
EVENTS: Dict[int, Dict[str, Union[Alert, Callable[[Any, messaging.SubMaster, bool], Alert]]]] = {
# ********** events with no alerts **********
EventName.stockFcw: {},
# ********** events only containing alerts displayed in all states **********
EventName.joystickDebug: {
ET.WARNING: joystick_alert,
#ET.PERMANENT: NormalPermanentAlert("Joystick Mode"),
ET.PERMANENT: NormalPermanentAlert("조이스틱 모드"),
},
EventName.controlsInitializing: {
#ET.NO_ENTRY: NoEntryAlert("Controls Initializing"),
ET.NO_ENTRY: NoEntryAlert("프로세스 초기화중입니다"),
},
EventName.startup: {
#ET.PERMANENT: StartupAlert("Be ready to take over at any time")
ET.PERMANENT: StartupAlert("오픈파일럿 사용준비 완료")
},
EventName.startupMaster: {
#ET.PERMANENT: StartupAlert("WARNING: This branch is not tested")
ET.PERMANENT: StartupAlert("오픈파일럿 사용준비 완료")
},
# Car is recognized, but marked as dashcam only
EventName.startupNoControl: {
#ET.PERMANENT: StartupAlert("Dashcam mode"),
ET.PERMANENT: StartupAlert("대시캠 모드"),
},
# Car is not recognized
EventName.startupNoCar: {
#ET.PERMANENT: StartupAlert("Dashcam mode for unsupported car"),
ET.PERMANENT: StartupAlert("대시캠 모드 : 호환되지않는 차량"),
},
EventName.startupNoFw: {
#ET.PERMANENT: StartupAlert("Car Unrecognized",
# "Check comma power connections",
ET.PERMANENT: StartupAlert("차량이 인식되지않습니다",
"배선연결상태를 확인하세요",
alert_status=AlertStatus.userPrompt),
},
EventName.dashcamMode: {
#ET.PERMANENT: NormalPermanentAlert("Dashcam Mode",
# priority=Priority.LOWEST),
ET.PERMANENT: NormalPermanentAlert("대시캠 모드",
priority=Priority.LOWEST),
},
EventName.invalidLkasSetting: {
#ET.PERMANENT: NormalPermanentAlert("Stock LKAS is turned on",
# "Turn off stock LKAS to engage"),
ET.PERMANENT: NormalPermanentAlert("차량 LKAS 버튼 상태확인",
"차량 LKAS 버튼 OFF후 활성화됩니다"),
},
EventName.cruiseMismatch: {
#ET.PERMANENT: ImmediateDisableAlert("openpilot failed to cancel cruise"),
},
# Some features or cars are marked as community features. If openpilot
# detects the use of a community feature it switches to dashcam mode
# until these features are allowed using a toggle in settings.
EventName.communityFeatureDisallowed: {
#ET.PERMANENT: NormalPermanentAlert("openpilot Not Available",
# "Enable Community Features in Settings to Engage"),
ET.PERMANENT: NormalPermanentAlert("커뮤니티 기능 감지됨",
"커뮤니티 기능을 활성화해주세요"),
},
# openpilot doesn't recognize the car. This switches openpilot into a
# read-only mode. This can be solved by adding your fingerprint.
# See https://github.com/commaai/openpilot/wiki/Fingerprinting for more information
EventName.carUnrecognized: {
#ET.PERMANENT: NormalPermanentAlert("Dashcam Mode",
# "Car Unrecognized",
ET.PERMANENT: NormalPermanentAlert("대시캠 모드",
"배선연결상태를 확인하세요",
priority=Priority.LOWEST),
},
EventName.stockAeb: {
ET.PERMANENT: Alert(
#"BRAKE!",
#"Stock AEB: Risk of Collision",
"브레이크!",
"추돌 위험",
AlertStatus.critical, AlertSize.full,
Priority.HIGHEST, VisualAlert.fcw, AudibleAlert.none, 2.),
#ET.NO_ENTRY: NoEntryAlert("Stock AEB: Risk of Collision"),
ET.NO_ENTRY: NoEntryAlert("AEB: 추돌위험"),
},
EventName.fcw: {
ET.PERMANENT: Alert(
#"BRAKE!",
#"Risk of Collision",
"브레이크!",
"추돌 위험",
AlertStatus.critical, AlertSize.full,
Priority.HIGHEST, VisualAlert.fcw, AudibleAlert.warningSoft, 2.),
},
EventName.ldw: {
ET.PERMANENT: Alert(
#"TAKE CONTROL",
#"Lane Departure Detected",
"핸들을 잡아주세요",
"차선이탈 감지됨",
AlertStatus.userPrompt, AlertSize.mid,
Priority.LOW, VisualAlert.ldw, AudibleAlert.prompt, 3.),
},
# ********** events only containing alerts that display while engaged **********
EventName.gasPressed: {
ET.PRE_ENABLE: Alert(
#"openpilot will not brake while gas pressed",
"가속패달감지시 오픈파일럿은 브레이크를 사용하지않습니다",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOWEST, VisualAlert.none, AudibleAlert.none, .1, creation_delay=1.),
},
# openpilot tries to learn certain parameters about your car by observing
# how the car behaves to steering inputs from both human and openpilot driving.
# This includes:
# - steer ratio: gear ratio of the steering rack. Steering angle divided by tire angle
# - tire stiffness: how much grip your tires have
# - angle offset: most steering angle sensors are offset and measure a non zero angle when driving straight
# This alert is thrown when any of these values exceed a sanity check. This can be caused by
# bad alignment or bad sensor data. If this happens consistently consider creating an issue on GitHub
EventName.vehicleModelInvalid: {
#ET.NO_ENTRY: NoEntryAlert("Vehicle Parameter Identification Failed"),
#ET.SOFT_DISABLE: SoftDisableAlert("Vehicle Parameter Identification Failed"),
ET.NO_ENTRY: NoEntryAlert("차량 매개변수 식별 오류"),
ET.SOFT_DISABLE: SoftDisableAlert("차량 매개변수 식별 오류"),
},
EventName.steerTempUnavailableSilent: {
ET.WARNING: Alert(
#"Steering Temporarily Unavailable",
"조향제어 일시적으로 사용불가",
"",
AlertStatus.userPrompt, AlertSize.small,
Priority.LOW, VisualAlert.steerRequired, AudibleAlert.prompt, 1.),
},
EventName.preDriverDistracted: {
ET.WARNING: Alert(
#"KEEP EYES ON ROAD: Driver Distracted",
"도로를 주시하세요 : 운전자 도로주시 불안",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.prompt, 1.),
},
EventName.promptDriverDistracted: {
ET.WARNING: Alert(
#"KEEP EYES ON ROAD",
#"Driver Distracted",
"도로를 주시하세요",
"운전자 도로주시 불안",
AlertStatus.userPrompt, AlertSize.mid,
Priority.MID, VisualAlert.steerRequired, AudibleAlert.promptDistracted, .1),
},
EventName.driverDistracted: {
ET.WARNING: Alert(
#"DISENGAGE IMMEDIATELY",
#"Driver Distracted",
"조향제어가 해제됩니다",
"운전자 도로주시 불안",
AlertStatus.critical, AlertSize.full,
Priority.HIGH, VisualAlert.steerRequired, AudibleAlert.warningImmediate, .1),
},
EventName.preDriverUnresponsive: {
ET.WARNING: Alert(
#"TOUCH STEERING WHEEL: No Face Detected",
"핸들을 잡아주세요 : 운전자 인식 불가",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOW, VisualAlert.steerRequired, AudibleAlert.prompt, .75, alert_rate=0.75),
},
EventName.promptDriverUnresponsive: {
ET.WARNING: Alert(
#"TOUCH STEERING WHEEL",
#"Driver Unresponsive",
"핸들을 잡아주세요",
"운전자 응답하지않음",
AlertStatus.userPrompt, AlertSize.mid,
Priority.MID, VisualAlert.steerRequired, AudibleAlert.promptDistracted, .1),
},
EventName.driverUnresponsive: {
ET.WARNING: Alert(
#"DISENGAGE IMMEDIATELY",
#"Driver Unresponsive",
"조향제어가 해제됩니다",
"운전자 응답하지않음",
AlertStatus.critical, AlertSize.full,
Priority.HIGH, VisualAlert.steerRequired, AudibleAlert.warningImmediate, .1),
},
EventName.manualRestart: {
ET.WARNING: Alert(
#"TAKE CONTROL",
#"Resume Driving Manually",
"핸들을 잡아주세요",
"수동으로 재활성하세요",
AlertStatus.userPrompt, AlertSize.mid,
Priority.LOW, VisualAlert.none, AudibleAlert.none, .2),
},
EventName.resumeRequired: {
ET.WARNING: Alert(
#"STOPPED",
#"Press Resume to Move",
"앞차량 멈춤",
"이동하려면 RES버튼을 누르세요",
AlertStatus.userPrompt, AlertSize.mid,
Priority.LOW, VisualAlert.none, AudibleAlert.none, .2),
},
EventName.belowSteerSpeed: {
ET.WARNING: below_steer_speed_alert,
},
EventName.preLaneChangeLeft: {
ET.WARNING: Alert(
#"Steer Left to Start Lane Change Once Safe",
#"",
"좌측차선으로 차선을 변경합니다",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.none, .1, alert_rate=0.75),
},
EventName.preLaneChangeRight: {
ET.WARNING: Alert(
#"Steer Right to Start Lane Change Once Safe",
#"",
"우측차선으로 차선을 변경합니다",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.none, .1, alert_rate=0.75),
},
EventName.laneChangeBlocked: {
ET.WARNING: Alert(
#"Car Detected in Blindspot",
#"",
"차선에 차량이 감지되니 대기하세요",
"",
AlertStatus.userPrompt, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.prompt, .1),
},
EventName.laneChange: {
ET.WARNING: Alert(
#"Changing Lanes",
#"",
"차선을 변경합니다",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.none, .1),
},
EventName.steerSaturated: {
ET.WARNING: Alert(
#"TAKE CONTROL",
#"Turn Exceeds Steering Limit",
"핸들을 잡아주세요",
"조향제어 제한을 초과함",
AlertStatus.userPrompt, AlertSize.mid,
Priority.LOW, VisualAlert.steerRequired, AudibleAlert.promptRepeat, 1.),
},
# Thrown when the fan is driven at >50% but is not rotating
EventName.fanMalfunction: {
#ET.PERMANENT: NormalPermanentAlert("Fan Malfunction", "Contact Support"),
ET.PERMANENT: NormalPermanentAlert("FAN 오작동", "장치를 점검하세요"),
},
# Camera is not outputting frames at a constant framerate
EventName.cameraMalfunction: {
#ET.PERMANENT: NormalPermanentAlert("Camera Malfunction", "Contact Support"),
ET.PERMANENT: NormalPermanentAlert("카메라 오작동", "장치를 점검하세요"),
},
# Unused
EventName.gpsMalfunction: {
#ET.PERMANENT: NormalPermanentAlert("GPS Malfunction", "Contact Support"),
ET.PERMANENT: NormalPermanentAlert("GPS 오작동", "장치를 점검하세요"),
},
# When the GPS position and localizer diverge the localizer is reset to the
# current GPS position. This alert is thrown when the localizer is reset
# more often than expected.
EventName.localizerMalfunction: {
# ET.PERMANENT: NormalPermanentAlert("Sensor Malfunction", "Contact Support"),
ET.PERMANENT: NormalPermanentAlert("센서 오작동", "장치를 점검하세요"),
},
# ********** events that affect controls state transitions **********
EventName.pcmEnable: {
ET.ENABLE: EngagementAlert(AudibleAlert.engage),
},
EventName.buttonEnable: {
ET.ENABLE: EngagementAlert(AudibleAlert.engage),
},
EventName.pcmDisable: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
},
EventName.buttonCancel: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
},
EventName.brakeHold: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
#ET.NO_ENTRY: NoEntryAlert("Brake Hold Active"),
ET.NO_ENTRY: NoEntryAlert("브레이크 감지됨"),
},
EventName.parkBrake: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
#ET.NO_ENTRY: NoEntryAlert("Park Brake Engaged"),
ET.NO_ENTRY: NoEntryAlert("주차 브레이크를 해제하세요"),
},
EventName.pedalPressed: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
#ET.NO_ENTRY: NoEntryAlert("Pedal Pressed During Attempt",
ET.NO_ENTRY: NoEntryAlert("브레이크 감지됨",
visual_alert=VisualAlert.brakePressed),
},
EventName.wrongCarMode: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
ET.NO_ENTRY: wrong_car_mode_alert,
},
EventName.wrongCruiseMode: {
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
#ET.NO_ENTRY: NoEntryAlert("Enable Adaptive Cruise"),
ET.NO_ENTRY: NoEntryAlert("어뎁티브크루즈를 활성화하세요"),
},
EventName.steerTempUnavailable: {
#ET.SOFT_DISABLE: SoftDisableAlert("Steering Temporarily Unavailable"),
#ET.NO_ENTRY: NoEntryAlert("Steering Temporarily Unavailable"),
#ET.SOFT_DISABLE: SoftDisableAlert("조향제어 일시적으로 사용불가"),
ET.WARNING: Alert(
"핸들을 잡아주세요",
"조향제어 일시적으로 사용불가",
AlertStatus.userPrompt, AlertSize.small,
Priority.LOW, VisualAlert.steerRequired, AudibleAlert.none, 0.),
ET.NO_ENTRY: NoEntryAlert("조향제어 일시적으로 사용불가"),
},
EventName.outOfSpace: {
#ET.PERMANENT: NormalPermanentAlert("Out of Storage"),
#ET.NO_ENTRY: NoEntryAlert("Out of Storage"),
ET.PERMANENT: NormalPermanentAlert("저장공간 부족"),
ET.NO_ENTRY: NoEntryAlert("저장공간 부족"),
},
EventName.belowEngageSpeed: {
ET.NO_ENTRY: below_engage_speed_alert,
},
EventName.sensorDataInvalid: {
ET.PERMANENT: Alert(
#"No Data from Device Sensors",
#"Reboot your Device",
"장치 센서 오류",
"장치를 점검하세요",
AlertStatus.normal, AlertSize.mid,
Priority.LOWER, VisualAlert.none, AudibleAlert.none, .2, creation_delay=1.),
#ET.NO_ENTRY: NoEntryAlert("No Data from Device Sensors"),
ET.NO_ENTRY: NoEntryAlert("장치 센서 오류"),
},
EventName.noGps: {
ET.PERMANENT: no_gps_alert,
},
EventName.soundsUnavailable: {
#ET.PERMANENT: NormalPermanentAlert("Speaker not found", "Reboot your Device"),
#ET.NO_ENTRY: NoEntryAlert("Speaker not found"),
ET.PERMANENT: NormalPermanentAlert("스피커가 감지되지않습니다", "장치를 점검하세요"),
ET.NO_ENTRY: NoEntryAlert("스피커가 감지되지않습니다"),
},
EventName.tooDistracted: {
#ET.NO_ENTRY: NoEntryAlert("Distraction Level Too High"),
ET.NO_ENTRY: NoEntryAlert("방해 수준이 너무높습니다"),
},
EventName.overheat: {
#ET.PERMANENT: NormalPermanentAlert("System Overheated"),
#ET.SOFT_DISABLE: SoftDisableAlert("System Overheated"),
#ET.NO_ENTRY: NoEntryAlert("System Overheated"),
ET.PERMANENT: NormalPermanentAlert("장치 과열됨"),
ET.SOFT_DISABLE: SoftDisableAlert("장치 과열됨"),
ET.NO_ENTRY: NoEntryAlert("장치 과열됨"),
},
EventName.wrongGear: {
#ET.SOFT_DISABLE: SoftDisableAlert("Gear not D"),
#ET.NO_ENTRY: NoEntryAlert("Gear not D"),
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
ET.NO_ENTRY: NoEntryAlert("기어를 [D]로 변경하세요"),
},
# This alert is thrown when the calibration angles are outside of the acceptable range.
# For example if the device is pointed too much to the left or the right.
# Usually this can only be solved by removing the mount from the windshield completely,
# and attaching while making sure the device is pointed straight forward and is level.
# See https://comma.ai/setup for more information
EventName.calibrationInvalid: {
#ET.PERMANENT: NormalPermanentAlert("Calibration Invalid", "Remount Device and Recalibrate"),
#ET.SOFT_DISABLE: SoftDisableAlert("Calibration Invalid: Remount Device & Recalibrate"),
#ET.NO_ENTRY: NoEntryAlert("Calibration Invalid: Remount Device & Recalibrate"),
ET.PERMANENT: NormalPermanentAlert("캘리브레이션 오류", "장치 위치변경후 캘리브레이션을 다시하세요"),
ET.SOFT_DISABLE: SoftDisableAlert("캘리브레이션 오류 : 장치 위치변경후 캘리브레이션을 다시하세요"),
ET.NO_ENTRY: NoEntryAlert("캘리브레이션 오류 : 장치 위치변경후 캘리브레이션을 다시하세요"),
},
EventName.calibrationIncomplete: {
ET.PERMANENT: calibration_incomplete_alert,
#ET.SOFT_DISABLE: SoftDisableAlert("Calibration in Progress"),
#ET.NO_ENTRY: NoEntryAlert("Calibration in Progress"),
ET.SOFT_DISABLE: SoftDisableAlert("캘리브레이션 진행중입니다"),
ET.NO_ENTRY: NoEntryAlert("캘리브레이션 진행중입니다"),
},
EventName.doorOpen: {
#ET.SOFT_DISABLE: SoftDisableAlert("Door Open"),
#ET.NO_ENTRY: NoEntryAlert("Door Open"),
ET.PERMANENT: Alert(
"도어 열림",
"",
AlertStatus.normal, AlertSize.full,
Priority.LOWEST, VisualAlert.none, AudibleAlert.none, .2, creation_delay = 0.5),
ET.USER_DISABLE: EngagementAlert(AudibleAlert.disengage),
ET.NO_ENTRY: NoEntryAlert("도어 열림"),
},
EventName.seatbeltNotLatched: {
#ET.SOFT_DISABLE: SoftDisableAlert("Seatbelt Unlatched"),
#ET.NO_ENTRY: NoEntryAlert("Seatbelt Unlatched"),
ET.PERMANENT: Alert(
"안전벨트 미착용",
"",
AlertStatus.normal, AlertSize.full,
Priority.LOWEST, VisualAlert.none, AudibleAlert.none, .2, creation_delay=0.5),
ET.SOFT_DISABLE: SoftDisableAlert("안전벨트를 착용해주세요"),
ET.NO_ENTRY: NoEntryAlert("안전벨트를 착용해주세요"),
},
EventName.espDisabled: {
#ET.SOFT_DISABLE: SoftDisableAlert("ESP Off"),
#ET.NO_ENTRY: NoEntryAlert("ESP Off"),
ET.SOFT_DISABLE: SoftDisableAlert("ESP 꺼짐"),
ET.NO_ENTRY: NoEntryAlert("ESP 꺼짐"),
},
EventName.lowBattery: {
#ET.SOFT_DISABLE: SoftDisableAlert("Low Battery"),
#ET.NO_ENTRY: NoEntryAlert("Low Battery"),
ET.SOFT_DISABLE: SoftDisableAlert("배터리 부족"),
ET.NO_ENTRY: NoEntryAlert("배터리 부족"),
},
# Different openpilot services communicate between each other at a certain
# interval. If communication does not follow the regular schedule this alert
# is thrown. This can mean a service crashed, did not broadcast a message for
# ten times the regular interval, or the average interval is more than 10% too high.
EventName.commIssue: {
#ET.SOFT_DISABLE: SoftDisableAlert("Communication Issue between Processes"),
#ET.NO_ENTRY: NoEntryAlert("Communication Issue between Processes"),
ET.SOFT_DISABLE: SoftDisableAlert("장치 프로세스 동작오류"),
ET.NO_ENTRY: NoEntryAlert("장치 프로세스 동작오류"),
},
# Thrown when manager detects a service exited unexpectedly while driving
EventName.processNotRunning: {
#ET.NO_ENTRY: NoEntryAlert("System Malfunction: Reboot Your Device"),
ET.NO_ENTRY: NoEntryAlert("시스템 오작동: 장치를 재부팅 하세요"),
},
EventName.radarFault: {
#ET.SOFT_DISABLE: SoftDisableAlert("Radar Error: Restart the Car"),
#ET.NO_ENTRY: NoEntryAlert("Radar Error: Restart the Car"),
ET.SOFT_DISABLE: SoftDisableAlert("레이더 오류 : 차량을 재가동하세요"),
ET.NO_ENTRY: NoEntryAlert("레이더 오류 : 차량을 재가동하세요"),
},
# Every frame from the camera should be processed by the model. If modeld
# is not processing frames fast enough they have to be dropped. This alert is
# thrown when over 20% of frames are dropped.
EventName.modeldLagging: {
#ET.SOFT_DISABLE: SoftDisableAlert("Driving model lagging"),
#ET.NO_ENTRY: NoEntryAlert("Driving model lagging"),
ET.SOFT_DISABLE: SoftDisableAlert("주행모델 지연됨"),
ET.NO_ENTRY: NoEntryAlert("주행모델 지연됨"),
},
# Besides predicting the path, lane lines and lead car data the model also
# predicts the current velocity and rotation speed of the car. If the model is
# very uncertain about the current velocity while the car is moving, this
# usually means the model has trouble understanding the scene. This is used
# as a heuristic to warn the driver.
EventName.posenetInvalid: {
#ET.SOFT_DISABLE: SoftDisableAlert("Model Output Uncertain"),
#ET.NO_ENTRY: NoEntryAlert("Model Output Uncertain"),
ET.SOFT_DISABLE: SoftDisableAlert("차선인식상태가 좋지않으니 주의운전하세요"),
ET.NO_ENTRY: NoEntryAlert("차선인식상태가 좋지않으니 주의운전하세요"),
},
# When the localizer detects an acceleration of more than 40 m/s^2 (~4G) we
# alert the driver the device might have fallen from the windshield.
EventName.deviceFalling: {
#ET.SOFT_DISABLE: SoftDisableAlert("Device Fell Off Mount"),
#ET.NO_ENTRY: NoEntryAlert("Device Fell Off Mount"),
ET.SOFT_DISABLE: SoftDisableAlert("장치가 마운트에서 떨어짐"),
ET.NO_ENTRY: NoEntryAlert("장치가 마운트에서 떨어짐"),
},
EventName.lowMemory: {
#ET.SOFT_DISABLE: SoftDisableAlert("Low Memory: Reboot Your Device"),
#ET.PERMANENT: NormalPermanentAlert("Low Memory", "Reboot your Device"),
#ET.NO_ENTRY: NoEntryAlert("Low Memory: Reboot Your Device"),
ET.SOFT_DISABLE: SoftDisableAlert("메모리 부족 : 장치를 재가동하세요"),
ET.PERMANENT: NormalPermanentAlert("메모리 부족", "장치를 재가동하세요"),
ET.NO_ENTRY: NoEntryAlert("메모리 부족 : 장치를 재가동하세요"),
},
EventName.highCpuUsage: {
#ET.SOFT_DISABLE: SoftDisableAlert("System Malfunction: Reboot Your Device"),
#ET.PERMANENT: NormalPermanentAlert("System Malfunction", "Reboot your Device"),
#ET.NO_ENTRY: NoEntryAlert("System Malfunction: Reboot Your Device"),
ET.NO_ENTRY: NoEntryAlert("시스템 오작동: 장치를 재부팅 하세요"),
},
EventName.accFaulted: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("Cruise Faulted"),
#ET.PERMANENT: NormalPermanentAlert("Cruise Faulted", ""),
#ET.NO_ENTRY: NoEntryAlert("Cruise Faulted"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("크루즈 오류"),
ET.PERMANENT: NormalPermanentAlert("크루즈 오류", ""),
ET.NO_ENTRY: NoEntryAlert("크루즈 오류"),
},
EventName.controlsMismatch: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("Controls Mismatch"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("컨트롤 불일치"),
},
EventName.roadCameraError: {
#ET.PERMANENT: NormalPermanentAlert("Camera Error",
ET.PERMANENT: NormalPermanentAlert("주행 카메라 오류",
duration=1.,
creation_delay=30.),
},
EventName.driverCameraError: {
#ET.PERMANENT: NormalPermanentAlert("Camera Error",
ET.PERMANENT: NormalPermanentAlert("운전자 카메라 오류",
duration=1.,
creation_delay=30.),
},
EventName.wideRoadCameraError: {
#ET.PERMANENT: NormalPermanentAlert("Camera Error",
ET.PERMANENT: NormalPermanentAlert("와이드 주행카메라 오류",
duration=1.,
creation_delay=30.),
},
# Sometimes the USB stack on the device can get into a bad state
# causing the connection to the panda to be lost
EventName.usbError: {
#ET.SOFT_DISABLE: SoftDisableAlert("USB Error: Reboot Your Device"),
#ET.PERMANENT: NormalPermanentAlert("USB Error: Reboot Your Device", ""),
#ET.NO_ENTRY: NoEntryAlert("USB Error: Reboot Your Device"),
ET.SOFT_DISABLE: SoftDisableAlert("USB 오류: 장치를 재부팅하세요"),
ET.PERMANENT: NormalPermanentAlert("USB 오류: 장치를 재부팅하세요", ""),
ET.NO_ENTRY: NoEntryAlert("USB 오류: 장치를 재부팅하세요"),
},
# This alert can be thrown for the following reasons:
# - No CAN data received at all
# - CAN data is received, but some message are not received at the right frequency
# If you're not writing a new car port, this is usually cause by faulty wiring
EventName.canError: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("CAN Error: Check Connections"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("CAN 오류 : 장치를 점검하세요"),
ET.PERMANENT: Alert(
#"CAN Error: Check Connections",
"CAN 오류 : 장치를 점검하세요",
"",
AlertStatus.normal, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.none, 1., creation_delay=1.),
#ET.NO_ENTRY: NoEntryAlert("CAN Error: Check Connections"),
ET.NO_ENTRY: NoEntryAlert("CAN 오류 : 장치를 점검하세요"),
},
EventName.steerUnavailable: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("LKAS Fault: Restart the Car"),
#ET.PERMANENT: NormalPermanentAlert("LKAS Fault: Restart the car to engage"),
#ET.NO_ENTRY: NoEntryAlert("LKAS Fault: Restart the Car"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("LKAS 오류 : 차량을 재가동하세요"),
ET.PERMANENT: NormalPermanentAlert("LKAS 오류 : 차량을 재가동하세요"),
ET.NO_ENTRY: NoEntryAlert("LKAS 오류 : 차량을 재가동하세요"),
},
EventName.brakeUnavailable: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("Cruise Fault: Restart the Car"),
#ET.PERMANENT: NormalPermanentAlert("Cruise Fault: Restart the car to engage"),
#ET.NO_ENTRY: NoEntryAlert("Cruise Fault: Restart the Car"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("크루즈 오류 : 차량을 재가동하세요"),
ET.PERMANENT: NormalPermanentAlert("크루즈 오류 : 차량을 재가동하세요"),
ET.NO_ENTRY: NoEntryAlert("크루즈 오류 : 차량을 재가동하세요"),
},
EventName.reverseGear: {
ET.PERMANENT: Alert(
#"Reverse\nGear",
"기어 [R] 상태",
"",
AlertStatus.normal, AlertSize.full,
Priority.LOWEST, VisualAlert.none, AudibleAlert.none, .2, creation_delay=0.5),
#ET.SOFT_DISABLE: SoftDisableAlert("Reverse Gear"),
#ET.NO_ENTRY: NoEntryAlert("Reverse Gear"),
ET.USER_DISABLE: SoftDisableAlert("기어 [R] 상태"),
ET.NO_ENTRY: NoEntryAlert("기어 [R] 상태"),
},
# On cars that use stock ACC the car can decide to cancel ACC for various reasons.
# When this happens we can no long control the car so the user needs to be warned immediately.
EventName.cruiseDisabled: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("Cruise Is Off"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("크루즈 꺼짐"),
},
# For planning the trajectory Model Predictive Control (MPC) is used. This is
# an optimization algorithm that is not guaranteed to find a feasible solution.
# If no solution is found or the solution has a very high cost this alert is thrown.
EventName.plannerError: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("Planner Solution Error"),
#ET.NO_ENTRY: NoEntryAlert("Planner Solution Error"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("플래너 솔루션 오류"),
ET.NO_ENTRY: NoEntryAlert("플래너 솔루션 오류"),
},
# When the relay in the harness box opens the CAN bus between the LKAS camera
# and the rest of the car is separated. When messages from the LKAS camera
# are received on the car side this usually means the relay hasn't opened correctly
# and this alert is thrown.
EventName.relayMalfunction: {
#ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("Harness Malfunction"),
#ET.PERMANENT: NormalPermanentAlert("Harness Malfunction", "Check Hardware"),
#ET.NO_ENTRY: NoEntryAlert("Harness Malfunction"),
ET.IMMEDIATE_DISABLE: ImmediateDisableAlert("하네스 오작동"),
ET.PERMANENT: NormalPermanentAlert("하네스 오작동", "장치를 점검하세요"),
ET.NO_ENTRY: NoEntryAlert("하네스 오작동"),
},
EventName.noTarget: {
ET.IMMEDIATE_DISABLE: Alert(
#"openpilot Canceled",
#"No close lead car",
"오픈파일럿 사용불가",
"근접 앞차량이 없습니다",
AlertStatus.normal, AlertSize.mid,
Priority.HIGH, VisualAlert.none, AudibleAlert.disengage, 3.),
ET.NO_ENTRY: NoEntryAlert("No Close Lead Car"),
},
EventName.speedTooLow: {
ET.IMMEDIATE_DISABLE: Alert(
#"openpilot Canceled",
#"Speed too low",
"오픈파일럿 사용불가",
"속도를 높이고 재가동하세요",
AlertStatus.normal, AlertSize.mid,
Priority.HIGH, VisualAlert.none, AudibleAlert.disengage, 3.),
},
# When the car is driving faster than most cars in the training data the model outputs can be unpredictable
EventName.speedTooHigh: {
ET.WARNING: Alert(
#"Speed Too High",
#"Model uncertain at this speed",
"속도가 너무 높습니다",
"속도를 줄여주세요",
AlertStatus.userPrompt, AlertSize.mid,
Priority.HIGH, VisualAlert.steerRequired, AudibleAlert.promptRepeat, 4.),
#ET.NO_ENTRY: NoEntryAlert("Slow down to engage"),
ET.NO_ENTRY: NoEntryAlert("속도를 줄이고 재가동하세요"),
},
EventName.lowSpeedLockout: {
#ET.PERMANENT: NormalPermanentAlert("Cruise Fault: Restart the car to engage"),
#ET.NO_ENTRY: NoEntryAlert("Cruise Fault: Restart the Car"),
ET.PERMANENT: NormalPermanentAlert("크루즈 오류 : 차량을 재가동하세요"),
ET.NO_ENTRY: NoEntryAlert("크루즈 오류 : 차량을 재가동하세요"),
},
EventName.turningIndicatorOn: {
ET.WARNING: Alert(
"방향지시등 동작중에는 핸들을 잡아주세요",
"",
AlertStatus.userPrompt, AlertSize.small,
Priority.LOW, VisualAlert.none, AudibleAlert.none, .2),
},
EventName.autoLaneChange: {
ET.WARNING: auto_lane_change_alert,
},
EventName.slowingDownSpeed: {
#ET.PERMANENT: Alert("Slowing down","", AlertStatus.normal, AlertSize.small,
ET.PERMANENT: Alert("속도를 줄이고 있습니다","", AlertStatus.normal, AlertSize.small,
Priority.MID, VisualAlert.none, AudibleAlert.none, .1),
},
EventName.slowingDownSpeedSound: {
#ET.PERMANENT: Alert("Slowing down","", AlertStatus.normal, AlertSize.small,
ET.PERMANENT: Alert("속도를 줄이고 있습니다", "", AlertStatus.normal, AlertSize.small,
Priority.HIGH, VisualAlert.none, AudibleAlert.slowingDownSpeed, 2.),
},
}
| 36.567327
| 152
| 0.680042
|
acfbbbd1fe0b771ad4a1584e61c03158e4baa65f
| 2,245
|
py
|
Python
|
backend/testappauto83_dev_23498/urls.py
|
crowdbotics-dev/testappauto83-dev-23498
|
9ef4b6ebb0fc2e74b2d571719f6e3c722b67d59f
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/testappauto83_dev_23498/urls.py
|
crowdbotics-dev/testappauto83-dev-23498
|
9ef4b6ebb0fc2e74b2d571719f6e3c722b67d59f
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/testappauto83_dev_23498/urls.py
|
crowdbotics-dev/testappauto83-dev-23498
|
9ef4b6ebb0fc2e74b2d571719f6e3c722b67d59f
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
"""testappauto83_dev_23498 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "TestAppAuto83"
admin.site.site_title = "TestAppAuto83 Admin Portal"
admin.site.index_title = "TestAppAuto83 Admin"
# swagger
api_info = openapi.Info(
title="TestAppAuto83 API",
default_version="v1",
description="API documentation for TestAppAuto83 App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| 35.634921
| 87
| 0.716704
|
acfbc04263db827c1d150670b38db041a9c95395
| 6,113
|
py
|
Python
|
tensorflow_datasets/core/community/register_path.py
|
shashwat9kumar/datasets
|
99b055408025f8e934fcbb0fc054488aa087ebfb
|
[
"Apache-2.0"
] | 1
|
2019-07-19T15:01:45.000Z
|
2019-07-19T15:01:45.000Z
|
tensorflow_datasets/core/community/register_path.py
|
shashwat9kumar/datasets
|
99b055408025f8e934fcbb0fc054488aa087ebfb
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_datasets/core/community/register_path.py
|
shashwat9kumar/datasets
|
99b055408025f8e934fcbb0fc054488aa087ebfb
|
[
"Apache-2.0"
] | 1
|
2021-08-02T22:12:40.000Z
|
2021-08-02T22:12:40.000Z
|
# coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Location-based register."""
import concurrent.futures
import difflib
from typing import Any, Dict, FrozenSet, Iterator, List, Type
import tensorflow as tf
from tensorflow_datasets.core import dataset_builder
from tensorflow_datasets.core import naming
from tensorflow_datasets.core import read_only_builder
from tensorflow_datasets.core import registered
from tensorflow_datasets.core import utils
from tensorflow_datasets.core.community import register_base
import toml
class DataDirRegister(register_base.BaseRegister):
"""Dataset register based on generated `data_dir` paths.
This register map `namespace` strings to `data_dir` paths. Mapping is defined
in `.toml` format:
```toml
[Namespaces]
kaggle='/path/to/datasets/'
tensorflow_graphics='gs://tensorflow-graphics/datasets'
```
Usage:
```python
register = DataDirRegister(path='/path/to/namespaces.toml')
# List all registered datasets: ['kaggle:ds0', 'kaggle:ds1',...]
register.list_builders()
# Load a specific dataset
builder = register.builder('tensorflow_graphics:shapenet')
```
"""
def __init__(self, path: utils.PathLike):
"""Contructor.
Args:
path: Path to the register files containing the mapping namespace ->
data_dir
"""
self._path: utils.ReadOnlyPath = utils.as_path(path)
@utils.memoized_property
def _ns2data_dir(self) -> Dict[str, utils.ReadWritePath]:
"""Mapping `namespace` -> `data_dir`."""
# Lazy-load the namespaces the first requested time.
config = toml.loads(self._path.read_text())
return {
namespace: utils.as_path(path)
for namespace, path in config['Namespaces'].items()
}
@utils.memoized_property
def namespaces(self) -> FrozenSet[str]:
"""Available namespaces."""
return frozenset(self._ns2data_dir)
def list_builders(self) -> List[str]:
"""Returns the list of registered builders."""
return sorted(_iter_builder_names(self._ns2data_dir))
def builder_cls(
self,
name: utils.DatasetName,
) -> Type[dataset_builder.DatasetBuilder]:
"""Returns the builder classes."""
if name.namespace not in self.namespaces: # pylint: disable=unsupported-membership-test
raise registered.DatasetNotFoundError(
f'Namespace {name.namespace} not found. Should be one of: '
f'{sorted(self.namespaces)}')
raise NotImplementedError(
'builder_cls does not support data_dir-based community datasets. Got: '
f'{name}')
def builder(
self,
name: utils.DatasetName,
**builder_kwargs: Any,
) -> dataset_builder.DatasetBuilder:
"""Returns the dataset builder."""
data_dir = builder_kwargs.pop('data_dir', None)
if data_dir:
raise ValueError(
'`data_dir` cannot be set for data_dir-based community datasets. '
f'Dataset should already be generated. Got: {data_dir}')
if name.namespace is None:
raise AssertionError(f'No namespace found: {name}')
if name.namespace not in self._ns2data_dir: # pylint: disable=unsupported-membership-test
close_matches = difflib.get_close_matches(
name.namespace, self._ns2data_dir, n=1)
hint = f'\nDid you mean: {close_matches[0]}' if close_matches else ''
raise KeyError(f'Namespace `{name.namespace}` for `{name}` not found. '
f'Should be one of {sorted(self._ns2data_dir)}{hint}')
return read_only_builder.builder_from_files(
name.name,
data_dir=self._ns2data_dir[name.namespace],
**builder_kwargs,
)
def get_builder_root_dir(self,
name: utils.DatasetName) -> utils.ReadWritePath:
"""Returns root dir of the generated builder (without version/config)."""
return self._ns2data_dir[name.namespace] / name.name
def _maybe_iterdir(path: utils.ReadOnlyPath) -> Iterator[utils.ReadOnlyPath]:
"""Same as `path.iterdir()`, but don't fail if path does not exists."""
# Use try/except rather than `.exists()` to avoid an extra RPC call
# per namespace
try:
for f in path.iterdir():
yield f
except (
FileNotFoundError,
tf.errors.NotFoundError,
tf.errors.PermissionDeniedError,
):
pass
def _iter_builder_names(
ns2data_dir: Dict[str, utils.ReadOnlyPath],) -> Iterator[str]:
"""Yields the `ns:name` dataset names."""
FILTERED_DIRNAME = frozenset(('downloads',)) # pylint: disable=invalid-name
def _is_valid_dataset_name(dataset_name: str) -> bool:
return (dataset_name not in FILTERED_DIRNAME and
naming.is_valid_dataset_name(dataset_name))
# For better performances, load all namespaces asynchonously
def _get_builder_names_single_namespace(
ns_name: str,
data_dir: utils.ReadOnlyPath,
) -> List[str]:
# Note: `data_dir` might contain non-dataset folders, but checking
# individual dataset would have significant performance drop, so
# this is an acceptable trade-of.
return [
str(utils.DatasetName(namespace=ns_name, name=builder_dir.name))
for builder_dir in _maybe_iterdir(data_dir)
if _is_valid_dataset_name(builder_dir.name)
]
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as ex:
builder_names_futures = [
ex.submit(_get_builder_names_single_namespace, ns_name, data_dir)
for ns_name, data_dir in ns2data_dir.items()
]
for future in concurrent.futures.as_completed(builder_names_futures):
yield from future.result()
| 34.931429
| 94
| 0.708327
|
acfbc11945bf51f88d0ee0f5dcb6aa7c8728d883
| 1,126
|
py
|
Python
|
diamond-2-brownie/scripts/deploy.py
|
AgentFUD/evm-smart-contract-upgrade-techniques
|
d1d01ac25b9b1e98971dc194532aa028525242db
|
[
"MIT"
] | 1
|
2021-11-25T04:53:13.000Z
|
2021-11-25T04:53:13.000Z
|
diamond-3-brownie/scripts/deploy.py
|
AgentFUD/evm-smart-contract-upgrade-techniques
|
d1d01ac25b9b1e98971dc194532aa028525242db
|
[
"MIT"
] | null | null | null |
diamond-3-brownie/scripts/deploy.py
|
AgentFUD/evm-smart-contract-upgrade-techniques
|
d1d01ac25b9b1e98971dc194532aa028525242db
|
[
"MIT"
] | null | null | null |
from brownie import Contract, accounts
from brownie import (
Diamond,
DiamondCutFacet,
DiamondLoupeFacet,
OwnershipFacet,
DiamondInit,
)
# from brownie import Test1Facet, Test2Facet
from scripts.helpers import facetCutAction, getSelectors
def main():
owner = accounts[0]
diamondCutFacet = DiamondCutFacet.deploy({"from": owner})
diamondLoupeFacet = DiamondLoupeFacet.deploy({"from": owner})
ownershipFacet = OwnershipFacet.deploy({"from": owner})
diamondInit = DiamondInit.deploy({"from": owner})
diamond = Diamond.deploy(owner, diamondCutFacet.address, {"from": owner})
cut = [
[
diamondLoupeFacet.address,
facetCutAction["Add"],
getSelectors(DiamondLoupeFacet),
],
[ownershipFacet.address, facetCutAction["Add"], getSelectors(OwnershipFacet)],
]
# DiamondCutFacet at diamond.address
diamondCut = Contract.from_abi("DiamondCut", diamond.address, diamondCutFacet.abi)
initSelector = getSelectors(DiamondInit)
diamondCut.diamondCut(cut, diamondInit.address, initSelector[0], {"from": owner})
| 30.432432
| 86
| 0.698046
|
acfbc121c32c9799bb51e5a96451c3d8b2d8745d
| 20,442
|
py
|
Python
|
tensorpack/models/batch_norm.py
|
layolu/tensorpack
|
97360e5b8ca9ce03d8a18b3abef5abfc92cb9907
|
[
"Apache-2.0"
] | 1
|
2019-11-21T07:53:39.000Z
|
2019-11-21T07:53:39.000Z
|
tensorpack/models/batch_norm.py
|
layolu/tensorpack
|
97360e5b8ca9ce03d8a18b3abef5abfc92cb9907
|
[
"Apache-2.0"
] | null | null | null |
tensorpack/models/batch_norm.py
|
layolu/tensorpack
|
97360e5b8ca9ce03d8a18b3abef5abfc92cb9907
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# File: batch_norm.py
import re
from ..compat import tfv1 as tf # this should be avoided first in model code
from tensorflow.python.training import moving_averages
from ..tfutils.collection import backup_collection, restore_collection
from ..tfutils.common import get_tf_version_tuple
from ..tfutils.tower import get_current_tower_context
from ..utils import logger
from ..utils.argtools import get_data_format, log_once
from ..utils.develop import log_deprecated
from .common import VariableHolder, layer_register
from .tflayer import convert_to_tflayer_args, rename_get_variable
from .utils import disable_autograph
__all__ = ['BatchNorm', 'BatchRenorm']
# decay: being too close to 1 leads to slow start-up. torch use 0.9.
# eps: torch: 1e-5. Lasagne: 1e-4
def get_bn_variables(n_out, use_scale, use_bias, beta_init, gamma_init):
if use_bias:
beta = tf.get_variable('beta', [n_out], initializer=beta_init)
else:
beta = tf.zeros([n_out], name='beta')
if use_scale:
gamma = tf.get_variable('gamma', [n_out], initializer=gamma_init)
else:
gamma = tf.ones([n_out], name='gamma')
# x * gamma + beta
moving_mean = tf.get_variable('mean/EMA', [n_out],
initializer=tf.constant_initializer(), trainable=False)
moving_var = tf.get_variable('variance/EMA', [n_out],
initializer=tf.constant_initializer(1.0), trainable=False)
if get_current_tower_context().is_main_training_tower:
for v in [moving_mean, moving_var]:
tf.add_to_collection(tf.GraphKeys.MODEL_VARIABLES, v)
return beta, gamma, moving_mean, moving_var
def internal_update_bn_ema(xn, batch_mean, batch_var,
moving_mean, moving_var, decay):
update_op1 = moving_averages.assign_moving_average(
moving_mean, batch_mean, decay, zero_debias=False,
name='mean_ema_op')
update_op2 = moving_averages.assign_moving_average(
moving_var, batch_var, decay, zero_debias=False,
name='var_ema_op')
# When sync_statistics is True, always enable internal_update.
# Otherwise the update ops (only executed on main tower)
# will hang when some BatchNorm layers are unused (https://github.com/tensorpack/tensorpack/issues/1078)
with tf.control_dependencies([update_op1, update_op2]):
return tf.identity(xn, name='output')
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
@disable_autograph()
def BatchNorm(inputs, axis=None, training=None, momentum=0.9, epsilon=1e-5,
center=True, scale=True,
beta_initializer=tf.zeros_initializer(),
gamma_initializer=tf.ones_initializer(),
virtual_batch_size=None,
data_format='channels_last',
ema_update='default',
sync_statistics=None,
internal_update=None):
"""
A more powerful version of `tf.layers.batch_normalization`. It differs from
the offical one in the following aspects:
1. Accepts an alternative ``data_format`` option when ``axis`` is None. For 2D input, this argument will be ignored.
2. Default value for ``momentum`` and ``epsilon`` is different.
3. Default value for ``training`` is automatically obtained from tensorpack's ``TowerContext``.
User-provided value can overwrite this behavior.
4. Support the ``ema_update`` option, which covers broader use cases than the standard EMA update.
5. Support the ``sync_statistics`` option, which implements "SyncBN" and is very useful in small-batch models.
Args:
training (bool): if True, use per-batch statistics to normalize. Otherwise, use stored EMA
to normalize. By default, it is equal to `get_current_tower_context().is_training`.
This is not a good argument name, but it is what the Tensorflow layer uses.
ema_update (str): Only effective when ``training=True``. It has the following options:
* "default": same as "collection". Because this is the default behavior in TensorFlow.
* "skip": do not update EMA. This can be useful when you reuse a batch norm layer in several places
but do not want them to all update your EMA.
* "collection": Add EMA update ops to collection `tf.GraphKeys.UPDATE_OPS`.
The ops in the collection will be run automatically by the callback :class:`RunUpdateOps`, along with
your training iterations. This can waste compute if your training iterations do not always depend
on the BatchNorm layer.
* "internal": EMA is updated inside this layer itself by control dependencies.
In standard scenarios, it has similar speed to "collection". But it has some more benefits:
1. BatchNorm is used inside dynamic control flow.
The collection-based update does not support dynamic control flows.
2. BatchNorm layer is sometimes unused (e.g., in GANs you have two networks to train alternatively).
Putting all update ops into a single collection will waste a lot of compute.
3. Other part of the model relies on the "updated" EMA. The collection-based method does not update
EMA immediately.
4. It has less chance to cause TensorFlow bugs in a graph with complicated control flow.
Therefore this option is preferred over TensorFlow default.
Corresponding TF issue: https://github.com/tensorflow/tensorflow/issues/14699
sync_statistics (str or None): one of None, "nccl", or "horovod". It determines how to compute the
"per-batch statistics" when ``training==True``.
* None: it uses statistics of the input tensor to normalize during training.
This is the standard way BatchNorm was implemented in most frameworks.
* "nccl": this layer must be used under tensorpack's multi-GPU trainers.
It uses the aggregated statistics of the whole batch (across all GPUs) to normalize.
* "horovod": this layer must be used under tensorpack's :class:`HorovodTrainer`.
It uses the aggregated statistics of the whole batch (across all MPI ranks) to normalize.
Note that on single machine this is significantly slower than the "nccl" implementation.
When not None, each GPU computes its own E[x] and E[x^2],
which are then averaged among all GPUs to compute global mean & variance.
Therefore each GPU needs to have the same batch size.
The synchronization is based on the current variable scope + the name of the layer
(`BatchNorm('name', input)`). Therefore, you need to make sure that:
1. The BatchNorm layer on different GPUs needs to have the same name, so that
statistics can be synchronized. If names do not match, this layer will hang.
2. A BatchNorm layer cannot be reused within one tower.
3. A BatchNorm layer needs to be executed for the same number of times by all GPUs.
If different GPUs execute one BatchNorm layer for different number of times
(e.g., if some GPUs do not execute it), this layer may hang.
This option is also known as "SyncBN" or "Cross-GPU BatchNorm" as mentioned in:
`MegDet: A Large Mini-Batch Object Detector <https://arxiv.org/abs/1711.07240>`_.
Corresponding TF issue: https://github.com/tensorflow/tensorflow/issues/18222.
When `sync_statistics` is enabled, `ema_update` is set to "internal" automatically.
This is to avoid running `UPDATE_OPS`, which requires synchronization.
internal_update: deprecated option. Don't use.
Variable Names:
* ``beta``: the bias term. Will be zero-inited by default.
* ``gamma``: the scale term. Will be one-inited by default.
* ``mean/EMA``: the moving average of mean.
* ``variance/EMA``: the moving average of variance.
Note:
This layer is more flexible than the standard "BatchNorm" layer and provides more features:
1. No matter whether you're doing training or not, you can set the ``training`` argument
to use batch statistics or EMA statistics.
i.e., you can use batch statistics during inference, or use EMA statistics during training.
Using EMA statistics in training is useful when you load a pre-trained BN and
don't want to update it.
2. As long as `training=True`, `sync_statistics` and `ema_update` option will take effect.
"""
# parse training/ctx
ctx = get_current_tower_context()
if training is None:
training = ctx.is_training
training = bool(training)
# parse shapes
data_format = get_data_format(data_format, keras_mode=False)
shape = inputs.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4], ndims
if sync_statistics is not None:
sync_statistics = sync_statistics.lower()
assert sync_statistics in [None, 'nccl', 'horovod'], sync_statistics
assert ema_update in ["default", "collection", "internal", "skip"]
if internal_update is not None:
log_deprecated("BatchNorm(internal_update=)", "Use ema_update='internal' instead!", "2020-01-01")
assert ema_update == 'default', \
"Do not use internal_update and ema_update together! internal_update is deprecated"
ema_update = "internal" if internal_update else "collection"
if ema_update == "default":
ema_update = "collection"
# Logic:
# 1. EMA update is possible only when we compute batch statistics (training=True)
# 2. We know that in training, non-main training tower does not need EMA update
# We don't know about what to do in prediction context, so be conservative and do the update.
# 3. User can explicit disable update by "skip".
do_ema_update = training and \
(ctx.is_main_training_tower or not ctx.is_training) \
and (ema_update != "skip")
if axis is None:
if ndims == 2:
axis = 1
else:
axis = 1 if data_format == 'NCHW' else 3
assert axis in [1, 3], axis
num_chan = shape[axis]
TF_version = get_tf_version_tuple()
freeze_bn_backward = not training and ctx.is_training
if freeze_bn_backward:
assert TF_version >= (1, 4), \
"Fine tuning a BatchNorm model with fixed statistics needs TF>=1.4!"
if ctx.is_main_training_tower: # only warn in first tower
log_once("Some BatchNorm layer uses moving_mean/moving_variance in training.", func='warn')
# Using moving_mean/moving_variance in training, which means we
# loaded a pre-trained BN and only fine-tuning the affine part.
do_sync_bn = (sync_statistics is not None) and training
if not do_sync_bn:
# Use the builtin layer for anything except for sync-bn
coll_bk = backup_collection([tf.GraphKeys.UPDATE_OPS])
with rename_get_variable(
{'moving_mean': 'mean/EMA',
'moving_variance': 'variance/EMA'}):
tf_args = dict(
axis=axis,
momentum=momentum, epsilon=epsilon,
center=center, scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
# https://github.com/tensorflow/tensorflow/issues/10857#issuecomment-410185429
fused=(ndims == 4 and axis in [1, 3] and not freeze_bn_backward),
_reuse=tf.get_variable_scope().reuse)
if TF_version >= (1, 5):
tf_args['virtual_batch_size'] = virtual_batch_size
else:
assert virtual_batch_size is None, "Feature not supported in this version of TF!"
use_fp16 = inputs.dtype == tf.float16
if use_fp16:
# non-fused does not support fp16; fused does not support all layouts.
# we made our best guess here
tf_args['fused'] = True
layer = tf.layers.BatchNormalization(**tf_args)
xn = layer.apply(inputs, training=training, scope=tf.get_variable_scope())
# Add EMA variables to the correct collection
if ctx.is_main_training_tower:
for v in layer.non_trainable_variables:
if isinstance(v, tf.Variable):
tf.add_to_collection(tf.GraphKeys.MODEL_VARIABLES, v)
if not do_ema_update:
restore_collection(coll_bk)
if do_ema_update and ema_update == "internal":
# Implement "internal" update.
restore_collection(coll_bk)
assert layer.updates
with tf.control_dependencies(layer.updates):
ret = tf.identity(xn, name='output')
else:
ret = tf.identity(xn, name='output')
vh = ret.variables = VariableHolder(
moving_mean=layer.moving_mean,
mean=layer.moving_mean, # for backward-compatibility
moving_variance=layer.moving_variance,
variance=layer.moving_variance) # for backward-compatibility
if scale:
vh.gamma = layer.gamma
if center:
vh.beta = layer.beta
else:
red_axis = [0] if ndims == 2 else ([0, 2, 3] if axis == 1 else [0, 1, 2])
new_shape = None # don't need to reshape unless ...
if ndims == 4 and axis == 1:
new_shape = [1, num_chan, 1, 1]
batch_mean = tf.reduce_mean(inputs, axis=red_axis)
batch_mean_square = tf.reduce_mean(tf.square(inputs), axis=red_axis)
if sync_statistics == 'nccl':
num_dev = ctx.total
if num_dev == 1:
logger.warn("BatchNorm(sync_statistics='nccl') is used with only one tower!")
else:
assert TF_version >= (1, 10), \
"Cross-GPU BatchNorm is only supported in TF>=1.10 ." \
"Upgrade TF or apply this patch manually: https://github.com/tensorflow/tensorflow/pull/20360"
if TF_version <= (1, 12):
try:
from tensorflow.contrib.nccl.python.ops.nccl_ops import _validate_and_load_nccl_so # deprecated
except Exception:
pass
else:
_validate_and_load_nccl_so()
from tensorflow.contrib.nccl.ops import gen_nccl_ops # deprecated
else:
from tensorflow.python.ops import gen_nccl_ops
shared_name = re.sub('tower[0-9]+/', '', tf.get_variable_scope().name)
batch_mean = gen_nccl_ops.nccl_all_reduce(
input=batch_mean,
reduction='sum',
num_devices=num_dev,
shared_name=shared_name + '_NCCL_mean') * (1.0 / num_dev)
batch_mean_square = gen_nccl_ops.nccl_all_reduce(
input=batch_mean_square,
reduction='sum',
num_devices=num_dev,
shared_name=shared_name + '_NCCL_mean_square') * (1.0 / num_dev)
elif sync_statistics == 'horovod':
# Require https://github.com/uber/horovod/pull/331
import horovod.tensorflow as hvd
if hvd.size() == 1:
logger.warn("BatchNorm(sync_statistics='horovod') is used with only one process!")
else:
import horovod
hvd_version = tuple(map(int, horovod.__version__.split('.')[:3]))
assert hvd_version >= (0, 13, 6), "sync_statistics=horovod needs horovod>=0.13.6 !"
batch_mean = hvd.allreduce(batch_mean, average=True)
batch_mean_square = hvd.allreduce(batch_mean_square, average=True)
batch_var = batch_mean_square - tf.square(batch_mean)
batch_mean_vec = batch_mean
batch_var_vec = batch_var
beta, gamma, moving_mean, moving_var = get_bn_variables(
num_chan, scale, center, beta_initializer, gamma_initializer)
if new_shape is not None:
batch_mean = tf.reshape(batch_mean, new_shape)
batch_var = tf.reshape(batch_var, new_shape)
# Using fused_batch_norm(is_training=False) is actually slightly faster,
# but hopefully this call will be JITed in the future.
xn = tf.nn.batch_normalization(
inputs, batch_mean, batch_var,
tf.reshape(beta, new_shape),
tf.reshape(gamma, new_shape), epsilon)
else:
xn = tf.nn.batch_normalization(
inputs, batch_mean, batch_var,
beta, gamma, epsilon)
if do_ema_update:
ret = internal_update_bn_ema(
xn, batch_mean_vec, batch_var_vec, moving_mean, moving_var, momentum)
else:
ret = tf.identity(xn, name='output')
vh = ret.variables = VariableHolder(
moving_mean=moving_mean,
mean=moving_mean, # for backward-compatibility
moving_variance=moving_var,
variance=moving_var) # for backward-compatibility
if scale:
vh.gamma = gamma
if center:
vh.beta = beta
return ret
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum'
})
def BatchRenorm(x, rmax, dmax, momentum=0.9, epsilon=1e-5,
center=True, scale=True, gamma_initializer=None,
data_format='channels_last'):
"""
Batch Renormalization layer, as described in the paper:
`Batch Renormalization: Towards Reducing Minibatch Dependence in Batch-Normalized Models
<https://arxiv.org/abs/1702.03275>`_.
This implementation is a wrapper around `tf.layers.batch_normalization`.
Args:
x (tf.Tensor): a NHWC or NC tensor.
rmax, dmax (tf.Tensor): a scalar tensor, the maximum allowed corrections.
decay (float): decay rate of moving average.
epsilon (float): epsilon to avoid divide-by-zero.
use_scale, use_bias (bool): whether to use the extra affine transformation or not.
Returns:
tf.Tensor: a tensor named ``output`` with the same shape of x.
Variable Names:
* ``beta``: the bias term.
* ``gamma``: the scale term. Input will be transformed by ``x * gamma + beta``.
* ``moving_mean, renorm_mean, renorm_mean_weight``: See TF documentation.
* ``moving_variance, renorm_stddev, renorm_stddev_weight``: See TF documentation.
"""
shape = x.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
if ndims == 2:
data_format = 'channels_first'
ctx = get_current_tower_context()
coll_bk = backup_collection([tf.GraphKeys.UPDATE_OPS])
layer = tf.layers.BatchNormalization(
axis=1 if data_format == 'channels_first' else 3,
momentum=momentum, epsilon=epsilon,
center=center, scale=scale,
renorm=True,
renorm_clipping={
'rmin': 1.0 / rmax,
'rmax': rmax,
'dmax': dmax},
renorm_momentum=0.99,
gamma_initializer=gamma_initializer,
fused=False,
_reuse=tf.get_variable_scope().reuse)
xn = layer.apply(x, training=ctx.is_training, scope=tf.get_variable_scope())
if ctx.is_main_training_tower:
for v in layer.non_trainable_variables:
if isinstance(v, tf.Variable):
tf.add_to_collection(tf.GraphKeys.MODEL_VARIABLES, v)
else:
# only run UPDATE_OPS in the first tower
restore_collection(coll_bk)
if ndims == 2:
xn = tf.squeeze(xn, [1, 2])
ret = tf.identity(xn, name='output')
# TODO not sure whether to add moving_mean/moving_var to VH now
vh = ret.variables = VariableHolder()
if scale:
vh.gamma = layer.gamma
if center:
vh.beta = layer.beta
return ret
| 45.731544
| 120
| 0.639957
|
acfbc2da15ef50261535e85f9befc2bcc716416e
| 27,091
|
py
|
Python
|
sdk/apis/product_service.py
|
Yiconghua/SDK
|
8465df574b083ff7398b5fadd4bc5150152dd0b3
|
[
"MIT"
] | 1
|
2021-04-03T05:11:29.000Z
|
2021-04-03T05:11:29.000Z
|
sdk/apis/product_service.py
|
Yiconghua/SDK
|
8465df574b083ff7398b5fadd4bc5150152dd0b3
|
[
"MIT"
] | null | null | null |
sdk/apis/product_service.py
|
Yiconghua/SDK
|
8465df574b083ff7398b5fadd4bc5150152dd0b3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# 商品服务
class ProductService:
__client = None
def __init__(self, client):
self.__client = client
def get_menu_with_group(self, mid):
"""
查询连锁总店菜单及分组信息
:param mid:菜单Id
"""
return self.__client.call("eleme.product.chain.menu.getMenuWithGroup", {"mid": mid})
def query_menu_by_page(self, offset, limit):
"""
分页查询连锁总店下的菜单列表
:param offset:分页起始
:param limit:一页个数
"""
return self.__client.call("eleme.product.chain.menu.queryMenuByPage", {"offset": offset, "limit": limit})
def create_menu(self, chain_menu_base_d_t_o):
"""
添加连锁总店菜单
:param chainMenuBaseDTO:添加的菜单信息
"""
return self.__client.call("eleme.product.chain.menu.createMenu", {"chainMenuBaseDTO": chain_menu_base_d_t_o})
def update_menu(self, mid, chain_menu_base_d_t_o):
"""
更新连锁总店菜单
:param mid:菜单Id
:param chainMenuBaseDTO:菜单更新信息
"""
return self.__client.call("eleme.product.chain.menu.updateMenu", {"mid": mid, "chainMenuBaseDTO": chain_menu_base_d_t_o})
def delete_menu(self, mid):
"""
删除连锁总店菜单
:param mid:菜单Id
"""
return self.__client.call("eleme.product.chain.menu.deleteMenu", {"mid": mid})
def get_group(self, gid):
"""
查询连锁总店商品分组
:param gid:连锁总店商品分组Id
"""
return self.__client.call("eleme.product.chain.group.getGroup", {"gid": gid})
def get_group_with_item(self, gid):
"""
查询连锁总店商品分组及商品详情
:param gid:连锁总店商品分组Id
"""
return self.__client.call("eleme.product.chain.group.getGroupWithItem", {"gid": gid})
def create_group(self, mid, chain_group_base_d_t_o):
"""
添加连锁总店商品分组
:param mid:菜单Id
:param chainGroupBaseDTO:分组创建信息
"""
return self.__client.call("eleme.product.chain.group.createGroup", {"mid": mid, "chainGroupBaseDTO": chain_group_base_d_t_o})
def batch_create_group(self, mid, chain_group_base_d_t_os):
"""
批量添加连锁总店商品分组
:param mid:菜单Id
:param chainGroupBaseDTOs:分组创建信息列表
"""
return self.__client.call("eleme.product.chain.group.batchCreateGroup", {"mid": mid, "chainGroupBaseDTOs": chain_group_base_d_t_os})
def update_group(self, gid, chain_group_base_d_t_o):
"""
更新连锁总店商品分组
:param gid:连锁总店商品分组Id
:param chainGroupBaseDTO:分组更新信息
"""
return self.__client.call("eleme.product.chain.group.updateGroup", {"gid": gid, "chainGroupBaseDTO": chain_group_base_d_t_o})
def delete_group(self, gid):
"""
删除连锁总店商品分组
:param gid:连锁总店商品分组Id
"""
return self.__client.call("eleme.product.chain.group.deleteGroup", {"gid": gid})
def get_relation_by_pid(self, p_id):
"""
查询连锁总店商品规格关联的单店商品规格信息
:param pId:连锁总店商品规格Id
"""
return self.__client.call("eleme.product.chain.pid.getRelationByPid", {"pId": p_id})
def set_pid(self, p_id, spec_id):
"""
设置连锁总店商品规格与单店商品规格关系
:param pId:连锁总店商品规格Id
:param specId:子店商品规格Id
"""
return self.__client.call("eleme.product.chain.pid.setPid", {"pId": p_id, "specId": spec_id})
def batch_set_pid(self, p_id, spec_ids):
"""
批量设置连锁总店商品规格与单店商品规格关系
:param pId:连锁总店商品规格Id
:param specIds:子店商品规格Id列表
"""
return self.__client.call("eleme.product.chain.pid.batchSetPid", {"pId": p_id, "specIds": spec_ids})
def delete_pid_by_spec_id(self, spec_id):
"""
解除连锁总店商品规格与单店商品规格关系
:param specId:子店的商品规格Id
"""
return self.__client.call("eleme.product.chain.pid.deletePidBySpecId", {"specId": spec_id})
def batch_delete_pid_by_spec_id(self, spec_ids):
"""
批量解除连锁总店商品规格与单店商品规格关系
:param specIds:子店的商品规格Id列表
"""
return self.__client.call("eleme.product.chain.pid.batchDeletePidBySpecId", {"specIds": spec_ids})
def get_shop_categories(self, shop_id):
"""
查询店铺商品分类
:param shopId:店铺Id
"""
return self.__client.call("eleme.product.category.getShopCategories", {"shopId": shop_id})
def get_shop_categories_with_children(self, shop_id):
"""
查询店铺商品分类,包含二级分类
:param shopId:店铺Id
"""
return self.__client.call("eleme.product.category.getShopCategoriesWithChildren", {"shopId": shop_id})
def get_category(self, category_id):
"""
查询商品分类详情
:param categoryId:商品分类Id
"""
return self.__client.call("eleme.product.category.getCategory", {"categoryId": category_id})
def get_category_with_children(self, category_id):
"""
查询商品分类详情,包含二级分类
:param categoryId:商品分类Id
"""
return self.__client.call("eleme.product.category.getCategoryWithChildren", {"categoryId": category_id})
def create_category(self, shop_id, name, description):
"""
添加商品分类
:param shopId:店铺Id
:param name:商品分类名称,长度需在50字以内
:param description:商品分类描述,长度需在50字以内
"""
return self.__client.call("eleme.product.category.createCategory", {"shopId": shop_id, "name": name, "description": description})
def create_category_with_children(self, shop_id, name, parent_id, description):
"""
添加商品分类,支持二级分类
:param shopId:店铺Id
:param name:商品分类名称,长度需在50字以内
:param parentId:父分类ID,如果没有可以填0
:param description:商品分类描述,长度需在50字以内
"""
return self.__client.call("eleme.product.category.createCategoryWithChildren", {"shopId": shop_id, "name": name, "parentId": parent_id, "description": description})
def update_category(self, category_id, name, description):
"""
更新商品分类
:param categoryId:商品分类Id
:param name:商品分类名称,长度需在50字以内
:param description:商品分类描述,长度需在50字以内
"""
return self.__client.call("eleme.product.category.updateCategory", {"categoryId": category_id, "name": name, "description": description})
def update_category_with_children(self, category_id, name, parent_id, description):
"""
更新商品分类,包含二级分类
:param categoryId:商品分类Id
:param name:商品分类名称,长度需在50字以内
:param parentId:父分类ID,如果没有可以填0
:param description:商品分类描述,长度需在50字以内
"""
return self.__client.call("eleme.product.category.updateCategoryWithChildren", {"categoryId": category_id, "name": name, "parentId": parent_id, "description": description})
def remove_category(self, category_id):
"""
删除商品分类
:param categoryId:商品分类Id
"""
return self.__client.call("eleme.product.category.removeCategory", {"categoryId": category_id})
def invalid_category(self, category_id):
"""
删除商品分类(新版)
:param categoryId:商品分类Id
"""
return self.__client.call("eleme.product.category.invalidCategory", {"categoryId": category_id})
def set_category_positions(self, shop_id, category_ids):
"""
设置分类排序
:param shopId:饿了么店铺Id
:param categoryIds:需要排序的分类Id
"""
return self.__client.call("eleme.product.category.setCategoryPositions", {"shopId": shop_id, "categoryIds": category_ids})
def set_category_sequence(self, shop_id, category_ids):
"""
设置分类排序(新版)
:param shopId:饿了么店铺Id
:param categoryIds:需要排序的全部一级分类Id
"""
return self.__client.call("eleme.product.category.setCategorySequence", {"shopId": shop_id, "categoryIds": category_ids})
def set_category_positions_with_children(self, shop_id, category_with_children_ids):
"""
设置二级分类排序
:param shopId:饿了么店铺Id
:param categoryWithChildrenIds:需要排序的父分类Id,及其下属的二级分类ID
"""
return self.__client.call("eleme.product.category.setCategoryPositionsWithChildren", {"shopId": shop_id, "categoryWithChildrenIds": category_with_children_ids})
def get_back_category(self, shop_id):
"""
查询商品后台类目
:param shopId:店铺Id
"""
return self.__client.call("eleme.product.category.getBackCategory", {"shopId": shop_id})
def set_category_type(self, shop_id, category_id, category_type):
"""
设置分类类型
:param shopId:店铺Id
:param categoryId:商品分类Id
:param categoryType:分类类型
"""
return self.__client.call("eleme.product.category.setCategoryType", {"shopId": shop_id, "categoryId": category_id, "categoryType": category_type})
def set_day_parting_stick_time(self, shop_id, category_id, day_parting_stick):
"""
设置分组分时段置顶
:param shopId:店铺Id
:param categoryId:商品分类Id
:param dayPartingStick:置顶时间设置
"""
return self.__client.call("eleme.product.category.setDayPartingStickTime", {"shopId": shop_id, "categoryId": category_id, "dayPartingStick": day_parting_stick})
def remove_day_parting_stick_time(self, shop_id, category_id):
"""
删除分组的分时置顶功能
:param shopId:店铺Id
:param categoryId:商品分类Id
"""
return self.__client.call("eleme.product.category.removeDayPartingStickTime", {"shopId": shop_id, "categoryId": category_id})
def create_package(self, category_id, o_package):
"""
添加套餐
:param categoryId:分类Id
:param oPackage:套餐属性
"""
return self.__client.call("eleme.product.package.createPackage", {"categoryId": category_id, "oPackage": o_package})
def update_package_content(self, item_id, category_id, update):
"""
修改套餐基本信息
:param itemId:新套餐id即OItem中的商品Id
:param categoryId:分类Id即OCategory中的分类Id
:param update:套餐基本信息
"""
return self.__client.call("eleme.product.package.updatePackageContent", {"itemId": item_id, "categoryId": category_id, "update": update})
def update_package_relation(self, item_id, packages):
"""
修改套餐和主料的关联关系
:param itemId:新套餐id即OItem中的商品Id
:param packages:套餐关系
"""
return self.__client.call("eleme.product.package.updatePackageRelation", {"itemId": item_id, "packages": packages})
def remove_package(self, item_id):
"""
删除套餐
:param itemId:套餐Id
"""
return self.__client.call("eleme.product.package.removePackage", {"itemId": item_id})
def get_items_by_category_id(self, category_id):
"""
获取一个分类下的所有商品
:param categoryId:商品分类Id
"""
return self.__client.call("eleme.product.item.getItemsByCategoryId", {"categoryId": category_id})
def get_item(self, item_id):
"""
查询商品详情
:param itemId:商品Id
"""
return self.__client.call("eleme.product.item.getItem", {"itemId": item_id})
def batch_get_items(self, item_ids):
"""
批量查询商品详情
:param itemIds:商品Id的列表
"""
return self.__client.call("eleme.product.item.batchGetItems", {"itemIds": item_ids})
def create_item(self, category_id, properties):
"""
添加商品
:param categoryId:商品分类Id
:param properties:商品属性
"""
return self.__client.call("eleme.product.item.createItem", {"categoryId": category_id, "properties": properties})
def batch_create_items(self, category_id, items):
"""
批量添加商品
:param categoryId:商品分类Id
:param items:商品属性的列表
"""
return self.__client.call("eleme.product.item.batchCreateItems", {"categoryId": category_id, "items": items})
def batch_create_items_ignore_error(self, category_id, items):
"""
批量添加商品,且忽略异常,专为星巴克开发
:param categoryId:商品分类Id
:param items:商品属性的列表
"""
return self.__client.call("eleme.product.item.batchCreateItemsIgnoreError", {"categoryId": category_id, "items": items})
def update_item(self, item_id, category_id, properties):
"""
更新商品
:param itemId:商品Id
:param categoryId:商品分类Id
:param properties:商品属性
"""
return self.__client.call("eleme.product.item.updateItem", {"itemId": item_id, "categoryId": category_id, "properties": properties})
def batch_fill_stock(self, spec_ids):
"""
批量置满库存
:param specIds:商品及商品规格的列表
"""
return self.__client.call("eleme.product.item.batchFillStock", {"specIds": spec_ids})
def batch_clear_stock(self, spec_ids):
"""
批量沽清库存
:param specIds:商品及商品规格的列表
"""
return self.__client.call("eleme.product.item.batchClearStock", {"specIds": spec_ids})
def batch_on_shelf(self, spec_ids):
"""
批量上架商品
:param specIds:商品及商品规格的列表
"""
return self.__client.call("eleme.product.item.batchOnShelf", {"specIds": spec_ids})
def batch_list_items(self, item_ids):
"""
批量上架商品(新版)
:param itemIds:商品ID列表
"""
return self.__client.call("eleme.product.item.batchListItems", {"itemIds": item_ids})
def batch_off_shelf(self, spec_ids):
"""
批量下架商品
:param specIds:商品及商品规格的列表
"""
return self.__client.call("eleme.product.item.batchOffShelf", {"specIds": spec_ids})
def batch_delist_items(self, item_ids):
"""
批量下架商品(新版)
:param itemIds:商品ID列表
"""
return self.__client.call("eleme.product.item.batchDelistItems", {"itemIds": item_ids})
def remove_item(self, item_id):
"""
删除商品
:param itemId:商品Id
"""
return self.__client.call("eleme.product.item.removeItem", {"itemId": item_id})
def invalid_item(self, item_id):
"""
删除商品(新版)
:param itemId:商品Id
"""
return self.__client.call("eleme.product.item.invalidItem", {"itemId": item_id})
def batch_remove_items(self, item_ids):
"""
批量删除商品
:param itemIds:商品Id的列表
"""
return self.__client.call("eleme.product.item.batchRemoveItems", {"itemIds": item_ids})
def batch_update_spec_stocks(self, spec_stocks):
"""
批量更新商品库存
:param specStocks:商品以及规格库存列表
"""
return self.__client.call("eleme.product.item.batchUpdateSpecStocks", {"specStocks": spec_stocks})
def batch_update_stock(self, stock_map):
"""
批量更新商品库存(新版)
:param stockMap:商品规格ID和库存设值的映射
"""
return self.__client.call("eleme.product.item.batchUpdateStock", {"stockMap": stock_map})
def set_item_positions(self, category_id, item_ids):
"""
设置商品排序
:param categoryId:商品分类Id
:param itemIds:商品Id列表
"""
return self.__client.call("eleme.product.item.setItemPositions", {"categoryId": category_id, "itemIds": item_ids})
def clear_and_timing_max_stock(self, clear_stocks):
"""
批量沽清库存并在次日2:00开始置满
:param clearStocks:店铺Id及商品Id的列表
"""
return self.__client.call("eleme.product.item.clearAndTimingMaxStock", {"clearStocks": clear_stocks})
def get_item_by_shop_id_and_extend_code(self, shop_id, extend_code):
"""
根据商品扩展码获取商品
:param shopId:店铺Id
:param extendCode:商品扩展码
"""
return self.__client.call("eleme.product.item.getItemByShopIdAndExtendCode", {"shopId": shop_id, "extendCode": extend_code})
def get_items_by_shop_id_and_bar_code(self, shop_id, bar_code):
"""
根据商品条形码获取商品
:param shopId:店铺Id
:param barCode:商品条形码
"""
return self.__client.call("eleme.product.item.getItemsByShopIdAndBarCode", {"shopId": shop_id, "barCode": bar_code})
def batch_update_prices(self, shop_id, spec_prices):
"""
批量修改商品价格
:param shopId:店铺Id
:param specPrices:商品Id及其下SkuId和价格对应Map(限制最多50个)
"""
return self.__client.call("eleme.product.item.batchUpdatePrices", {"shopId": shop_id, "specPrices": spec_prices})
def get_item_ids_has_activity_by_shop_id(self, shop_id):
"""
查询活动商品
:param shopId:店铺Id
"""
return self.__client.call("eleme.product.item.getItemIdsHasActivityByShopId", {"shopId": shop_id})
def get_shop_sales_items(self, shop_id):
"""
查询店铺活动商品(新版)
:param shopId:店铺Id
"""
return self.__client.call("eleme.product.item.getShopSalesItems", {"shopId": shop_id})
def set_order_packing_fee(self, shop_id, status, packing_fee):
"""
设置订单餐盒费
:param shopId: 店铺ID
:param status:是否按照订单设置餐盒费
:param packingFee:订单餐盒费费用
"""
return self.__client.call("eleme.product.item.setOrderPackingFee", {"shopId": shop_id, "status": status, "packingFee": packing_fee})
def query_item_by_page(self, query_page):
"""
分页获取店铺下的商品
:param queryPage:分页查询参数
"""
return self.__client.call("eleme.product.item.queryItemByPage", {"queryPage": query_page})
def get_material_tree(self, shop_id):
"""
获取原材料树(即将下线)
:param shopId:店铺ID
"""
return self.__client.call("eleme.product.item.getMaterialTree", {"shopId": shop_id})
def set_ingredient(self, shop_id, main_item_id, ingredient_group):
"""
主料关联配料(接口已过期,请勿使用)
:param shopId:店铺ID
:param mainItemId:主料ID(商品ID)
:param ingredientGroup: 商品配料分组
"""
return self.__client.call("eleme.product.item.setIngredient", {"shopId": shop_id, "mainItemId": main_item_id, "ingredientGroup": ingredient_group})
def remove_ingredient(self, shop_id, main_item_id):
"""
删除配料(接口已过期,请勿使用)
:param shopId:店铺ID
:param mainItemId:主料ID(商品ID)
"""
return self.__client.call("eleme.product.item.removeIngredient", {"shopId": shop_id, "mainItemId": main_item_id})
def set_related_item_ids(self, shop_id, item_id, related_item_ids):
"""
针对主菜itemId设置菜品推荐
:param shopId:店铺ID
:param itemId:商品ID
:param relatedItemIds:关联的商品ID
"""
return self.__client.call("eleme.product.item.setRelatedItemIds", {"shopId": shop_id, "itemId": item_id, "relatedItemIds": related_item_ids})
def display_related_item_ids(self, shop_id, item_id, display):
"""
对主菜itemId设置是否开启菜品推荐
:param shopId:店铺ID
:param itemId:商品ID
:param display:是否展示
"""
return self.__client.call("eleme.product.item.displayRelatedItemIds", {"shopId": shop_id, "itemId": item_id, "display": display})
def get_related_item_ids(self, shop_id, item_id):
"""
针对主菜itemId查询菜品推荐
:param shopId:店铺ID
:param itemId:商品ID
"""
return self.__client.call("eleme.product.item.getRelatedItemIds", {"shopId": shop_id, "itemId": item_id})
def create_multi_spec_item(self, category_id, properties):
"""
添加多规格商品
:param categoryId:商品分类Id
:param properties:商品属性
"""
return self.__client.call("eleme.product.item.createMultiSpecItem", {"categoryId": category_id, "properties": properties})
def batch_create_multi_spec_item(self, category_id, items):
"""
批量添加多规格商品
:param categoryId:商品分类Id
:param items:商品属性的列表
"""
return self.__client.call("eleme.product.item.batchCreateMultiSpecItem", {"categoryId": category_id, "items": items})
def update_multi_spec_item(self, item_id, category_id, properties):
"""
更新多规格商品
:param itemId:商品Id
:param categoryId:商品分类Id
:param properties:商品属性
"""
return self.__client.call("eleme.product.item.updateMultiSpecItem", {"itemId": item_id, "categoryId": category_id, "properties": properties})
def set_ingredient_group(self, item_id, group_relations):
"""
设置配料组数据
:param itemId:商品Id
:param groupRelations:配料组信息
"""
return self.__client.call("eleme.product.item.setIngredientGroup", {"itemId": item_id, "groupRelations": group_relations})
def remove_ingredient_group(self, item_id):
"""
删除配料组数据
:param itemId:商品Id
"""
return self.__client.call("eleme.product.item.removeIngredientGroup", {"itemId": item_id})
def get_item_material_tree(self, shop_id):
"""
获取商品原材料数据(新版)
:param shopId:店铺ID
"""
return self.__client.call("eleme.product.item.getItemMaterialTree", {"shopId": shop_id})
def create_ingredient_group(self, ingredient_group):
"""
创建配料组
:param ingredientGroup:配料组数据
"""
return self.__client.call("eleme.product.item.createIngredientGroup", {"ingredientGroup": ingredient_group})
def batch_create_ingredient_groups(self, ingredient_groups):
"""
批量创建配料组
:param ingredientGroups:配料组数据
"""
return self.__client.call("eleme.product.item.batchCreateIngredientGroups", {"ingredientGroups": ingredient_groups})
def get_ingredient_group(self, ingredient_group_id):
"""
查询配料组
:param ingredientGroupId:配料组id
"""
return self.__client.call("eleme.product.item.getIngredientGroup", {"ingredientGroupId": ingredient_group_id})
def list_ingredient_groups(self, ingredient_group_ids):
"""
批量查询配料组
:param ingredientGroupIds:配料组id列表
"""
return self.__client.call("eleme.product.item.listIngredientGroups", {"ingredientGroupIds": ingredient_group_ids})
def delete_ingredient_group(self, ingredient_group_id):
"""
删除配料组
:param ingredientGroupId:配料组id
"""
return self.__client.call("eleme.product.item.deleteIngredientGroup", {"ingredientGroupId": ingredient_group_id})
def bind_ingredient_groups(self, item_id, ingredient_group_ids):
"""
给主料商品绑定配料组
:param itemId:主料商品id
:param ingredientGroupIds:配料组id列表
"""
return self.__client.call("eleme.product.item.bindIngredientGroups", {"itemId": item_id, "ingredientGroupIds": ingredient_group_ids})
def unbind_ingredient_groups(self, item_id, ingredient_group_ids):
"""
解绑配料组
:param itemId:主料商品id
:param ingredientGroupIds:配料组id列表
"""
return self.__client.call("eleme.product.item.unbindIngredientGroups", {"itemId": item_id, "ingredientGroupIds": ingredient_group_ids})
def remove_main_item_ingredient_groups(self, item_id):
"""
移除主料商品的全部配料组
:param itemId:主料商品id
"""
return self.__client.call("eleme.product.item.removeMainItemIngredientGroups", {"itemId": item_id})
def update_item_group(self, shop_id, item_id, category_id):
"""
更新单店商品所属分组
:param shopId:店铺id
:param itemId:商品id
:param categoryId:分类id
"""
return self.__client.call("eleme.product.item.updateItemGroup", {"shopId": shop_id, "itemId": item_id, "categoryId": category_id})
def get_chain_item(self, iid):
"""
查询连锁总店商品信息
:param iid:连锁总店商品Id
"""
return self.__client.call("eleme.product.chain.item.getChainItem", {"iid": iid})
def batch_get_chain_item(self, iids):
"""
批量查询连锁总店商品信息
:param iids:连锁总店商品Id列表
"""
return self.__client.call("eleme.product.chain.item.batchGetChainItem", {"iids": iids})
def create_chain_item(self, gid, chain_item_base_d_t_o):
"""
添加连锁总店商品
:param gid:连锁总店商品分组Id
:param chainItemBaseDTO:商品创建信息
"""
return self.__client.call("eleme.product.chain.item.createChainItem", {"gid": gid, "chainItemBaseDTO": chain_item_base_d_t_o})
def batch_create_chain_item(self, gid, chain_item_base_d_t_os):
"""
批量添加连锁总店商品
:param gid:连锁总店商品分组Id
:param chainItemBaseDTOs:商品创建信息列表
"""
return self.__client.call("eleme.product.chain.item.batchCreateChainItem", {"gid": gid, "chainItemBaseDTOs": chain_item_base_d_t_os})
def replace_chain_item(self, gid, chain_item_d_t_o):
"""
替换连锁总店商品
:param gid:商品分组Id
:param chainItemDTO:商品替换信息
"""
return self.__client.call("eleme.product.chain.item.replaceChainItem", {"gid": gid, "chainItemDTO": chain_item_d_t_o})
def batch_replace_chain_item(self, gid, chain_item_d_t_os):
"""
批量替换连锁总店商品
:param gid:商品分组Id
:param chainItemDTOs:商品替换信息列表
"""
return self.__client.call("eleme.product.chain.item.batchReplaceChainItem", {"gid": gid, "chainItemDTOs": chain_item_d_t_os})
def update_chain_item_without_sku(self, iid, chain_item_base_d_t_o):
"""
更新连锁总店商品不包含规格信息
:param iid:连锁总店商品Id
:param chainItemBaseDTO:商品更新信息
"""
return self.__client.call("eleme.product.chain.item.updateChainItemWithoutSku", {"iid": iid, "chainItemBaseDTO": chain_item_base_d_t_o})
def delete_chain_item(self, iid):
"""
删除连锁总店商品
:param iid:连锁总店商品Id
"""
return self.__client.call("eleme.product.chain.item.deleteChainItem", {"iid": iid})
def get_sku(self, p_id):
"""
查询连锁总店商品规格
:param pId:连锁总店商品规格Id
"""
return self.__client.call("eleme.product.chain.item.getSku", {"pId": p_id})
def add_sku(self, iid, chain_sku_base_d_t_o):
"""
新增连锁总店商品规格
:param iid:连锁总店商品Id
:param chainSkuBaseDTO:添加规格信息
"""
return self.__client.call("eleme.product.chain.item.addSku", {"iid": iid, "chainSkuBaseDTO": chain_sku_base_d_t_o})
def update_sku(self, p_id, chain_sku_base_d_t_o):
"""
更新连锁总店商品规格
:param pId:连锁总店商品规格Id
:param chainSkuBaseDTO:规格更新信息
"""
return self.__client.call("eleme.product.chain.item.updateSku", {"pId": p_id, "chainSkuBaseDTO": chain_sku_base_d_t_o})
def delete_sku(self, p_id):
"""
删除连锁总店商品规格
:param pId:连锁总店商品规格Id
"""
return self.__client.call("eleme.product.chain.item.deleteSku", {"pId": p_id})
def upload_image(self, image):
"""
上传图片,返回图片的hash值
:param image:文件内容base64编码值
"""
return self.__client.call("eleme.file.uploadImage", {"image": image})
def upload_image_with_remote_url(self, url):
"""
通过远程URL上传图片,返回图片的hash值
:param url:远程Url地址
"""
return self.__client.call("eleme.file.uploadImageWithRemoteUrl", {"url": url})
def get_uploaded_url(self, hash):
"""
获取上传文件的访问URL,返回文件的Url地址
:param hash:图片hash值
"""
return self.__client.call("eleme.file.getUploadedUrl", {"hash": hash})
def get_image_url(self, hash):
"""
获取上传图片的url地址(新版)
:param hash:图片hash值
"""
return self.__client.call("eleme.file.getImageUrl", {"hash": hash})
| 34.598978
| 180
| 0.634565
|
acfbc322e44e274ba79db3a8dd2f7f7a89a7daaa
| 816
|
py
|
Python
|
experiments/test_reverts.py
|
celioggr/erc20-pbt
|
64043c4943884a8517fa24f7223740c780eb0ad5
|
[
"Apache-2.0"
] | 5
|
2020-10-12T16:26:30.000Z
|
2021-12-28T09:18:47.000Z
|
experiments/test_reverts.py
|
celioggr/erc20-pbt
|
64043c4943884a8517fa24f7223740c780eb0ad5
|
[
"Apache-2.0"
] | null | null | null |
experiments/test_reverts.py
|
celioggr/erc20-pbt
|
64043c4943884a8517fa24f7223740c780eb0ad5
|
[
"Apache-2.0"
] | 1
|
2021-01-21T11:01:11.000Z
|
2021-01-21T11:01:11.000Z
|
import brownie, pytest, re
""" override foo fixture from conftest
@pytest.fixture(scope="module")
def foo(foo,accounts,FooCoin):
token = FooCoin.deploy(2**64,"FooCoin", "FCN", 18, {'from': accounts[0]})
yield token
"""
def test_uint256_overflow(foo, accounts):
with pytest.raises(OverflowError) as exc_info:
foo.transfer(accounts[2], 2 ** 256, {"from": accounts[0]})
assert exc_info.type is OverflowError
assert exc_info.match("is outside allowable range for uint256")
def test_try_underflow(foo, accounts):
with pytest.raises(brownie.exceptions.VirtualMachineError) as exc_info:
foo.transfer(accounts[2], 10000, {"from": accounts[1]})
assert exc_info.type is brownie.exceptions.VirtualMachineError
assert exc_info.match("ERC20: transfer amount exceeds balance")
| 35.478261
| 77
| 0.724265
|
acfbc333e7947ab266c219a1f46dc8bd652e0675
| 8,141
|
py
|
Python
|
docs/conf.py
|
matthewi/magpie
|
c5217da0c3ae2e92f0ea21e025714022522523bd
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
matthewi/magpie
|
c5217da0c3ae2e92f0ea21e025714022522523bd
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
matthewi/magpie
|
c5217da0c3ae2e92f0ea21e025714022522523bd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# magpie documentation build configuration file, created by
# sphinx-quickstart on Fri Apr 18 17:35:26 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'magpie'
copyright = u'2014, Charles Thomas'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'magpiedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'magpie.tex', u'magpie Documentation',
u'Charles Thomas', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'magpie', u'magpie Documentation',
[u'Charles Thomas'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'magpie', u'magpie Documentation',
u'Charles Thomas', 'magpie', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 31.432432
| 79
| 0.717357
|
acfbc57d5b39da26b2028fca1f0cfbed237ab6c0
| 4,821
|
py
|
Python
|
sdk/AsposeEmailCloudSdk/models/ai_bcr_image_storage_file.py
|
aspose-email-cloud/aspose-email-cloud-python
|
c5c13839cbbbfa5b6617bd1aedf3cf30cd664227
|
[
"MIT"
] | 1
|
2020-02-26T13:19:06.000Z
|
2020-02-26T13:19:06.000Z
|
sdk/AsposeEmailCloudSdk/models/ai_bcr_image_storage_file.py
|
aspose-email-cloud/aspose-email-cloud-python
|
c5c13839cbbbfa5b6617bd1aedf3cf30cd664227
|
[
"MIT"
] | null | null | null |
sdk/AsposeEmailCloudSdk/models/ai_bcr_image_storage_file.py
|
aspose-email-cloud/aspose-email-cloud-python
|
c5c13839cbbbfa5b6617bd1aedf3cf30cd664227
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# ----------------------------------------------------------------------------
# <copyright company="Aspose" file="AiBcrImageStorageFile.py">
# Copyright (c) 2018-2020 Aspose Pty Ltd. All rights reserved.
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# </summary>
# ----------------------------------------------------------------------------
import pprint
import re
import six
from typing import List, Set, Dict, Tuple, Optional
from datetime import datetime
from AsposeEmailCloudSdk.models.ai_bcr_image import AiBcrImage
from AsposeEmailCloudSdk.models.storage_file_location import StorageFileLocation
class AiBcrImageStorageFile(AiBcrImage):
"""Image from storage for recognition
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'is_single': 'bool',
'file': 'StorageFileLocation'
}
attribute_map = {
'is_single': 'isSingle',
'file': 'file'
}
def __init__(self, is_single: bool = None, file: StorageFileLocation = None):
"""
Image from storage for recognition
:param is_single: Determines that image contains single VCard or more.
:type is_single: bool
:param file: Image location
:type file: StorageFileLocation
"""
super(AiBcrImageStorageFile, self).__init__()
self._file = None
if is_single is not None:
self.is_single = is_single
if file is not None:
self.file = file
@property
def file(self) -> StorageFileLocation:
"""
Image location
:return: The file of this AiBcrImageStorageFile.
:rtype: StorageFileLocation
"""
return self._file
@file.setter
def file(self, file: StorageFileLocation):
"""
Image location
:param file: The file of this AiBcrImageStorageFile.
:type: StorageFileLocation
"""
if file is None:
raise ValueError("Invalid value for `file`, must not be `None`")
self._file = file
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AiBcrImageStorageFile):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 34.435714
| 91
| 0.590127
|
acfbc5851f8b85b35eecf0aafcf71e8124689a42
| 643
|
py
|
Python
|
GPErks/gp/mean.py
|
stelong/GPErks
|
7e8e0e4561c10ad21fba2079619418e416a167b6
|
[
"MIT"
] | null | null | null |
GPErks/gp/mean.py
|
stelong/GPErks
|
7e8e0e4561c10ad21fba2079619418e416a167b6
|
[
"MIT"
] | 6
|
2021-12-10T14:16:51.000Z
|
2022-03-25T16:26:50.000Z
|
GPErks/gp/mean.py
|
stelong/GPErks
|
7e8e0e4561c10ad21fba2079619418e416a167b6
|
[
"MIT"
] | 1
|
2022-01-28T11:12:33.000Z
|
2022-01-28T11:12:33.000Z
|
import gpytorch
import torch
class LinearMean(gpytorch.means.Mean):
def __init__(self, input_size, data_mean, batch_shape=torch.Size()):
super().__init__()
self.register_parameter(
name="weights",
parameter=torch.nn.Parameter(
torch.zeros(*batch_shape, input_size, 1)
),
)
self.register_parameter(
name="bias",
parameter=torch.nn.Parameter(
data_mean * torch.ones(*batch_shape, 1)
),
)
def forward(self, x):
res = self.bias + x.matmul(self.weights).squeeze(-1)
return res
| 26.791667
| 72
| 0.559876
|
acfbc5c92ddaa4d3a04be026d418512efd421dc6
| 18,475
|
py
|
Python
|
mycroft/skills/skill_manager.py
|
zhanghan177/mycroft-core
|
f6f7a5515fb82b672932378a5e30abcf854a18d4
|
[
"Apache-2.0"
] | null | null | null |
mycroft/skills/skill_manager.py
|
zhanghan177/mycroft-core
|
f6f7a5515fb82b672932378a5e30abcf854a18d4
|
[
"Apache-2.0"
] | null | null | null |
mycroft/skills/skill_manager.py
|
zhanghan177/mycroft-core
|
f6f7a5515fb82b672932378a5e30abcf854a18d4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Load, update and manage skills on this device."""
import os
from glob import glob
from threading import Thread, Event, Lock
from time import sleep, time, monotonic
from inspect import signature
from mycroft.api import is_paired
from mycroft.enclosure.api import EnclosureAPI
from mycroft.configuration import Configuration
from mycroft.messagebus.message import Message
from mycroft.util.log import LOG
from .msm_wrapper import create_msm as msm_creator, build_msm_config
from .settings import SkillSettingsDownloader
from .skill_loader import SkillLoader
from .skill_updater import SkillUpdater
SKILL_MAIN_MODULE = '__init__.py'
class UploadQueue:
"""Queue for holding loaders with data that still needs to be uploaded.
This queue can be used during startup to capture all loaders
and then processing can be triggered at a later stage when the system is
connected to the backend.
After all queued settingsmeta has been processed and the queue is empty
the queue will set the self.started flag.
"""
def __init__(self):
self._queue = []
self.started = False
self.lock = Lock()
def start(self):
"""Start processing of the queue."""
self.started = True
self.send()
def stop(self):
"""Stop the queue, and hinder any further transmissions."""
self.started = False
def send(self):
"""Loop through all stored loaders triggering settingsmeta upload."""
with self.lock:
queue = self._queue
self._queue = []
if queue:
LOG.info('New Settings meta to upload.')
for loader in queue:
if self.started:
loader.instance.settings_meta.upload()
else:
break
def __len__(self):
return len(self._queue)
def put(self, loader):
"""Append a skill loader to the queue.
If a loader is already present it's removed in favor of the new entry.
"""
if self.started:
LOG.info('Updating settings meta during runtime...')
with self.lock:
# Remove existing loader
self._queue = [e for e in self._queue if e != loader]
self._queue.append(loader)
def _shutdown_skill(instance):
"""Shutdown a skill.
Call the default_shutdown method of the skill, will produce a warning if
the shutdown process takes longer than 1 second.
Arguments:
instance (MycroftSkill): Skill instance to shutdown
"""
try:
ref_time = monotonic()
# Perform the shutdown
instance.default_shutdown()
shutdown_time = monotonic() - ref_time
if shutdown_time > 1:
LOG.warning('{} shutdown took {} seconds'.format(instance.skill_id,
shutdown_time))
except Exception:
LOG.exception('Failed to shut down skill: '
'{}'.format(instance.skill_id))
class SkillManager(Thread):
_msm = None
def __init__(self, bus, watchdog=None):
"""Constructor
Arguments:
bus (event emitter): Mycroft messagebus connection
watchdog (callable): optional watchdog function
"""
super(SkillManager, self).__init__()
self.bus = bus
# Set watchdog to argument or function returning None
self._watchdog = watchdog or (lambda: None)
self._stop_event = Event()
self._connected_event = Event()
self.config = Configuration.get()
self.upload_queue = UploadQueue()
self.skill_loaders = {}
self.enclosure = EnclosureAPI(bus)
self.initial_load_complete = False
self.num_install_retries = 0
self.settings_downloader = SkillSettingsDownloader(self.bus)
self.empty_skill_dirs = set() # Save a record of empty skill dirs.
# Statuses
self._alive_status = False # True after priority skills has loaded
self._loaded_status = False # True after all skills has loaded
self.skill_updater = SkillUpdater()
self._define_message_bus_events()
self.daemon = True
def _define_message_bus_events(self):
"""Define message bus events with handlers defined in this class."""
# Conversation management
self.bus.on('skill.converse.request', self.handle_converse_request)
# Update on initial connection
self.bus.on(
'mycroft.internet.connected',
lambda x: self._connected_event.set()
)
# Update upon request
self.bus.on('skillmanager.update', self.schedule_now)
self.bus.on('skillmanager.list', self.send_skill_list)
self.bus.on('skillmanager.deactivate', self.deactivate_skill)
self.bus.on('skillmanager.keep', self.deactivate_except)
self.bus.on('skillmanager.activate', self.activate_skill)
self.bus.on('mycroft.paired', self.handle_paired)
self.bus.on(
'mycroft.skills.settings.update',
self.settings_downloader.download
)
@property
def skills_config(self):
return self.config['skills']
@property
def msm(self):
if self._msm is None:
msm_config = build_msm_config(self.config)
self._msm = msm_creator(msm_config)
return self._msm
@staticmethod
def create_msm():
LOG.debug('instantiating msm via static method...')
msm_config = build_msm_config(Configuration.get())
msm_instance = msm_creator(msm_config)
return msm_instance
def schedule_now(self, _):
self.skill_updater.next_download = time() - 1
def _start_settings_update(self):
LOG.info('Start settings update')
self.skill_updater.post_manifest(reload_skills_manifest=True)
self.upload_queue.start()
LOG.info('All settings meta has been processed or upload has started')
self.settings_downloader.download()
LOG.info('Skill settings downloading has started')
def handle_paired(self, _):
"""Trigger upload of skills manifest after pairing."""
self._start_settings_update()
def load_priority(self):
skills = {skill.name: skill for skill in self.msm.all_skills}
priority_skills = self.skills_config.get("priority_skills", [])
for skill_name in priority_skills:
skill = skills.get(skill_name)
if skill is not None:
if not skill.is_local:
try:
self.msm.install(skill)
except Exception:
log_msg = 'Downloading priority skill: {} failed'
LOG.exception(log_msg.format(skill_name))
continue
loader = self._load_skill(skill.path)
if loader:
self.upload_queue.put(loader)
else:
LOG.error(
'Priority skill {} can\'t be found'.format(skill_name)
)
self._alive_status = True
def run(self):
"""Load skills and update periodically from disk and internet."""
self._remove_git_locks()
self._connected_event.wait()
if (not self.skill_updater.defaults_installed() and
self.skills_config["auto_update"]):
LOG.info('Not all default skills are installed, '
'performing skill update...')
self.skill_updater.update_skills()
self._load_on_startup()
# Sync backend and skills.
if is_paired() and not self.upload_queue.started:
self._start_settings_update()
# Scan the file folder that contains Skills. If a Skill is updated,
# unload the existing version from memory and reload from the disk.
while not self._stop_event.is_set():
try:
self._unload_removed_skills()
self._reload_modified_skills()
self._load_new_skills()
self._update_skills()
if (is_paired() and self.upload_queue.started and
len(self.upload_queue) > 0):
self.msm.clear_cache()
self.skill_updater.post_manifest()
self.upload_queue.send()
self._watchdog()
sleep(2) # Pause briefly before beginning next scan
except Exception:
LOG.exception('Something really unexpected has occured '
'and the skill manager loop safety harness was '
'hit.')
sleep(30)
def _remove_git_locks(self):
"""If git gets killed from an abrupt shutdown it leaves lock files."""
for i in glob(os.path.join(self.msm.skills_dir, '*/.git/index.lock')):
LOG.warning('Found and removed git lock file: ' + i)
os.remove(i)
def _load_on_startup(self):
"""Handle initial skill load."""
LOG.info('Loading installed skills...')
self._load_new_skills()
LOG.info("Skills all loaded!")
self.bus.emit(Message('mycroft.skills.initialized'))
self._loaded_status = True
def _reload_modified_skills(self):
"""Handle reload of recently changed skill(s)"""
for skill_dir in self._get_skill_directories():
try:
skill_loader = self.skill_loaders.get(skill_dir)
if skill_loader is not None and skill_loader.reload_needed():
# If reload succeed add settingsmeta to upload queue
if skill_loader.reload():
self.upload_queue.put(skill_loader)
except Exception:
LOG.exception('Unhandled exception occured while '
'reloading {}'.format(skill_dir))
def _load_new_skills(self):
"""Handle load of skills installed since startup."""
for skill_dir in self._get_skill_directories():
if skill_dir not in self.skill_loaders:
loader = self._load_skill(skill_dir)
if loader:
self.upload_queue.put(loader)
def _load_skill(self, skill_directory):
skill_loader = SkillLoader(self.bus, skill_directory)
try:
load_status = skill_loader.load()
except Exception:
LOG.exception('Load of skill {} failed!'.format(skill_directory))
load_status = False
finally:
self.skill_loaders[skill_directory] = skill_loader
return skill_loader if load_status else None
def _get_skill_directories(self):
skill_glob = glob(os.path.join(self.msm.skills_dir, '*/'))
skill_directories = []
for skill_dir in skill_glob:
# TODO: all python packages must have __init__.py! Better way?
# check if folder is a skill (must have __init__.py)
if SKILL_MAIN_MODULE in os.listdir(skill_dir):
skill_directories.append(skill_dir.rstrip('/'))
if skill_dir in self.empty_skill_dirs:
self.empty_skill_dirs.discard(skill_dir)
else:
if skill_dir not in self.empty_skill_dirs:
self.empty_skill_dirs.add(skill_dir)
LOG.debug('Found skills directory with no skill: ' +
skill_dir)
return skill_directories
def _unload_removed_skills(self):
"""Shutdown removed skills."""
skill_dirs = self._get_skill_directories()
# Find loaded skills that don't exist on disk
removed_skills = [
s for s in self.skill_loaders.keys() if s not in skill_dirs
]
for skill_dir in removed_skills:
skill = self.skill_loaders[skill_dir]
LOG.info('removing {}'.format(skill.skill_id))
try:
skill.unload()
except Exception:
LOG.exception('Failed to shutdown skill ' + skill.id)
del self.skill_loaders[skill_dir]
# If skills were removed make sure to update the manifest on the
# mycroft backend.
if removed_skills:
self.skill_updater.post_manifest(reload_skills_manifest=True)
def _update_skills(self):
"""Update skills once an hour if update is enabled"""
do_skill_update = (
time() >= self.skill_updater.next_download and
self.skills_config["auto_update"]
)
if do_skill_update:
self.skill_updater.update_skills()
def is_alive(self, message=None):
"""Respond to is_alive status request."""
return self._alive_status
def is_all_loaded(self, message=None):
""" Respond to all_loaded status request."""
return self._loaded_status
def send_skill_list(self, _):
"""Send list of loaded skills."""
try:
message_data = {}
for skill_dir, skill_loader in self.skill_loaders.items():
message_data[skill_loader.skill_id] = dict(
active=skill_loader.active and skill_loader.loaded,
id=skill_loader.skill_id
)
self.bus.emit(Message('mycroft.skills.list', data=message_data))
except Exception:
LOG.exception('Failed to send skill list')
def deactivate_skill(self, message):
"""Deactivate a skill."""
try:
for skill_loader in self.skill_loaders.values():
if message.data['skill'] == skill_loader.skill_id:
skill_loader.deactivate()
except Exception:
LOG.exception('Failed to deactivate ' + message.data['skill'])
def deactivate_except(self, message):
"""Deactivate all skills except the provided."""
try:
skill_to_keep = message.data['skill']
LOG.info('Deactivating all skills except {}'.format(skill_to_keep))
loaded_skill_file_names = [
os.path.basename(skill_dir) for skill_dir in self.skill_loaders
]
if skill_to_keep in loaded_skill_file_names:
for skill in self.skill_loaders.values():
if skill.skill_id != skill_to_keep:
skill.deactivate()
else:
LOG.info('Couldn\'t find skill ' + message.data['skill'])
except Exception:
LOG.exception('An error occurred during skill deactivation!')
def activate_skill(self, message):
"""Activate a deactivated skill."""
try:
for skill_loader in self.skill_loaders.values():
if (message.data['skill'] in ('all', skill_loader.skill_id) and
not skill_loader.active):
skill_loader.activate()
except Exception:
LOG.exception('Couldn\'t activate skill')
def stop(self):
"""Tell the manager to shutdown."""
self._stop_event.set()
self.settings_downloader.stop_downloading()
self.upload_queue.stop()
# Do a clean shutdown of all skills
for skill_loader in self.skill_loaders.values():
if skill_loader.instance is not None:
_shutdown_skill(skill_loader.instance)
def handle_converse_request(self, message):
"""Check if the targeted skill id can handle conversation
If supported, the conversation is invoked.
"""
skill_id = message.data['skill_id']
# loop trough skills list and call converse for skill with skill_id
skill_found = False
for skill_loader in self.skill_loaders.values():
if skill_loader.skill_id == skill_id:
skill_found = True
if not skill_loader.loaded:
error_message = 'converse requested but skill not loaded'
self._emit_converse_error(message, skill_id, error_message)
break
try:
# check the signature of a converse method
# to either pass a message or not
if len(signature(
skill_loader.instance.converse).parameters) == 1:
result = skill_loader.instance.converse(
message=message)
else:
utterances = message.data['utterances']
lang = message.data['lang']
result = skill_loader.instance.converse(
utterances=utterances, lang=lang)
self._emit_converse_response(result, message, skill_loader)
except Exception:
error_message = 'exception in converse method'
LOG.exception(error_message)
self._emit_converse_error(message, skill_id, error_message)
finally:
break
if not skill_found:
error_message = 'skill id does not exist'
self._emit_converse_error(message, skill_id, error_message)
def _emit_converse_error(self, message, skill_id, error_msg):
"""Emit a message reporting the error back to the intent service."""
reply = message.reply('skill.converse.response',
data=dict(skill_id=skill_id, error=error_msg))
self.bus.emit(reply)
def _emit_converse_response(self, result, message, skill_loader):
reply = message.reply(
'skill.converse.response',
data=dict(skill_id=skill_loader.skill_id, result=result)
)
self.bus.emit(reply)
| 38.409563
| 79
| 0.604384
|
acfbc7202f7602ecc671f60378858cf8d7b9a0ef
| 3,017
|
py
|
Python
|
src/gamesbyexample/caesarcipher.py
|
spp2/PythonStdioGames
|
7edc6a07ef816a44579800e773f30217541971fa
|
[
"MIT"
] | null | null | null |
src/gamesbyexample/caesarcipher.py
|
spp2/PythonStdioGames
|
7edc6a07ef816a44579800e773f30217541971fa
|
[
"MIT"
] | null | null | null |
src/gamesbyexample/caesarcipher.py
|
spp2/PythonStdioGames
|
7edc6a07ef816a44579800e773f30217541971fa
|
[
"MIT"
] | null | null | null |
"""Caesar Cipher, by Al Sweigart al@inventwithpython.com
The Caesar cipher is a shift cipher that uses addition and subtraction
to encrypt and decrypt letters.
More info at: https://en.wikipedia.org/wiki/Caesar_cipher
Tags: short, beginner, cryptography, math"""
__version__ = 0
try:
import pyperclip # pyperclip copies text to the clipboard.
except ImportError:
pass # If pyperclip is not installed, do nothing. It's no big deal.
# Every possible symbol that can be encrypted/decrypted:
# (!) You can add numbers and punctuation marks to encrypt those
# symbols as well.
SYMBOLS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
print('Caesar Cipher, by Al Sweigart al@inventwithpython.com')
print('The Caesar cipher encrypts letters by shifting them over by a')
print('key number. For example, a key of 2 means the letter A is')
print('encrypted into C, the letter B encrypted into D, and so on.')
print()
# Let the user enter if they are encrypting or decrypting:
while True: # Keep asking until the user enters e or d.
print('Do you want to (e)ncrypt or (d)ecrypt?')
response = input('> ').lower()
if response.startswith('e'):
mode = 'encrypt'
break
elif response.startswith('d'):
mode = 'decrypt'
break
print('Please enter the letter e or d.')
# Let the user enter the key to use:
while True: # Keep asking until the user enters a valid key.
maxKey = len(SYMBOLS) - 1
print('Please enter the key (0 to {}) to use.'.format(maxKey))
response = input('> ').upper()
if not response.isdecimal():
print('This key is not a number.')
continue
if 0 <= int(response) < len(SYMBOLS):
key = int(response)
break
# Let the user enter the message to encrypt/decrypt:
print('Enter the message to {}.'.format(mode))
message = input('> ')
# Caesar cipher only works on uppercase letters:
message = message.upper()
# Stores the encrypted/decrypted form of the message:
translated = ''
# Encrypt/decrypt each symbol in the message:
for symbol in message:
if symbol in SYMBOLS:
# Get the encrypted (or decrypted) number for this symbol.
num = SYMBOLS.find(symbol) # Get the number of the symbol.
if mode == 'encrypt':
num = num + key
elif mode == 'decrypt':
num = num - key
# Handle the wrap-around if num is larger than the length of
# SYMBOLS or less than 0:
if num >= len(SYMBOLS):
num = num - len(SYMBOLS)
elif num < 0:
num = num + len(SYMBOLS)
# Add encrypted/decrypted number's symbol to translated:
translated = translated + SYMBOLS[num]
else:
# Just add the symbol without encrypting/decrypting:
translated = translated + symbol
# Display the encrypted/decrypted string to the screen:
print(translated)
try:
pyperclip.copy(translated)
print('Full {}ed text copied to clipboard.'.format(mode))
except:
pass # Do nothing if pyperclip wasn't installed.
| 34.284091
| 72
| 0.669208
|
acfbc7e0a1078fe2ccf8707ef71aaab9927162a0
| 22,058
|
py
|
Python
|
clair3/utils.py
|
HKU-BAL/Clair3
|
9bb918bd77bdb22c2e4d2753bcd47066b720a336
|
[
"BSD-3-Clause"
] | 85
|
2021-05-17T08:20:57.000Z
|
2022-03-31T19:44:17.000Z
|
clair3/utils.py
|
HKU-BAL/Clair3
|
9bb918bd77bdb22c2e4d2753bcd47066b720a336
|
[
"BSD-3-Clause"
] | 67
|
2021-05-17T08:15:35.000Z
|
2022-03-25T23:02:44.000Z
|
clair3/utils.py
|
HKU-BAL/Clair3
|
de515cf4a8616349be55aec788d55e613ea876fd
|
[
"BSD-3-Clause"
] | 10
|
2021-06-07T05:48:39.000Z
|
2022-03-01T08:30:57.000Z
|
import sys
import gc
import copy
import shlex
import os
import tables
import numpy as np
from functools import partial
from clair3.task.main import *
from shared.interval_tree import bed_tree_from, is_region_in
from shared.utils import subprocess_popen, IUPAC_base_to_ACGT_base_dict as BASE2BASE, IUPAC_base_to_num_dict as BASE2NUM
FILTERS = tables.Filters(complib='blosc:lz4hc', complevel=5)
shuffle_bin_size = 50000
PREFIX_CHAR_STR = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
def setup_environment():
gc.enable()
def batches_from(iterable, item_from, batch_size=1):
iterable = iter(iterable)
while True:
chunk = []
for _ in range(batch_size):
try:
chunk.append(item_from(next(iterable)))
except StopIteration:
yield chunk
return
yield chunk
def tensor_generator_from(tensor_file_path, batch_size, pileup, platform):
global param
float_type = 'int32'
if pileup:
import shared.param_p as param
else:
import shared.param_f as param
float_type = 'int8'
if tensor_file_path != "PIPE":
f = subprocess_popen(shlex.split("{} -fdc {}".format(param.zstd, tensor_file_path)))
fo = f.stdout
else:
fo = sys.stdin
processed_tensors = 0
tensor_shape = param.ont_input_shape if platform == 'ont' else param.input_shape
prod_tensor_shape = np.prod(tensor_shape)
def item_from(row):
chrom, coord, seq, tensor, alt_info = row.split("\t")
if pileup:
tensor = np.array(tensor.split(), dtype=np.dtype(float_type))
depth = int(alt_info.split('-', maxsplit=1)[0])
max_depth = param.max_depth_dict[platform]
# for extreme high coverage data, make sure we could have a truncated coverage
if depth > 0 and depth > max_depth * 1.5:
scale_factor = depth / max_depth
tensor = tensor / scale_factor
else:
# need add padding if depth is lower than maximum depth.
tensor = [int(item) for item in tensor.split()]
tensor_depth = len(tensor) // tensor_shape[1] // tensor_shape[2]
padding_depth = tensor_shape[0] - tensor_depth
prefix_padding_depth = int(padding_depth / 2)
suffix_padding_depth = padding_depth - int(padding_depth / 2)
prefix_zero_padding = [0] * prefix_padding_depth * tensor_shape[1] * tensor_shape[2]
suffix_zero_padding = [0] * suffix_padding_depth * tensor_shape[1] * tensor_shape[2]
tensor = prefix_zero_padding + tensor + suffix_zero_padding
tensor = np.array(tensor, dtype=np.dtype(float_type))
pos = chrom + ":" + coord + ":" + seq
return tensor, pos, seq, alt_info
for batch in batches_from(fo, item_from=item_from, batch_size=batch_size):
tensors = np.empty(([batch_size, prod_tensor_shape]), dtype=np.dtype(float_type))
positions = []
alt_info_list = []
for tensor, pos, seq, alt_info in batch:
if seq[param.flankingBaseNum] not in BASE2NUM:
continue
tensors[len(positions)] = tensor
positions.append(pos)
alt_info_list.append(alt_info)
current_batch_size = len(positions)
X = np.reshape(tensors, ([batch_size] + tensor_shape))
if processed_tensors > 0 and processed_tensors % 20000 == 0:
print("Processed %d tensors" % processed_tensors, file=sys.stderr)
processed_tensors += current_batch_size
if current_batch_size <= 0:
continue
yield X[:current_batch_size], positions[:current_batch_size], alt_info_list[:current_batch_size]
if tensor_file_path != "PIPE":
fo.close()
f.wait()
def remove_common_suffix(ref_base, alt_base):
min_length = min(len(ref_base) - 1, min([len(item) - 1 for item in alt_base])) # keep at least one base
prefix = ref_base[::-1]
for string in alt_base:
string = string[::-1]
while string[:len(prefix)] != prefix and prefix:
prefix = prefix[:len(prefix) - 1]
if not prefix:
break
res_length = len(prefix)
if res_length > min_length:
return ref_base, alt_base
return ref_base[:len(ref_base) - res_length], [item[:len(item) - res_length] for item in alt_base]
return ref_base[-min_length], [item[-min_length] for item in alt_base]
def decode_alt(ref_base, alt_base):
if ',' not in alt_base:
return [ref_base], [alt_base]
alt_base = alt_base.split(',')
ref_base_list, alt_base_list = [], []
for ab in alt_base:
rb,ab = remove_common_suffix(ref_base, [ab])
ref_base_list.append(rb)
alt_base_list.append(ab[0])
return ref_base_list, alt_base_list
def variant_map_from(var_fn, tree, is_tree_empty):
Y = {}
truth_alt_dict = {}
miss_variant_set = set()
if var_fn is None:
return Y, miss_variant_set, truth_alt_dict
f = subprocess_popen(shlex.split("gzip -fdc %s" % (var_fn)))
for row in f.stdout:
if row[0] == "#":
continue
columns = row.strip().split()
ctg_name, position_str, ref_base, alt_base, genotype1, genotype2 = columns
key = ctg_name + ":" + position_str
if genotype1 == '-1' or genotype2 == '-1':
miss_variant_set.add(key)
continue
if not (is_tree_empty or is_region_in(tree, ctg_name, int(position_str))):
continue
Y[key] = output_labels_from_vcf_columns(columns)
ref_base_list, alt_base_list = decode_alt(ref_base, alt_base)
truth_alt_dict[int(position_str)] = (ref_base_list, alt_base_list)
f.stdout.close()
f.wait()
return Y, miss_variant_set, truth_alt_dict
def find_read_support(pos, truth_alt_dict, alt_info):
alt_info = alt_info.rstrip().split('-')
seqs = alt_info[1].split(' ') if len(alt_info) > 1 else ''
seq_alt_bases_dict = dict(zip(seqs[::2], [int(item) for item in seqs[1::2]])) if len(seqs) else {}
pos = int(pos)
if pos not in truth_alt_dict:
# candidate position not in the truth vcf or unified truth vcf
return None
ref_base_list, alt_base_list = truth_alt_dict[pos]
found = 0
for alt_type in seq_alt_bases_dict:
if '*' in alt_type or '#' in alt_type or 'R' in alt_type:
continue
if alt_type[0] == 'X':
if alt_type[1] in alt_base_list:
found += 1
elif alt_type[0] == 'I':
if alt_type[1:] in alt_base_list:
found += 1
elif alt_type[0] == 'D':
del_cigar = alt_type[1:]
for rb, ab in zip(ref_base_list, alt_base_list):
if rb[1:] == del_cigar and len(ab) == 1:
found += 1
if found >= len(alt_base_list):
return True
# return False if we find any alternative bases missed in subsampled bam, then remove the position from training
return False
def write_table_dict(table_dict, string, label, pos, total, alt_info, tensor_shape, pileup):
"""
Write pileup or full alignment tensor into a dictionary.compressed bin file.
table_dict: dictionary include all training information (tensor position, label, altnative bases).
string: input tensor string, need add padding to meet the depth requirement.
label: include gt21 genotype, indel length 1, indel length 2.
alt_info: altnative information for querying variant.
"""
if len(string) == 1:
string = string[0]
position_matrix = string
position_matrix = position_matrix.split()
if pileup:
table_dict['position_matrix'].append(position_matrix)
else:
tensor_depth = len(position_matrix) // tensor_shape[1] // tensor_shape[2]
padding_depth = tensor_shape[0] - tensor_depth
prefix_padding_depth = int(padding_depth / 2)
suffix_padding_depth = padding_depth - int(padding_depth / 2)
prefix_zero_padding = ['0'] * prefix_padding_depth * tensor_shape[1] * tensor_shape[2]
suffix_zero_padding = ['0'] * suffix_padding_depth * tensor_shape[1] * tensor_shape[2]
table_dict['position_matrix'].append(prefix_zero_padding + position_matrix + suffix_zero_padding)
table_dict['position'].append(pos)
table_dict['label'].append(label)
table_dict['alt_info'].append(alt_info)
return total + 1
def update_table_dict():
table_dict = {}
table_dict['position_matrix'] = []
table_dict['alt_info'] = []
table_dict['position'] = []
table_dict['label'] = []
return table_dict
def write_table_file(table_file, table_dict, tensor_shape, label_size, float_type):
"""
Write pileup or full alignment tensor into compressed bin file.
table_dict: dictionary include all training information (tensor position, label, altnative bases).
string: input tensor string, need add padding to meet the depth requirement.
tree: dictionary(contig name : intervaltree) for quick region querying.
miss_variant_set: sometimes there will have true variant missing after downsampling reads.
is_allow_duplicate_chr_pos: whether allow duplicate positions when training, if there exists downsampled data, lower depth will add a random prefix character.
non_variant_subsample_ratio: define a maximum non variant ratio for training, we always expect use more non variant data, while it would greatly increase training
time, especially in ont data, here we usually use 1:1 or 1:2 for variant candidate: non variant candidate.
"""
position_matrix = np.array(table_dict['position_matrix'], np.dtype(float_type)).reshape([-1] + tensor_shape)
table_file.root.position_matrix.append(position_matrix)
table_file.root.alt_info.append(np.array(table_dict['alt_info']).reshape(-1, 1))
table_file.root.position.append(np.array(table_dict['position']).reshape(-1, 1))
table_file.root.label.append(np.array(table_dict['label'], np.dtype(float_type)).reshape(-1, label_size))
table_dict = update_table_dict()
return table_dict
def print_bin_size(path, prefix=None):
import tables
import os
total = 0
for file_name in os.listdir(path):
if prefix and not file_name.startswith(prefix):
continue
table = tables.open_file(os.path.join(path, file_name), 'r')
print("[INFO] {} size is: {}".format(file_name, len(table.root.label)))
total += len(table.root.label)
print('[INFO] total: {}'.format(total))
def bin_reader_generator_from(tensor_fn, Y_true_var, Y, is_tree_empty, tree, miss_variant_set, truth_alt_dict, is_allow_duplicate_chr_pos=False, maximum_non_variant_ratio=None):
"""
Bin reader generator for bin file generation.
tensor_fn: tensor file.
Y_true_var: dictionary (contig name: label information) containing all true variant information (should not be changed).
Y: dictionary (contig name: label information) to store all variant and non variant information.
tree: dictionary(contig name : intervaltree) for quick region querying.
miss_variant_set: sometimes there will have true variant missing after downsampling reads.
truth_alt_dict: unified truth reference base and alternative bases to find read support.
is_allow_duplicate_chr_pos: whether allow duplicate positions when training, if there exists downsampled data, lower depth will add a random prefix character.
maximum_non_variant_ratio: define a maximum non variant ratio for training, we always expect use more non variant data, while it would greatly increase training
time, especially in ont data, here we usually use 1:1 or 1:2 for variant candidate: non variant candidate.
"""
X = {}
ref_list = []
total = 0
variant_set_with_read_support = set()
variants_without_read_support = 0
for row_idx, row in enumerate(tensor_fn):
chrom, coord, seq, string, alt_info = row.split("\t")
alt_info = alt_info.rstrip()
if not (is_tree_empty or is_region_in(tree, chrom, int(coord))):
continue
seq = seq.upper()
if seq[param.flankingBaseNum] not in 'ACGT':
continue
key = chrom + ":" + coord
is_reference = key not in Y_true_var
if key in miss_variant_set:
continue
have_read_support = find_read_support(pos=coord, truth_alt_dict=truth_alt_dict, alt_info=alt_info)
if have_read_support is not None and not have_read_support:
miss_variant_set.add(key)
variants_without_read_support += 1
continue
variant_set_with_read_support.add(key)
if key not in X:
X[key] = (string, alt_info, seq)
if is_reference:
ref_list.append(key)
elif is_allow_duplicate_chr_pos:
new_key = ""
for character in PREFIX_CHAR_STR:
tmp_key = character + key
if tmp_key not in X:
new_key = tmp_key
break
if len(new_key) > 0:
X[new_key] = (string, alt_info, seq)
if is_reference:
ref_list.append(new_key)
if is_reference and key not in Y:
Y[key] = output_labels_from_reference(BASE2BASE[seq[param.flankingBaseNum]])
if len(X) == shuffle_bin_size:
if maximum_non_variant_ratio is not None:
_filter_non_variants(X, ref_list, maximum_non_variant_ratio)
yield X, total, False
X = {}
ref_list = []
total += 1
if total % 100000 == 0:
print("[INFO] Processed %d tensors" % total, file=sys.stderr)
print("[INFO] Variants with read support/variants without read support: {}/{}".format(len(variant_set_with_read_support), variants_without_read_support))
if maximum_non_variant_ratio is not None:
_filter_non_variants(X, ref_list, maximum_non_variant_ratio)
yield X, total, True
def _filter_non_variants(X, ref_list, maximum_non_variant_ratio):
non_variant_num = len(ref_list)
variant_num = len(X) - non_variant_num
if non_variant_num > variant_num * maximum_non_variant_ratio:
non_variant_keep_fraction = maximum_non_variant_ratio * variant_num / (1. * non_variant_num)
probabilities = np.random.random_sample((non_variant_num,))
for key, p in zip(ref_list, probabilities):
if p > non_variant_keep_fraction:
X.pop(key)
def get_training_array(tensor_fn, var_fn, bed_fn, bin_fn, shuffle=True, is_allow_duplicate_chr_pos=True, chunk_id=None,
chunk_num=None, platform='ont', pileup=False, maximum_non_variant_ratio=None, candidate_details_fn_prefix=None):
"""
Generate training array for training. here pytables with blosc:lz4hc are used for extreme fast compression and decompression,
which can meet the requirement of gpu utilization. lz4hc decompression allows speed up training array decompression 4~5x compared
with tensorflow tfrecord file format, current gpu utilization could reach over 85% with only 10G memory.
tensor_fn: string format tensor acquired from CreateTensorPileup or CreateTensorFullAlign, include contig name position, tensor matrix, alternative information.
var_fn: simplified variant(vcf) format from GetTruths, which include contig name, position, reference base, alternative base, genotype.
bin_fn: pytables format output bin file name.
shuffle: whether apply index shuffling when generating training data, default True, which would promote robustness.
is_allow_duplicate_chr_pos: whether allow duplicate positions when training, if there exists downsampled data, lower depth will add a random prefix character.
chunk_id: specific chunk id works with total chunk_num for parallel execution. Here will merge all tensor file with sampe prefix.
chunk_num: total chunk number for parallel execution. Each chunk refer to a smaller reference regions.
platform: platform for tensor shape, ont give a larger maximum depth compared with pb and illumina.
pileup: whether in pileup mode. Define two calling mode, pileup or full alignment.
maximum_non_variant_ratio: define a maximum non variant ratio for training, we always expect use more non variant data, while it would greatly increase training
time, especially in ont data, here we usually use 1:1 or 1:2 for variant candidate: non variant candidate.
candidate_details_fn_prefix: a counter to calculate total variant and non variant from the information in alternative file.
"""
tree = bed_tree_from(bed_file_path=bed_fn)
is_tree_empty = len(tree.keys()) == 0
Y_true_var, miss_variant_set, truth_alt_dict = variant_map_from(var_fn, tree, is_tree_empty)
Y = copy.deepcopy(Y_true_var)
global param
float_type = 'int32'
if pileup:
import shared.param_p as param
else:
import shared.param_f as param
float_type = 'int8'
tensor_shape = param.ont_input_shape if platform == 'ont' else param.input_shape
subprocess_list = []
if tensor_fn == 'PIPE':
subprocess_list.append(sys.stdin)
elif os.path.exists(tensor_fn):
subprocess_list.append(subprocess_popen(shlex.split("{} -fdc {}".format(param.zstd, tensor_fn))).stdout)
# select all match prefix if file path not exists
else:
tensor_fn = tensor_fn.split('/')
directry, file_prefix = '/'.join(tensor_fn[:-1]), tensor_fn[-1]
all_file_name = []
for file_name in os.listdir(directry):
if file_name.startswith(file_prefix + '_') or file_name.startswith(
file_prefix + '.'): # add '_.' to avoid add other prefix chr
all_file_name.append(file_name)
all_file_name = sorted(all_file_name)
if chunk_id is not None:
chunk_size = len(all_file_name) // chunk_num if len(all_file_name) % chunk_num == 0 else len(
all_file_name) // chunk_num + 1
chunk_start = chunk_size * chunk_id
chunk_end = chunk_start + chunk_size
all_file_name = all_file_name[chunk_start:chunk_end]
if not len(all_file_name):
print("[INFO] chunk_id exceed total file number, skip chunk", file=sys.stderr)
return 0
for file_name in all_file_name:
subprocess_list.append(
subprocess_popen(shlex.split("{} -fdc {}".format(param.zstd, os.path.join(directry, file_name)))).stdout)
tables.set_blosc_max_threads(64)
int_atom = tables.Atom.from_dtype(np.dtype(float_type))
string_atom = tables.StringAtom(itemsize=param.no_of_positions + 50)
long_string_atom = tables.StringAtom(itemsize=5000) # max alt_info length
table_file = tables.open_file(bin_fn, mode='w', filters=FILTERS)
table_file.create_earray(where='/', name='position_matrix', atom=int_atom, shape=[0] + tensor_shape,
filters=FILTERS)
table_file.create_earray(where='/', name='position', atom=string_atom, shape=(0, 1), filters=FILTERS)
table_file.create_earray(where='/', name='label', atom=int_atom, shape=(0, param.label_size), filters=FILTERS)
table_file.create_earray(where='/', name='alt_info', atom=long_string_atom, shape=(0, 1), filters=FILTERS)
table_dict = update_table_dict()
# generator to avoid high memory occupy
bin_reader_generator = partial(bin_reader_generator_from,
Y_true_var=Y_true_var,
Y=Y,
is_tree_empty=is_tree_empty,
tree=tree,
miss_variant_set=miss_variant_set,
truth_alt_dict=truth_alt_dict,
is_allow_duplicate_chr_pos=is_allow_duplicate_chr_pos,
maximum_non_variant_ratio=maximum_non_variant_ratio)
total_compressed = 0
for fin in subprocess_list:
bin_g = bin_reader_generator(tensor_fn=fin)
completed = False
while not completed:
try:
X, total, completed = next(bin_g)
except StopIteration:
completed = True
if X is None or not len(X):
break
all_chr_pos = sorted(X.keys())
if shuffle == True:
np.random.shuffle(all_chr_pos)
for key in all_chr_pos:
string, alt_info, seq = X[key]
del X[key]
label = None
if key in Y:
label = Y[key]
pos = key + ':' + seq
if not is_allow_duplicate_chr_pos:
del Y[key]
elif is_allow_duplicate_chr_pos:
tmp_key = key[1:]
label = Y[tmp_key]
pos = tmp_key + ':' + seq
if label is None:
print(key)
continue
total_compressed = write_table_dict(table_dict, string, label, pos, total_compressed, alt_info,
tensor_shape, pileup)
if total_compressed % 500 == 0 and total_compressed > 0:
table_dict = write_table_file(table_file, table_dict, tensor_shape, param.label_size, float_type)
if total_compressed % 50000 == 0:
print("[INFO] Compressed %d tensor" % (total_compressed), file=sys.stderr)
fin.close()
if total_compressed % 500 != 0 and total_compressed > 0:
table_dict = write_table_file(table_file, table_dict, tensor_shape, param.label_size, float_type)
table_file.close()
print("[INFO] Compressed %d/%d tensor" % (total_compressed, total), file=sys.stderr)
| 44.651822
| 177
| 0.657086
|
acfbc7f98816736279dcf48432055f9dcb8a800b
| 14,520
|
py
|
Python
|
mistral/api/controllers/v2/execution.py
|
liuzheng/mistral
|
3fd66f3b0575d909158595b19e687e7f1a6126fe
|
[
"Apache-2.0"
] | 3
|
2015-08-28T04:57:56.000Z
|
2017-03-27T10:59:56.000Z
|
mistral/api/controllers/v2/execution.py
|
liuzheng/mistral
|
3fd66f3b0575d909158595b19e687e7f1a6126fe
|
[
"Apache-2.0"
] | 21
|
2015-04-14T22:41:53.000Z
|
2019-02-20T09:30:10.000Z
|
mistral/api/controllers/v2/execution.py
|
liuzheng/mistral
|
3fd66f3b0575d909158595b19e687e7f1a6126fe
|
[
"Apache-2.0"
] | 12
|
2015-08-14T02:27:37.000Z
|
2020-12-31T10:09:21.000Z
|
# Copyright 2013 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
# Copyright 2015 Huawei Technologies Co., Ltd.
# Copyright 2016 - Brocade Communications Systems, Inc.
# Copyright 2018 - Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from pecan import rest
import sqlalchemy as sa
import tenacity
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from mistral.api import access_control as acl
from mistral.api.controllers.v2 import resources
from mistral.api.controllers.v2 import task
from mistral.api.controllers.v2 import types
from mistral import context
from mistral.db.v2 import api as db_api
from mistral import exceptions as exc
from mistral.rpc import clients as rpc
from mistral.services import workflows as wf_service
from mistral.utils import filter_utils
from mistral.utils import merge_dicts
from mistral.utils import rest_utils
from mistral.workflow import states
LOG = logging.getLogger(__name__)
STATE_TYPES = wtypes.Enum(
str,
states.IDLE,
states.RUNNING,
states.SUCCESS,
states.ERROR,
states.PAUSED,
states.CANCELLED
)
def _load_deferred_output_field(ex):
if ex:
# We need to refer to this lazy-load field explicitly in
# order to make sure that it is correctly loaded.
hasattr(ex, 'output')
return ex
def _get_workflow_execution_resource(wf_ex):
_load_deferred_output_field(wf_ex)
return resources.Execution.from_db_model(wf_ex)
# Use retries to prevent possible failures.
@tenacity.retry(
retry=tenacity.retry_if_exception_type(sa.exc.OperationalError),
stop=tenacity.stop_after_attempt(10),
wait=tenacity.wait_incrementing(increment=100) # 0.1 seconds
)
def _get_workflow_execution(id, must_exist=True):
with db_api.transaction():
if must_exist:
wf_ex = db_api.get_workflow_execution(id)
else:
wf_ex = db_api.load_workflow_execution(id)
return _load_deferred_output_field(wf_ex)
# TODO(rakhmerov): Make sure to make all needed renaming on public API.
class ExecutionsController(rest.RestController):
tasks = task.ExecutionTasksController()
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(resources.Execution, wtypes.text)
def get(self, id):
"""Return the specified Execution.
:param id: UUID of execution to retrieve.
"""
acl.enforce("executions:get", context.ctx())
LOG.debug("Fetch execution [id=%s]", id)
wf_ex = _get_workflow_execution(id)
return resources.Execution.from_db_model(wf_ex)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(
resources.Execution,
wtypes.text,
body=resources.Execution
)
def put(self, id, wf_ex):
"""Update the specified workflow execution.
:param id: UUID of execution to update.
:param wf_ex: Execution object.
"""
acl.enforce('executions:update', context.ctx())
LOG.debug('Update execution [id=%s, execution=%s]', id, wf_ex)
with db_api.transaction():
# ensure that workflow execution exists
db_api.get_workflow_execution(id)
delta = {}
if wf_ex.state:
delta['state'] = wf_ex.state
if wf_ex.description:
delta['description'] = wf_ex.description
if wf_ex.params and wf_ex.params.get('env'):
delta['env'] = wf_ex.params.get('env')
# Currently we can change only state, description, or env.
if len(delta.values()) <= 0:
raise exc.InputException(
'The property state, description, or env '
'is not provided for update.'
)
# Description cannot be updated together with state.
if delta.get('description') and delta.get('state'):
raise exc.InputException(
'The property description must be updated '
'separately from state.'
)
# If state change, environment cannot be updated if not RUNNING.
if (delta.get('env') and
delta.get('state') and delta['state'] != states.RUNNING):
raise exc.InputException(
'The property env can only be updated when workflow '
'execution is not running or on resume from pause.'
)
if delta.get('description'):
wf_ex = db_api.update_workflow_execution(
id,
{'description': delta['description']}
)
if not delta.get('state') and delta.get('env'):
wf_ex = db_api.get_workflow_execution(id)
wf_ex = wf_service.update_workflow_execution_env(
wf_ex,
delta.get('env')
)
if delta.get('state'):
if states.is_paused(delta.get('state')):
wf_ex = rpc.get_engine_client().pause_workflow(id)
elif delta.get('state') == states.RUNNING:
wf_ex = rpc.get_engine_client().resume_workflow(
id,
env=delta.get('env')
)
elif states.is_completed(delta.get('state')):
msg = wf_ex.state_info if wf_ex.state_info else None
wf_ex = rpc.get_engine_client().stop_workflow(
id,
delta.get('state'),
msg
)
else:
# To prevent changing state in other cases throw a message.
raise exc.InputException(
"Cannot change state to %s. Allowed states are: '%s" % (
wf_ex.state,
', '.join([
states.RUNNING,
states.PAUSED,
states.SUCCESS,
states.ERROR,
states.CANCELLED
])
)
)
return resources.Execution.from_dict(
wf_ex if isinstance(wf_ex, dict) else wf_ex.to_dict()
)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(
resources.Execution,
body=resources.Execution,
status_code=201
)
def post(self, wf_ex):
"""Create a new Execution.
:param wf_ex: Execution object with input content.
"""
acl.enforce('executions:create', context.ctx())
LOG.debug("Create execution [execution=%s]", wf_ex)
exec_dict = wf_ex.to_dict()
exec_id = exec_dict.get('id')
source_execution_id = exec_dict.get('source_execution_id')
source_exec_dict = None
if exec_id:
# If ID is present we need to check if such execution exists.
# If yes, the method just returns the object. If not, the ID
# will be used to create a new execution.
wf_ex = _get_workflow_execution(exec_id, must_exist=False)
if wf_ex:
return resources.Execution.from_db_model(wf_ex)
if source_execution_id:
# If source execution is present we will perform a lookup for
# previous workflow execution model and the information to start
# a new workflow based on that information.
source_exec_dict = db_api.get_workflow_execution(
source_execution_id).to_dict()
result_exec_dict = merge_dicts(source_exec_dict, exec_dict)
if not (result_exec_dict.get('workflow_id') or
result_exec_dict.get('workflow_name')):
raise exc.WorkflowException(
"Workflow ID or workflow name must be provided. Workflow ID is"
" recommended."
)
engine = rpc.get_engine_client()
result = engine.start_workflow(
result_exec_dict.get('workflow_id',
result_exec_dict.get('workflow_name')),
result_exec_dict.get('workflow_namespace', ''),
exec_id,
result_exec_dict.get('input'),
description=result_exec_dict.get('description', ''),
**result_exec_dict.get('params', {})
)
return resources.Execution.from_dict(result)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
def delete(self, id):
"""Delete the specified Execution.
:param id: UUID of execution to delete.
"""
acl.enforce('executions:delete', context.ctx())
LOG.debug("Delete execution [id=%s]", id)
return db_api.delete_workflow_execution(id)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(resources.Executions, types.uuid, int,
types.uniquelist, types.list, types.uniquelist,
wtypes.text, types.uuid, wtypes.text, types.jsontype,
types.uuid, types.uuid, STATE_TYPES, wtypes.text,
types.jsontype, types.jsontype, wtypes.text,
wtypes.text, bool, types.uuid, bool)
def get_all(self, marker=None, limit=None, sort_keys='created_at',
sort_dirs='asc', fields='', workflow_name=None,
workflow_id=None, description=None, params=None,
task_execution_id=None, root_execution_id=None, state=None,
state_info=None, input=None, output=None, created_at=None,
updated_at=None, include_output=None, project_id=None,
all_projects=False):
"""Return all Executions.
:param marker: Optional. Pagination marker for large data sets.
:param limit: Optional. Maximum number of resources to return in a
single result. Default value is None for backward
compatibility.
:param sort_keys: Optional. Columns to sort results by.
Default: created_at, which is backward compatible.
:param sort_dirs: Optional. Directions to sort corresponding to
sort_keys, "asc" or "desc" can be chosen.
Default: desc. The length of sort_dirs can be equal
or less than that of sort_keys.
:param fields: Optional. A specified list of fields of the resource to
be returned. 'id' will be included automatically in
fields if it's provided, since it will be used when
constructing 'next' link.
:param workflow_name: Optional. Keep only resources with a specific
workflow name.
:param workflow_id: Optional. Keep only resources with a specific
workflow ID.
:param description: Optional. Keep only resources with a specific
description.
:param params: Optional. Keep only resources with specific parameters.
:param task_execution_id: Optional. Keep only resources with a
specific task execution ID.
:param root_execution_id: Optional. Keep only resources with a
specific root execution ID.
:param state: Optional. Keep only resources with a specific state.
:param state_info: Optional. Keep only resources with specific
state information.
:param input: Optional. Keep only resources with a specific input.
:param output: Optional. Keep only resources with a specific output.
:param created_at: Optional. Keep only resources created at a specific
time and date.
:param updated_at: Optional. Keep only resources with specific latest
update time and date.
:param include_output: Optional. Include the output for all executions
in the list.
:param project_id: Optional. Only get exectuions belong to the project.
Admin required.
:param all_projects: Optional. Get resources of all projects. Admin
required.
"""
acl.enforce('executions:list', context.ctx())
if all_projects or project_id:
acl.enforce('executions:list:all_projects', context.ctx())
filters = filter_utils.create_filters_from_request_params(
created_at=created_at,
workflow_name=workflow_name,
workflow_id=workflow_id,
params=params,
task_execution_id=task_execution_id,
state=state,
state_info=state_info,
input=input,
output=output,
updated_at=updated_at,
description=description,
project_id=project_id,
root_execution_id=root_execution_id,
)
LOG.debug(
"Fetch executions. marker=%s, limit=%s, sort_keys=%s, "
"sort_dirs=%s, filters=%s, all_projects=%s", marker, limit,
sort_keys, sort_dirs, filters, all_projects
)
if include_output:
resource_function = _get_workflow_execution_resource
else:
resource_function = None
return rest_utils.get_all(
resources.Executions,
resources.Execution,
db_api.get_workflow_executions,
db_api.get_workflow_execution,
resource_function=resource_function,
marker=marker,
limit=limit,
sort_keys=sort_keys,
sort_dirs=sort_dirs,
fields=fields,
all_projects=all_projects,
**filters
)
| 37.8125
| 79
| 0.601653
|
acfbc8323aa1f8361a5341648dc7fae710d6f49f
| 686
|
py
|
Python
|
pingerservice/management/commands/initadmin.py
|
Dobli/pinger
|
7d9ccf7179a29128f8d9f6c64525604b84c55944
|
[
"Apache-2.0"
] | null | null | null |
pingerservice/management/commands/initadmin.py
|
Dobli/pinger
|
7d9ccf7179a29128f8d9f6c64525604b84c55944
|
[
"Apache-2.0"
] | null | null | null |
pingerservice/management/commands/initadmin.py
|
Dobli/pinger
|
7d9ccf7179a29128f8d9f6c64525604b84c55944
|
[
"Apache-2.0"
] | null | null | null |
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
class Command(BaseCommand):
def handle(self, *args, **options):
for user in settings.ADMINS:
username = user[0].replace(' ', '')
email = user[1]
password = user[2]
if not User.objects.filter(username=username).exists():
print('Creating account for %s (%s)' % (username, email))
User.objects.create_superuser(username, email, password)
else:
print('Admin accounts can only be'
'initialized if no Accounts exist')
| 36.105263
| 73
| 0.600583
|
acfbc84a68cac0782cb566c563966c367065dcf8
| 3,888
|
py
|
Python
|
sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2021_03_08/aio/_application_insights_management_client.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-09T08:59:13.000Z
|
2022-03-09T08:59:13.000Z
|
sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2021_03_08/aio/_application_insights_management_client.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2021_03_08/aio/_application_insights_management_client.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, Optional, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
from .. import models
from ._configuration import ApplicationInsightsManagementClientConfiguration
from .operations import MyWorkbooksOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ApplicationInsightsManagementClient:
"""Composite Swagger for Application Insights Management Client.
:ivar my_workbooks: MyWorkbooksOperations operations
:vartype my_workbooks:
azure.mgmt.applicationinsights.v2021_03_08.aio.operations.MyWorkbooksOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ApplicationInsightsManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.my_workbooks = MyWorkbooksOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request: HttpRequest,
**kwargs: Any
) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "ApplicationInsightsManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| 43.2
| 137
| 0.698045
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.