code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from django.db.models import (
Count,
Q,
F
)
from django.contrib.contenttypes.fields import (
GenericForeignKey,
GenericRelation
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.postgres.fields import JSONField
from django.core.cache import cache
from django.db import models
from paper.utils import get_cache_key
from purchase.models import Purchase
from researchhub.lib import CREATED_LOCATIONS
from .reaction_models import Flag, Vote, Endorsement
HELP_TEXT_WAS_EDITED = (
'True if the comment text was edited after first being created.'
)
HELP_TEXT_IS_PUBLIC = (
'Hides the comment from the public.'
)
HELP_TEXT_IS_REMOVED = (
'Hides the comment because it is not allowed.'
)
class BaseComment(models.Model):
CREATED_LOCATION_PROGRESS = CREATED_LOCATIONS['PROGRESS']
CREATED_LOCATION_CHOICES = [
(CREATED_LOCATION_PROGRESS, 'Progress')
]
created_by = models.ForeignKey(
'user.User',
on_delete=models.SET_NULL,
blank=True,
null=True,
)
created_date = models.DateTimeField(auto_now_add=True, db_index=True)
updated_date = models.DateTimeField(auto_now=True)
created_location = models.CharField(
choices=CREATED_LOCATION_CHOICES,
max_length=255,
default=None,
null=True,
blank=True
)
was_edited = models.BooleanField(
default=False,
help_text=HELP_TEXT_WAS_EDITED
)
is_public = models.BooleanField(
default=True,
help_text=HELP_TEXT_IS_PUBLIC
)
is_removed = models.BooleanField(
default=False,
help_text=HELP_TEXT_IS_REMOVED
)
ip_address = models.GenericIPAddressField(
unpack_ipv4=True,
blank=True,
null=True
)
text = JSONField(blank=True, null=True)
external_metadata = JSONField(null=True)
votes = GenericRelation(
Vote,
object_id_field='object_id',
content_type_field='content_type',
related_query_name='discussion'
)
flags = GenericRelation(Flag)
endorsement = GenericRelation(Endorsement)
plain_text = models.TextField(default='', blank=True)
source = models.CharField(default='researchhub', max_length=32, null=True)
purchases = GenericRelation(
Purchase,
object_id_field='object_id',
content_type_field='content_type',
related_query_name='discussion'
)
contributions = GenericRelation(
'reputation.Contribution',
object_id_field='object_id',
content_type_field='content_type',
related_query_name='discussion'
)
class Meta:
abstract = True
# TODO make this a mixin Actionable or Notifiable
@property
def owners(self):
if self.created_by:
return [self.created_by]
else:
return []
# TODO make this a mixin Actionable or Notifiable
@property
def users_to_notify(self):
parent_owners = self.parent.owners
return parent_owners
@property
def created_by_author_profile_indexing(self):
if self.created_by:
author = self.created_by.author_profile
if author:
return author
return None
@property
def children(self):
return BaseComment.objects.none()
@property
def score_indexing(self):
return self.calculate_score()
def calculate_score(self, ignore_self_vote=False):
if hasattr(self, 'score'):
return self.score
else:
qs = self.votes.filter(
created_by__is_suspended=False,
created_by__probable_spammer=False
)
if ignore_self_vote:
qs = qs.exclude(created_by=F('discussion__created_by'))
score = qs.aggregate(
score=Count(
'id', filter=Q(vote_type=Vote.UPVOTE)
) - Count(
'id', filter=Q(vote_type=Vote.DOWNVOTE)
)
).get('score', 0)
return score
def update_discussion_count(self):
paper = self.paper
if paper:
new_dis_count = paper.get_discussion_count()
paper.calculate_hot_score()
paper.discussion_count = new_dis_count
paper.save(update_fields=['discussion_count'])
cache_key = get_cache_key('paper', paper.id)
cache.delete(cache_key)
for h in paper.hubs.all():
h.discussion_count = h.get_discussion_count()
h.save(update_fields=['discussion_count'])
return new_dis_count
post = self.post
hypothesis = self.hypothesis
instance = post or hypothesis
if instance:
new_dis_count = instance.get_discussion_count()
instance.discussion_count = new_dis_count
instance.save()
return new_dis_count
return 0
def remove_nested(self):
if self.is_removed is False:
self.is_removed = True
self.save(update_fields=['is_removed'])
if len(self.children) > 0:
for c in self.children:
c.remove_nested()
def get_promoted_score(self):
purchases = self.purchases.filter(
paid_status=Purchase.PAID,
)
if purchases.exists():
boost_score = sum(
map(int, purchases.values_list('amount', flat=True))
)
return boost_score
return False
class Thread(BaseComment):
CITATION_COMMENT = 'citation_comment'
INLINE_ABSTRACT = 'inline_abstract'
INLINE_PAPER_BODY = 'inline_paper_body'
RESEARCHHUB = 'researchhub'
THREAD_SOURCE_CHOICES = [
(CITATION_COMMENT, 'Citation Comment'),
(INLINE_ABSTRACT, 'Inline Abstract'),
(INLINE_PAPER_BODY, 'Inline Paper Body'),
(RESEARCHHUB, 'researchhub'),
]
source = models.CharField(
default=RESEARCHHUB,
choices=THREAD_SOURCE_CHOICES,
max_length=32
)
block_key = models.CharField(max_length=255, null=True, blank=True)
context_title = models.TextField(
blank=True,
null=True,
help_text="For inline-comments, indicates what's highlighted"
)
entity_key = models.CharField(max_length=255, null=True, blank=True)
title = models.CharField(
max_length=255,
null=True,
blank=True
)
paper = models.ForeignKey(
'paper.Paper',
on_delete=models.SET_NULL,
related_name='threads',
blank=True,
null=True
)
post = models.ForeignKey(
'researchhub_document.ResearchhubPost',
on_delete=models.SET_NULL,
related_name='threads',
blank=True,
null=True
)
hypothesis = models.ForeignKey(
'hypothesis.Hypothesis',
on_delete=models.SET_NULL,
related_name='threads',
null=True,
blank=True,
)
citation = models.ForeignKey(
'hypothesis.Citation',
on_delete=models.SET_NULL,
related_name='threads',
null=True,
blank=True,
)
actions = GenericRelation(
'user.Action',
object_id_field='object_id',
content_type_field='content_type',
related_query_name='threads'
)
def __str__(self):
return '%s: %s' % (self.created_by, self.title)
@property
def parent(self):
return self.paper
@property
def unified_document(self):
paper = self.paper
if paper:
return paper.unified_document
post = self.post
if post:
return post.unified_document
hypothesis = self.hypothesis
if hypothesis:
return hypothesis.unified_document
citation = self.citation
if citation:
return citation.source
return None
@property
def children(self):
return self.comments.filter(is_removed=False)
@property
def comment_count_indexing(self):
return len(self.comments.filter(is_removed=False))
@property
def paper_indexing(self):
if self.paper is not None:
return self.paper.id
@property
def paper_title_indexing(self):
if self.paper is not None:
return self.paper.title
@property
def owners(self):
if (
self.created_by
and self.created_by.emailrecipient.thread_subscription
and not self.created_by.emailrecipient.thread_subscription.none
):
return [self.created_by]
else:
return []
@property
def users_to_notify(self):
# TODO: Add notifications to posts and hypotheses
if self.post or self.hypothesis or self.citation:
return []
users = list(self.parent.moderators.all())
paper_authors = self.parent.authors.all()
for author in paper_authors:
if (
author.user
and author.user.emailrecipient.paper_subscription.threads
and not author.user.emailrecipient.paper_subscription.none
):
users.append(author.user)
return users
class Reply(BaseComment):
content_type = models.ForeignKey(
ContentType,
on_delete=models.SET_NULL,
blank=True,
null=True
)
object_id = models.PositiveIntegerField()
parent = GenericForeignKey('content_type', 'object_id')
replies = GenericRelation('Reply')
actions = GenericRelation(
'user.Action',
object_id_field='object_id',
content_type_field='content_type',
related_query_name='replies'
)
@property
def paper(self):
comment = self.get_comment_of_reply()
paper = comment.paper
return paper
@property
def post(self):
comment = self.get_comment_of_reply()
if comment:
post = comment.post
return post
@property
def hypothesis(self):
comment = self.get_comment_of_reply()
if comment:
hypothesis = comment.hypothesis
return hypothesis
@property
def thread(self):
comment = self.get_comment_of_reply()
thread = comment.parent
return thread
@property
def unified_document(self):
thread = self.thread
paper = thread.paper
hypothesis = thread.hypothesis
if paper:
return paper.unified_document
post = thread.post
if post:
return post.unified_document
hypothesis = thread.hypothesis
if hypothesis:
return hypothesis.unified_document
return None
@property
def children(self):
return self.replies.filter(is_removed=False)
def get_comment_of_reply(self):
obj = self
while isinstance(obj, Reply):
obj = obj.parent
if isinstance(obj, Comment):
return obj
return None
@property
def owners(self):
return [self.created_by]
@property
def users_to_notify(self):
# TODO: No siblings for now. Do we need this?
# sibling_comment_users = []
# for c in self.parent.children.prefetch_related(
# 'created_by',
# 'created_by__emailrecipient',
# 'created_by__emailrecipient__thread_subscription',
# 'created_by__emailrecipient__comment_subscription'
# ):
# if (
# c != self
# and c.created_by not in sibling_comment_users
# and c.created_by.emailrecipient.thread_subscription
# and c.created_by.emailrecipient.thread_subscription.replies
# and c.created_by.emailrecipient.comment_subscription
# and c.created_by.emailrecipient.comment_subscription.replies
# ):
# sibling_comment_users.append(c.created_by)
# return parent_owners + sibling_comment_users
users = []
p = self.parent
if isinstance(p, Reply):
if (
p.created_by
and p.created_by.emailrecipient.reply_subscription.replies
and not p.created_by.emailrecipient.reply_subscription.none
and not p.created_by == self.created_by
):
users.append(p.created_by)
else:
if (
p.created_by
and p.created_by.emailrecipient.comment_subscription.replies
and not p.created_by.emailrecipient.comment_subscription.none
):
users.append(p.created_by)
return users
class Comment(BaseComment):
parent = models.ForeignKey(
Thread,
on_delete=models.SET_NULL,
related_name='comments',
blank=True,
null=True
)
replies = GenericRelation(Reply)
actions = GenericRelation(
'user.Action',
object_id_field='object_id',
content_type_field='content_type',
related_query_name='comments'
)
def __str__(self):
return '{} - {}'.format(self.created_by, self.plain_text)
@property
def paper(self):
thread = self.parent
if thread:
paper = thread.paper
return paper
@property
def post(self):
thread = self.parent
if thread:
post = thread.post
return post
@property
def hypothesis(self):
thread = self.parent
if thread:
hypothesis = thread.hypothesis
return hypothesis
@property
def unified_document(self):
thread = self.thread
paper = thread.paper
if paper:
return paper.unified_document
post = thread.post
if post:
return post.unified_document
hypothesis = thread.hypothesis
if hypothesis:
return hypothesis.unified_document
return None
@property
def thread(self):
thread = self.parent
return thread
@property
def children(self):
return self.replies.filter(is_removed=False)
@property
def owners(self):
return [self.created_by]
@property
def users_to_notify(self):
users = []
p = self.parent
if (
p.created_by
and p.created_by.emailrecipient.thread_subscription.comments
and not p.created_by.emailrecipient.thread_subscription.none
and not p.created_by == self.created_by
):
users.append(p.created_by)
return users
# TODO: No siblings for now. Do we need this?
# sibling_comment_users = []
# for c in self.parent.children.prefetch_related(
# 'created_by',
# 'created_by__emailrecipient',
# 'created_by__emailrecipient__thread_subscription'
# ):
# if (
# c != self
# and c.created_by not in sibling_comment_users
# and c.created_by.emailrecipient.thread_subscription
# and c.created_by.emailrecipient.thread_subscription.comments
# ):
# sibling_comment_users.append(c.created_by)
# return parent_owners + sibling_comment_users
|
[
"django.db.models.TextField",
"django.contrib.contenttypes.fields.GenericForeignKey",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.contrib.contenttypes.fields.GenericRelation",
"django.db.models.PositiveIntegerField",
"django.db.models.Q",
"django.db.models.BooleanField",
"django.contrib.postgres.fields.JSONField",
"django.db.models.GenericIPAddressField",
"django.db.models.F",
"django.core.cache.cache.delete",
"django.db.models.DateTimeField",
"paper.utils.get_cache_key"
] |
[((948, 1033), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""user.User"""'], {'on_delete': 'models.SET_NULL', 'blank': '(True)', 'null': '(True)'}), "('user.User', on_delete=models.SET_NULL, blank=True, null=True\n )\n", (965, 1033), False, 'from django.db import models\n'), ((1087, 1141), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'db_index': '(True)'}), '(auto_now_add=True, db_index=True)\n', (1107, 1141), False, 'from django.db import models\n'), ((1161, 1196), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1181, 1196), False, 'from django.db import models\n'), ((1220, 1328), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'CREATED_LOCATION_CHOICES', 'max_length': '(255)', 'default': 'None', 'null': '(True)', 'blank': '(True)'}), '(choices=CREATED_LOCATION_CHOICES, max_length=255, default=\n None, null=True, blank=True)\n', (1236, 1328), False, 'from django.db import models\n'), ((1387, 1453), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': 'HELP_TEXT_WAS_EDITED'}), '(default=False, help_text=HELP_TEXT_WAS_EDITED)\n', (1406, 1453), False, 'from django.db import models\n'), ((1492, 1556), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': 'HELP_TEXT_IS_PUBLIC'}), '(default=True, help_text=HELP_TEXT_IS_PUBLIC)\n', (1511, 1556), False, 'from django.db import models\n'), ((1596, 1662), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': 'HELP_TEXT_IS_REMOVED'}), '(default=False, help_text=HELP_TEXT_IS_REMOVED)\n', (1615, 1662), False, 'from django.db import models\n'), ((1702, 1771), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'unpack_ipv4': '(True)', 'blank': '(True)', 'null': '(True)'}), '(unpack_ipv4=True, blank=True, null=True)\n', (1730, 1771), False, 'from django.db import models\n'), ((1813, 1845), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1822, 1845), False, 'from django.contrib.postgres.fields import JSONField\n'), ((1870, 1890), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'null': '(True)'}), '(null=True)\n', (1879, 1890), False, 'from django.contrib.postgres.fields import JSONField\n'), ((1903, 2026), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['Vote'], {'object_id_field': '"""object_id"""', 'content_type_field': '"""content_type"""', 'related_query_name': '"""discussion"""'}), "(Vote, object_id_field='object_id', content_type_field=\n 'content_type', related_query_name='discussion')\n", (1918, 2026), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((2072, 2093), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['Flag'], {}), '(Flag)\n', (2087, 2093), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((2112, 2140), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['Endorsement'], {}), '(Endorsement)\n', (2127, 2140), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((2158, 2198), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""', 'blank': '(True)'}), "(default='', blank=True)\n", (2174, 2198), False, 'from django.db import models\n'), ((2212, 2277), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""researchhub"""', 'max_length': '(32)', 'null': '(True)'}), "(default='researchhub', max_length=32, null=True)\n", (2228, 2277), False, 'from django.db import models\n'), ((2294, 2421), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['Purchase'], {'object_id_field': '"""object_id"""', 'content_type_field': '"""content_type"""', 'related_query_name': '"""discussion"""'}), "(Purchase, object_id_field='object_id', content_type_field=\n 'content_type', related_query_name='discussion')\n", (2309, 2421), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((2475, 2618), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['"""reputation.Contribution"""'], {'object_id_field': '"""object_id"""', 'content_type_field': '"""content_type"""', 'related_query_name': '"""discussion"""'}), "('reputation.Contribution', object_id_field='object_id',\n content_type_field='content_type', related_query_name='discussion')\n", (2490, 2618), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((6010, 6097), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'RESEARCHHUB', 'choices': 'THREAD_SOURCE_CHOICES', 'max_length': '(32)'}), '(default=RESEARCHHUB, choices=THREAD_SOURCE_CHOICES,\n max_length=32)\n', (6026, 6097), False, 'from django.db import models\n'), ((6140, 6195), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (6156, 6195), False, 'from django.db import models\n'), ((6216, 6323), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'help_text': '"""For inline-comments, indicates what\'s highlighted"""'}), '(blank=True, null=True, help_text=\n "For inline-comments, indicates what\'s highlighted")\n', (6232, 6323), False, 'from django.db import models\n'), ((6366, 6421), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (6382, 6421), False, 'from django.db import models\n'), ((6434, 6489), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (6450, 6489), False, 'from django.db import models\n'), ((6532, 6643), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""paper.Paper"""'], {'on_delete': 'models.SET_NULL', 'related_name': '"""threads"""', 'blank': '(True)', 'null': '(True)'}), "('paper.Paper', on_delete=models.SET_NULL, related_name=\n 'threads', blank=True, null=True)\n", (6549, 6643), False, 'from django.db import models\n'), ((6696, 6832), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""researchhub_document.ResearchhubPost"""'], {'on_delete': 'models.SET_NULL', 'related_name': '"""threads"""', 'blank': '(True)', 'null': '(True)'}), "('researchhub_document.ResearchhubPost', on_delete=models.\n SET_NULL, related_name='threads', blank=True, null=True)\n", (6713, 6832), False, 'from django.db import models\n'), ((6891, 7011), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""hypothesis.Hypothesis"""'], {'on_delete': 'models.SET_NULL', 'related_name': '"""threads"""', 'null': '(True)', 'blank': '(True)'}), "('hypothesis.Hypothesis', on_delete=models.SET_NULL,\n related_name='threads', null=True, blank=True)\n", (6908, 7011), False, 'from django.db import models\n'), ((7070, 7188), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""hypothesis.Citation"""'], {'on_delete': 'models.SET_NULL', 'related_name': '"""threads"""', 'null': '(True)', 'blank': '(True)'}), "('hypothesis.Citation', on_delete=models.SET_NULL,\n related_name='threads', null=True, blank=True)\n", (7087, 7188), False, 'from django.db import models\n'), ((7247, 7375), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['"""user.Action"""'], {'object_id_field': '"""object_id"""', 'content_type_field': '"""content_type"""', 'related_query_name': '"""threads"""'}), "('user.Action', object_id_field='object_id',\n content_type_field='content_type', related_query_name='threads')\n", (7262, 7375), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((9377, 9462), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ContentType'], {'on_delete': 'models.SET_NULL', 'blank': '(True)', 'null': '(True)'}), '(ContentType, on_delete=models.SET_NULL, blank=True, null=True\n )\n', (9394, 9462), False, 'from django.db import models\n'), ((9512, 9541), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (9539, 9541), False, 'from django.db import models\n'), ((9555, 9601), 'django.contrib.contenttypes.fields.GenericForeignKey', 'GenericForeignKey', (['"""content_type"""', '"""object_id"""'], {}), "('content_type', 'object_id')\n", (9572, 9601), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((9616, 9640), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['"""Reply"""'], {}), "('Reply')\n", (9631, 9640), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((9655, 9783), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['"""user.Action"""'], {'object_id_field': '"""object_id"""', 'content_type_field': '"""content_type"""', 'related_query_name': '"""replies"""'}), "('user.Action', object_id_field='object_id',\n content_type_field='content_type', related_query_name='replies')\n", (9670, 9783), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((12890, 12995), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Thread'], {'on_delete': 'models.SET_NULL', 'related_name': '"""comments"""', 'blank': '(True)', 'null': '(True)'}), "(Thread, on_delete=models.SET_NULL, related_name=\n 'comments', blank=True, null=True)\n", (12907, 12995), False, 'from django.db import models\n'), ((13051, 13073), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['Reply'], {}), '(Reply)\n', (13066, 13073), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((13088, 13217), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['"""user.Action"""'], {'object_id_field': '"""object_id"""', 'content_type_field': '"""content_type"""', 'related_query_name': '"""comments"""'}), "('user.Action', object_id_field='object_id',\n content_type_field='content_type', related_query_name='comments')\n", (13103, 13217), False, 'from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation\n'), ((4430, 4462), 'paper.utils.get_cache_key', 'get_cache_key', (['"""paper"""', 'paper.id'], {}), "('paper', paper.id)\n", (4443, 4462), False, 'from paper.utils import get_cache_key\n'), ((4475, 4498), 'django.core.cache.cache.delete', 'cache.delete', (['cache_key'], {}), '(cache_key)\n', (4487, 4498), False, 'from django.core.cache import cache\n'), ((3801, 3828), 'django.db.models.F', 'F', (['"""discussion__created_by"""'], {}), "('discussion__created_by')\n", (3802, 3828), False, 'from django.db.models import Count, Q, F\n'), ((3927, 3951), 'django.db.models.Q', 'Q', ([], {'vote_type': 'Vote.UPVOTE'}), '(vote_type=Vote.UPVOTE)\n', (3928, 3951), False, 'from django.db.models import Count, Q, F\n'), ((4012, 4038), 'django.db.models.Q', 'Q', ([], {'vote_type': 'Vote.DOWNVOTE'}), '(vote_type=Vote.DOWNVOTE)\n', (4013, 4038), False, 'from django.db.models import Count, Q, F\n')]
|
from discord.ext import commands
import discord
from datetime import datetime, time, timedelta
import asyncio
import requests
import json
import os
client = discord.Client()
jontron_url = 'https://www.youtube.com/user/JonTronShow/videos'
try:
with open('./auth_tokens.json', 'r') as filein:
token = json.load(filein)['token']
except FileNotFoundError:
token = os.environ.get('token')
bot = commands.Bot(command_prefix="&")
current_title = 'Simplifying Corporate Logos - JonTron'
WHEN = time(16, 00, 00) # 12:00 PM
channel_id = 290915716255711232 # Put your channel id here
date_mapper = {
'Jan': 'January',
'Feb': 'February',
'Mar': 'March',
'Apr': 'April',
'May': 'May',
'Jun': 'June',
'Jul': 'July',
'Aug': 'August',
'Sep': 'September',
'Oct': 'October',
'Nov': 'November',
'Dec': 'December'
}
def get_jontron_video_title(content):
try:
focused_content = content[ content.index('gridVideoRenderer'): ]
focused_content = focused_content[ focused_content.index('"text":') + len('"text":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
title_name = focused_content[ left:right ]
return title_name
except ValueError:
print('ERROR: could not get jontron video title\nSubstring did not work')
def get_jontron_video_image_url(content):
try:
focused_content = content[ content.index('gridVideoRenderer'): ]
focused_content = focused_content[ focused_content.index('"url":') + len('"url":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
picture_url = focused_content [ left:right ]
return picture_url
except ValueError:
print('ERROR: could not get jontron video image\nSubstring did not work')
def get_jontron_video_date(content):
try:
focused_content = content[ content.index('gridVideoRenderer'): ]
focused_content = focused_content[ focused_content.index('"text":') + len('"text":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
title_name = focused_content[ left:right ]
focused_content = focused_content[ focused_content.index('"url":') + len('"url":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
watch_url = f'https://www.youtube.com{focused_content [ left:right ]}'
results = requests.get(watch_url)
content = results.text
focused_content = content[ content.index('"dateText":{"simpleText":') + len('"dateText":{"simpleText":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
video_date = focused_content[ left:right ]
month = date_mapper[video_date.split(' ')[0]]
date = video_date.replace(video_date.split(' ')[0], month)
date = datetime.strptime(date,"%B %d, %Y")
return date
except ValueError:
print('ERROR: could not get jontron video date\nSubstring did not work')
def get_jontron_watch_url(content):
try:
focused_content = content[ content.index('gridVideoRenderer'): ]
focused_content = focused_content[ focused_content.index('"text":') + len('"text":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
title_name = focused_content[ left:right ]
focused_content = focused_content[ focused_content.index('"url":') + len('"url":'): ]
left = focused_content.index('"')+1
right = focused_content[ focused_content.index('"')+1: ].index('"')+1
watch_url = f'https://www.youtube.com{focused_content [ left:right ]}'
return watch_url
except ValueError:
print('ERROR: could not get jontron video url\nSubstring did not work')
def get_jontron():
content = requests.get(jontron_url).text
return {
"title": get_jontron_video_title(content),
"image": get_jontron_video_image_url(content),
"date": get_jontron_video_date(content),
"url": get_jontron_watch_url(content)
}
async def called_once_a_day(): # Fired every day
await bot.wait_until_ready() # Make sure your guild cache is ready so the channel can be found via get_channel
channel = bot.get_channel(channel_id) # Note: It's more efficient to do bot.get_guild(guild_id).get_channel(channel_id) as there's less looping involved, but just get_channel still works fine
jontron = get_jontron()
video_em = discord.Embed()
video_em.set_image(url=jontron['image'])
await channel.send(f'Good Afternoon!\nIt\'s been {(datetime.now() - jontron["date"]).days} days since JonTron uploaded "{jontron["title"]}"', embed=video_em)
async def upload_check_background_task():
global current_title
while True:
jontron = get_jontron()
if (current_title != jontron['title']):
await bot.wait_until_ready() # Make sure your guild cache is ready so the channel can be found via get_channel
channel = bot.get_channel(channel_id)
video_em = discord.Embed()
video_em.set_image(url=jontron['image'])
await channel.send(f'JONTRON HAS UPLOADED\nTHIS IS NOT A DRILL!!!\n:rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light::rotating_light:\n{jontron["url"]}', embed=video_em)
current_title = jontron['title']
else:
print('No JonTron Upload :(')
await asyncio.sleep(3600)
async def morning_upload_background_task():
now = datetime.utcnow()
if now.time() > WHEN: # Make sure loop doesn't start after {WHEN} as then it will send immediately the first time as negative seconds will make the sleep yield instantly
tomorrow = datetime.combine(now.date() + timedelta(days=1), time(0))
seconds = (tomorrow - now).total_seconds() # Seconds until tomorrow (midnight)
await asyncio.sleep(seconds) # Sleep until tomorrow and then the loop will start
while True:
now = datetime.utcnow() # You can do now() or a specific timezone if that matters, but I'll leave it with utcnow
target_time = datetime.combine(now.date(), WHEN) # 6:00 PM today (In UTC)
seconds_until_target = (target_time - now).total_seconds()
await asyncio.sleep(seconds_until_target) # Sleep until we hit the target time
await called_once_a_day() # Call the helper function that sends the message
tomorrow = datetime.combine(now.date() + timedelta(days=1), time(0))
seconds = (tomorrow - now).total_seconds() # Seconds until tomorrow (midnight)
await asyncio.sleep(seconds) # Sleep until tomorrow and then the loop will start a new iteration
@bot.command()
async def JontronPlz(ctx):
jontron = get_jontron()
video_em = discord.Embed()
video_em.set_image(url=jontron['image'])
await ctx.channel.send(f'It\'s been {(datetime.now() - jontron["date"]).days} days since JonTron uploaded "{jontron["title"]}"', embed=video_em)
if __name__ == "__main__":
print(f'Running message at: {WHEN.hour}:{WHEN.minute}:{WHEN.second}')
bot.loop.create_task(morning_upload_background_task())
bot.loop.create_task(upload_check_background_task())
bot.run(token)
|
[
"json.load",
"discord.Embed",
"asyncio.sleep",
"os.environ.get",
"datetime.datetime.utcnow",
"datetime.datetime.strptime",
"datetime.timedelta",
"discord.ext.commands.Bot",
"requests.get",
"datetime.time",
"datetime.datetime.now",
"discord.Client"
] |
[((159, 175), 'discord.Client', 'discord.Client', ([], {}), '()\n', (173, 175), False, 'import discord\n'), ((411, 443), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""&"""'}), "(command_prefix='&')\n", (423, 443), False, 'from discord.ext import commands\n'), ((507, 521), 'datetime.time', 'time', (['(16)', '(0)', '(0)'], {}), '(16, 0, 0)\n', (511, 521), False, 'from datetime import datetime, time, timedelta\n'), ((4768, 4783), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (4781, 4783), False, 'import discord\n'), ((5923, 5940), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5938, 5940), False, 'from datetime import datetime, time, timedelta\n'), ((7192, 7207), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (7205, 7207), False, 'import discord\n'), ((380, 403), 'os.environ.get', 'os.environ.get', (['"""token"""'], {}), "('token')\n", (394, 403), False, 'import os\n'), ((2622, 2645), 'requests.get', 'requests.get', (['watch_url'], {}), '(watch_url)\n', (2634, 2645), False, 'import requests\n'), ((3103, 3139), 'datetime.datetime.strptime', 'datetime.strptime', (['date', '"""%B %d, %Y"""'], {}), "(date, '%B %d, %Y')\n", (3120, 3139), False, 'from datetime import datetime, time, timedelta\n'), ((4110, 4135), 'requests.get', 'requests.get', (['jontron_url'], {}), '(jontron_url)\n', (4122, 4135), False, 'import requests\n'), ((6403, 6420), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6418, 6420), False, 'from datetime import datetime, time, timedelta\n'), ((314, 331), 'json.load', 'json.load', (['filein'], {}), '(filein)\n', (323, 331), False, 'import json\n'), ((5355, 5370), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (5368, 5370), False, 'import discord\n'), ((5847, 5866), 'asyncio.sleep', 'asyncio.sleep', (['(3600)'], {}), '(3600)\n', (5860, 5866), False, 'import asyncio\n'), ((6184, 6191), 'datetime.time', 'time', (['(0)'], {}), '(0)\n', (6188, 6191), False, 'from datetime import datetime, time, timedelta\n'), ((6295, 6317), 'asyncio.sleep', 'asyncio.sleep', (['seconds'], {}), '(seconds)\n', (6308, 6317), False, 'import asyncio\n'), ((6674, 6709), 'asyncio.sleep', 'asyncio.sleep', (['seconds_until_target'], {}), '(seconds_until_target)\n', (6687, 6709), False, 'import asyncio\n'), ((6901, 6908), 'datetime.time', 'time', (['(0)'], {}), '(0)\n', (6905, 6908), False, 'from datetime import datetime, time, timedelta\n'), ((7012, 7034), 'asyncio.sleep', 'asyncio.sleep', (['seconds'], {}), '(seconds)\n', (7025, 7034), False, 'import asyncio\n'), ((6165, 6182), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6174, 6182), False, 'from datetime import datetime, time, timedelta\n'), ((6882, 6899), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6891, 6899), False, 'from datetime import datetime, time, timedelta\n'), ((4884, 4898), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4896, 4898), False, 'from datetime import datetime, time, timedelta\n'), ((7295, 7309), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7307, 7309), False, 'from datetime import datetime, time, timedelta\n')]
|
import sys
import numpy as np
from mayavi import mlab
from mayavi.scripts import mayavi2
from traits.api import HasTraits, Button, Instance
from traitsui.api import View, Item
from ._plot3d import plot3d_embeddings
def plot3d_gmphd(dataset, embeddings, grid, gm_s=None, gm_list=None,
observation=None, title=None, contours=4,
log_plot=True):
"""3D plot of CASAS sensor embedding with GM-PHD sampled by grid.
Multi-target PHD represented either by scalar ``gm_s`` or Gaussian Mixture
``gm_list`` is plotted as 3D contour graph with mayavi.
Current observations and sensor embedding are plotted as spheres as well.
Args:
dataset (:obj:`~pymrt.casas.CASASDataset`): CASAS smart home dataset.
embeddings (:obj:`numpy.ndarray`): 3D sensor vector embedding of shape
(num_sensors, 3) where num_sensors corresponds to the length of
``dataset.sensor_list``.
grid (:obj:`numpy.ndarray`): 3D mesh generated by :func:`numpy.mgrid` or
:func:`numpy.meshgrid`.
gm_s (:obj:`numpy.ndarray`): Multi-target PHD scalar sampled at each
point defined by the 3D mesh grid ``grid``.
gm_list (:obj:`list`): List of
:obj:`~pymrt.tracking.utils.GaussianComponent` representing the
multi-target PHD at the moment. If ``gm_s`` is None, this list is
used to generate the PHD scalar for plotting.
observation (:obj:`list`): List of observations to be plotted. Each
observation is a :obj:`numpy.ndarray` of shape (n, 1). It has to
be the embedding vector of one of the sensor in the dataset.
title (:obj:`string`): Plot title.
contours (:obj:`int`): Number of contour surfaces to draw.
log_plot (:obj:`bool`): Plot ``gm_s`` in log scale.
"""
if gm_s is None:
if gm_list is None:
raise ValueError("Must provide 3D sampled GM scalar gm_s or a "
"Gaussian Mixture list")
else:
print('Sampling PHD in 3D space')
from ...tracking.utils import gm_calculate
gm_s = gm_calculate(gm_list=gm_list, grid=grid)
if title is None:
title = 'PHD'
print('Start Plotting with Mayavi')
figure = mlab.figure(dataset.get_name() + ' ' + title)
if log_plot:
contour_s = np.log(gm_s + np.finfo(np.float).tiny)
else:
contour_s = gm_s
# Plot Contour Surf first
contour = mlab.contour3d(
grid[0],
grid[1],
grid[2],
contour_s,
contours=contours,
transparent=True,
opacity=0.5
)
mlab.colorbar(contour, title='PHD', orientation='vertical')
_, points = plot3d_embeddings(dataset, embeddings, figure=figure)
if observation is not None:
obs_array = np.block(observation).T
obs_points = mlab.points3d(
obs_array[:, 0], obs_array[:, 1], obs_array[:, 2],
scale_factor=0.03, color=(0, 0, 1)
)
mlab.show()
def plot3d_gmphd_track(dataset, embeddings, grid, gm_s_list=None,
gm_list_list=None, observation_list=None,
title=None, contours=4, log_plot=True):
""" 3D plot of CASAS sensor embedding with GM-PHD sampled by grid.
Multi-target PHD represented either by scalar ``gm_s`` or Gaussian Mixture
``gm_list`` is plotted as 3D contour graph with mayavi.
Current observations and sensor embedding are plotted as spheres as well.
It wraps the whole sequence in a mayavi application, user can go back and
forth in time and visually see how the PHD changes in time.
Args:
dataset (:obj:`~pymrt.casas.CASASDataset`): CASAS smart home dataset.
embeddings (:obj:`numpy.ndarray`): 3D sensor vector embedding of shape
(num_sensors, 3) where num_sensors corresponds to the length of
``dataset.sensor_list``.
grid (:obj:`numpy.ndarray`): 3D mesh generated by :func:`numpy.mgrid` or
:func:`numpy.meshgrid`.
gm_s_list (:obj:`list`): List of PHD scalars at each time step.
gm_list_list (:obj:`list`): List of Gaussian Mixtures at each time step.
If ``gm_s_list`` is None, it is used along with ``grid`` to generate
the PHD scalar at each time step.
observation_list (:obj:`list`): List of observations at each time step.
title (:obj:`string`): Plot title.
contours (:obj:`int`): Number of contour surfaces to draw.
log_plot (:obj:`bool`): Plot ``gm_s`` in log scale.
"""
if gm_s_list is None:
if gm_list_list is None:
raise ValueError("Must provide 3D sampled GM scalar gm_s or a "
"Gaussian Mixture list")
else:
print('Sampling PHD in 3D space')
from ...tracking.utils import gm_calculate
gm_s_list = []
i = 0
for gm_list in gm_list_list:
sys.stdout.write('calculate gm_scalar for step %d' % i)
gm_s_list.append(gm_calculate(
gm_list=gm_list, grid=grid
))
i += 1
if title is None:
title = 'PHD'
print('Start Plotting with Mayavi')
class Controller(HasTraits):
next_frame = Button('Next Frame')
previous_frame = Button('Previous Frame')
view = View(
Item(name='next_frame'),
Item(name='previous_frame')
)
current_frame = 0
play_state = False
def _next_frame_changed(self, value):
"""Goto next frame"""
if self.current_frame + 1 < len(gm_s_list):
self.current_frame += 1
self.update_frame()
def _previous_frame_changed(self, value):
"""Goto previous frame"""
if self.current_frame - 1 >= 0:
self.current_frame -= 1
self.update_frame()
def update_frame(self):
print('Frame %d' % self.current_frame)
if log_plot:
contour_s = np.log(
gm_s_list[self.current_frame] + np.finfo(np.float).tiny
)
else:
contour_s = gm_s_list[self.current_frame]
self.phd_contour.mlab_source.set(
scalars=contour_s
)
self.color_vector[:] = 0.
if observation_list is not None:
obs_array = observation_list[self.current_frame]
obs_index = [
np.where(
np.all(embeddings == sensor_vec.flatten(), axis=1)
)[0][0]
for sensor_vec in obs_array
]
self.color_vector[obs_index] = 1.
self.sensor_points.mlab_source.dataset.point_data.scalars = \
self.color_vector
mlab.draw()
@mayavi2.standalone
def main_view():
"""Example showing how to view a 3D numpy array in mayavi2.
"""
figure = mlab.figure(dataset.get_name() + ' ' + title)
if log_plot:
contour_s = np.log(gm_s_list[0] + np.finfo(np.float).tiny)
else:
contour_s = gm_s_list[0]
# Plot Contour Surf first
contour = mlab.contour3d(
grid[0],
grid[1],
grid[2],
contour_s,
contours=contours,
transparent=True,
opacity=0.5
)
mlab.colorbar(contour, title='PHD', orientation='vertical')
_, points = plot3d_embeddings(dataset, embeddings, figure=figure)
points.glyph.scale_mode = 'scale_by_vector'
points.mlab_source.dataset.point_data.vectors = np.tile(
np.ones(embeddings.shape[0]), (3, 1))
color_vector = np.zeros(embeddings.shape[0])
points.mlab_source.dataset.point_data.scalars = color_vector
if observation_list is not None:
obs_array = observation_list[0]
obs_index = [
np.where(
np.all(embeddings == sensor_vec.flatten(), axis=1)
)[0][0]
for sensor_vec in obs_array
]
color_vector[obs_index] = 1.
computation = Controller(
sensor_points=points,
phd_contour=contour,
color_vector=color_vector,
figure=figure
)
computation.edit_traits()
main_view()
|
[
"sys.stdout.write",
"mayavi.mlab.colorbar",
"traits.api.Button",
"mayavi.mlab.show",
"mayavi.mlab.draw",
"numpy.zeros",
"numpy.ones",
"mayavi.mlab.points3d",
"numpy.finfo",
"traitsui.api.Item",
"mayavi.mlab.contour3d",
"numpy.block"
] |
[((2505, 2611), 'mayavi.mlab.contour3d', 'mlab.contour3d', (['grid[0]', 'grid[1]', 'grid[2]', 'contour_s'], {'contours': 'contours', 'transparent': '(True)', 'opacity': '(0.5)'}), '(grid[0], grid[1], grid[2], contour_s, contours=contours,\n transparent=True, opacity=0.5)\n', (2519, 2611), False, 'from mayavi import mlab\n'), ((2675, 2734), 'mayavi.mlab.colorbar', 'mlab.colorbar', (['contour'], {'title': '"""PHD"""', 'orientation': '"""vertical"""'}), "(contour, title='PHD', orientation='vertical')\n", (2688, 2734), False, 'from mayavi import mlab\n'), ((3044, 3055), 'mayavi.mlab.show', 'mlab.show', ([], {}), '()\n', (3053, 3055), False, 'from mayavi import mlab\n'), ((2904, 3008), 'mayavi.mlab.points3d', 'mlab.points3d', (['obs_array[:, 0]', 'obs_array[:, 1]', 'obs_array[:, 2]'], {'scale_factor': '(0.03)', 'color': '(0, 0, 1)'}), '(obs_array[:, 0], obs_array[:, 1], obs_array[:, 2],\n scale_factor=0.03, color=(0, 0, 1))\n', (2917, 3008), False, 'from mayavi import mlab\n'), ((5357, 5377), 'traits.api.Button', 'Button', (['"""Next Frame"""'], {}), "('Next Frame')\n", (5363, 5377), False, 'from traits.api import HasTraits, Button, Instance\n'), ((5403, 5427), 'traits.api.Button', 'Button', (['"""Previous Frame"""'], {}), "('Previous Frame')\n", (5409, 5427), False, 'from traits.api import HasTraits, Button, Instance\n'), ((7374, 7480), 'mayavi.mlab.contour3d', 'mlab.contour3d', (['grid[0]', 'grid[1]', 'grid[2]', 'contour_s'], {'contours': 'contours', 'transparent': '(True)', 'opacity': '(0.5)'}), '(grid[0], grid[1], grid[2], contour_s, contours=contours,\n transparent=True, opacity=0.5)\n', (7388, 7480), False, 'from mayavi import mlab\n'), ((7580, 7639), 'mayavi.mlab.colorbar', 'mlab.colorbar', (['contour'], {'title': '"""PHD"""', 'orientation': '"""vertical"""'}), "(contour, title='PHD', orientation='vertical')\n", (7593, 7639), False, 'from mayavi import mlab\n'), ((7906, 7935), 'numpy.zeros', 'np.zeros', (['embeddings.shape[0]'], {}), '(embeddings.shape[0])\n', (7914, 7935), True, 'import numpy as np\n'), ((2859, 2880), 'numpy.block', 'np.block', (['observation'], {}), '(observation)\n', (2867, 2880), True, 'import numpy as np\n'), ((5461, 5484), 'traitsui.api.Item', 'Item', ([], {'name': '"""next_frame"""'}), "(name='next_frame')\n", (5465, 5484), False, 'from traitsui.api import View, Item\n'), ((5498, 5525), 'traitsui.api.Item', 'Item', ([], {'name': '"""previous_frame"""'}), "(name='previous_frame')\n", (5502, 5525), False, 'from traitsui.api import View, Item\n'), ((6976, 6987), 'mayavi.mlab.draw', 'mlab.draw', ([], {}), '()\n', (6985, 6987), False, 'from mayavi import mlab\n'), ((7845, 7873), 'numpy.ones', 'np.ones', (['embeddings.shape[0]'], {}), '(embeddings.shape[0])\n', (7852, 7873), True, 'import numpy as np\n'), ((5025, 5080), 'sys.stdout.write', 'sys.stdout.write', (["('calculate gm_scalar for step %d' % i)"], {}), "('calculate gm_scalar for step %d' % i)\n", (5041, 5080), False, 'import sys\n'), ((2401, 2419), 'numpy.finfo', 'np.finfo', (['np.float'], {}), '(np.float)\n', (2409, 2419), True, 'import numpy as np\n'), ((7245, 7263), 'numpy.finfo', 'np.finfo', (['np.float'], {}), '(np.float)\n', (7253, 7263), True, 'import numpy as np\n'), ((6209, 6227), 'numpy.finfo', 'np.finfo', (['np.float'], {}), '(np.float)\n', (6217, 6227), True, 'import numpy as np\n')]
|
import numpy as np
def gauss2D(x, y, fwhmx, fwhmy, x0=0, y0=0, offset=0, order=1, int_FWHM=True):
"""
Define a (super-)Gaussian 2D beam. Identical to laser.misc.gauss2D.
Parameters
----------
x: float 2D np.array
Horizontal axis of the Gaussian
y: float 2D np.array
Vertical axis of the Gaussian
fwhmx: float
Horizontal Full Width at Half Maximum
fwhmy: float
Vertical Full Width at Half Maximum
x0: float, optional
Horizontal center position of the Gaussian
y0: float, optional
Vertical center position of the Gaussian
offset: float, optional
Amplitude offset of the Gaussian
order: int, optional
order of the super-Gaussian function.
Defined as: exp( - ( x**2 + y**2 )**order )
int_FWHM: boolean, optional
If True, the FWHM is the FWHM of the square of the Gaussian (intensity).
If False, it is the FWHM of the Gaussian directly (electric field).
"""
coeff = 1.0
if int_FWHM:
coeff = 0.5
return np.exp(-np.log(2) * coeff * ((2 * (x - x0) / fwhmx)**2 + (2 * (y - y0) / fwhmy)**2)**order) + offset
def gauss1D(x, fwhm, x0=0, offset=0, order=1, int_FWHM=True):
"""
Define a (super-)Gaussian 1D beam. Identical to laser.misc.gauss2D.
Parameters
----------
x: float 1D np.array
Axis of the Gaussian
fwhm: float
Full Width at Half Maximum
x0: float, optional
Center position of the Gaussian
offset: float, optional
Amplitude offset of the Gaussian
order: int, optional
order of the super-Gaussian function.
Defined as: exp( - ( x**2 )**order )
int_FWHM: boolean, optional
If True, the FWHM is the FWHM of the square of the Gaussian (intensity).
If False, it is the FWHM of the Gaussian directly (electric field).
"""
coeff = 1.0
if int_FWHM:
coeff = 0.5
return np.exp(-np.log(2) * coeff * ((2 * (x - x0) / fwhm)**2)**order) + offset
def cart2pol(x, y):
"""Convert cartesian to polar coordinates"""
return np.abs(x + 1j * y), np.angle(x + 1j * y)
def pol2cart(r, theta):
"""Convert polar to cartesian coodinates"""
return np.real(r * exp(1j * theta)), np.imag(r * exp(1j * theta))
def array_trim(ar):
"""Trim zeros of 2D map"""
ar_trim = ar.copy()
ar_trim = ar_trim[:, ar_trim.any(axis=0)] # trim columns
ar_trim = ar_trim[ar_trim.any(axis=1), :] # trim rows
return ar_trim
def vect(N):
"""Returns a centered array between -0.5 and 0.5"""
return np.linspace(0, N, num=N) / N - 0.5
def norm(a):
"""Normalise an array by it's maximum value"""
return a / np.max(np.abs(a))
def text_progress_bar(iteration, num_iteration):
"""Displays a progress bar with the print function"""
return print('|' * (iteration + 1) + '.' * (num_iteration - iteration - 1) + ' %.1f %%' % ((iteration + 1) / num_iteration * 100), end='\r')
|
[
"numpy.angle",
"numpy.abs",
"numpy.log",
"numpy.linspace"
] |
[((2122, 2142), 'numpy.abs', 'np.abs', (['(x + 1.0j * y)'], {}), '(x + 1.0j * y)\n', (2128, 2142), True, 'import numpy as np\n'), ((2142, 2164), 'numpy.angle', 'np.angle', (['(x + 1.0j * y)'], {}), '(x + 1.0j * y)\n', (2150, 2164), True, 'import numpy as np\n'), ((2606, 2630), 'numpy.linspace', 'np.linspace', (['(0)', 'N'], {'num': 'N'}), '(0, N, num=N)\n', (2617, 2630), True, 'import numpy as np\n'), ((2729, 2738), 'numpy.abs', 'np.abs', (['a'], {}), '(a)\n', (2735, 2738), True, 'import numpy as np\n'), ((1080, 1089), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (1086, 1089), True, 'import numpy as np\n'), ((1976, 1985), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (1982, 1985), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
from typing import Optional
import datetime
import logging
import pathlib
import cv2
import numpy as np
import yacs.config
from gaze_estimation.gaze_estimator.common import (Face, FacePartsName,
Visualizer)
from gaze_estimation.utils import load_config
from gaze_estimation import GazeEstimationMethod, GazeEstimator
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
import pdb
import pickle
import time
import imutils
import sys
import os
import draw_utils
from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup
from screen_conf import *
# FLAGS PARAMETERS
#------------------------------------------------
fpath = 'recs/'
rgb_fp = 'det/'
# AVERAGING OVER GAZE VALUES TOGGLE
#------------------------------------------------
GAZE_AVG_FLAG = 0
num_frames = 3 # num of frames to average over
#------------------------------------------------
# AVERAGING OVER LANDMARKS TOGGLE
AVG_LANDMARKS = 0
num_avg_frames = 3 # num of frames to average over
# GLOBAL VARIABLES
#------------------------------------------------
img = np.zeros((adj_H, W_px,3))
mid_point = (0,0)
rng_pos = (np.random.randint(0, W_px),np.random.randint(0, H_px))
focus = 0
avg_pos = []
#------------------------------------------------
DEBUG = 0 #'EYE' # 'EYE' DEBUG INDIVIDUAL VALUES
if DEBUG:
try:
print('Creating dirs')
os.mkdir(fpath)
os.mkdirs(fpath+rgb_fp)
except:
print('dirs already exist')
#------------------------------------------------
class Demo:
QUIT_KEYS = {27, ord('q')}
def __init__(self, config: yacs.config.CfgNode):
self.config = config
self.gaze_estimator = GazeEstimator(config, AVG_LANDMARKS=AVG_LANDMARKS, num_frames=num_avg_frames)
self.visualizer = Visualizer(self.gaze_estimator.camera)
self.cap = self._create_capture()
self.output_dir = self._create_output_dir()
# Turn writer on and off.
if SAVE_VIDEO:
self.writer = self._create_video_writer()
else:
self.writer = 0
self.stop = False
self.show_bbox = self.config.demo.show_bbox
self.show_head_pose = self.config.demo.show_head_pose
self.show_landmarks = self.config.demo.show_landmarks
self.show_normalized_image = NORM_EYEZ # self.config.demo.show_normalized_image
self.show_template_model = self.config.demo.show_template_model
# FRAME COUNTER
self.i = 0
self.pts = []
self.cur_pos = []
self.true_pos = []
self.dist = []
self.left_eye_cent = []
self.right_eye_cent = []
self.right_eye_gaze = []
self.left_eye_gaze = []
self.face_gaze = []
self.face_cent = []
def run(self) -> None:
while True:
if DEMO:
pts = draw_utils.display_canv(CANV_MODE=CANV_MODE, cur_pos=mid_point) #cur_pos=cur_pos
self.pts.append(pts)
self.true_pos.append(pts[0])
self.cur_pos.append(pts[1])
if self.config.demo.display_on_screen:
self._wait_key()
if self.stop:
break
ok, frame = self.cap.read()
if not ok:
break
if CUST_VIDEO:
frame = imutils.resize(frame, width=self.gaze_estimator.camera.width, height=self.gaze_estimator.camera.height)
calib_time = time.time()
# FIRST WE UNDISTORT THE IMAGE!
undistorted = cv2.undistort(
frame, self.gaze_estimator.camera.camera_matrix,
self.gaze_estimator.camera.dist_coefficients)
if RUNTIME:
print('Image calibration: ', time.time()-calib_time, ' seconds.')
self.visualizer.set_image(frame.copy())
dlib_time = time.time()
faces = self.gaze_estimator.detect_faces(undistorted)
if RUNTIME:
print('DLIB faces: ', time.time() - dlib_time, ' seconds.')
for face in faces:
self.gaze_estimator.estimate_gaze(undistorted, face)
self._draw_face_bbox(face)
self._draw_head_pose(face)
self._draw_landmarks(face)
self._draw_face_template_model(face)
self._draw_gaze_vector(face)
self._display_normalized_image(face)
if self.config.demo.use_camera:
self.visualizer.image = self.visualizer.image[:, ::-1]
if self.writer:
self.writer.write(self.visualizer.image)
#self.write_eyes.write(self.visualizer.image)
if self.config.demo.display_on_screen:
self.visualizer.image = cv2.resize(self.visualizer.image, (0, 0), fy=IMG_SCALE, fx=IMG_SCALE)
cv2.imshow('frame', self.visualizer.image)
# MOVE TO TOP LEFT CORNER
cv2.moveWindow("frame", 0,0)
if DEBUG:
cv2.imwrite(fpath+rgb_fp+'rgb_'+str(self.i).zfill(5)+'.png', self.visualizer.image)
# INCREMENT COUNTER
self.i += 1
self.cap.release()
if self.writer:
self.writer.release()
def _create_capture(self) -> cv2.VideoCapture:
if self.config.demo.use_camera:
# use recording or the custom video
if CUST_VIDEO:
cap = cv2.VideoCapture(vid_file)
else:
cap = cv2.VideoCapture(0)
elif self.config.demo.video_path:
cap = cv2.VideoCapture(self.config.demo.video_path)
else:
raise ValueError
# pdb.set_trace()
cap.set(cv2.CAP_PROP_FRAME_WIDTH, self.gaze_estimator.camera.width)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, self.gaze_estimator.camera.height)
return cap
def _create_output_dir(self) -> Optional[pathlib.Path]:
if not self.config.demo.output_dir:
return
output_dir = pathlib.Path(self.config.demo.output_dir)
output_dir.mkdir(exist_ok=True, parents=True)
return output_dir
@staticmethod
def _create_timestamp() -> str:
dt = datetime.datetime.now()
return dt.strftime('%Y%m%d_%H%M%S')
def _create_video_writer(self) -> Optional[cv2.VideoWriter]:
if not self.output_dir:
return None
ext = self.config.demo.output_file_extension
if ext == 'mp4':
fourcc = cv2.VideoWriter_fourcc(*'H264')
elif ext == 'avi':
fourcc = cv2.VideoWriter_fourcc(*'PIM1')
else:
raise ValueError
output_path = self.output_dir / f'{self._create_timestamp()}.{ext}'
writer = cv2.VideoWriter(output_path.as_posix(), fourcc, FPS,
(VID_W,
VID_H))
if writer is None:
raise RuntimeError
return writer
def _wait_key(self) -> None:
key = cv2.waitKey(self.config.demo.wait_time) & 0xff
if key in self.QUIT_KEYS:
self.stop = True
elif key == ord('b'):
self.show_bbox = not self.show_bbox
elif key == ord('l'):
self.show_landmarks = not self.show_landmarks
elif key == ord('h'):
self.show_head_pose = not self.show_head_pose
elif key == ord('n'):
self.show_normalized_image = not self.show_normalized_image
elif key == ord('t'):
self.show_template_model = not self.show_template_model
def _draw_face_bbox(self, face: Face) -> None:
if not self.show_bbox:
return
self.visualizer.draw_bbox(face.bbox)
def _draw_head_pose(self, face: Face) -> None:
if not self.show_head_pose:
return
# Draw the axes of the model coordinate system
length = self.config.demo.head_pose_axis_length
self.visualizer.draw_model_axes(face, length, lw=2)
euler_angles = face.head_pose_rot.as_euler('XYZ', degrees=True)
pitch, yaw, roll = face.change_coordinate_system(euler_angles)
logger.info(f'[head] pitch: {pitch:.2f}, yaw: {yaw:.2f}, '
f'roll: {roll:.2f}, distance: {face.distance:.2f}')
self.dist.append(face.distance)
def _draw_landmarks(self, face: Face) -> None:
if not self.show_landmarks:
return
self.visualizer.draw_points(face.landmarks,
color=(0, 255, 255),
size=1)
def _draw_face_template_model(self, face: Face) -> None:
if not self.show_template_model:
return
self.visualizer.draw_3d_points(face.model3d,
color=(255, 0, 525),
size=1)
def _display_normalized_image(self, face: Face) -> None:
if not self.config.demo.display_on_screen:
return
if not self.show_normalized_image:
return
if self.config.mode == GazeEstimationMethod.MPIIGaze.name:
reye = face.reye.normalized_image
leye = face.leye.normalized_image
normalized = np.hstack([reye, leye])
elif self.config.mode == GazeEstimationMethod.MPIIFaceGaze.name:
normalized = face.normalized_image
else:
raise ValueError
if self.config.demo.use_camera:
normalized = normalized[:, ::-1]
normalized = cv2.resize(normalized, (0, 0), fy=5, fx=5)
if PRINT_VALS:
H, W = normalized.shape
left_edge = W - 50
left_edge_H = 20
cv2.putText(normalized,
str(self.i), #'cur frame = '
(left_edge, left_edge_H),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, RED, 1)
save_str = 'norm_eyes_fix/img_'+str(self.i).zfill(5)+'.png'
if NORM_EYEZ:
cv2.imwrite(save_str, normalized[:,300:])
cv2.imshow('normalized', normalized)
def avg_frames(self):
if 0:
r_avg_cent = [np.array([x[0] for x in self.right_eye_cent[-num_frames:]]).mean(),
np.array([x[1] for x in self.right_eye_cent[-num_frames:]]).mean(),
np.array([x[2] for x in self.right_eye_cent[-num_frames:]]).mean()]
l_avg_cent = [np.array([x[0] for x in self.left_eye_cent[-num_frames:]]).mean(),
np.array([x[1] for x in self.left_eye_cent[-num_frames:]]).mean(),
np.array([x[2] for x in self.left_eye_cent[-num_frames:]]).mean()]
else:
r_avg_cent = self.right_eye_cent[-1]
l_avg_cent = self.left_eye_cent[-1]
r_avg_gaze = [np.array([x[0] for x in self.right_eye_gaze[-num_frames:]]).mean(),
np.array([x[1] for x in self.right_eye_gaze[-num_frames:]]).mean(),
np.array([x[2] for x in self.right_eye_gaze[-num_frames:]]).mean()]
l_avg_gaze = [np.array([x[0] for x in self.left_eye_gaze[-num_frames:]]).mean(),
np.array([x[1] for x in self.left_eye_gaze[-num_frames:]]).mean(),
np.array([x[2] for x in self.left_eye_gaze[-num_frames:]]).mean()]
right_eye_XY = point_to_screen(r_avg_cent, r_avg_gaze)
left_eye_XY = point_to_screen(l_avg_cent, l_avg_gaze)
mid_x = np.mean([right_eye_XY[0], left_eye_XY[0]])
mid_y = np.mean([right_eye_XY[1], left_eye_XY[1]])
if PRINT_VALS:
self.draw_vals(r_avg_gaze, r_avg_cent, l_avg_gaze,l_avg_cent)
return mid_x, mid_y
def draw_vals(self, r_gaze, r_cent, l_gaze, l_cent):
H, W, _ = self.visualizer.image.shape
left_edge = W - 350
left_edge_H = 40
flip_img = cv2.flip(self.visualizer.image, 1)
r_gaze = round_tup(r_gaze)
r_cent = round_tup(r_cent)
l_gaze = round_tup(l_gaze)
l_cent = round_tup(l_cent)
print('frame no ', self.i)
print('right_gaze, ', r_gaze)
print('left_gaze , ', l_gaze)
print('right_cent, ', r_cent)
print('left_cent , ', l_cent)
cv2.putText(flip_img,
'cur frame = '+ str(self.i),
(left_edge, left_edge_H-20),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, RED, 1)
cv2.putText(flip_img,
'R_Gaze = '+str(r_gaze),
(left_edge, left_edge_H),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, BLACK, 1)
cv2.putText(flip_img,
'R_Cent = '+str(r_cent),
(left_edge, left_edge_H+20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, BLACK, 1)
cv2.putText(flip_img,
'L_Gaze = '+str(l_gaze),
(left_edge, left_edge_H+40),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, BLACK, 1)
cv2.putText(flip_img,
'L_Cent = '+str(l_cent),
(left_edge, left_edge_H+60),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, BLACK, 1)
if GAZE_AVG_FLAG:
avg_str = 'ON' + ' frames = ' + str(num_frames)
else:
avg_str = 'OFF'
cv2.putText(flip_img,
'AVG = ' + str(avg_str),
(left_edge, left_edge_H+85),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, RED, 1)
self.visualizer.image = cv2.flip(flip_img, 1)
def _draw_gaze_vector(self, face: Face) -> None:
length = self.config.demo.gaze_visualization_length
print('*'*50)
right_eye_XY = (0,0)
left_eye_XY = (0,0)
r_gaze_ = (0,0,0)
r_cent_ = (0,0,0)
l_gaze_ = (0,0,0)
l_cent_ = (0,0,0)
if self.config.mode == GazeEstimationMethod.MPIIGaze.name:
for key in [FacePartsName.REYE, FacePartsName.LEYE]:
eye = getattr(face, key.name.lower())
self.visualizer.draw_3d_line(
eye.center, eye.center + length * eye.gaze_vector)
if key.name.lower() == 'reye':
self.right_eye_cent.append(eye.center)
self.right_eye_gaze.append(eye.gaze_vector)
r_gaze_ = tuple(eye.gaze_vector)
r_cent_ = tuple(eye.center)
right_eye_XY = point_to_screen(eye.center, eye.gaze_vector)
else:
self.left_eye_cent.append(eye.center)
self.left_eye_gaze.append(eye.gaze_vector)
left_eye_XY = point_to_screen(eye.center, eye.gaze_vector)
l_gaze_ = tuple(eye.gaze_vector)
l_cent_ = tuple(eye.center)
print('{} gaze = '.format(key.name.lower()), eye.gaze_vector)
print('{} center = '.format(key.name.lower()), eye.center)
pitch, yaw = np.rad2deg(eye.vector_to_angle(eye.gaze_vector))
logger.info(
f'[{key.name.lower()}] pitch: {pitch:.2f}, yaw: {yaw:.2f}')
elif self.config.mode == GazeEstimationMethod.MPIIFaceGaze.name:
self.visualizer.draw_3d_line(
face.center, face.center + length * face.gaze_vector)
self.face_cent.append(face.center)
self.face_gaze.append(face.gaze_vector)
pitch, yaw = np.rad2deg(face.vector_to_angle(face.gaze_vector))
logger.info(f'[face] pitch: {pitch:.2f}, yaw: {yaw:.2f}')
else:
raise ValueError
global mid_point
if self.config.mode == GazeEstimationMethod.MPIIGaze.name:
# -----------------------------------------------
if GAZE_AVG_FLAG:
if len(self.right_eye_cent) >= num_frames:
mid_x, mid_y = self.avg_frames()
else:
if PRINT_VALS:
self.draw_vals(r_gaze_, r_cent_, l_gaze_,l_cent_)
else:
mid_x = np.mean([right_eye_XY[0], left_eye_XY[0]])
mid_y = np.mean([right_eye_XY[1], left_eye_XY[1]])
if PRINT_VALS:
self.draw_vals(r_gaze_, r_cent_, l_gaze_,l_cent_)
elif self.config.mode == GazeEstimationMethod.MPIIFaceGaze.name:
XY = point_to_screen(face.center, face.gaze_vector)
mid_x = XY[0]
mid_y = XY[1]
else:
raise ValueError
mid_point = (int(mid_x), int(mid_y))
def main():
'''
# EYE MODEL
python demo.py --config configs/demo_mpiigaze_resnet.yaml
# FACE MODEL
python demo.py --config configs/demo_mpiifacegaze_resnet_simple_14.yaml
'''
global DEMO, CANV_MODE, IMG_SCALE, NORM_EYEZ, SAVE_VIDEO
global RUNTIME, CUST_VIDEO, vid_file, PRINT_VALS
start_time = time.time()
config, custom = load_config()
# pdb.set_trace()
DEMO = custom['demo']
# Save normalized eyes
NORM_EYEZ = custom['eyes']
# FLAG TO SAVE MOVE, DEFAULT = FALSE
SAVE_VIDEO = custom['save_vid']
# PRINT RUNTIME
RUNTIME = custom['runtime'] #0
# PRINTS VALS ON THE WEBCAM IMG
PRINT_VALS = custom['printvals'] #0
# CUSTOM VIDEO:
CUST_VIDEO = custom['cust_vid']
if CUST_VIDEO != None:
vid_file = CUST_VIDEO
CANV_MODE = custom['mode']
if CANV_MODE == 'STABILITY' or CANV_MODE == 'UPDOWN' \
or CANV_MODE == 'LEFTRIGHT' or CANV_MODE == 'SEQ':
print('Current mode is {}'.format(CANV_MODE))
else:
print('Breaking since current mode is {}'.format(CANV_MODE))
print('Set correct CANV_MODE --mode: ')
print('*STABILITY* *UPDOWN* *LEFTRIGHT* *SEQ*')
sys.exit(1)
if DEMO:
IMG_SCALE = custom['imgscale']
CANV_MODE = custom['mode'] #'RNG'
demo = Demo(config)
demo.run()
n_frames = len(demo.pts)
tot_time = time.time()-start_time
print('nr of frames: ', n_frames)
print('All finished: ',tot_time , ' seconds.')
print('FPS: ', round(n_frames/tot_time,2))
# This part only gets executed in case there is input to the model
if CUST_VIDEO:
# COMPUTE ACCURACY METRICS HERE
save_path = 'testResults/'
try:
os.mkdir(save_path)
except:
print('folder already existing {}'.format(save_path))
str_name = vid_file.split('/')[1].split('.')[0] + '_LM_' +str(AVG_LANDMARKS) + '_GAZE_' + str(GAZE_AVG_FLAG)
str_name = str(demo.gaze_estimator.camera.width) + 'x' + str(demo.gaze_estimator.camera.height) + '_' + str_name
str_name = config.mode + str_name
indices = [sum(item) for item in demo.cur_pos if sum(item) == 0]
for item in reversed(indices):
demo.true_pos.pop(item)
demo.cur_pos.pop(item)
# DUMP THE GAZE AND CENTER VALUES
if config.mode == 'MPIIGaze':
dump_dict(str_name,items=[demo.left_eye_cent,demo.left_eye_gaze, demo.right_eye_cent, demo.right_eye_gaze, demo.true_pos, demo.dist],
item_name = ['lcent', 'lgaze', 'rcent', 'rgaze', 'tpos', 'fdist'])
elif config.mode == 'MPIIFaceGaze':
dump_dict(str_name,items=[demo.face_cent,demo.face_gaze, demo.true_pos, demo.dist],
item_name = ['fcent', 'fgaze', 'tpos', 'fdist'])
print('EXTI BEFORE METRICS & PLOTS')
_, MAE, CEP, CE95 = calc_metrics((demo.true_pos,demo.cur_pos))
print('MAE = ', MAE)
print('CEP = ', CEP)
print('CEP95 = ', CE95)
# draw results
draw_utils.plot_pts((demo.true_pos,demo.cur_pos), str_name, MAE, save_path)
if __name__ == '__main__':
main()
|
[
"os.mkdir",
"helper_fn.calc_metrics",
"cv2.VideoWriter_fourcc",
"draw_utils.display_canv",
"pathlib.Path",
"numpy.random.randint",
"numpy.mean",
"imutils.resize",
"cv2.imshow",
"gaze_estimation.gaze_estimator.common.Visualizer",
"cv2.undistort",
"gaze_estimation.utils.load_config",
"helper_fn.point_to_screen",
"cv2.imwrite",
"os.mkdirs",
"helper_fn.round_tup",
"gaze_estimation.GazeEstimator",
"datetime.datetime.now",
"cv2.resize",
"cv2.waitKey",
"numpy.hstack",
"cv2.flip",
"helper_fn.dump_dict",
"sys.exit",
"logging.basicConfig",
"numpy.zeros",
"time.time",
"cv2.VideoCapture",
"numpy.array",
"draw_utils.plot_pts",
"cv2.moveWindow",
"logging.getLogger"
] |
[((395, 434), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (414, 434), False, 'import logging\n'), ((444, 471), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (461, 471), False, 'import logging\n'), ((1148, 1174), 'numpy.zeros', 'np.zeros', (['(adj_H, W_px, 3)'], {}), '((adj_H, W_px, 3))\n', (1156, 1174), True, 'import numpy as np\n'), ((1203, 1229), 'numpy.random.randint', 'np.random.randint', (['(0)', 'W_px'], {}), '(0, W_px)\n', (1220, 1229), True, 'import numpy as np\n'), ((1230, 1256), 'numpy.random.randint', 'np.random.randint', (['(0)', 'H_px'], {}), '(0, H_px)\n', (1247, 1256), True, 'import numpy as np\n'), ((17064, 17075), 'time.time', 'time.time', ([], {}), '()\n', (17073, 17075), False, 'import time\n'), ((17097, 17110), 'gaze_estimation.utils.load_config', 'load_config', ([], {}), '()\n', (17108, 17110), False, 'from gaze_estimation.utils import load_config\n'), ((1439, 1454), 'os.mkdir', 'os.mkdir', (['fpath'], {}), '(fpath)\n', (1447, 1454), False, 'import os\n'), ((1463, 1488), 'os.mkdirs', 'os.mkdirs', (['(fpath + rgb_fp)'], {}), '(fpath + rgb_fp)\n', (1472, 1488), False, 'import os\n'), ((1742, 1819), 'gaze_estimation.GazeEstimator', 'GazeEstimator', (['config'], {'AVG_LANDMARKS': 'AVG_LANDMARKS', 'num_frames': 'num_avg_frames'}), '(config, AVG_LANDMARKS=AVG_LANDMARKS, num_frames=num_avg_frames)\n', (1755, 1819), False, 'from gaze_estimation import GazeEstimationMethod, GazeEstimator\n'), ((1846, 1884), 'gaze_estimation.gaze_estimator.common.Visualizer', 'Visualizer', (['self.gaze_estimator.camera'], {}), '(self.gaze_estimator.camera)\n', (1856, 1884), False, 'from gaze_estimation.gaze_estimator.common import Face, FacePartsName, Visualizer\n'), ((6110, 6151), 'pathlib.Path', 'pathlib.Path', (['self.config.demo.output_dir'], {}), '(self.config.demo.output_dir)\n', (6122, 6151), False, 'import pathlib\n'), ((6300, 6323), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6321, 6323), False, 'import datetime\n'), ((9627, 9669), 'cv2.resize', 'cv2.resize', (['normalized', '(0, 0)'], {'fy': '(5)', 'fx': '(5)'}), '(normalized, (0, 0), fy=5, fx=5)\n', (9637, 9669), False, 'import cv2\n'), ((10159, 10195), 'cv2.imshow', 'cv2.imshow', (['"""normalized"""', 'normalized'], {}), "('normalized', normalized)\n", (10169, 10195), False, 'import cv2\n'), ((11462, 11501), 'helper_fn.point_to_screen', 'point_to_screen', (['r_avg_cent', 'r_avg_gaze'], {}), '(r_avg_cent, r_avg_gaze)\n', (11477, 11501), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((11524, 11563), 'helper_fn.point_to_screen', 'point_to_screen', (['l_avg_cent', 'l_avg_gaze'], {}), '(l_avg_cent, l_avg_gaze)\n', (11539, 11563), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((11580, 11622), 'numpy.mean', 'np.mean', (['[right_eye_XY[0], left_eye_XY[0]]'], {}), '([right_eye_XY[0], left_eye_XY[0]])\n', (11587, 11622), True, 'import numpy as np\n'), ((11639, 11681), 'numpy.mean', 'np.mean', (['[right_eye_XY[1], left_eye_XY[1]]'], {}), '([right_eye_XY[1], left_eye_XY[1]])\n', (11646, 11681), True, 'import numpy as np\n'), ((11984, 12018), 'cv2.flip', 'cv2.flip', (['self.visualizer.image', '(1)'], {}), '(self.visualizer.image, 1)\n', (11992, 12018), False, 'import cv2\n'), ((12036, 12053), 'helper_fn.round_tup', 'round_tup', (['r_gaze'], {}), '(r_gaze)\n', (12045, 12053), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((12071, 12088), 'helper_fn.round_tup', 'round_tup', (['r_cent'], {}), '(r_cent)\n', (12080, 12088), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((12106, 12123), 'helper_fn.round_tup', 'round_tup', (['l_gaze'], {}), '(l_gaze)\n', (12115, 12123), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((12141, 12158), 'helper_fn.round_tup', 'round_tup', (['l_cent'], {}), '(l_cent)\n', (12150, 12158), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((13633, 13654), 'cv2.flip', 'cv2.flip', (['flip_img', '(1)'], {}), '(flip_img, 1)\n', (13641, 13654), False, 'import cv2\n'), ((18153, 18164), 'time.time', 'time.time', ([], {}), '()\n', (18162, 18164), False, 'import time\n'), ((19659, 19702), 'helper_fn.calc_metrics', 'calc_metrics', (['(demo.true_pos, demo.cur_pos)'], {}), '((demo.true_pos, demo.cur_pos))\n', (19671, 19702), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((19824, 19900), 'draw_utils.plot_pts', 'draw_utils.plot_pts', (['(demo.true_pos, demo.cur_pos)', 'str_name', 'MAE', 'save_path'], {}), '((demo.true_pos, demo.cur_pos), str_name, MAE, save_path)\n', (19843, 19900), False, 'import draw_utils\n'), ((3533, 3544), 'time.time', 'time.time', ([], {}), '()\n', (3542, 3544), False, 'import time\n'), ((3615, 3728), 'cv2.undistort', 'cv2.undistort', (['frame', 'self.gaze_estimator.camera.camera_matrix', 'self.gaze_estimator.camera.dist_coefficients'], {}), '(frame, self.gaze_estimator.camera.camera_matrix, self.\n gaze_estimator.camera.dist_coefficients)\n', (3628, 3728), False, 'import cv2\n'), ((3941, 3952), 'time.time', 'time.time', ([], {}), '()\n', (3950, 3952), False, 'import time\n'), ((6589, 6620), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'H264'"], {}), "(*'H264')\n", (6611, 6620), False, 'import cv2\n'), ((7101, 7140), 'cv2.waitKey', 'cv2.waitKey', (['self.config.demo.wait_time'], {}), '(self.config.demo.wait_time)\n', (7112, 7140), False, 'import cv2\n'), ((9334, 9357), 'numpy.hstack', 'np.hstack', (['[reye, leye]'], {}), '([reye, leye])\n', (9343, 9357), True, 'import numpy as np\n'), ((17964, 17975), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (17972, 17975), False, 'import sys\n'), ((18503, 18522), 'os.mkdir', 'os.mkdir', (['save_path'], {}), '(save_path)\n', (18511, 18522), False, 'import os\n'), ((19160, 19369), 'helper_fn.dump_dict', 'dump_dict', (['str_name'], {'items': '[demo.left_eye_cent, demo.left_eye_gaze, demo.right_eye_cent, demo.\n right_eye_gaze, demo.true_pos, demo.dist]', 'item_name': "['lcent', 'lgaze', 'rcent', 'rgaze', 'tpos', 'fdist']"}), "(str_name, items=[demo.left_eye_cent, demo.left_eye_gaze, demo.\n right_eye_cent, demo.right_eye_gaze, demo.true_pos, demo.dist],\n item_name=['lcent', 'lgaze', 'rcent', 'rgaze', 'tpos', 'fdist'])\n", (19169, 19369), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((2918, 2981), 'draw_utils.display_canv', 'draw_utils.display_canv', ([], {'CANV_MODE': 'CANV_MODE', 'cur_pos': 'mid_point'}), '(CANV_MODE=CANV_MODE, cur_pos=mid_point)\n', (2941, 2981), False, 'import draw_utils\n'), ((3403, 3511), 'imutils.resize', 'imutils.resize', (['frame'], {'width': 'self.gaze_estimator.camera.width', 'height': 'self.gaze_estimator.camera.height'}), '(frame, width=self.gaze_estimator.camera.width, height=self.\n gaze_estimator.camera.height)\n', (3417, 3511), False, 'import imutils\n'), ((4854, 4923), 'cv2.resize', 'cv2.resize', (['self.visualizer.image', '(0, 0)'], {'fy': 'IMG_SCALE', 'fx': 'IMG_SCALE'}), '(self.visualizer.image, (0, 0), fy=IMG_SCALE, fx=IMG_SCALE)\n', (4864, 4923), False, 'import cv2\n'), ((4940, 4982), 'cv2.imshow', 'cv2.imshow', (['"""frame"""', 'self.visualizer.image'], {}), "('frame', self.visualizer.image)\n", (4950, 4982), False, 'import cv2\n'), ((5041, 5070), 'cv2.moveWindow', 'cv2.moveWindow', (['"""frame"""', '(0)', '(0)'], {}), "('frame', 0, 0)\n", (5055, 5070), False, 'import cv2\n'), ((5530, 5556), 'cv2.VideoCapture', 'cv2.VideoCapture', (['vid_file'], {}), '(vid_file)\n', (5546, 5556), False, 'import cv2\n'), ((5597, 5616), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (5613, 5616), False, 'import cv2\n'), ((5677, 5722), 'cv2.VideoCapture', 'cv2.VideoCapture', (['self.config.demo.video_path'], {}), '(self.config.demo.video_path)\n', (5693, 5722), False, 'import cv2\n'), ((6669, 6700), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'PIM1'"], {}), "(*'PIM1')\n", (6691, 6700), False, 'import cv2\n'), ((10108, 10150), 'cv2.imwrite', 'cv2.imwrite', (['save_str', 'normalized[:, 300:]'], {}), '(save_str, normalized[:, 300:])\n', (10119, 10150), False, 'import cv2\n'), ((16231, 16273), 'numpy.mean', 'np.mean', (['[right_eye_XY[0], left_eye_XY[0]]'], {}), '([right_eye_XY[0], left_eye_XY[0]])\n', (16238, 16273), True, 'import numpy as np\n'), ((16298, 16340), 'numpy.mean', 'np.mean', (['[right_eye_XY[1], left_eye_XY[1]]'], {}), '([right_eye_XY[1], left_eye_XY[1]])\n', (16305, 16340), True, 'import numpy as np\n'), ((16532, 16578), 'helper_fn.point_to_screen', 'point_to_screen', (['face.center', 'face.gaze_vector'], {}), '(face.center, face.gaze_vector)\n', (16547, 16578), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((19435, 19571), 'helper_fn.dump_dict', 'dump_dict', (['str_name'], {'items': '[demo.face_cent, demo.face_gaze, demo.true_pos, demo.dist]', 'item_name': "['fcent', 'fgaze', 'tpos', 'fdist']"}), "(str_name, items=[demo.face_cent, demo.face_gaze, demo.true_pos,\n demo.dist], item_name=['fcent', 'fgaze', 'tpos', 'fdist'])\n", (19444, 19571), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((10923, 10982), 'numpy.array', 'np.array', (['[x[0] for x in self.right_eye_gaze[-num_frames:]]'], {}), '([x[0] for x in self.right_eye_gaze[-num_frames:]])\n', (10931, 10982), True, 'import numpy as np\n'), ((11011, 11070), 'numpy.array', 'np.array', (['[x[1] for x in self.right_eye_gaze[-num_frames:]]'], {}), '([x[1] for x in self.right_eye_gaze[-num_frames:]])\n', (11019, 11070), True, 'import numpy as np\n'), ((11099, 11158), 'numpy.array', 'np.array', (['[x[2] for x in self.right_eye_gaze[-num_frames:]]'], {}), '([x[2] for x in self.right_eye_gaze[-num_frames:]])\n', (11107, 11158), True, 'import numpy as np\n'), ((11189, 11247), 'numpy.array', 'np.array', (['[x[0] for x in self.left_eye_gaze[-num_frames:]]'], {}), '([x[0] for x in self.left_eye_gaze[-num_frames:]])\n', (11197, 11247), True, 'import numpy as np\n'), ((11276, 11334), 'numpy.array', 'np.array', (['[x[1] for x in self.left_eye_gaze[-num_frames:]]'], {}), '([x[1] for x in self.left_eye_gaze[-num_frames:]])\n', (11284, 11334), True, 'import numpy as np\n'), ((11363, 11421), 'numpy.array', 'np.array', (['[x[2] for x in self.left_eye_gaze[-num_frames:]]'], {}), '([x[2] for x in self.left_eye_gaze[-num_frames:]])\n', (11371, 11421), True, 'import numpy as np\n'), ((14579, 14623), 'helper_fn.point_to_screen', 'point_to_screen', (['eye.center', 'eye.gaze_vector'], {}), '(eye.center, eye.gaze_vector)\n', (14594, 14623), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((14801, 14845), 'helper_fn.point_to_screen', 'point_to_screen', (['eye.center', 'eye.gaze_vector'], {}), '(eye.center, eye.gaze_vector)\n', (14816, 14845), False, 'from helper_fn import point_to_screen, dump_dict, calc_metrics, round_tup\n'), ((3826, 3837), 'time.time', 'time.time', ([], {}), '()\n', (3835, 3837), False, 'import time\n'), ((4081, 4092), 'time.time', 'time.time', ([], {}), '()\n', (4090, 4092), False, 'import time\n'), ((10263, 10322), 'numpy.array', 'np.array', (['[x[0] for x in self.right_eye_cent[-num_frames:]]'], {}), '([x[0] for x in self.right_eye_cent[-num_frames:]])\n', (10271, 10322), True, 'import numpy as np\n'), ((10355, 10414), 'numpy.array', 'np.array', (['[x[1] for x in self.right_eye_cent[-num_frames:]]'], {}), '([x[1] for x in self.right_eye_cent[-num_frames:]])\n', (10363, 10414), True, 'import numpy as np\n'), ((10447, 10506), 'numpy.array', 'np.array', (['[x[2] for x in self.right_eye_cent[-num_frames:]]'], {}), '([x[2] for x in self.right_eye_cent[-num_frames:]])\n', (10455, 10506), True, 'import numpy as np\n'), ((10541, 10599), 'numpy.array', 'np.array', (['[x[0] for x in self.left_eye_cent[-num_frames:]]'], {}), '([x[0] for x in self.left_eye_cent[-num_frames:]])\n', (10549, 10599), True, 'import numpy as np\n'), ((10632, 10690), 'numpy.array', 'np.array', (['[x[1] for x in self.left_eye_cent[-num_frames:]]'], {}), '([x[1] for x in self.left_eye_cent[-num_frames:]])\n', (10640, 10690), True, 'import numpy as np\n'), ((10723, 10781), 'numpy.array', 'np.array', (['[x[2] for x in self.left_eye_cent[-num_frames:]]'], {}), '([x[2] for x in self.left_eye_cent[-num_frames:]])\n', (10731, 10781), True, 'import numpy as np\n')]
|
import sys
path = sys.path[0] + "/Package/parser"
sys.path.append(path)
path = sys.path[0] + "/Package/vector"
sys.path.append(path)
path = sys.path[0] + "/Package/solvercontrol"
sys.path.append(path)
|
[
"sys.path.append"
] |
[((51, 72), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (66, 72), False, 'import sys\n'), ((113, 134), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (128, 134), False, 'import sys\n'), ((182, 203), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (197, 203), False, 'import sys\n')]
|
import functools
from keras import backend as K
import tensorflow as tf
def as_keras_metric(method):
@functools.wraps(method)
def wrapper(self, args, **kwargs):
""" Wrapper for turning tensorflow metrics into keras metrics """
value, update_op = method(self, args, **kwargs)
K.get_session().run(tf.local_variables_initializer())
with tf.control_dependencies([update_op]):
value = tf.identity(value)
return value
return wrapper
precision = as_keras_metric(tf.metrics.precision)
recall = as_keras_metric(tf.metrics.recall)
f1_score = as_keras_metric(tf.contrib.metrics.f1_score)
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
def recall2(y_true, y_pred):
"""Recall metric.
Only computes a batch-wise average of recall.
Computes the recall, a metric for multi-label classification of
how many relevant items are selected.
"""
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall
def precision2(y_true, y_pred):
"""Precision metric.
Only computes a batch-wise average of precision.
Computes the precision, a metric for multi-label classification of
how many selected items are relevant.
"""
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
def f1(y_true, y_pred):
precision = precision2(y_true, y_pred)
recall = recall2(y_true, y_pred)
return 2*((precision*recall)/(precision+recall+K.epsilon()))
|
[
"tensorflow.control_dependencies",
"tensorflow.identity",
"keras.backend.epsilon",
"keras.backend.get_session",
"tensorflow.local_variables_initializer",
"functools.wraps",
"keras.backend.clip",
"keras.backend.square"
] |
[((107, 130), 'functools.wraps', 'functools.wraps', (['method'], {}), '(method)\n', (122, 130), False, 'import functools\n'), ((328, 360), 'tensorflow.local_variables_initializer', 'tf.local_variables_initializer', ([], {}), '()\n', (358, 360), True, 'import tensorflow as tf\n'), ((375, 411), 'tensorflow.control_dependencies', 'tf.control_dependencies', (['[update_op]'], {}), '([update_op])\n', (398, 411), True, 'import tensorflow as tf\n'), ((433, 451), 'tensorflow.identity', 'tf.identity', (['value'], {}), '(value)\n', (444, 451), True, 'import tensorflow as tf\n'), ((697, 722), 'keras.backend.square', 'K.square', (['(y_pred - y_true)'], {}), '(y_pred - y_true)\n', (705, 722), True, 'from keras import backend as K\n'), ((992, 1021), 'keras.backend.clip', 'K.clip', (['(y_true * y_pred)', '(0)', '(1)'], {}), '(y_true * y_pred, 0, 1)\n', (998, 1021), True, 'from keras import backend as K\n'), ((1063, 1083), 'keras.backend.clip', 'K.clip', (['y_true', '(0)', '(1)'], {}), '(y_true, 0, 1)\n', (1069, 1083), True, 'from keras import backend as K\n'), ((1138, 1149), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (1147, 1149), True, 'from keras import backend as K\n'), ((1438, 1467), 'keras.backend.clip', 'K.clip', (['(y_true * y_pred)', '(0)', '(1)'], {}), '(y_true * y_pred, 0, 1)\n', (1444, 1467), True, 'from keras import backend as K\n'), ((1510, 1530), 'keras.backend.clip', 'K.clip', (['y_pred', '(0)', '(1)'], {}), '(y_pred, 0, 1)\n', (1516, 1530), True, 'from keras import backend as K\n'), ((1589, 1600), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (1598, 1600), True, 'from keras import backend as K\n'), ((308, 323), 'keras.backend.get_session', 'K.get_session', ([], {}), '()\n', (321, 323), True, 'from keras import backend as K\n'), ((1780, 1791), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (1789, 1791), True, 'from keras import backend as K\n')]
|
import unittest
from vehicle_info_after import VehicleInfo
class TestVehicleInfoMethods(unittest.TestCase):
pass
# def test_compute_tax_non_electric(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertEqual(v.compute_tax(), 500)
# def test_compute_tax_electric(self):
# v = VehicleInfo("BMW", True, 10000)
# self.assertEqual(v.compute_tax(), 200)
# def test_compute_tax_exemption(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertEqual(v.compute_tax(5000), 250)
# def test_compute_tax_exemption_negative(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertRaises(ValueError, v.compute_tax, -5000)
# def test_compute_tax_exemption_high(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertEqual(v.compute_tax(20000), 0)
# def test_can_lease_false(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertEqual(v.can_lease(5000), False)
# def test_can_lease_true(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertEqual(v.can_lease(15000), True)
# def test_can_lease_negative_income(self):
# v = VehicleInfo("BMW", False, 10000)
# self.assertRaises(ValueError, v.can_lease, -5000)
# run the actual unittests
unittest.main()
|
[
"unittest.main"
] |
[((1324, 1339), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1337, 1339), False, 'import unittest\n')]
|
""" Ping agent - checks up/down status of nodes."""
import socket
from alfmonitor.lib.alflogger import logger
from agents.lib.abs_agent import AbstractConnectionAgent
socket.setdefaulttimeout(10)
class PingAgent(AbstractConnectionAgent):
agent_name = 'Ping'
def __init__(self):
self.log = logger(
'{}.{}'.format(
__name__,
self.__class__.__name__,
)
)
def connect(self, profile):
""" Connects to profile's uri. """
try:
hostname, port = profile.uri.split(':')
except (IndexError, ValueError) as err:
hostname = profile.uri
port = 0
self.log.exception(err)
self.log.error('Port cannot be assigned. Attempting with port 0.')
self.log.debug(
f'Attempting connection to {hostname} '
f'at port {port} ...'
)
if profile.protocol == 'TCP':
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
elif profile.protocol == 'UDP':
connection = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
else:
self.log.warn(
f'Protocol not set for profile: {profile.name}. Assuming TCP.'
)
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
address = (hostname, int(port))
is_connected = False
try:
connection.connect(address)
is_connected = True
except (socket.timeout, ConnectionRefusedError):
pass
except socket.gaierror as err:
self.log.exception(
'Check to see if this profile should use Http agent instead '
'of Ping agent.\n'
f'Profile uri is {profile.uri}'
)
finally:
connection.close()
return is_connected
if __name__ == '__main__':
agent = PingAgent()
agent.run()
|
[
"socket.setdefaulttimeout",
"socket.socket"
] |
[((169, 197), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(10)'], {}), '(10)\n', (193, 197), False, 'import socket\n'), ((983, 1032), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (996, 1032), False, 'import socket\n'), ((1098, 1146), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1111, 1146), False, 'import socket\n'), ((1306, 1355), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1319, 1355), False, 'import socket\n')]
|
import json
import os
configName = os.path.dirname(os.path.realpath(__file__)) + '/config.json'
config = None
try:
with open(configName) as data:
try:
config = json.load(data)
config["build_exists"] = os.path.join(os.path.dirname(configName), "app", "frontend", "build")
config["build_exists"] = os.path.exists(config["build_exists"])
except Exception:
print("Error occured while parsing json, please check json validity")
except Exception:
print("Error occured while reading config, make sure config.json is present")
raise
|
[
"os.path.realpath",
"json.load",
"os.path.exists",
"os.path.dirname"
] |
[((52, 78), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (68, 78), False, 'import os\n'), ((185, 200), 'json.load', 'json.load', (['data'], {}), '(data)\n', (194, 200), False, 'import json\n'), ((345, 383), 'os.path.exists', 'os.path.exists', (["config['build_exists']"], {}), "(config['build_exists'])\n", (359, 383), False, 'import os\n'), ((251, 278), 'os.path.dirname', 'os.path.dirname', (['configName'], {}), '(configName)\n', (266, 278), False, 'import os\n')]
|
from dice_gen import dice_details
from monster_gen import monster_details
from name_gen import name_details
from player_gen import final_ran_stats
from weapon_gen import generate_random_weapon
from NPC_gen_vince import generate_random_race
def main():
def clear(arr):
for x in arr:
x.reset()
while True:
print("Welcome brave adventure.\nWhat is your request?")
print("[1] Dice Rolls\n[2] Stat Gen\n[3] Name Gen\n[4] Monster Gen\n[5] Loot Gen\n[6] NPC Gen\n[0] Exit")
try:
selector = int(input("Enter a number for what you want: "))
if selector == 0 or selector is None:
print("Goodbye")
break
if selector == 1: # Dice Roll
dice_details()
elif selector == 2: # Player/Stat Gen
final_ran_stats()
elif selector == 3:
name_details()
elif selector == 4:
monster_details()
elif selector == 5:
print(generate_random_weapon())
# weaponDetails()
elif selector >= 6:
print(generate_random_race())
except ValueError or IndexError:
print("Goodbye")
break
if __name__ == "__main__":
main()
|
[
"NPC_gen_vince.generate_random_race",
"name_gen.name_details",
"monster_gen.monster_details",
"player_gen.final_ran_stats",
"dice_gen.dice_details",
"weapon_gen.generate_random_weapon"
] |
[((724, 738), 'dice_gen.dice_details', 'dice_details', ([], {}), '()\n', (736, 738), False, 'from dice_gen import dice_details\n'), ((798, 815), 'player_gen.final_ran_stats', 'final_ran_stats', ([], {}), '()\n', (813, 815), False, 'from player_gen import final_ran_stats\n'), ((856, 870), 'name_gen.name_details', 'name_details', ([], {}), '()\n', (868, 870), False, 'from name_gen import name_details\n'), ((911, 928), 'monster_gen.monster_details', 'monster_details', ([], {}), '()\n', (926, 928), False, 'from monster_gen import monster_details\n'), ((975, 999), 'weapon_gen.generate_random_weapon', 'generate_random_weapon', ([], {}), '()\n', (997, 999), False, 'from weapon_gen import generate_random_weapon\n'), ((1077, 1099), 'NPC_gen_vince.generate_random_race', 'generate_random_race', ([], {}), '()\n', (1097, 1099), False, 'from NPC_gen_vince import generate_random_race\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-28 13:20
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import django_fsm
import markupfield.fields
import qraz.frontend.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Repository',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('github', models.PositiveIntegerField(verbose_name='Github repository ID')),
('name', models.CharField(max_length=256, verbose_name='Repository name')),
('state', django_fsm.FSMField(default='inactive', max_length=50)),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='Last modified')),
('comment', markupfield.fields.MarkupField(blank=True, null=True, rendered_field=True, verbose_name='Comment')),
('comment_markup_type', models.CharField(choices=[('', '--'), ('ReST', 'ReST')], default='ReST', max_length=30)),
('secret', models.CharField(default=qraz.frontend.models.get_secret, max_length=16, verbose_name='Shared secret for Github webhooks')),
('hook', models.PositiveIntegerField(null=True, verbose_name='ID of Github webhook')),
('_comment_rendered', models.TextField(editable=False, null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"django.db.models.TextField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.AutoField",
"django_fsm.FSMField"
] |
[((416, 473), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (447, 473), False, 'from django.db import migrations, models\n'), ((608, 701), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (624, 701), False, 'from django.db import migrations, models\n'), ((727, 791), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'verbose_name': '"""Github repository ID"""'}), "(verbose_name='Github repository ID')\n", (754, 791), False, 'from django.db import migrations, models\n'), ((819, 883), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'verbose_name': '"""Repository name"""'}), "(max_length=256, verbose_name='Repository name')\n", (835, 883), False, 'from django.db import migrations, models\n'), ((912, 966), 'django_fsm.FSMField', 'django_fsm.FSMField', ([], {'default': '"""inactive"""', 'max_length': '(50)'}), "(default='inactive', max_length=50)\n", (931, 966), False, 'import django_fsm\n'), ((1268, 1359), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('', '--'), ('ReST', 'ReST')]", 'default': '"""ReST"""', 'max_length': '(30)'}), "(choices=[('', '--'), ('ReST', 'ReST')], default='ReST',\n max_length=30)\n", (1284, 1359), False, 'from django.db import migrations, models\n'), ((1385, 1511), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'qraz.frontend.models.get_secret', 'max_length': '(16)', 'verbose_name': '"""Shared secret for Github webhooks"""'}), "(default=qraz.frontend.models.get_secret, max_length=16,\n verbose_name='Shared secret for Github webhooks')\n", (1401, 1511), False, 'from django.db import migrations, models\n'), ((1535, 1610), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'null': '(True)', 'verbose_name': '"""ID of Github webhook"""'}), "(null=True, verbose_name='ID of Github webhook')\n", (1562, 1610), False, 'from django.db import migrations, models\n'), ((1651, 1694), 'django.db.models.TextField', 'models.TextField', ([], {'editable': '(False)', 'null': '(True)'}), '(editable=False, null=True)\n', (1667, 1694), False, 'from django.db import migrations, models\n'), ((1722, 1818), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1739, 1818), False, 'from django.db import migrations, models\n')]
|
import numpy as np
from openmdao.main.api import Component
from openmdao.lib.datatypes.api import Float, Array
class KSfunction(object):
"""Helper class that can be used inside other components to aggregate constraint
vectors with a KS function."""
def compute(self, g, rho=50):
"""Gets the value of the KS function for the given array of constraints."""
self.rho = rho
self.g_max = np.max(g)
self.g_diff = g-self.g_max
self.exponents = np.exp(rho * self.g_diff)
self.summation = np.sum(self.exponents)
self.KS = self.g_max + 1.0/rho * np.log(self.summation)
return self.KS
def derivatives(self):
"""returns a row vector of [dKS_gd, dKS_drho]"""
dsum_dg = self.rho*self.exponents
dKS_dsum = 1.0/self.rho/self.summation
self.dKS_dg = dKS_dsum * dsum_dg
dsum_drho = np.sum(self.g_diff*self.exponents)
self.dKS_drho = dKS_dsum * dsum_drho
return self.dKS_dg, self.dKS_drho
class KSComp(Component):
"""Aggregates a number of functions to a single value via the
Kreisselmeier-Steinhauser Function."""
rho = Float(.1,
iotype="in",
desc="Hyperparameter for the KS function")
KS = Float(0,
iotype="out",
desc="Value of the aggregate KS function")
def __init__(self, n=2):
super(KS, self).__init__()
self.n = n
self.add('g',Array(zeros((n,)),
size=(n,1),
dtype=Float,
iotype="in",
desc="Array of function values to be aggregated"))
self._ks = KSfunction()
def execute(self):
self.KS = self._ks.compute(self.g, self.rho)
def linearize(self):
"""Linearize around the last executed point"""
#use g_max, exponsnte, summation from last executed point
self.J = np.hstack(self._ks.derivatives())
def provideDer(self):
ins = ('g','rho')
outs = ('KS', )
return ins, outs, self.J
|
[
"numpy.sum",
"numpy.log",
"openmdao.lib.datatypes.api.Float",
"numpy.max",
"numpy.exp"
] |
[((1169, 1235), 'openmdao.lib.datatypes.api.Float', 'Float', (['(0.1)'], {'iotype': '"""in"""', 'desc': '"""Hyperparameter for the KS function"""'}), "(0.1, iotype='in', desc='Hyperparameter for the KS function')\n", (1174, 1235), False, 'from openmdao.lib.datatypes.api import Float, Array\n'), ((1277, 1342), 'openmdao.lib.datatypes.api.Float', 'Float', (['(0)'], {'iotype': '"""out"""', 'desc': '"""Value of the aggregate KS function"""'}), "(0, iotype='out', desc='Value of the aggregate KS function')\n", (1282, 1342), False, 'from openmdao.lib.datatypes.api import Float, Array\n'), ((427, 436), 'numpy.max', 'np.max', (['g'], {}), '(g)\n', (433, 436), True, 'import numpy as np\n'), ((497, 522), 'numpy.exp', 'np.exp', (['(rho * self.g_diff)'], {}), '(rho * self.g_diff)\n', (503, 522), True, 'import numpy as np\n'), ((548, 570), 'numpy.sum', 'np.sum', (['self.exponents'], {}), '(self.exponents)\n', (554, 570), True, 'import numpy as np\n'), ((895, 931), 'numpy.sum', 'np.sum', (['(self.g_diff * self.exponents)'], {}), '(self.g_diff * self.exponents)\n', (901, 931), True, 'import numpy as np\n'), ((612, 634), 'numpy.log', 'np.log', (['self.summation'], {}), '(self.summation)\n', (618, 634), True, 'import numpy as np\n')]
|
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import ObjectProperty
from kivy.uix.popup import Popup
from kivy.clock import Clock
from pandas import Timedelta
from frontur_gui.model.ComputeDialog import ComputeDialog
from frontur_gui.model.FileManagerLoad import FileManagerLoad
from frontur_gui.controller.SolverController import SolverController
import frontur_utilities.constants as const
import os
import json
class FileSolverMenu(BoxLayout):
container = ObjectProperty(None)
save_btn = ObjectProperty(None)
def __init__(self, **kwargs):
super(FileSolverMenu, self).__init__(**kwargs)
Clock.schedule_once(lambda dt: self.set_label_values())
@property
def is_save_disabled(self):
return not self.solver_df.fully_loaded
def set_label_values(self):
data = vars(const)
for child in reversed(self.container.children):
if isinstance(child, TextInput):
child.text = str(data[child.keyword])
elif isinstance(child, FileManagerLoad):
child.ids.text_input.text = str(data[child.keyword]) if child.keyword in data else ''
def get_dict_values(self):
self.interface_values = {}
for child in reversed(self.container.children):
if isinstance(child, TextInput):
self.interface_values[child.keyword] = float(child.text)
elif isinstance(child, FileManagerLoad):
self.interface_values[child.keyword] = child.text
import json
with open(self.interface_values['REQ_INTERVIEWS_FILE_PATH']) as jfile:
data = json.load(jfile)
return {
"filename": self.interface_values['SOLVER_FILE_PATH'],
'solver_parameters': {
'workday_time': Timedelta(hours=float(self.interface_values['workday_time'])).seconds,
'rest_time': Timedelta(minutes=float(self.interface_values['rest_time'])).seconds,
'execution_time_limit': Timedelta(minutes=float(self.interface_values['execution_time_limit'])).seconds,
'country_kwargs': {
'plane_kwargs': {
'seats_used': float(self.interface_values['seats_used']),
'poll_success': float(self.interface_values['poll_success']),
'poll_time': Timedelta(seconds=float(self.interface_values['poll_time'])).seconds
},
'interviews': data
}
}
}
def save_configuration(self):
import frontur_utilities
dir_path = os.path.dirname(frontur_utilities.__file__)
data = {}
with open(dir_path + '/data/config.json') as f:
data = json.load(f)
with open(dir_path + '/data/config.json', 'w') as f:
modified_data = {**data, **self.interface_values}
f.write(json.dumps(modified_data, indent=4, sort_keys=False))
def run_solver(self):
self.solver_df = SolverController(**self.get_dict_values())
content = ComputeDialog(loading_method=self.solver_df.run, cancel=self.dismiss_popup,
callback=self.callback)
self._popup = Popup(title="Solving file", content=content,
size_hint=(0.9, 0.9), auto_dismiss=False)
self._popup.bind(on_open=self._popup.content.compute)
self._popup.open()
def dismiss_popup(self):
self.save_configuration()
self._popup.dismiss()
def callback(self):
self.save_btn.disabled = self.is_save_disabled
if not self.is_save_disabled:
self.save_btn.dataframe = self.solver_df.data_frame
|
[
"json.load",
"kivy.uix.popup.Popup",
"os.path.dirname",
"json.dumps",
"kivy.properties.ObjectProperty",
"frontur_gui.model.ComputeDialog.ComputeDialog"
] |
[((521, 541), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (535, 541), False, 'from kivy.properties import ObjectProperty\n'), ((557, 577), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (571, 577), False, 'from kivy.properties import ObjectProperty\n'), ((2676, 2719), 'os.path.dirname', 'os.path.dirname', (['frontur_utilities.__file__'], {}), '(frontur_utilities.__file__)\n', (2691, 2719), False, 'import os\n'), ((3136, 3239), 'frontur_gui.model.ComputeDialog.ComputeDialog', 'ComputeDialog', ([], {'loading_method': 'self.solver_df.run', 'cancel': 'self.dismiss_popup', 'callback': 'self.callback'}), '(loading_method=self.solver_df.run, cancel=self.dismiss_popup,\n callback=self.callback)\n', (3149, 3239), False, 'from frontur_gui.model.ComputeDialog import ComputeDialog\n'), ((3290, 3380), 'kivy.uix.popup.Popup', 'Popup', ([], {'title': '"""Solving file"""', 'content': 'content', 'size_hint': '(0.9, 0.9)', 'auto_dismiss': '(False)'}), "(title='Solving file', content=content, size_hint=(0.9, 0.9),\n auto_dismiss=False)\n", (3295, 3380), False, 'from kivy.uix.popup import Popup\n'), ((1674, 1690), 'json.load', 'json.load', (['jfile'], {}), '(jfile)\n', (1683, 1690), False, 'import json\n'), ((2813, 2825), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2822, 2825), False, 'import json\n'), ((2969, 3021), 'json.dumps', 'json.dumps', (['modified_data'], {'indent': '(4)', 'sort_keys': '(False)'}), '(modified_data, indent=4, sort_keys=False)\n', (2979, 3021), False, 'import json\n')]
|
from urllib.parse import urlparse
import aiozk
import logging
L = logging.getLogger(__name__)
"""
This module builds ZooKeeper clients from Configs and urls
urls supported :
1. Absolute url.
Example: zookeeper://zookeeper:12181/etc/configs/file1
2. Relative ur1 with full path
Example: zookeeper:///etc/configs/file1
In this case the relative url is expanded as follows:
zookeeper://{default_server}/etc/configs/file1
Where {default_server} is substituted with the server entry of the [asab:zookeeper] configuration file section.
3. Relative url with relative path
Example: zookeeper:./etc/configs/file1
In this case, the relative url is expanded as follows:
zookeper://{default_server}/{default_path}/etc/configs/file1
Where {default_server} is substituted with the "server" entry of the [asab:zookeeper] configuration file section and
{default_path} is substituted with the "path" entry of the [asab:zookeeper] configuration file section.
Sample config file:
[asab.zookeeper]
server=server1 server2 server3 <-- Default server
path=/myfolder <-- Default path
"""
def build_client(Config, z_url):
# initialize vaiables
url_netloc = ''
url_path = ''
# Parse URL
if z_url is not None:
url_pieces = urlparse(z_url)
url_netloc = url_pieces.netloc
url_path = url_pieces.path
# If there is no location, use implied
if url_netloc == '':
# if server entry is missing exit
if not Config.has_option("asab:zookeeper", "servers"):
L.error("Servers entry not passed in the configuration.")
return None, None
else:
url_netloc = Config["asab:zookeeper"]["servers"]
if url_path == '':
# if path entry is missing retun with only client and path as none.
if not Config.has_option("asab:zookeeper", "path"):
L.error("Path entry not passed in the configuration.")
return url_netloc, None
else:
url_path = Config["asab:zookeeper"]["path"]
if url_path.startswith("/"):
url_path = url_path.strip("/")
# Create and return the client and the url-path
client = aiozk.ZKClient(url_netloc)
return client, url_path
|
[
"aiozk.ZKClient",
"urllib.parse.urlparse",
"logging.getLogger"
] |
[((67, 94), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (84, 94), False, 'import logging\n'), ((2032, 2058), 'aiozk.ZKClient', 'aiozk.ZKClient', (['url_netloc'], {}), '(url_netloc)\n', (2046, 2058), False, 'import aiozk\n'), ((1244, 1259), 'urllib.parse.urlparse', 'urlparse', (['z_url'], {}), '(z_url)\n', (1252, 1259), False, 'from urllib.parse import urlparse\n')]
|
import os
import shutil
import requests
from hyp3lib.make_cogs import cogify_dir, cogify_file
def _is_cog(filename):
with open(filename, 'rb') as f:
response = requests.post('http://cog-validate.radiant.earth/api/validate', files={'file': f})
return response.status_code == 200
def test_make_cog(geotiff):
assert not _is_cog(geotiff)
cogify_file(geotiff)
assert _is_cog(geotiff)
def test_cogify_dir(geotiff):
base_dir = os.path.dirname(geotiff)
copy_names = [os.path.join(base_dir, '1.tif'), os.path.join(base_dir, '2.tif')]
for name in copy_names:
shutil.copy(geotiff, name)
# Only cogify our copied files
cogify_dir(base_dir, file_pattern='?.tif')
for name in copy_names:
assert _is_cog(name)
assert not _is_cog(geotiff)
|
[
"os.path.dirname",
"hyp3lib.make_cogs.cogify_dir",
"hyp3lib.make_cogs.cogify_file",
"requests.post",
"os.path.join",
"shutil.copy"
] |
[((363, 383), 'hyp3lib.make_cogs.cogify_file', 'cogify_file', (['geotiff'], {}), '(geotiff)\n', (374, 383), False, 'from hyp3lib.make_cogs import cogify_dir, cogify_file\n'), ((459, 483), 'os.path.dirname', 'os.path.dirname', (['geotiff'], {}), '(geotiff)\n', (474, 483), False, 'import os\n'), ((672, 714), 'hyp3lib.make_cogs.cogify_dir', 'cogify_dir', (['base_dir'], {'file_pattern': '"""?.tif"""'}), "(base_dir, file_pattern='?.tif')\n", (682, 714), False, 'from hyp3lib.make_cogs import cogify_dir, cogify_file\n'), ((175, 262), 'requests.post', 'requests.post', (['"""http://cog-validate.radiant.earth/api/validate"""'], {'files': "{'file': f}"}), "('http://cog-validate.radiant.earth/api/validate', files={\n 'file': f})\n", (188, 262), False, 'import requests\n'), ((502, 533), 'os.path.join', 'os.path.join', (['base_dir', '"""1.tif"""'], {}), "(base_dir, '1.tif')\n", (514, 533), False, 'import os\n'), ((535, 566), 'os.path.join', 'os.path.join', (['base_dir', '"""2.tif"""'], {}), "(base_dir, '2.tif')\n", (547, 566), False, 'import os\n'), ((605, 631), 'shutil.copy', 'shutil.copy', (['geotiff', 'name'], {}), '(geotiff, name)\n', (616, 631), False, 'import shutil\n')]
|
import csv
import pandas as pd
# from stylegan.metrics import linear_separability
from collections import defaultdict
from glob import glob
from random import choice, sample
def get_data():
train_file_path = "/content/drive/MyDrive/ExplainedKinshipData/data/train-pairs.csv"
train_folders_path = "/content/drive/MyDrive/ExplainedKinshipData/data/train-faces/"
val_families = "F09"
all_images = glob(train_folders_path + "*/*/*.jpg")
train_images = [x for x in all_images if val_families not in x]
val_images = [x for x in all_images if val_families in x]
train_person_to_images_map = defaultdict(list)
ppl = [x.split("/")[-3] + "/" + x.split("/")[-2] for x in all_images]
for x in train_images:
train_person_to_images_map[x.split("/")[-3] + "/" + x.split("/")[-2]].append(x)
val_person_to_images_map = defaultdict(list)
for x in val_images:
val_person_to_images_map[x.split("/")[-3] + "/" + x.split("/")[-2]].append(x)
relationships = pd.read_csv(train_file_path)
relationships = list(zip(relationships.p1.values, relationships.p2.values))
relationships = [x for x in relationships if x[0] in ppl and x[1] in ppl]
train = [x for x in relationships if val_families not in x[0]]
val = [x for x in relationships if val_families in x[0]]
print(relationships)
print(train)
gen(train, train_person_to_images_map, batch_size=16)
def gen(list_tuples, person_to_images_map, batch_size=16):
ppl = list(person_to_images_map.keys())
while True:
batch_tuples = sample(list_tuples, batch_size // 2)
labels = [1] * len(batch_tuples)
while len(batch_tuples) < batch_size:
p1 = choice(ppl)
p2 = choice(ppl)
if p1 != p2 and (p1, p2) not in list_tuples and (p2, p1) not in list_tuples:
batch_tuples.append((p1, p2))
labels.append(0)
for x in batch_tuples:
if not len(person_to_images_map[x[0]]):
print(x[0])
X1 = [choice(person_to_images_map[x[0]]) for x in batch_tuples]
X1 = np.array([read_img(x) for x in X1])
X2 = [choice(person_to_images_map[x[1]]) for x in batch_tuples]
X2 = np.array([read_img(x) for x in X2])
print(X1, X2, labels)
yield [X1, X2], labels
# df_pairs = pd.read_csv("/content/drive/MyDrive/ExplainedKinshipData/data/train-pairs.csv")
# print(df_pairs)
# features = []
# for pair in df_pairs:
# current_features_1 = linear_separability.get_features(pair["p1"], pair["ptype"])
# current_features_2 = linear_separability.get_features(pair["p2"], pair["ptype"])
# features.append([current_features_1, current_features_2])
# df_pairs.insert(-1, "features", features, True)
# print(df_pairs)
# return df_pairs
get_data()
|
[
"random.sample",
"pandas.read_csv",
"random.choice",
"collections.defaultdict",
"glob.glob"
] |
[((413, 451), 'glob.glob', 'glob', (["(train_folders_path + '*/*/*.jpg')"], {}), "(train_folders_path + '*/*/*.jpg')\n", (417, 451), False, 'from glob import glob\n'), ((617, 634), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (628, 634), False, 'from collections import defaultdict\n'), ((858, 875), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (869, 875), False, 'from collections import defaultdict\n'), ((1009, 1037), 'pandas.read_csv', 'pd.read_csv', (['train_file_path'], {}), '(train_file_path)\n', (1020, 1037), True, 'import pandas as pd\n'), ((1571, 1607), 'random.sample', 'sample', (['list_tuples', '(batch_size // 2)'], {}), '(list_tuples, batch_size // 2)\n', (1577, 1607), False, 'from random import choice, sample\n'), ((1712, 1723), 'random.choice', 'choice', (['ppl'], {}), '(ppl)\n', (1718, 1723), False, 'from random import choice, sample\n'), ((1741, 1752), 'random.choice', 'choice', (['ppl'], {}), '(ppl)\n', (1747, 1752), False, 'from random import choice, sample\n'), ((2049, 2083), 'random.choice', 'choice', (['person_to_images_map[x[0]]'], {}), '(person_to_images_map[x[0]])\n', (2055, 2083), False, 'from random import choice, sample\n'), ((2171, 2205), 'random.choice', 'choice', (['person_to_images_map[x[1]]'], {}), '(person_to_images_map[x[1]])\n', (2177, 2205), False, 'from random import choice, sample\n')]
|
#!/usr/bin/env python3
import argparse
def _parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('input', help='program input')
return parser.parse_args()
def _solve_first(ipt):
chars = list(str(ipt))
accum = 0
for item in enumerate(chars):
if item[1] == chars[item[0]-1]:
accum += int(item[1])
return accum
def _solve_second(ipt):
chars = list(str(ipt))
accum = 0
for item in enumerate(chars):
idx = (item[0] + len(chars) // 2) % len(chars)
if item[1] == chars[idx]:
accum += int(item[1])
return accum
def main():
args = _parse_args()
ans1 = _solve_first(args.input)
ans2 = _solve_second(args.input)
print('Part One:', ans1)
print('Part Two:', ans2)
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser"
] |
[((74, 99), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (97, 99), False, 'import argparse\n')]
|
import logging
import scrapelib
from lxml import html
from urllib import parse
from sqlalchemy.exc import IntegrityError
from document import Document
from scrapers.base_scraper import BaseScraper
from utils import ensure_absolute_url
log = logging.getLogger(__name__)
class KnoxCoTNAgendaScraper(BaseScraper):
SITE_ROOT_URL = 'https://destinyhosted.com/'
MEETING_SCHEDULE_URL = 'https://destinyhosted.com/agenda_publish.cfm?id=56691&mt=ALL'
def __init__(self) -> None:
self.scraper = scrapelib.Scraper()
def scrape(self, session):
page = self.scraper.get(self.MEETING_SCHEDULE_URL)
documents = self._get_docs_from_schedule(page.content)
log.debug("Found %d documents", len(documents))
new_docs = []
for doc in documents:
try:
with session.begin_nested():
session.add(doc)
except IntegrityError:
log.debug('Already have document %s', doc)
else:
new_docs.append(doc)
log.info("New Documents: %s", new_docs)
session.commit()
def _get_docs_from_schedule(self, page_str):
"""
Parse the contents of the meeting schedule page and extract document
links and title.
Parameters:
A string containing the page HTML
Returns:
list of Document objects (not yet persisted to database)
"""
doctree = html.fromstring(page_str)
tables = doctree.findall('body/div/form/table/tbody')
# There should be 3 tables on the page. The second one is the one we want
if len(tables) < 2:
log.error("Knox Agendas: required table not found in page")
raise ValueError('required table not found in page')
table = tables[1]
# We don't care about the table header, it contains the values Agendas and Meetings
rows = table.findall('tr')
documents = []
for row in rows:
# The first cell is a link to the Agenda doc and the anchor text is the date of the meeting
# The second cell is the name of the meeting
agenda, meeting = row.findall('td')[:2]
meeting_name = meeting.text.strip()
agenda_anchor = agenda.find('a')
if agenda_anchor is None:
log.error("Knox Agendas: no document link in the meetings table")
raise ValueError('no document link in the meetings table')
agenda_date = agenda_anchor.text.strip()
doc_url = agenda_anchor.get('href')
if not doc_url:
log.error("Knox Agendas: no href in the anchor tag for %s: %s", agenda_date, meeting_name)
raise ValueError('no href in document anchor')
doc_url = ensure_absolute_url(self.SITE_ROOT_URL, doc_url)
# The anchor title is useless, so use the meeting name in the doc name
doc_name = "{}: {}".format(agenda_date, meeting_name)
documents.append(
Document(
url=doc_url,
title=doc_name,
site=Document.Site.KNOX_CO_TN_AGENDAS.name,
)
)
return documents
|
[
"lxml.html.fromstring",
"utils.ensure_absolute_url",
"document.Document",
"logging.getLogger",
"scrapelib.Scraper"
] |
[((244, 271), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (261, 271), False, 'import logging\n'), ((511, 530), 'scrapelib.Scraper', 'scrapelib.Scraper', ([], {}), '()\n', (528, 530), False, 'import scrapelib\n'), ((1454, 1479), 'lxml.html.fromstring', 'html.fromstring', (['page_str'], {}), '(page_str)\n', (1469, 1479), False, 'from lxml import html\n'), ((2822, 2870), 'utils.ensure_absolute_url', 'ensure_absolute_url', (['self.SITE_ROOT_URL', 'doc_url'], {}), '(self.SITE_ROOT_URL, doc_url)\n', (2841, 2870), False, 'from utils import ensure_absolute_url\n'), ((3067, 3153), 'document.Document', 'Document', ([], {'url': 'doc_url', 'title': 'doc_name', 'site': 'Document.Site.KNOX_CO_TN_AGENDAS.name'}), '(url=doc_url, title=doc_name, site=Document.Site.KNOX_CO_TN_AGENDAS\n .name)\n', (3075, 3153), False, 'from document import Document\n')]
|
from selenium import webdriver
from selenium.webdriver.common.by import By
import time
import os
try:
link = "http://suninjuly.github.io/file_input.html"
browser = webdriver.Chrome()
browser.get(link)
# Ваш код, который заполняет обязательные поля
input1 = browser.find_element(
By.XPATH, "//input[@name='firstname'][@required]")
input1.send_keys("Ivan")
input2 = browser.find_element(
By.XPATH, "//input[@name='lastname'][@required]")
input2.send_keys("Petrov")
input3 = browser.find_element(
By.XPATH, "//input[@name='email'][@required]")
input3.send_keys("<EMAIL>")
input4 = browser.find_element(
By.XPATH, "//input[@type='file']")
# получаем путь к директории текущего исполняемого файла
current_dir = os.path.abspath(os.path.dirname(__file__))
# добавляем к этому пути имя файла
file_path = os.path.join(current_dir, 'testfile.txt')
input4.send_keys(file_path)
# Отправляем заполненную форму
button = browser.find_element(By.XPATH, "//button[@type='submit']")
button.click()
# Проверяем, что смогли зарегистрироваться
# ждем загрузки страницы
time.sleep(2)
# # находим элемент, содержащий текст
# welcome_text_elt = browser.find_element_by_tag_name("h1")
# # записываем в переменную welcome_text текст из элемента welcome_text_elt
# welcome_text = welcome_text_elt.text
# # с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта
# assert "Congratulations! You have successfully registered!" == welcome_text
finally:
# ожидание чтобы визуально оценить результаты прохождения скрипта
time.sleep(10)
# закрываем браузер после всех манипуляций
browser.quit()
|
[
"os.path.dirname",
"os.path.join",
"selenium.webdriver.Chrome",
"time.sleep"
] |
[((173, 191), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (189, 191), False, 'from selenium import webdriver\n'), ((891, 932), 'os.path.join', 'os.path.join', (['current_dir', '"""testfile.txt"""'], {}), "(current_dir, 'testfile.txt')\n", (903, 932), False, 'import os\n'), ((1173, 1186), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1183, 1186), False, 'import time\n'), ((1678, 1692), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (1688, 1692), False, 'import time\n'), ((809, 834), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (824, 834), False, 'import os\n')]
|
import discord
import os
import json
import time
import shutil
# creating a dict to keep track of member details
MemberDict ={
# name:
# unique id
# number of reports
};
timeStr = time.strftime("%H%M%S")
MemberDict['people'] = [];
token = open("token.txt","r").read();
#start the bot client
client = discord.Client();
@client.event
async def on_ready(): # method expected by client. This runs once when connected
print(f'We have logged in as {client.user}') # notification of login.
#initiate guild once in this and then populate list for all the guild members that are not bots
# RCW_guild = client.get_guild(346399441475076097)
# for members in RCW_guild.members:
# print(members.name)
#assign unique member ids to a set variable name
@client.event
async def on_message(message): # event that happens per any message.
# each message has a bunch of attributes. Here are a few.
# check out more by print(dir(message)) for example.
print(f"{message.channel}: {message.author}: {message.content}")
RCW_guild = client.get_guild(346399441475076097)
if "!helpRCWbot" in message.content:
await message.channel.send("You have accessed the help menu");
elif "!UpdateMembers" == message.content:
for members in RCW_guild.members:
if members.bot == 0:
MemberDict['people'].append({'name:' : members.name, 'uniqueID:' : members.id, 'reportCount:' : '0'});
with open("data.txt", mode = "w") as outfile:
outfile.seek(0);
json.dump(MemberDict,outfile);
outfile.truncate();
outfile.close();
elif "!report" in message.content:
await message.channel.send("As no RCW member was mentioned ShadowKnight was reported");
elif "!m" == message.content:
# await message.channel.send(f"```{(RCW_guild.members[0].name)}```")
for members in RCW_guild.members:
if members.bot == 0:
print(f"{members.id} : {members.name}")
client.run(token) # recall my token was saved!
|
[
"json.dump",
"time.strftime",
"discord.Client"
] |
[((202, 225), 'time.strftime', 'time.strftime', (['"""%H%M%S"""'], {}), "('%H%M%S')\n", (215, 225), False, 'import time\n'), ((332, 348), 'discord.Client', 'discord.Client', ([], {}), '()\n', (346, 348), False, 'import discord\n'), ((1568, 1598), 'json.dump', 'json.dump', (['MemberDict', 'outfile'], {}), '(MemberDict, outfile)\n', (1577, 1598), False, 'import json\n')]
|
import uuid
class ClientModel:
"""Client models
Args:
name ([type]): [description]
company ([type]): [description]
mail ([type]): [description]
position ([type]): [description]
uid ([type], optional): [description]. Defaults to None.
"""
def __init__(self, name, company, email, position, uid=None):
self.name = name
self.company = company
self.email = email
self.position = position
self.uid = uid or uuid.uuid4()
def to_dict(self):
return vars(self) # convierte nuestro objeto en diccionario
@staticmethod # crea un metodo estatico
def schema():
return ['name', 'company', 'email', 'position', 'uid']
|
[
"uuid.uuid4"
] |
[((547, 559), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (557, 559), False, 'import uuid\n')]
|
import argparse
from pprint import pprint
try:
import yaml
import sublist3r
import dns.resolver
except Exception as e:
print("Error loading libraries, please run following commands first:")
print("pip install pyyaml dnspython")
print("git clone https://github.com/aboul3la/Sublist3r")
print("cd Sublist3r")
print("python setup.py install")
exit(1)
from cisco_sdwan_policy.List.DataPrefix import DataPrefix
from cisco_sdwan_policy.List.Prefix import Prefix
from cisco_sdwan_policy import PolicyLoader
def config_reader(config_file):
'''
Read config from yaml file
:return: config in dict format
'''
with open(config_file) as file:
config = yaml.load(file.read())
# print(result)
return config
def parse_domain(domain,nameserver):
domain_list=[]
ip_list=set()
if "*" in domain and domain[0:2]!="*.":
raise Exception("Invalid domain: {}".format(domain))
elif "*" in domain:
sub_domains = sublist3r.main(domain[2:], 40, None, ports=None, silent=False, verbose=False,
enable_bruteforce=False, engines=None)
print(sub_domains)
domain_list.extend(sub_domains)
else:
domain_list.append(domain)
# Use DNSPYTHON to get info.
resolver = dns.resolver.Resolver()
resolver.lifetime = resolver.timeout = 20.0
for domain_name in domain_list:
print("Resolving: {}".format(domain_name))
try:
resolver.nameservers=[nameserver]
response =resolver.query(domain_name)
for answer in response.response.answer:
for ip in answer.items:
if ip.rdtype == 1:
ip_list.add(ip.address+"/32")
except:
pass
# try:
# response = dns.resolver.query(domain_name, "CNAME")
# for answer in response.response.answer:
# for ip in answer.items:
# if ip.rdtype == 1:
# ip_list.add(ip.address+"/32")
# except:
# pass
return ip_list
if __name__ == '__main__':
# First read all the configurations from config file.
parser = argparse.ArgumentParser(description='App List Genenrator.')
parser.add_argument('config', metavar='config_file_path', type=str,
help='config yaml path')
args = parser.parse_args()
config_file=args.config
try:
config = config_reader(config_file)
print("Config file {} loaded".format(args.config))
app_ip_info ={}
assert type(config["sdwan_server"])==dict
assert type(config["apps"])==dict
assert type(config["dns_server"])==str
except Exception as e:
print("ERROR : Invalid config file.")
print(e)
exit(1)
for appname,domain_list in config["apps"].items():
app_ips=set()
for domain in domain_list:
ip_list = parse_domain(domain,config["dns_server"])
app_ips = app_ips | ip_list
app_ip_info[appname]=list(app_ips)
pprint(app_ip_info)
print("Start creating Prefix Lists")
pl = PolicyLoader.init(config["sdwan_server"])
pl.load()
existing_list=[i.name for i in pl.list_policies]
for appname,ip_list in app_ip_info.items():
if "{}_prefix".format(appname) not in existing_list:
Prefix("{}_prefix".format(appname),prefix_list=ip_list).save()
print("Created Prefix List: {}_prefix".format(appname))
else:
for i in pl.list_policies:
if i.name=="{}_prefix".format(appname):
i.set_entries(ip_list)
i.save()
print("Updated Prefix List: {}".format(i.name))
if "{}_dataprefix".format(appname) not in existing_list:
DataPrefix("{}_dataprefix".format(appname),prefix_list=ip_list).save()
print("Created Data Prefix List: {}_dataprefix".format(appname))
else:
for i in pl.list_policies:
if i.name=="{}_dataprefix".format(appname):
i.set_entries(ip_list)
i.save()
print("Updated Data Prefix List: {}".format(i.name))
|
[
"pprint.pprint",
"cisco_sdwan_policy.PolicyLoader.init",
"sublist3r.main",
"argparse.ArgumentParser"
] |
[((2235, 2294), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""App List Genenrator."""'}), "(description='App List Genenrator.')\n", (2258, 2294), False, 'import argparse\n'), ((3120, 3139), 'pprint.pprint', 'pprint', (['app_ip_info'], {}), '(app_ip_info)\n', (3126, 3139), False, 'from pprint import pprint\n'), ((3190, 3231), 'cisco_sdwan_policy.PolicyLoader.init', 'PolicyLoader.init', (["config['sdwan_server']"], {}), "(config['sdwan_server'])\n", (3207, 3231), False, 'from cisco_sdwan_policy import PolicyLoader\n'), ((1003, 1124), 'sublist3r.main', 'sublist3r.main', (['domain[2:]', '(40)', 'None'], {'ports': 'None', 'silent': '(False)', 'verbose': '(False)', 'enable_bruteforce': '(False)', 'engines': 'None'}), '(domain[2:], 40, None, ports=None, silent=False, verbose=\n False, enable_bruteforce=False, engines=None)\n', (1017, 1124), False, 'import sublist3r\n')]
|
import tensorflow as tf
from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance
def minimal_distance(poly_a, poly_b):
x_a = tf.convert_to_tensor(poly_a)
x_b = tf.convert_to_tensor(poly_b)
e_a = get_edges(poly_a)
e_b = get_edges(poly_b)
a_in_b = point_in_polygon(e_b[..., None, :, :, :], x_a)
a_in_b = tf.reduce_any(a_in_b, axis=-1)
b_in_a = point_in_polygon(e_a[..., None, :, :, :], x_b)
b_in_a = tf.reduce_any(b_in_a, axis=-1)
intersection = tf.logical_or(a_in_b, b_in_a)
# the minimal distance must occur between an edge of a and vertex of b or visa-versa
d_a_b = point_line_segment_distance(e_a[..., :, None, :, :], x_b[..., None, :])
d_a_b = tf.reduce_min(d_a_b, axis=[-2, -1])
d_b_a = point_line_segment_distance(e_b[..., :, None, :, :], x_a[..., None, :])
d_b_a = tf.reduce_min(d_b_a, axis=[-2, -1])
d = tf.minimum(d_a_b, d_b_a)
d = tf.where(intersection, tf.constant(0., d.dtype), d)
return d
|
[
"tensorflow.reduce_min",
"tf_polygon.primitives.point_line_segment_distance",
"tf_polygon.primitives.get_edges",
"tensorflow.convert_to_tensor",
"tensorflow.logical_or",
"tensorflow.constant",
"tensorflow.minimum",
"tf_polygon.primitives.point_in_polygon",
"tensorflow.reduce_any"
] |
[((166, 194), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['poly_a'], {}), '(poly_a)\n', (186, 194), True, 'import tensorflow as tf\n'), ((205, 233), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['poly_b'], {}), '(poly_b)\n', (225, 233), True, 'import tensorflow as tf\n'), ((245, 262), 'tf_polygon.primitives.get_edges', 'get_edges', (['poly_a'], {}), '(poly_a)\n', (254, 262), False, 'from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance\n'), ((273, 290), 'tf_polygon.primitives.get_edges', 'get_edges', (['poly_b'], {}), '(poly_b)\n', (282, 290), False, 'from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance\n'), ((305, 351), 'tf_polygon.primitives.point_in_polygon', 'point_in_polygon', (['e_b[..., None, :, :, :]', 'x_a'], {}), '(e_b[..., None, :, :, :], x_a)\n', (321, 351), False, 'from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance\n'), ((365, 395), 'tensorflow.reduce_any', 'tf.reduce_any', (['a_in_b'], {'axis': '(-1)'}), '(a_in_b, axis=-1)\n', (378, 395), True, 'import tensorflow as tf\n'), ((410, 456), 'tf_polygon.primitives.point_in_polygon', 'point_in_polygon', (['e_a[..., None, :, :, :]', 'x_b'], {}), '(e_a[..., None, :, :, :], x_b)\n', (426, 456), False, 'from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance\n'), ((470, 500), 'tensorflow.reduce_any', 'tf.reduce_any', (['b_in_a'], {'axis': '(-1)'}), '(b_in_a, axis=-1)\n', (483, 500), True, 'import tensorflow as tf\n'), ((521, 550), 'tensorflow.logical_or', 'tf.logical_or', (['a_in_b', 'b_in_a'], {}), '(a_in_b, b_in_a)\n', (534, 550), True, 'import tensorflow as tf\n'), ((653, 724), 'tf_polygon.primitives.point_line_segment_distance', 'point_line_segment_distance', (['e_a[..., :, None, :, :]', 'x_b[..., None, :]'], {}), '(e_a[..., :, None, :, :], x_b[..., None, :])\n', (680, 724), False, 'from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance\n'), ((737, 772), 'tensorflow.reduce_min', 'tf.reduce_min', (['d_a_b'], {'axis': '[-2, -1]'}), '(d_a_b, axis=[-2, -1])\n', (750, 772), True, 'import tensorflow as tf\n'), ((786, 857), 'tf_polygon.primitives.point_line_segment_distance', 'point_line_segment_distance', (['e_b[..., :, None, :, :]', 'x_a[..., None, :]'], {}), '(e_b[..., :, None, :, :], x_a[..., None, :])\n', (813, 857), False, 'from tf_polygon.primitives import get_edges, point_in_polygon, point_line_segment_distance\n'), ((870, 905), 'tensorflow.reduce_min', 'tf.reduce_min', (['d_b_a'], {'axis': '[-2, -1]'}), '(d_b_a, axis=[-2, -1])\n', (883, 905), True, 'import tensorflow as tf\n'), ((915, 939), 'tensorflow.minimum', 'tf.minimum', (['d_a_b', 'd_b_a'], {}), '(d_a_b, d_b_a)\n', (925, 939), True, 'import tensorflow as tf\n'), ((971, 996), 'tensorflow.constant', 'tf.constant', (['(0.0)', 'd.dtype'], {}), '(0.0, d.dtype)\n', (982, 996), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
"""
@author: <NAME>
"""
from __future__ import division
import finite_difference as fd
import numpy as np
def rtm1d(v,seis,dt,dz):
nt = len(seis)
nx = len(v)
a = fd.alpha(v,dt,dz)
ul, u, up = np.zeros((3,nx))
data = np.zeros((nt,nx))
g = np.zeros(u.shape)
g[0] = 1
ul += g*seis[nt-1]
u += g*seis[nt-2]
for i in xrange(nt-3,-1,-1):
src = g*seis[i]
ul[0]=2*u[0]-up[0]+a[0]**2*(u[1]-2*u[0]) + src[0]
ul[1:nx-1]=2*u[1:nx-1]-up[1:nx-1]+a[1:nx-1]**2*(u[2:nx]-2*u[1:nx-1]+ \
u[0:nx-2]) + src[1:nx-1]
ul = fd.abc1D(u, ul, a, src)
up = np.copy(u)
u = np.copy(ul)
data[i] = np.copy(u)
return data
def rtm2D(v,shotgt,dt,dx,dz):
# rtm 2D with different algorithm
nz,nx = v.shape
nt = shotgt[:,0].size
ul, u, up = np.zeros((3,nz,nx))
up[0,:], u[0,:], ul[0,:] = shotgt[nt-3:nt,:]
a = fd.alpha(v,dt,dx)**2
a2 = 2-4*a
data = np.zeros((nt, nz, nx))
e = (np.exp(-((0.015*(20-np.arange(1,21)))**2) ))**10
c = 2
for i in xrange(nt-2,-1,-1):
c+=1
b = min(c,nz)
for iz in xrange(b):
ul[iz,0:20] = e*ul[iz,0:20]
u[iz,0:20] = e*u[iz,0:20]
ul[iz,nx-20:] = e[::-1]*ul[iz,nx-20:]
u[iz,nx-20:] = e[::-1]*u[iz,nx-20:]
if b >= (nz-20):
for iz in xrange(nz-20,nz):
ul[iz] = e[nz-iz-1]*ul[iz]
u[iz] = e[nz-iz-1]*u[iz]
if b == nz:
d = nz-2
else:
d = b
up[0:b,1:nx-1] = up[0:b,1:nx-1]-ul[0:b,1:nx-1]
u[1:d,1:nx-1] = a2[1:d,1:nx-1]*ul[1:d,1:nx-1]+u[1:d,1:nx-1]+a[1:d,2:nx]*ul[1:d,2:nx]\
+a[1:d,0:nx-2]*ul[1:d,0:nx-2]+a[2:d+1,1:nx-1]*ul[2:d+1,1:nx-1]+\
+a[0:d-1,1:nx-1]*ul[0:d-1,1:nx-1]
u[0,1:nx-1] = a2[0,1:nx-1]*ul[0,1:nx-1]+u[0,1:nx-1]+a[0,2:nx]*ul[0,2:nx]\
+a[0,0:nx-2]*ul[0,0:nx-2]+a[1,1:nx-1]*ul[1,1:nx-1]
if b == nz:
u[nz-1,1:nx-1] = a2[nz-1,1:nx-1]*ul[nz-1,1:nx-1]+u[nz-1,1:nx-1]\
+a[nz-1,2:nx]*ul[nz-1,2:nx]+a[nz-1,0:nx-2]*ul[nz-1,0:nx-2]\
+a[nz-2,1:nx-1]*ul[nz-1,1:nx-1]
u[nz-1,0] = a2[nz-1,0]*ul[nz-1,0]+u[nz-1,0]+a[nz-1,1]*ul[nz-1,1]\
+a[nz-2,0]*ul[nz-2,0]
u[1:d,0] = a2[1:d,0]*ul[1:d,0]+u[1:d,0]+a[1:d,1]*ul[1:d,1]+a[2:d+1,0]\
*ul[2:d+1,0]+a[0:d-1,0]*ul[0:d-1,0]
u[1:d,nx-1] = a2[1:d,nx-1]*ul[1:d,nx-1]+u[1:d,nx-1]+a[1:d,nx-2]*ul[1:d,nx-2]\
+a[2:d+1,nx-1]*ul[2:d+1,nx-1]+a[0:d-1,nx-1]*ul[0:d-1,nx-1]
u[0,0] = a2[0,0]*ul[0,0]+u[0,0]+a[0,1]*ul[0,1]+a[1,0]*ul[1,0]
u[0,nx-1] = a2[0,nx-1]*ul[0,nx-1]+u[0,nx-1]+a[0,nx-1]*ul[0,nx-1]+a[1,nx-1]*ul[1,nx-1]
ul = np.copy(u)
u = np.copy(up)
if i > 1:
up[1:nz-1] = 0;
up[0] = shotgt[i-3,:]
data[i] = ul
return data
|
[
"numpy.copy",
"finite_difference.abc1D",
"numpy.zeros",
"numpy.arange",
"finite_difference.alpha"
] |
[((202, 221), 'finite_difference.alpha', 'fd.alpha', (['v', 'dt', 'dz'], {}), '(v, dt, dz)\n', (210, 221), True, 'import finite_difference as fd\n'), ((236, 253), 'numpy.zeros', 'np.zeros', (['(3, nx)'], {}), '((3, nx))\n', (244, 253), True, 'import numpy as np\n'), ((264, 282), 'numpy.zeros', 'np.zeros', (['(nt, nx)'], {}), '((nt, nx))\n', (272, 282), True, 'import numpy as np\n'), ((290, 307), 'numpy.zeros', 'np.zeros', (['u.shape'], {}), '(u.shape)\n', (298, 307), True, 'import numpy as np\n'), ((868, 889), 'numpy.zeros', 'np.zeros', (['(3, nz, nx)'], {}), '((3, nz, nx))\n', (876, 889), True, 'import numpy as np\n'), ((992, 1014), 'numpy.zeros', 'np.zeros', (['(nt, nz, nx)'], {}), '((nt, nz, nx))\n', (1000, 1014), True, 'import numpy as np\n'), ((618, 641), 'finite_difference.abc1D', 'fd.abc1D', (['u', 'ul', 'a', 'src'], {}), '(u, ul, a, src)\n', (626, 641), True, 'import finite_difference as fd\n'), ((655, 665), 'numpy.copy', 'np.copy', (['u'], {}), '(u)\n', (662, 665), True, 'import numpy as np\n'), ((678, 689), 'numpy.copy', 'np.copy', (['ul'], {}), '(ul)\n', (685, 689), True, 'import numpy as np\n'), ((708, 718), 'numpy.copy', 'np.copy', (['u'], {}), '(u)\n', (715, 718), True, 'import numpy as np\n'), ((945, 964), 'finite_difference.alpha', 'fd.alpha', (['v', 'dt', 'dx'], {}), '(v, dt, dx)\n', (953, 964), True, 'import finite_difference as fd\n'), ((2958, 2968), 'numpy.copy', 'np.copy', (['u'], {}), '(u)\n', (2965, 2968), True, 'import numpy as np\n'), ((2981, 2992), 'numpy.copy', 'np.copy', (['up'], {}), '(up)\n', (2988, 2992), True, 'import numpy as np\n'), ((1044, 1060), 'numpy.arange', 'np.arange', (['(1)', '(21)'], {}), '(1, 21)\n', (1053, 1060), True, 'import numpy as np\n')]
|
from scipy.io import loadmat
import tables
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import os, os.path
import time
import scipy.signal
from scipy import signal
from lmfit import minimize, Parameters
import scipy.optimize as optimization
import operator
class temperature_preprocessing_extract_phase_amplitude():
def __init__(self,exp_setup,line_info,time_stamp):
self.exp_setup = exp_setup
# exp_setup = {'px':25/10**6,'f_heating':1,'gap':20}
self.line_info = line_info
# line_info = {'N_line_groups':N_line_groups,'N_horizontal_lines':N_horizontal_lines,'N_files':N_files}
self.time_stamp = time_stamp
def butter_highpass(self,cutoff, fs, order=5):
nyq = 0.5 * fs
normal_cutoff = cutoff / nyq
b, a = signal.butter(order, normal_cutoff, btype='high', analog=False)
return b, a
def butter_highpass_filter(self,data, cutoff, fs, order=4):
b, a = self.butter_highpass(cutoff, fs, order=order)
y = signal.filtfilt(b, a, data)
return y
def filter_signal(self,df_rec,f0):
cutoff = f0*0.5
fs = (df_rec.shape[0])/(max(df_rec['reltime'])-min(df_rec['reltime']))
# Plot the frequency response for a few different orders.
time = df_rec['reltime']
N = df_rec.shape[1]-1
df_filtered = pd.DataFrame(data = {'reltime':np.array(df_rec['reltime'])})
for i in range(N):
temp = (self.butter_highpass_filter(df_rec[i],cutoff,fs))
df_filtered[i] = np.array(temp)
return df_filtered
def sin_func(self,x,amplitude,phase,bias,f_heating):
return amplitude*np.sin(2*np.pi*f_heating*x + phase)+bias
def residual(self,params, x, data, eps_data):
amplitude = params['amplitude']
phase = params['phase']
bias = params['bias']
freq = params['frequency']
model = amplitude*np.sin(2*np.pi*freq*x + phase)+bias
return (data-model) / eps_data
def extract_phase_amplitude_sinusoidal_function(self,index,df_temperature):
px = self.exp_setup['px']
f_heating = self.exp_setup['f_heating']
gap = self.exp_setup['gap']
fitting_params_initial = {'amplitude':0.2,'phase':0.1,'bias':0.1}
n_col = df_temperature.shape[1]
tmin = df_temperature['reltime'][0]
time = df_temperature['reltime']-tmin
# A1 = df_temperature.iloc[:,index[0]+3]
# A2 = df_temperature.iloc[:,index[1]+3]
A1 = df_temperature[index[0]]
A2 = df_temperature[index[1]]
A1-= A1.mean()
A2-= A2.mean()
x0 = np.array([1,0,0]) # amplitude,phase,bias
sigma = np.ones(len(time))
params1 = Parameters()
params1.add('amplitude', value=fitting_params_initial['amplitude'])
params1.add('phase', value=fitting_params_initial['phase'])
params1.add('bias', value=fitting_params_initial['bias'])
params1.add('frequency', value=f_heating,vary=False)
res1 = minimize(self.residual, params1, args=(time, A1, sigma))
params2 = Parameters()
params2.add('amplitude', value=fitting_params_initial['amplitude'])
params2.add('phase', value=fitting_params_initial['phase'])
params2.add('bias', value=fitting_params_initial['bias'])
params2.add('frequency', value=f_heating,vary=False)
res2 = minimize(self.residual, params2, args=(time, A2, sigma))
amp1 = np.abs(res1.params['amplitude'].value)
amp2 = np.abs(res2.params['amplitude'].value)
p1 = res1.params['phase'].value
p2 = res2.params['phase'].value
amp_ratio = min(np.abs(amp1/amp2),np.abs(amp2/amp1))
phase_diff = np.abs(p1-p2)
if phase_diff>2*np.pi:
phase_diff = phase_diff - 2*np.pi
if phase_diff>np.pi/2:
phase_diff = np.pi - phase_diff
T_total = np.max(time)-np.min(time)
df = 1/T_total
L = abs(index[0]-index[1])*px*gap
w = 2*np.pi*f_heating
return L, phase_diff,amp_ratio
def extract_phase_amplitude_Fourier_transform(self,index,df_temperature):
px = self.exp_setup['px']
f_heating = self.exp_setup['f_heating']
gap = self.exp_setup['gap']
n_col = df_temperature.shape[1]
tmin = df_temperature['reltime'][0]
time = df_temperature['reltime']-tmin
fft_X1 = np.fft.fft(df_temperature.iloc[:,index[0]+3])
fft_X2 = np.fft.fft(df_temperature.iloc[:,index[1]+3])
T_total = np.max(time)-np.min(time)
df = 1/T_total
N_0 = int(f_heating/df)
magnitude_X1 = np.abs(fft_X1)
magnitude_X2 = np.abs(fft_X2)
phase_X1 = np.angle(fft_X1)
phase_X2 = np.angle(fft_X2)
N1, Amp1 = max(enumerate(magnitude_X1[N_0-5:N_0+5]), key=operator.itemgetter(1))
N2, Amp2 = max(enumerate(magnitude_X2[N_0-5:N_0+5]), key=operator.itemgetter(1))
Nf = N_0+N1-5
amp_ratio = magnitude_X1[Nf]/magnitude_X2[Nf]
phase_diff = phase_X1[Nf]-phase_X2[Nf]
if phase_diff<0:
phase_diff = phase_diff+np.pi*2
L = abs(index[0]-index[1])*px*gap
return L, phase_diff,amp_ratio
def fit_amp_phase_one_batch(self,df_temperature,method):
px = self.exp_setup['px']
f_heating = self.exp_setup['f_heating']
gap = self.exp_setup['gap']
N_lines = df_temperature.shape[1]-1
x_list = np.zeros(N_lines-1)
phase_diff_list = np.zeros(N_lines-1)
amp_ratio_list = np.zeros(N_lines-1)
for i in range(N_lines):
if i>0:
index = [0,i]
if method == 'fft':
x_list[i-1],phase_diff_list[i-1], amp_ratio_list[i-1] = self.extract_phase_amplitude_Fourier_transform(index,df_temperature)
else:
x_list[i-1],phase_diff_list[i-1], amp_ratio_list[i-1] = self.extract_phase_amplitude_sinusoidal_function(index,df_temperature)
return x_list,phase_diff_list,amp_ratio_list
def extract_temperature_from_IR(self,X0,Y0,rec_name,N_avg):
# this function takes the average of N pixels in Y0 direction, typically N = 100
gap = self.exp_setup['gap']
N_line_groups = self.line_info['N_line_groups']
N_horizontal_lines = self.line_info['N_horizontal_lines']
N_files = self.line_info['N_files']
T = np.zeros((N_line_groups,N_horizontal_lines,N_files))
for k in range(N_files):
temp = pd.read_csv(self.line_info['data_path']+rec_name+str(k)+'.csv')
for j in range(N_line_groups):
for i in range(N_horizontal_lines):
T[j,i,k] = temp.iloc[Y0-int(N_avg/2):Y0+int(N_avg/2),X0-j-gap*i].mean() # for T, first dim is line group, 2nd dimension is # of lines, 3rd dim is number of files
return T
def batch_process_horizontal_lines(self,T,method):
#T averaged temperature for N_lines and N_line_groups and N_frames
x_list_all = []
phase_diff_list_all = []
amp_ratio_list_all = []
N_horizontal_lines = self.line_info['N_horizontal_lines']
N_line_groups = self.line_info['N_line_groups']
px = self.exp_setup['px']
f_heating = self.exp_setup['f_heating']
gap = self.exp_setup['gap']
time_stamp = self.time_stamp
for j in range(N_line_groups):
horinzontal_temp = T[j,:,:].T
df = pd.DataFrame(horinzontal_temp)
df['reltime'] = time_stamp['reltime']
df_filtered = self.filter_signal(df,f_heating)
x_list,phase_diff_list,amp_ratio_list = self.fit_amp_phase_one_batch(df_filtered,method)
x_list_all = x_list_all+list(x_list)
phase_diff_list_all = phase_diff_list_all+list(phase_diff_list)
amp_ratio_list_all = amp_ratio_list_all+list(amp_ratio_list)
df_result_IR = pd.DataFrame(data = {'x':x_list_all,'amp_ratio':amp_ratio_list_all,'phase_diff':phase_diff_list_all})
return df_result_IR
|
[
"pandas.DataFrame",
"numpy.abs",
"scipy.signal.filtfilt",
"numpy.fft.fft",
"numpy.angle",
"numpy.zeros",
"numpy.max",
"lmfit.minimize",
"numpy.array",
"numpy.min",
"numpy.sin",
"operator.itemgetter",
"scipy.signal.butter",
"lmfit.Parameters"
] |
[((819, 882), 'scipy.signal.butter', 'signal.butter', (['order', 'normal_cutoff'], {'btype': '"""high"""', 'analog': '(False)'}), "(order, normal_cutoff, btype='high', analog=False)\n", (832, 882), False, 'from scipy import signal\n'), ((1041, 1068), 'scipy.signal.filtfilt', 'signal.filtfilt', (['b', 'a', 'data'], {}), '(b, a, data)\n', (1056, 1068), False, 'from scipy import signal\n'), ((2673, 2692), 'numpy.array', 'np.array', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (2681, 2692), True, 'import numpy as np\n'), ((2769, 2781), 'lmfit.Parameters', 'Parameters', ([], {}), '()\n', (2779, 2781), False, 'from lmfit import minimize, Parameters\n'), ((3069, 3125), 'lmfit.minimize', 'minimize', (['self.residual', 'params1'], {'args': '(time, A1, sigma)'}), '(self.residual, params1, args=(time, A1, sigma))\n', (3077, 3125), False, 'from lmfit import minimize, Parameters\n'), ((3145, 3157), 'lmfit.Parameters', 'Parameters', ([], {}), '()\n', (3155, 3157), False, 'from lmfit import minimize, Parameters\n'), ((3444, 3500), 'lmfit.minimize', 'minimize', (['self.residual', 'params2'], {'args': '(time, A2, sigma)'}), '(self.residual, params2, args=(time, A2, sigma))\n', (3452, 3500), False, 'from lmfit import minimize, Parameters\n'), ((3517, 3555), 'numpy.abs', 'np.abs', (["res1.params['amplitude'].value"], {}), "(res1.params['amplitude'].value)\n", (3523, 3555), True, 'import numpy as np\n'), ((3571, 3609), 'numpy.abs', 'np.abs', (["res2.params['amplitude'].value"], {}), "(res2.params['amplitude'].value)\n", (3577, 3609), True, 'import numpy as np\n'), ((3784, 3799), 'numpy.abs', 'np.abs', (['(p1 - p2)'], {}), '(p1 - p2)\n', (3790, 3799), True, 'import numpy as np\n'), ((4509, 4557), 'numpy.fft.fft', 'np.fft.fft', (['df_temperature.iloc[:, index[0] + 3]'], {}), '(df_temperature.iloc[:, index[0] + 3])\n', (4519, 4557), True, 'import numpy as np\n'), ((4572, 4620), 'numpy.fft.fft', 'np.fft.fft', (['df_temperature.iloc[:, index[1] + 3]'], {}), '(df_temperature.iloc[:, index[1] + 3])\n', (4582, 4620), True, 'import numpy as np\n'), ((4743, 4757), 'numpy.abs', 'np.abs', (['fft_X1'], {}), '(fft_X1)\n', (4749, 4757), True, 'import numpy as np\n'), ((4781, 4795), 'numpy.abs', 'np.abs', (['fft_X2'], {}), '(fft_X2)\n', (4787, 4795), True, 'import numpy as np\n'), ((4816, 4832), 'numpy.angle', 'np.angle', (['fft_X1'], {}), '(fft_X1)\n', (4824, 4832), True, 'import numpy as np\n'), ((4852, 4868), 'numpy.angle', 'np.angle', (['fft_X2'], {}), '(fft_X2)\n', (4860, 4868), True, 'import numpy as np\n'), ((5598, 5619), 'numpy.zeros', 'np.zeros', (['(N_lines - 1)'], {}), '(N_lines - 1)\n', (5606, 5619), True, 'import numpy as np\n'), ((5644, 5665), 'numpy.zeros', 'np.zeros', (['(N_lines - 1)'], {}), '(N_lines - 1)\n', (5652, 5665), True, 'import numpy as np\n'), ((5689, 5710), 'numpy.zeros', 'np.zeros', (['(N_lines - 1)'], {}), '(N_lines - 1)\n', (5697, 5710), True, 'import numpy as np\n'), ((6570, 6624), 'numpy.zeros', 'np.zeros', (['(N_line_groups, N_horizontal_lines, N_files)'], {}), '((N_line_groups, N_horizontal_lines, N_files))\n', (6578, 6624), True, 'import numpy as np\n'), ((8132, 8240), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{'x': x_list_all, 'amp_ratio': amp_ratio_list_all, 'phase_diff':\n phase_diff_list_all}"}), "(data={'x': x_list_all, 'amp_ratio': amp_ratio_list_all,\n 'phase_diff': phase_diff_list_all})\n", (8144, 8240), True, 'import pandas as pd\n'), ((1570, 1584), 'numpy.array', 'np.array', (['temp'], {}), '(temp)\n', (1578, 1584), True, 'import numpy as np\n'), ((3717, 3736), 'numpy.abs', 'np.abs', (['(amp1 / amp2)'], {}), '(amp1 / amp2)\n', (3723, 3736), True, 'import numpy as np\n'), ((3735, 3754), 'numpy.abs', 'np.abs', (['(amp2 / amp1)'], {}), '(amp2 / amp1)\n', (3741, 3754), True, 'import numpy as np\n'), ((3970, 3982), 'numpy.max', 'np.max', (['time'], {}), '(time)\n', (3976, 3982), True, 'import numpy as np\n'), ((3983, 3995), 'numpy.min', 'np.min', (['time'], {}), '(time)\n', (3989, 3995), True, 'import numpy as np\n'), ((4637, 4649), 'numpy.max', 'np.max', (['time'], {}), '(time)\n', (4643, 4649), True, 'import numpy as np\n'), ((4650, 4662), 'numpy.min', 'np.min', (['time'], {}), '(time)\n', (4656, 4662), True, 'import numpy as np\n'), ((7669, 7699), 'pandas.DataFrame', 'pd.DataFrame', (['horinzontal_temp'], {}), '(horinzontal_temp)\n', (7681, 7699), True, 'import pandas as pd\n'), ((1699, 1740), 'numpy.sin', 'np.sin', (['(2 * np.pi * f_heating * x + phase)'], {}), '(2 * np.pi * f_heating * x + phase)\n', (1705, 1740), True, 'import numpy as np\n'), ((1955, 1991), 'numpy.sin', 'np.sin', (['(2 * np.pi * freq * x + phase)'], {}), '(2 * np.pi * freq * x + phase)\n', (1961, 1991), True, 'import numpy as np\n'), ((4939, 4961), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (4958, 4961), False, 'import operator\n'), ((5028, 5050), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (5047, 5050), False, 'import operator\n'), ((1413, 1440), 'numpy.array', 'np.array', (["df_rec['reltime']"], {}), "(df_rec['reltime'])\n", (1421, 1440), True, 'import numpy as np\n')]
|
import argparse
import sys
import os
import glob
import simParse
import entityParse
parser = argparse.ArgumentParser(description='Process a report')
#parser.add_argument('--input_file', dest='input_file', default='/logs/EntityLog/', help='input file path')
#parser.add_argument('--report_type', dest='report_type', default='entity_report', help='input file type')
parser.add_argument('--input_file', dest='input_file', default='/logs/simReports/sim_report.txt', help='input file path')
parser.add_argument('--report_type', dest='report_type', default='sim_report', help='input file type')
args = parser.parse_args()
def parse_file(input_path, type_of_report, database='sim_test'):
print("parsing: " + input_path)
print("Type : " + type_of_report)
lines = []
''' a sim_report is a detailed report of population information for each turn '''
if(type_of_report is 'sim_report' and '.txt' in input_path):
print('creating sim report parsed file')
with open(input_path, 'r') as file:
for line in file:
lines.append(line)
#
file.close()
simParse.parse_sim_report(lines, database)
elif(type_of_report is 'entity_report' and input_path[-1] is '/'):
files = glob.glob('{}*.txt'.format(input_path))
files.sort()
print('found {} files'.format(len(files)))
for filename in files:
lines = []
with open(filename, 'r') as file:
for line in file:
lines.append(line)
#
file.close()
entityParse.parse_entity_report(lines, filename)
'''
MAIN
'''
if __name__ == "__main__":
path = sys.path[0]
path = path.split('/')
path = "/".join(path[0:len(path)-1])
#parse_file(path + args.input_file, args.report_type)
parse_file(path + '/logs/simReports/sim_report.txt', 'sim_report')
parse_file(path + '/logs/EntityLog/', 'entity_report')
#string = '{3340'
#print(string.split('{')[1])
|
[
"simParse.parse_sim_report",
"argparse.ArgumentParser",
"entityParse.parse_entity_report"
] |
[((95, 150), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process a report"""'}), "(description='Process a report')\n", (118, 150), False, 'import argparse\n'), ((1141, 1183), 'simParse.parse_sim_report', 'simParse.parse_sim_report', (['lines', 'database'], {}), '(lines, database)\n', (1166, 1183), False, 'import simParse\n'), ((1615, 1663), 'entityParse.parse_entity_report', 'entityParse.parse_entity_report', (['lines', 'filename'], {}), '(lines, filename)\n', (1646, 1663), False, 'import entityParse\n')]
|
#!/usr/bin/env python3
import argparse
try:
from search_engines.engines import search_engines_dict
from search_engines.multiple_search_engines import MultipleSearchEngines, AllSearchEngines
from search_engines import config
except ImportError as err:
MSG = '\nPlease install `search_engines` to resolve this error.'
raise ImportError(f'{MSG}\n') from err
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-q',
help='query', required=True)
parser.add_argument('-e',
help='search engine(s) - ' + ', '.join(search_engines_dict) + ', or "all"',
default='duckduckgo')
parser.add_argument('-o',
help='output file [html, csv, json]',
default='print')
parser.add_argument('-n',
help='filename for output file',
default=str(config.OUTPUT_DIR / 'output'))
parser.add_argument('-p',
help='number of pages',
default=config.SEARCH_ENGINE_RESULTS_PAGES,
type=int)
parser.add_argument('-f',
help='filter results [url, title, text, host]',
default=None)
parser.add_argument('-i',
help='ignore duplicates, useful when multiple search engines are used',
action='store_true')
parser.add_argument('-proxy',
help='use proxy (protocol://ip:port)',
default=config.PROXY)
args = parser.parse_args()
proxy = args.proxy
timeout = config.TIMEOUT + (10 * bool(proxy))
agent = config.FAKE_USER_AGENT
engines = [
e.strip() for e in args.e.lower().split(',')
if e.strip() in search_engines_dict or e.strip() == 'all'
]
if not engines:
print('Please choose a search engine: ' + ', '.join(search_engines_dict))
else:
if 'all' in engines:
engine = AllSearchEngines(agent, proxy, timeout)
elif len(engines) > 1:
engine = MultipleSearchEngines(engines, agent, proxy, timeout)
else:
engine = search_engines_dict[engines[0]](agent, proxy, timeout)
engine.ignore_duplicate_urls = args.i
if args.f:
engine.set_search_operator(args.f)
engine.search(args.q, args.p)
engine.output(args.o, args.n)
if __name__ == '__main__':
main()
|
[
"search_engines.multiple_search_engines.AllSearchEngines",
"argparse.ArgumentParser",
"search_engines.multiple_search_engines.MultipleSearchEngines"
] |
[((403, 428), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (426, 428), False, 'import argparse\n'), ((2054, 2093), 'search_engines.multiple_search_engines.AllSearchEngines', 'AllSearchEngines', (['agent', 'proxy', 'timeout'], {}), '(agent, proxy, timeout)\n', (2070, 2093), False, 'from search_engines.multiple_search_engines import MultipleSearchEngines, AllSearchEngines\n'), ((2146, 2199), 'search_engines.multiple_search_engines.MultipleSearchEngines', 'MultipleSearchEngines', (['engines', 'agent', 'proxy', 'timeout'], {}), '(engines, agent, proxy, timeout)\n', (2167, 2199), False, 'from search_engines.multiple_search_engines import MultipleSearchEngines, AllSearchEngines\n')]
|
n, q = map(int, input().split())
g = [[] for i in range(n)]
for i in range(n - 1):
ai, bi = map(int, input().split())
ai -= 1
bi -= 1
g[ai].append(bi)
g[bi].append(ai)
# N: 頂点数
# G[v]: 頂点vの子頂点 (親頂点は含まない)
#
# - construct
# prv[u] = v: 頂点uの一つ上の祖先頂点v
# - lca
# kprv[k][u] = v: 頂点uの2^k個上の祖先頂点v
# depth[u]: 頂点uの深さ (根頂点は0)
N = n
LV = (N - 1).bit_length()
def construct(prv):
kprv = [prv]
S = prv
for k in range(LV):
T = [0] * N
for i in range(N):
if S[i] is None:
continue
T[i] = S[S[i]]
kprv.append(T)
S = T
return kprv
def lca(u, v, kprv, depth):
dd = depth[v] - depth[u]
if dd < 0:
u, v = v, u
dd = -dd
# assert depth[u] <= depth[v]
for k in range(LV + 1):
if dd & 1:
v = kprv[k][v]
dd >>= 1
# assert depth[u] == depth[v]
if u == v:
return u
for k in range(LV - 1, -1, -1):
pu = kprv[k][u]
pv = kprv[k][v]
if pu != pv:
u = pu
v = pv
# assert kprv[0][u] == kprv[0][v]
return kprv[0][u]
# BFS
infty = 10**10
depth = [infty for i in range(n)]
prev = [infty for i in range(n)]
prev[0] = 0
depth[0] = 0
from collections import deque
dq = deque()
dq.append(0)
while len(dq):
u = dq.popleft()
for v in g[u]:
if depth[v] == infty:
depth[v] = depth[u] + 1
prev[v] = u
dq.append(v)
kprv = construct(prev)
for i in range(q):
ci, di = map(int, input().split())
ci -= 1
di -= 1
lc = lca(ci, di, kprv, depth)
dist = depth[ci] + depth[di] - depth[lc] * 2
if dist % 2 == 0:
print("Town")
else:
print("Road")
|
[
"collections.deque"
] |
[((1289, 1296), 'collections.deque', 'deque', ([], {}), '()\n', (1294, 1296), False, 'from collections import deque\n')]
|
import pytest
from malt.parser.optionparser import parse, parse_all
from malt.exceptions import EmptyOptionString
def test_signature():
"""
Parsing takes an option string and creates a Signature. Basic operation test.
"""
option = "pow i:number i:power=2"
result = parse(option)
assert result.head == 'pow'
assert result.body[0].position == 0
assert result.body[0].key == 'number'
assert result.body[0].value == None
assert result.body[0].cast == 'i'
assert result.body[1].position == 1
assert result.body[1].key == 'power'
assert result.body[1].value == '2'
assert result.body[1].cast == 'i'
def test_parse_on_no_args():
"""
Parsing a command with no arguments should not raise any errors.
"""
result = parse("command")
assert result.head == "command"
assert result.body == []
def test_failure_empty_input():
"""
Raise EmptyOptionString when given empty input.
"""
with pytest.raises(EmptyOptionString):
parse('')
|
[
"pytest.raises",
"malt.parser.optionparser.parse"
] |
[((288, 301), 'malt.parser.optionparser.parse', 'parse', (['option'], {}), '(option)\n', (293, 301), False, 'from malt.parser.optionparser import parse, parse_all\n'), ((781, 797), 'malt.parser.optionparser.parse', 'parse', (['"""command"""'], {}), "('command')\n", (786, 797), False, 'from malt.parser.optionparser import parse, parse_all\n'), ((974, 1006), 'pytest.raises', 'pytest.raises', (['EmptyOptionString'], {}), '(EmptyOptionString)\n', (987, 1006), False, 'import pytest\n'), ((1016, 1025), 'malt.parser.optionparser.parse', 'parse', (['""""""'], {}), "('')\n", (1021, 1025), False, 'from malt.parser.optionparser import parse, parse_all\n')]
|
import requests
import urllib.request, json, re
def load_organizational_data(identification_number):
address = 'https://or.justice.cz/ias/ui/rejstrik-$firma?ico={0}'.format(identification_number)
with urllib.request.urlopen(address) as url:
page = url.read().decode('utf-8')
detail_page_pattern = r'subjektId=\d*'
result = re.search(detail_page_pattern, page)
zadost_string = result.group(0)
print(zadost_string)
load_organizational_data(22758518)
|
[
"re.search"
] |
[((356, 392), 're.search', 're.search', (['detail_page_pattern', 'page'], {}), '(detail_page_pattern, page)\n', (365, 392), False, 'import urllib.request, json, re\n')]
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/core/auth/signed_message.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# third party
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
# syft absolute
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n$proto/core/auth/signed_message.proto\x12\x0esyft.core.auth\x1a%proto/core/common/common_object.proto\x1a\x1bgoogle/protobuf/empty.proto"\x80\x01\n\rSignedMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x10\n\x08obj_type\x18\x02 \x01(\t\x12\x11\n\tsignature\x18\x03 \x01(\x0c\x12\x12\n\nverify_key\x18\x04 \x01(\x0c\x12\x0f\n\x07message\x18\x05 \x01(\x0c"\x1f\n\tVerifyKey\x12\x12\n\nverify_key\x18\x01 \x01(\x0c"0\n\tVerifyAll\x12#\n\x03\x61ll\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Emptyb\x06proto3'
)
_SIGNEDMESSAGE = DESCRIPTOR.message_types_by_name["SignedMessage"]
_VERIFYKEY = DESCRIPTOR.message_types_by_name["VerifyKey"]
_VERIFYALL = DESCRIPTOR.message_types_by_name["VerifyAll"]
SignedMessage = _reflection.GeneratedProtocolMessageType(
"SignedMessage",
(_message.Message,),
{
"DESCRIPTOR": _SIGNEDMESSAGE,
"__module__": "proto.core.auth.signed_message_pb2"
# @@protoc_insertion_point(class_scope:syft.core.auth.SignedMessage)
},
)
_sym_db.RegisterMessage(SignedMessage)
VerifyKey = _reflection.GeneratedProtocolMessageType(
"VerifyKey",
(_message.Message,),
{
"DESCRIPTOR": _VERIFYKEY,
"__module__": "proto.core.auth.signed_message_pb2"
# @@protoc_insertion_point(class_scope:syft.core.auth.VerifyKey)
},
)
_sym_db.RegisterMessage(VerifyKey)
VerifyAll = _reflection.GeneratedProtocolMessageType(
"VerifyAll",
(_message.Message,),
{
"DESCRIPTOR": _VERIFYALL,
"__module__": "proto.core.auth.signed_message_pb2"
# @@protoc_insertion_point(class_scope:syft.core.auth.VerifyAll)
},
)
_sym_db.RegisterMessage(VerifyAll)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_SIGNEDMESSAGE._serialized_start = 125
_SIGNEDMESSAGE._serialized_end = 253
_VERIFYKEY._serialized_start = 255
_VERIFYKEY._serialized_end = 286
_VERIFYALL._serialized_start = 288
_VERIFYALL._serialized_end = 336
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor_pool.Default",
"google.protobuf.reflection.GeneratedProtocolMessageType"
] |
[((514, 540), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (538, 540), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((1571, 1742), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""SignedMessage"""', '(_message.Message,)', "{'DESCRIPTOR': _SIGNEDMESSAGE, '__module__':\n 'proto.core.auth.signed_message_pb2'}"], {}), "('SignedMessage', (_message.Message\n ,), {'DESCRIPTOR': _SIGNEDMESSAGE, '__module__':\n 'proto.core.auth.signed_message_pb2'})\n", (1611, 1742), True, 'from google.protobuf import reflection as _reflection\n'), ((1900, 2062), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""VerifyKey"""', '(_message.Message,)', "{'DESCRIPTOR': _VERIFYKEY, '__module__': 'proto.core.auth.signed_message_pb2'}"], {}), "('VerifyKey', (_message.Message,),\n {'DESCRIPTOR': _VERIFYKEY, '__module__':\n 'proto.core.auth.signed_message_pb2'})\n", (1940, 2062), True, 'from google.protobuf import reflection as _reflection\n'), ((2213, 2375), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""VerifyAll"""', '(_message.Message,)', "{'DESCRIPTOR': _VERIFYALL, '__module__': 'proto.core.auth.signed_message_pb2'}"], {}), "('VerifyAll', (_message.Message,),\n {'DESCRIPTOR': _VERIFYALL, '__module__':\n 'proto.core.auth.signed_message_pb2'})\n", (2253, 2375), True, 'from google.protobuf import reflection as _reflection\n'), ((779, 805), 'google.protobuf.descriptor_pool.Default', '_descriptor_pool.Default', ([], {}), '()\n', (803, 805), True, 'from google.protobuf import descriptor_pool as _descriptor_pool\n')]
|
from pathlib import Path
def test_scrape(aqmesh_scraper_setup, tmpdir):
data = Path(aqmesh_scraper_setup["co2"]["data"])
metadata = Path(aqmesh_scraper_setup["co2"]["metadata"])
assert data.exists()
assert metadata.exists()
assert data.name == "20210515_20211024_CO2_AQMesh_Scaled_Dataset_PPM.csv"
assert metadata.name == "20210515_20211024_CO2_pod_metadata.csv"
|
[
"pathlib.Path"
] |
[((85, 126), 'pathlib.Path', 'Path', (["aqmesh_scraper_setup['co2']['data']"], {}), "(aqmesh_scraper_setup['co2']['data'])\n", (89, 126), False, 'from pathlib import Path\n'), ((142, 187), 'pathlib.Path', 'Path', (["aqmesh_scraper_setup['co2']['metadata']"], {}), "(aqmesh_scraper_setup['co2']['metadata'])\n", (146, 187), False, 'from pathlib import Path\n')]
|
# Copyright (c) 2020 Foundry.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import print_function
import sys
import os
import time
import scipy.misc
import numpy as np
import cv2
import tensorflow as tf
tf.compat.v1.disable_eager_execution() # For TF 2.x compatibility
from models.baseModel import BaseModel
from models.common.model_builder import baseline_model
from models.common.util import print_, get_ckpt_list, linear_to_srgb, srgb_to_linear
import message_pb2
class Model(BaseModel):
"""Load your trained model and do inference in Nuke"""
def __init__(self):
super(Model, self).__init__()
self.name = 'Regression Template TF'
self.n_levels = 3
self.scale = 0.5
dir_path = os.path.dirname(os.path.realpath(__file__))
self.checkpoints_dir = os.path.join(dir_path, 'checkpoints')
self.patch_size = 50
self.output_param_number = 1
# Initialise checkpoint name to the latest checkpoint
ckpt_names = get_ckpt_list(self.checkpoints_dir)
if not ckpt_names: # empty list
self.checkpoint_name = ''
else:
latest_ckpt = tf.compat.v1.train.latest_checkpoint(self.checkpoints_dir)
if latest_ckpt is not None:
self.checkpoint_name = latest_ckpt.split('/')[-1]
else:
self.checkpoint_name = ckpt_names[-1]
self.prev_ckpt_name = self.checkpoint_name
# Silence TF log when creating tf.Session()
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# Define options
self.gamma_to_predict = 1.0
self.predict = False
self.options = ('checkpoint_name', 'gamma_to_predict',)
self.buttons = ('predict',)
# Define inputs/outputs
self.inputs = {'input': 3}
self.outputs = {'output': 3}
def load(self, model):
# Check if empty or invalid checkpoint name
if self.checkpoint_name=='':
ckpt_names = get_ckpt_list(self.checkpoints_dir)
if not ckpt_names:
raise ValueError("No checkpoints found in {}".format(self.checkpoints_dir))
else:
raise ValueError("Empty checkpoint name, try an available checkpoint in {} (ex: {})"
.format(self.checkpoints_dir, ckpt_names[-1]))
print_("Loading trained model checkpoint...\n", 'm')
# Load from given checkpoint file name
self.saver.restore(self.sess, os.path.join(self.checkpoints_dir, self.checkpoint_name))
print_("...Checkpoint {} loaded\n".format(self.checkpoint_name), 'm')
def inference(self, image_list):
"""Do an inference on the model with a set of inputs.
# Arguments:
image_list: The input image list
Return the result of the inference.
"""
image = image_list[0]
image = linear_to_srgb(image).copy()
if not hasattr(self, 'sess'):
# Initialise tensorflow graph
tf.compat.v1.reset_default_graph()
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth=True
self.sess=tf.compat.v1.Session(config=config)
# Input is stacked histograms of original and gamma-graded images.
input_shape = [1, 2, 100]
# Initialise input placeholder size
self.input = tf.compat.v1.placeholder(tf.float32, shape=input_shape)
self.model = baseline_model(
input_shape=input_shape[1:],
output_param_number=self.output_param_number)
self.infer_op = self.model(self.input)
# Load latest model checkpoint
self.saver = tf.compat.v1.train.Saver()
self.load(self.model)
self.prev_ckpt_name = self.checkpoint_name
# If checkpoint name has changed, load new checkpoint
if self.prev_ckpt_name != self.checkpoint_name or self.checkpoint_name == '':
self.load(self.model)
# If checkpoint correctly loaded, update previous checkpoint name
self.prev_ckpt_name = self.checkpoint_name
# Preprocess image same way we preprocessed it for training
# Here for gamma correction compute histograms
def histogram(x, value_range=[0.0, 1.0], nbins=100):
"""Return histogram of tensor x"""
h, w, c = x.shape
hist = tf.histogram_fixed_width(x, value_range, nbins=nbins)
hist = tf.divide(hist, h * w * c)
return hist
with tf.compat.v1.Session() as sess:
# Convert to grayscale
img_gray = tf.image.rgb_to_grayscale(image)
img_gray = tf.image.resize(img_gray, [self.patch_size, self.patch_size])
# Apply gamma correction
img_gray_grade = tf.math.pow(img_gray, self.gamma_to_predict)
img_grade = tf.math.pow(image, self.gamma_to_predict)
# Compute histograms
img_hist = histogram(img_gray)
img_grade_hist = histogram(img_gray_grade)
hists_op = tf.stack([img_hist, img_grade_hist], axis=0)
hists, img_grade = sess.run([hists_op, img_grade])
res_img = srgb_to_linear(img_grade)
hists_batch = np.expand_dims(hists, 0)
start = time.time()
# Run model inference
inference = self.sess.run(self.infer_op, feed_dict={self.input: hists_batch})
duration = time.time() - start
print('Inference duration: {:4.3f}s'.format(duration))
res = inference[-1]
print("Predicted gamma: {}".format(res))
# If predict button is pressed in Nuke
if self.predict:
script_msg = message_pb2.FieldValuePairAttrib()
script_msg.name = "PythonScript"
# Create a Python script message to run in Nuke
python_script = self.nuke_script(res)
script_msg_val = script_msg.values.add()
script_msg_str = script_msg_val.string_attributes.add()
script_msg_str.values.extend([python_script])
return [res_img, script_msg]
return [res_img]
def nuke_script(self, res):
"""Return the Python script function to create a pop up window in Nuke."""
popup_msg = "Predicted gamma: {}".format(res)
script = "nuke.message('{}')\n".format(popup_msg)
return script
|
[
"tensorflow.image.rgb_to_grayscale",
"models.common.util.print_",
"tensorflow.compat.v1.disable_eager_execution",
"tensorflow.histogram_fixed_width",
"tensorflow.divide",
"os.path.join",
"message_pb2.FieldValuePairAttrib",
"tensorflow.compat.v1.placeholder",
"tensorflow.stack",
"tensorflow.compat.v1.Session",
"models.common.model_builder.baseline_model",
"tensorflow.math.pow",
"tensorflow.compat.v1.train.latest_checkpoint",
"models.common.util.srgb_to_linear",
"os.path.realpath",
"tensorflow.compat.v1.train.Saver",
"tensorflow.compat.v1.ConfigProto",
"models.common.util.linear_to_srgb",
"models.common.util.get_ckpt_list",
"numpy.expand_dims",
"time.time",
"tensorflow.compat.v1.reset_default_graph",
"tensorflow.image.resize"
] |
[((802, 840), 'tensorflow.compat.v1.disable_eager_execution', 'tf.compat.v1.disable_eager_execution', ([], {}), '()\n', (838, 840), True, 'import tensorflow as tf\n'), ((1405, 1442), 'os.path.join', 'os.path.join', (['dir_path', '"""checkpoints"""'], {}), "(dir_path, 'checkpoints')\n", (1417, 1442), False, 'import os\n'), ((1593, 1628), 'models.common.util.get_ckpt_list', 'get_ckpt_list', (['self.checkpoints_dir'], {}), '(self.checkpoints_dir)\n', (1606, 1628), False, 'from models.common.util import print_, get_ckpt_list, linear_to_srgb, srgb_to_linear\n'), ((2935, 2987), 'models.common.util.print_', 'print_', (['"""Loading trained model checkpoint...\n"""', '"""m"""'], {}), "('Loading trained model checkpoint...\\n', 'm')\n", (2941, 2987), False, 'from models.common.util import print_, get_ckpt_list, linear_to_srgb, srgb_to_linear\n'), ((5872, 5896), 'numpy.expand_dims', 'np.expand_dims', (['hists', '(0)'], {}), '(hists, 0)\n', (5886, 5896), True, 'import numpy as np\n'), ((5913, 5924), 'time.time', 'time.time', ([], {}), '()\n', (5922, 5924), False, 'import time\n'), ((1346, 1372), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1362, 1372), False, 'import os\n'), ((1747, 1805), 'tensorflow.compat.v1.train.latest_checkpoint', 'tf.compat.v1.train.latest_checkpoint', (['self.checkpoints_dir'], {}), '(self.checkpoints_dir)\n', (1783, 1805), True, 'import tensorflow as tf\n'), ((2582, 2617), 'models.common.util.get_ckpt_list', 'get_ckpt_list', (['self.checkpoints_dir'], {}), '(self.checkpoints_dir)\n', (2595, 2617), False, 'from models.common.util import print_, get_ckpt_list, linear_to_srgb, srgb_to_linear\n'), ((3073, 3129), 'os.path.join', 'os.path.join', (['self.checkpoints_dir', 'self.checkpoint_name'], {}), '(self.checkpoints_dir, self.checkpoint_name)\n', (3085, 3129), False, 'import os\n'), ((3601, 3635), 'tensorflow.compat.v1.reset_default_graph', 'tf.compat.v1.reset_default_graph', ([], {}), '()\n', (3633, 3635), True, 'import tensorflow as tf\n'), ((3657, 3683), 'tensorflow.compat.v1.ConfigProto', 'tf.compat.v1.ConfigProto', ([], {}), '()\n', (3681, 3683), True, 'import tensorflow as tf\n'), ((3755, 3790), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'config': 'config'}), '(config=config)\n', (3775, 3790), True, 'import tensorflow as tf\n'), ((3981, 4036), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32'], {'shape': 'input_shape'}), '(tf.float32, shape=input_shape)\n', (4005, 4036), True, 'import tensorflow as tf\n'), ((4062, 4156), 'models.common.model_builder.baseline_model', 'baseline_model', ([], {'input_shape': 'input_shape[1:]', 'output_param_number': 'self.output_param_number'}), '(input_shape=input_shape[1:], output_param_number=self.\n output_param_number)\n', (4076, 4156), False, 'from models.common.model_builder import baseline_model\n'), ((4304, 4330), 'tensorflow.compat.v1.train.Saver', 'tf.compat.v1.train.Saver', ([], {}), '()\n', (4328, 4330), True, 'import tensorflow as tf\n'), ((5017, 5070), 'tensorflow.histogram_fixed_width', 'tf.histogram_fixed_width', (['x', 'value_range'], {'nbins': 'nbins'}), '(x, value_range, nbins=nbins)\n', (5041, 5070), True, 'import tensorflow as tf\n'), ((5090, 5116), 'tensorflow.divide', 'tf.divide', (['hist', '(h * w * c)'], {}), '(hist, h * w * c)\n', (5099, 5116), True, 'import tensorflow as tf\n'), ((5154, 5176), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (5174, 5176), True, 'import tensorflow as tf\n'), ((5244, 5276), 'tensorflow.image.rgb_to_grayscale', 'tf.image.rgb_to_grayscale', (['image'], {}), '(image)\n', (5269, 5276), True, 'import tensorflow as tf\n'), ((5300, 5361), 'tensorflow.image.resize', 'tf.image.resize', (['img_gray', '[self.patch_size, self.patch_size]'], {}), '(img_gray, [self.patch_size, self.patch_size])\n', (5315, 5361), True, 'import tensorflow as tf\n'), ((5428, 5472), 'tensorflow.math.pow', 'tf.math.pow', (['img_gray', 'self.gamma_to_predict'], {}), '(img_gray, self.gamma_to_predict)\n', (5439, 5472), True, 'import tensorflow as tf\n'), ((5497, 5538), 'tensorflow.math.pow', 'tf.math.pow', (['image', 'self.gamma_to_predict'], {}), '(image, self.gamma_to_predict)\n', (5508, 5538), True, 'import tensorflow as tf\n'), ((5693, 5737), 'tensorflow.stack', 'tf.stack', (['[img_hist, img_grade_hist]'], {'axis': '(0)'}), '([img_hist, img_grade_hist], axis=0)\n', (5701, 5737), True, 'import tensorflow as tf\n'), ((5823, 5848), 'models.common.util.srgb_to_linear', 'srgb_to_linear', (['img_grade'], {}), '(img_grade)\n', (5837, 5848), False, 'from models.common.util import print_, get_ckpt_list, linear_to_srgb, srgb_to_linear\n'), ((6060, 6071), 'time.time', 'time.time', ([], {}), '()\n', (6069, 6071), False, 'import time\n'), ((6318, 6352), 'message_pb2.FieldValuePairAttrib', 'message_pb2.FieldValuePairAttrib', ([], {}), '()\n', (6350, 6352), False, 'import message_pb2\n'), ((3479, 3500), 'models.common.util.linear_to_srgb', 'linear_to_srgb', (['image'], {}), '(image)\n', (3493, 3500), False, 'from models.common.util import print_, get_ckpt_list, linear_to_srgb, srgb_to_linear\n')]
|
import json
import numpy as np
import os
import skimage
def save_np_arrays(images, img_names, save_path):
for img, img_name in zip(images, img_names):
np.save(f'{save_path}/{img_name}', img)
def load_np_arrays(path, num=None):
images = []
img_names = sorted(os.listdir(path))
if num is None:
num = len(img_names)
for idx in range(num):
img_name = img_names[idx]
img = np.load(f'{path}/{img_name}')
images.append(img)
return np.array(images)
def load_images(path, img_names, num_images=None):
images = []
if num_images is None:
num_images = len(img_names)
for idx in range(num_images):
img_name = img_names[idx]
img_path = f'{path}/{img_name}'
img = skimage.io.imread(img_path) / 255.
images.append(img)
return images
def load_images_and_density_maps(path, num_images):
img_names = sorted(os.listdir(f'{path}/images'))[:num_images]
density_map_names = sorted(os.listdir(f'{path}/gt_density_maps'))[:num_images]
images = []
density_maps = []
for img_name, density_map_name in zip(img_names, density_map_names):
img = skimage.io.imread(f'{path}/images/{img_name}') / 255.
density_map = np.load(f'{path}/gt_density_maps/{density_map_name}')
images.append(img)
density_maps.append(density_map)
return images, density_maps
def save_gt_counts(counts, img_names, save_path):
for img_name, count in zip(img_names, counts):
txt_name = f'{img_name.split(".")[0]}.txt'
txt_path = f'{save_path}/{txt_name}'
with open(txt_path, 'w') as fo:
fo.write(str(int(count)))
def load_gt_counts(counts_path):
txt_names = sorted(os.listdir(counts_path))
counts = np.empty(len(txt_names), dtype=np.int)
for i, txt_name in enumerate(txt_names):
txt_path = f'{counts_path}/{txt_name}'
with open(txt_path, 'r') as fi:
counts[i] = int(fi.read().split()[0])
return counts
def read_json(filename):
with open(filename, 'r') as fi:
data = json.load(fi)
return data
def write_json(data, filename):
dirname = os.path.dirname(filename)
if not os.path.isdir(dirname):
os.makedirs(dirname)
with open(filename, 'w') as fo:
json.dump(data, fo)
|
[
"json.dump",
"numpy.load",
"numpy.save",
"json.load",
"os.makedirs",
"os.path.isdir",
"os.path.dirname",
"numpy.array",
"os.listdir",
"skimage.io.imread"
] |
[((513, 529), 'numpy.array', 'np.array', (['images'], {}), '(images)\n', (521, 529), True, 'import numpy as np\n'), ((2242, 2267), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2257, 2267), False, 'import os\n'), ((164, 203), 'numpy.save', 'np.save', (['f"""{save_path}/{img_name}"""', 'img'], {}), "(f'{save_path}/{img_name}', img)\n", (171, 203), True, 'import numpy as np\n'), ((288, 304), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (298, 304), False, 'import os\n'), ((440, 469), 'numpy.load', 'np.load', (['f"""{path}/{img_name}"""'], {}), "(f'{path}/{img_name}')\n", (447, 469), True, 'import numpy as np\n'), ((1274, 1327), 'numpy.load', 'np.load', (['f"""{path}/gt_density_maps/{density_map_name}"""'], {}), "(f'{path}/gt_density_maps/{density_map_name}')\n", (1281, 1327), True, 'import numpy as np\n'), ((1784, 1807), 'os.listdir', 'os.listdir', (['counts_path'], {}), '(counts_path)\n', (1794, 1807), False, 'import os\n'), ((2165, 2178), 'json.load', 'json.load', (['fi'], {}), '(fi)\n', (2174, 2178), False, 'import json\n'), ((2279, 2301), 'os.path.isdir', 'os.path.isdir', (['dirname'], {}), '(dirname)\n', (2292, 2301), False, 'import os\n'), ((2311, 2331), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (2322, 2331), False, 'import os\n'), ((2385, 2404), 'json.dump', 'json.dump', (['data', 'fo'], {}), '(data, fo)\n', (2394, 2404), False, 'import json\n'), ((784, 811), 'skimage.io.imread', 'skimage.io.imread', (['img_path'], {}), '(img_path)\n', (801, 811), False, 'import skimage\n'), ((945, 973), 'os.listdir', 'os.listdir', (['f"""{path}/images"""'], {}), "(f'{path}/images')\n", (955, 973), False, 'import os\n'), ((1019, 1056), 'os.listdir', 'os.listdir', (['f"""{path}/gt_density_maps"""'], {}), "(f'{path}/gt_density_maps')\n", (1029, 1056), False, 'import os\n'), ((1198, 1244), 'skimage.io.imread', 'skimage.io.imread', (['f"""{path}/images/{img_name}"""'], {}), "(f'{path}/images/{img_name}')\n", (1215, 1244), False, 'import skimage\n')]
|
# The MIT License (MIT)
#
# Copyright (c) 2021 <NAME> (TG-Techie)
#
# See the file in the root directory of this project for the full licsense text
from tg_gui_std.all import *
import tg_gui_pyportal as setup
@setup.appwrapper
class Application(Layout):
some_data = State(0.5)
# now let's make the label show the value of the slider
our_label = Label(
text=DerivedState(some_data, lambda d: f"value: {round(d*100, 2)}")
)
our_slider = Slider(value=some_data)
def _any_(self):
our_label = self.our_label(top, (self.width, self.height // 2))
our_slider = self.our_slider(bottom, (9 * self.width // 10, self.height // 2))
setup.run_app_loop()
|
[
"tg_gui_pyportal.run_app_loop"
] |
[((676, 696), 'tg_gui_pyportal.run_app_loop', 'setup.run_app_loop', ([], {}), '()\n', (694, 696), True, 'import tg_gui_pyportal as setup\n')]
|
import logging
from easyprocess import EasyProcess
from pyscreenshot.plugins.backend import CBackend
from pyscreenshot.tempexport import RunProgError, read_func_img
from pyscreenshot.util import extract_version
log = logging.getLogger(__name__)
PROGRAM = "xwd"
# wikipedia: https://en.wikipedia.org/wiki/Xwd
# xwd | xwdtopnm | pnmtopng > Screenshot.png
# xwdtopnm is buggy: https://bugs.launchpad.net/ubuntu/+source/netpbm-free/+bug/1379480
# solution : imagemagick convert
# xwd -root -display :0 | convert xwd:- file.png
# TODO: xwd sometimes grabs the wrong window so this backend will be not added now
def read_xwd_img():
def run_prog(fpng, bbox=None):
fxwd = fpng + ".xwd"
pxwd = EasyProcess([PROGRAM, "-root", "-out", fxwd])
pxwd.call()
if pxwd.return_code != 0:
raise RunProgError(pxwd.stderr)
pconvert = EasyProcess(["convert", "xwd:" + fxwd, fpng])
pconvert.call()
if pconvert.return_code != 0:
raise RunProgError(pconvert.stderr)
im = read_func_img(run_prog)
return im
class XwdWrapper(CBackend):
name = "xwd"
is_subprocess = True
def grab(self, bbox=None):
im = read_xwd_img()
if bbox:
im = im.crop(bbox)
return im
def backend_version(self):
return extract_version(EasyProcess([PROGRAM, "-version"]).call().stdout)
|
[
"easyprocess.EasyProcess",
"pyscreenshot.tempexport.read_func_img",
"pyscreenshot.tempexport.RunProgError",
"logging.getLogger"
] |
[((220, 247), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (237, 247), False, 'import logging\n'), ((1055, 1078), 'pyscreenshot.tempexport.read_func_img', 'read_func_img', (['run_prog'], {}), '(run_prog)\n', (1068, 1078), False, 'from pyscreenshot.tempexport import RunProgError, read_func_img\n'), ((725, 770), 'easyprocess.EasyProcess', 'EasyProcess', (["[PROGRAM, '-root', '-out', fxwd]"], {}), "([PROGRAM, '-root', '-out', fxwd])\n", (736, 770), False, 'from easyprocess import EasyProcess\n'), ((889, 934), 'easyprocess.EasyProcess', 'EasyProcess', (["['convert', 'xwd:' + fxwd, fpng]"], {}), "(['convert', 'xwd:' + fxwd, fpng])\n", (900, 934), False, 'from easyprocess import EasyProcess\n'), ((843, 868), 'pyscreenshot.tempexport.RunProgError', 'RunProgError', (['pxwd.stderr'], {}), '(pxwd.stderr)\n', (855, 868), False, 'from pyscreenshot.tempexport import RunProgError, read_func_img\n'), ((1015, 1044), 'pyscreenshot.tempexport.RunProgError', 'RunProgError', (['pconvert.stderr'], {}), '(pconvert.stderr)\n', (1027, 1044), False, 'from pyscreenshot.tempexport import RunProgError, read_func_img\n'), ((1354, 1388), 'easyprocess.EasyProcess', 'EasyProcess', (["[PROGRAM, '-version']"], {}), "([PROGRAM, '-version'])\n", (1365, 1388), False, 'from easyprocess import EasyProcess\n')]
|
from django import forms
from .models import Comments
class CommentForm(forms.ModelForm):
class Meta:
model = Comments
fields = ('name', 'body')
widgets = {
'name': forms.TextInput(attrs={'class' : 'form-control'}),
'body' : forms.Textarea(attrs={'class': 'form-control' })
}
class CF(forms.ModelForm):
class Meta:
model = Comments
fields = ('name', 'body')
widgets = {
'name': forms.TextInput(attrs={'class' : 'form-control'}),
'body' : forms.Textarea(attrs={'class': 'form-control' })
}
|
[
"django.forms.TextInput",
"django.forms.Textarea"
] |
[((207, 255), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (222, 255), False, 'from django import forms\n'), ((279, 326), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (293, 326), False, 'from django import forms\n'), ((482, 530), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (497, 530), False, 'from django import forms\n'), ((554, 601), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (568, 601), False, 'from django import forms\n')]
|
# -*- coding: utf-8 -*-
from collections import defaultdict
from os import listdir, path
from helpers.file_name import FileName
class Allocation(object):
def __init__(self, data_path):
files = sorted(listdir(data_path))
files = filter(lambda x: x[-4:] == '.csv', files)
alloc_files = filter(lambda x: x.startswith('alloc'), files)
user_needs_files = filter(lambda x: x.startswith('user_needs'), files)
user_needs_files = map(FileName, user_needs_files)
user_needs_files = {n.attributes['resource_type']: path.join(
data_path, n.name) for n in user_needs_files}
self._allocations = defaultdict(list)
for f in alloc_files:
file_name = FileName(f)
params_dict = file_name.attributes
resource_type = params_dict['resource_type']
params_dict['file_name'] = path.join(data_path, f)
if resource_type in user_needs_files:
params_dict['types_file_name'] = user_needs_files[resource_type]
self._allocations[resource_type].append(params_dict)
self.user_type_files = user_needs_files
def resource_types(self):
return self._allocations.keys()
def __getattr__(self, name):
return self._allocations[name]
def iteritems(self):
return self._allocations.iteritems()
|
[
"collections.defaultdict",
"helpers.file_name.FileName",
"os.path.join",
"os.listdir"
] |
[((658, 675), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (669, 675), False, 'from collections import defaultdict\n'), ((216, 234), 'os.listdir', 'listdir', (['data_path'], {}), '(data_path)\n', (223, 234), False, 'from os import listdir, path\n'), ((561, 589), 'os.path.join', 'path.join', (['data_path', 'n.name'], {}), '(data_path, n.name)\n', (570, 589), False, 'from os import listdir, path\n'), ((730, 741), 'helpers.file_name.FileName', 'FileName', (['f'], {}), '(f)\n', (738, 741), False, 'from helpers.file_name import FileName\n'), ((885, 908), 'os.path.join', 'path.join', (['data_path', 'f'], {}), '(data_path, f)\n', (894, 908), False, 'from os import listdir, path\n')]
|
#!/usr/bin/python3.8
# -*- coding: utf-8 -*-
"""
Runs all example scripts.
"""
import os
import subprocess
from flask import Flask, request
from flask_restful import Resource, Api
from subprocess import PIPE
python_command = "python"
processes = []
supervisor = None
supervisor_url = 'http://localhost:8070'
dirs = [
"control",
"data_feeder",
"emulator",
"processor",
"recorder",
"spoofer",
"tracker_matlab"
]
class Shutdown(Resource):
def get(self):
try:
for process in processes:
process.terminate()
print(f"supervisor halted, terminating all associated processes...")
finally:
shutdown_hook = request.environ.get(supervisor_url)
if shutdown_hook is not None:
shutdown_hook()
return "terminate received"
# -----------------------------------------------------------------------------
# Execute requisite logic
if __name__ == '__main__':
supervisor = Flask(__name__)
api = Api(supervisor)
api.add_resource(Shutdown, '/shutdown')
print(f"initializing associated processes...")
for dir in dirs:
processes.append(subprocess.Popen([python_command, os.path.join(
os.path.dirname(__file__), dir + "/main.py")], stdout=PIPE, stderr=PIPE))
print(f"initialized {len(processes)} processes to supervisor (terminate by accessing {supervisor_url})...")
supervisor.run(port=8070)
print(f"supervisor launched")
|
[
"flask.request.environ.get",
"flask_restful.Api",
"os.path.dirname",
"flask.Flask"
] |
[((1001, 1016), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1006, 1016), False, 'from flask import Flask, request\n'), ((1027, 1042), 'flask_restful.Api', 'Api', (['supervisor'], {}), '(supervisor)\n', (1030, 1042), False, 'from flask_restful import Resource, Api\n'), ((700, 735), 'flask.request.environ.get', 'request.environ.get', (['supervisor_url'], {}), '(supervisor_url)\n', (719, 735), False, 'from flask import Flask, request\n'), ((1246, 1271), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1261, 1271), False, 'import os\n')]
|
#!/usr/bin/env python3
import datetime
import json
from pathlib import Path
import requests
import MySQLdb
db = MySQLdb.connect(
host="lxc-rrd",
port=3306,
user='sens',
passwd='<PASSWORD>',
db="observatory1")
db_cursor = db.cursor()
def check_db(minutes):
sql = """
SELECT sensors_id
,create_time
FROM sensors
ORDER BY create_time DESC
LIMIT 1;
"""
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
try:
sensors_id = db_result_tuple[0]
db_date = db_result_tuple[1]
except:
raise
if db_date < datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes) :
print("DB last timestamp {db_date} is More than {minutes} minutes ago -> close".format(db_date=db_date, minutes=minutes))
print("Stop_Imaging (do not wait). Park (wait), Close_Roof")
quit(1)
else:
print("DB last timestamp {db_date} is Less than {minutes} minutes ago -> open".format(db_date=db_date, minutes=minutes))
return(sensors_id)
def check_sqm(sensors_id, sqm_min):
sql = """
SELECT sqm1_sqm
FROM sensors
WHERE sensors_id = {sensors_id}
LIMIT 1;
""".format(sensors_id=sensors_id)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
sqm = db_result_tuple[0]
except:
raise
if sqm >= sqm_min:
print("SQM {sqm} >= minimum {sqm_min} -> open".format(sqm=sqm, sqm_min=sqm_min))
return(True)
else:
print("SQM {sqm} < minimum {sqm_min} -> close".format(sqm=sqm, sqm_min=sqm_min))
return(False)
def check_sqm_past(sqm_min, seconds, outlier_count_max):
sql = """
SELECT COUNT(*)
FROM sensors
WHERE create_time > DATE_SUB(UTC_TIMESTAMP(), INTERVAL {seconds} second)
AND sqm1_sqm < {sqm_min};
""".format(seconds=seconds, sqm_min=sqm_min)
#print(sql)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
count = db_result_tuple[0]
except:
raise
if count <= outlier_count_max:
print("SQM < minimum {sqm_min} count over the last {seconds} seconds is {count} <= {outlier_count_max} -> open".format(sqm_min=sqm_min, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(True)
else:
print("SQM < minimum {sqm_min} count over the last {seconds} seconds is {count} > {outlier_count_max} -> close".format(sqm_min=sqm_min, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(False)
def check_rain(sensors_id, drops_min):
sql = """
SELECT rainsensor1_drops
FROM sensors
WHERE sensors_id = {sensors_id}
LIMIT 1;
""".format(sensors_id=sensors_id)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
drops = db_result_tuple[0]
except:
raise
if drops <= drops_min:
print("Rain drops {drops} <= minimum {drops_min} -> open".format(drops=drops, drops_min=drops_min))
return(True)
else:
print("Rain drops {drops} > minimum {drops_min} -> close".format(drops=drops, drops_min=drops_min))
return(False)
def check_rain_past(drops_min, seconds, outlier_count_max):
sql = """
SELECT COUNT(*)
FROM sensors
WHERE create_time > DATE_SUB(UTC_TIMESTAMP(), INTERVAL {seconds} second)
AND rainsensor1_drops > {drops_min};
""".format(seconds=seconds, drops_min=drops_min)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
count = db_result_tuple[0]
except:
raise
if count <= outlier_count_max:
print("Rain drops <= minimum {drops_min} count over the last {seconds} seconds is {count} <= {outlier_count_max} -> open".format(drops_min=drops_min, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(True)
else:
print("Rain drops > minimum {drops_min} count over the last {seconds} seconds is {count} > {outlier_count_max} -> close".format(drops_min=drops_min, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(False)
def check_ups_is_on_mains(sensors_id, min_ups_bcharge):
sql = """
SELECT ups1_status, ups1_bcharge
FROM sensors
WHERE sensors_id = {sensors_id}
LIMIT 1;
""".format(sensors_id=sensors_id)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
# print(db_result_tuple)
try:
ups_status = db_result_tuple[0]
ups_bcharge = db_result_tuple[1]
except:
raise
if ups_status == 1 and ups_bcharge >= min_ups_bcharge:
print("UPS is powered and battery charge {bcharge} >= {min_ups_bcharge} -> open".format(bcharge=ups_bcharge, min_ups_bcharge=min_ups_bcharge))
return True
else:
if ups_status != 1:
print("UPS is on battery (and battery charge is {bcharge}) -> close".format(bcharge=ups_bcharge))
else:
print("UPS is powered but battery charge {bcharge} < {min_ups_bcharge} -> open".format(bcharge=ups_bcharge, min_ups_bcharge=min_ups_bcharge))
return False
def check_infrared(sensors_id, sensor, minimum_delta_t):
sql = """
SELECT {sensor}_temperature_sensor
, {sensor}_temperature_sky
, {sensor}_temperature_sensor - {sensor}_temperature_sky
FROM sensors
WHERE sensors_id = {sensors_id}
LIMIT 1;
""".format(sensor=sensor, sensors_id=sensors_id)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
temperature_sensor = db_result_tuple[0]
temperature_sky = db_result_tuple[1]
delta_t = db_result_tuple[2]
except:
raise
if delta_t >= minimum_delta_t:
print("Sensor {sensor} sky temperature delta ({temperature_sensor} - {temperature_sky} = {delta_t}) >= {minimum_delta_t} -> open".format(sensor=sensor, temperature_sensor=temperature_sensor, temperature_sky=temperature_sky, delta_t=delta_t, minimum_delta_t=minimum_delta_t))
return(True)
else:
print("Sensor {sensor} sky temperature delta ({temperature_sensor} - {temperature_sky} = {delta_t}) < {minimum_delta_t} -> close".format(sensor=sensor, temperature_sensor=temperature_sensor, temperature_sky=temperature_sky, delta_t=delta_t, minimum_delta_t=minimum_delta_t))
return(False)
def check_infrared_past(sensor, minimum_delta_t, seconds, outlier_count_max):
sql = """
SELECT COUNT(*)
FROM sensors
WHERE create_time > DATE_SUB(UTC_TIMESTAMP(), INTERVAL {seconds} second)
AND {sensor}_temperature_sensor - {sensor}_temperature_sky < {minimum_delta_t};
""".format(sensor=sensor, seconds=seconds, minimum_delta_t=minimum_delta_t)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
count = db_result_tuple[0]
except:
raise
if count < outlier_count_max:
print("Sensor {sensor} sky temperature delta < {minimum_delta_t} count over the last {seconds} seconds is {count} <= {outlier_count_max} -> open".format(sensor=sensor, minimum_delta_t=minimum_delta_t, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(True)
else:
print("Sensor {sensor} sky temperature delta < {minimum_delta_t} count over the last {seconds} seconds is {count} > {outlier_count_max} -> close".format(sensor=sensor, minimum_delta_t=minimum_delta_t, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(False)
def last_event_long_enough_ago(event, seconds, outlier_count_max):
sql = """
SELECT COUNT(*)
FROM events
WHERE create_time > DATE_SUB(UTC_TIMESTAMP(), INTERVAL {seconds} second)
AND event = '{event}';
""".format(event=event, seconds=seconds)
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
#print(db_result_tuple)
try:
count = db_result_tuple[0]
except:
raise
if count < outlier_count_max:
print("Event {event} count over the last {seconds} seconds is {count} < {outlier_count_max} -> open".format(event=event, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(True)
else:
print("Event {event} count over the last {seconds} seconds is {count} >= {outlier_count_max} -> close".format(event=event, seconds=seconds, count=count, outlier_count_max=outlier_count_max))
return(False)
def retrieve_previous_open_ok():
sql = """
SELECT create_time,open_ok
FROM roof
ORDER BY roof_id DESC
LIMIT 1;
"""
db_cursor.execute(sql)
db_result_tuple = db_cursor.fetchone()
last_open_ok = bool(db_result_tuple[1])
print("Roof open status is {}".format(last_open_ok))
return(last_open_ok)
#def retrieve_previous_open(sensors_id):
# sql = """
# SELECT open
# FROM roof
# WHERE sensors_id = {}
# LIMIT 1;
# """.format(sensors_id)
# sql = """
# SELECT create_time, open_ok
# FROM roof
# ORDER BY roof_id DESC
# LIMIT 1;
# """
# db_cursor.execute(sql)
# db_result_tuple = db_cursor.fetchone()
# open = db_result_tuple[1]
# return(bool(open))
def store_roof_status(utcnow, sensors_id, open_ok, reasons):
sql_keys = []
sql_values = []
sql_keys.append("create_time")
sql_values.append('"' + str(utcnow) + '"')
sql_keys.append("sensors_id")
sql_values.append(str(sensors_id))
sql_keys.append("open_ok")
sql_values.append(str(open_ok))
sql_keys.append("reasons")
sql_values.append('"' + reasons + '"')
sql = """
INSERT INTO observatory1.roof ({keys})
VALUES ({values});
""".format(keys = ','.join(sql_keys),
values = ','.join(sql_values))
#print("{}".format(sql.lstrip().rstrip()))
try:
db_cursor.execute(sql)
db.commit()
#print(db_cursor.rowcount, "record inserted.")
except:
db.rollback()
raise
#def get_roof_status(minutes):
# sql = """
# SELECT open_ok
# ,create_time
# FROM roof
# ORDER BY roof_id DESC
# LIMIT 1;
# """
# db_cursor.execute(sql)
# db_result_tuple = db_cursor.fetchone()
# try:
# last_open_ok = db_result_tuple[0]
# db_date = db_result_tuple[1]
# except:
# raise
# if db_date < datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes) :
# print("DB last timestamp {db_date} is More than {minutes} minutes ago -> close".format(db_date=db_date, minutes=minutes))
# print("Stop_Imaging (do not wait). Park (wait), Close_Roof")
# quit(1)
# else:
# return(last_open_ok)
def store_event(utcnow, event, reason = None):
sql_keys = []
sql_values = []
sql_keys.append("create_time")
sql_values.append('"' + str(utcnow) + '"')
sql_keys.append("event")
sql_values.append('"' + event + '"')
if reason:
sql_keys.append("reason")
sql_values.append('"' + reason + '"')
sql = """
INSERT INTO observatory1.events ({keys})
VALUES ({values});
""".format(keys = ','.join(sql_keys),
values = ','.join(sql_values))
#print("{}".format(sql.lstrip().rstrip()))
try:
db_cursor.execute(sql)
db.commit()
#print(db_cursor.rowcount, "record inserted.")
except:
db.rollback()
raise
def sendToMattermost(url, message):
print("Send to mattermost: {}".format(message))
payload = {}
payload['text'] = message
r = requests.post(url, data={'payload': json.dumps(payload, sort_keys=True, indent=4)})
if r.status_code != 200:
try:
r = json.loads(r.text)
except ValueError:
r = {'message': r.text, 'status_code': r.status_code}
raise RuntimeError("{} ({})".format(r['message'], r['status_code']))
def main():
home = str(Path.home())
mattermost_url_file = open(home + "/.mattermosturl", 'r')
url = mattermost_url_file.read().rstrip('\n')
mattermost_url_file.close()
sensors_id = check_db(minutes=2)
# roof_status = get_roof_status(minutes=2)
# if roof_status == 1:
last_open_ok = retrieve_previous_open_ok()
if last_open_ok is True:
sqm_min_hysterese = 6
minimum_delta_t_hysterese = 7
else:
sqm_min_hysterese = 0
minimum_delta_t_hysterese = 0
sqm_now_ok = check_sqm(sensors_id, sqm_min=17.5 - sqm_min_hysterese)
rain_now_ok = check_rain(sensors_id, drops_min=1)
ups_now_ok1 = check_ups_is_on_mains(sensors_id, 99.0)
if ups_now_ok1 is False:
# might be self-test. check previous minute
ups_now_ok2 = check_ups_is_on_mains(sensors_id - 1, 99.0)
infrared1_now_ok = check_infrared(sensors_id, sensor='BAA1', minimum_delta_t=20 - minimum_delta_t_hysterese)
infrared2_now_ok = check_infrared(sensors_id, sensor='BCC1', minimum_delta_t=20 - minimum_delta_t_hysterese)
sqm_past_ok = check_sqm_past(sqm_min=17.5 - sqm_min_hysterese, seconds=3600, outlier_count_max=5)
rain_past_ok = check_rain_past(drops_min=1, seconds=3600, outlier_count_max=2)
infrared1_past_ok = check_infrared_past(sensor='BAA1', minimum_delta_t=20 - minimum_delta_t_hysterese, seconds=3600, outlier_count_max=5)
infrared2_past_ok = check_infrared_past(sensor='BCC1', minimum_delta_t=20 - minimum_delta_t_hysterese, seconds=3600, outlier_count_max=5)
closing_event_past_ok = last_event_long_enough_ago(event="closing", seconds=3600, outlier_count_max=1)
reason_open = []
reason_close = []
if sensors_id:
reason_open.append("DB ok")
else:
reason_close.append("DB not ok")
if sqm_now_ok:
if sqm_past_ok:
reason_open.append("Dark long enough")
else:
reason_close.append("Not dark long enough")
else:
if sqm_past_ok:
reason_close.append("Not dark enough anymore")
else:
reason_close.append("Still not dark enough")
if rain_now_ok:
if rain_past_ok:
reason_open.append("Dry long enough")
else:
reason_close.append("Not dry long enough")
else:
if rain_past_ok:
reason_close.append("Started raining")
else:
reason_close.append("Still raining")
if ups_now_ok1:
reason_open.append("UPS works")
ups_now_ok = True
else:
if ups_now_ok2:
reason_open.append("UPS selftest or on battery")
ups_now_ok = True
else:
reason_close.append("UPS on battery")
ups_now_ok = False
if infrared1_now_ok or infrared2_now_ok:
if infrared1_past_ok or infrared2_past_ok:
reason_open.append("Clear long enough")
else:
reason_close.append("Not clear long enough")
else:
if infrared1_past_ok or infrared2_past_ok:
reason_close.append("Too cloudy")
else:
reason_close.append("Still too cloudy")
if closing_event_past_ok:
reason_open.append("Roof has been closed long enough")
else:
reason_close.append("Roof was just closed")
#print(reason_open)
#print(reason_close)
if sensors_id and sqm_now_ok and sqm_past_ok and rain_now_ok and rain_past_ok and ups_now_ok and (infrared1_now_ok or infrared2_now_ok) and (infrared1_past_ok or infrared2_past_ok) and closing_event_past_ok:
open_ok = True
reasons = "All sensors are go"
#reasons = "{}".format(', '.join(reason_open))
# print("roof open ok, {}".format(', '.join(reason_open)))
else:
open_ok = False
reasons = "{}".format(', '.join(reason_close))
# print("roof open not ok: {}".format(', '.join(reason_close)))
# print(reasons)
utcnow = datetime.datetime.utcnow()
# last_open_ok = retrieve_previous_open_ok()
event = ''
roof_change = False
if last_open_ok is False:
if open_ok is True:
roof_change = True
event = "opening"
else:
event = "stays closed"
else:
if open_ok is False:
roof_change = True
event = "closing"
else:
event = "stays open"
print("Roof {}, {}".format(event, reasons))
if roof_change is True:
sendToMattermost(url, event + ", " + reasons)
store_event(utcnow, event, reasons)
# last_open_ok = retrieve_previous_open_ok()
# if last_open_ok != open_ok:
# sendToMattermost(url, open_ok_str + reasons)
store_roof_status(utcnow, sensors_id, open_ok, reasons)
print("")
if __name__ == "__main__":
main()
|
[
"MySQLdb.connect",
"pathlib.Path.home",
"json.loads",
"json.dumps",
"datetime.datetime.utcnow",
"datetime.timedelta"
] |
[((114, 213), 'MySQLdb.connect', 'MySQLdb.connect', ([], {'host': '"""lxc-rrd"""', 'port': '(3306)', 'user': '"""sens"""', 'passwd': '"""<PASSWORD>"""', 'db': '"""observatory1"""'}), "(host='lxc-rrd', port=3306, user='sens', passwd='<PASSWORD>',\n db='observatory1')\n", (129, 213), False, 'import MySQLdb\n'), ((16158, 16184), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (16182, 16184), False, 'import datetime\n'), ((12198, 12209), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (12207, 12209), False, 'from pathlib import Path\n'), ((619, 645), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (643, 645), False, 'import datetime\n'), ((648, 683), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': 'minutes'}), '(minutes=minutes)\n', (666, 683), False, 'import datetime\n'), ((11977, 11995), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (11987, 11995), False, 'import json\n'), ((11871, 11916), 'json.dumps', 'json.dumps', (['payload'], {'sort_keys': '(True)', 'indent': '(4)'}), '(payload, sort_keys=True, indent=4)\n', (11881, 11916), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import time
import numpy as np
import torch
import torch.utils.data
from optimization.training import evaluate, plot_samples
from utils.load_data import load_dataset
from os.path import join
parser = argparse.ArgumentParser(description='PyTorch Discrete Normalizing flows')
parser.add_argument('-d', '--dataset', type=str, default='cifar10',
choices=['cifar10', 'imagenet32', 'imagenet64', 'svhn'],
metavar='DATASET',
help='Dataset choice.')
parser.add_argument('-bs', '--batch_size', type=int, default=1000, metavar='BATCH_SIZE',
help='input batch size for training (default: 100)')
parser.add_argument('--snap_dir', type=str, default='')
def main():
args = parser.parse_args()
args.cuda = torch.cuda.is_available()
args.break_epoch = False
snap_dir = args.snap_dir = join('snapshots', args.snap_dir) + '/'
train_loader, val_loader, test_loader, args = load_dataset(args)
final_model = torch.load(snap_dir + 'a.model', map_location='cpu')
if args.cuda:
final_model = final_model.cuda()
# Just for timing at the moment.
with torch.no_grad():
final_model.eval()
timing_results = []
for i in range(100):
torch.cuda.synchronize()
start = time.time()
x_sample = final_model.sample(n_samples=100)
torch.cuda.synchronize()
duration = time.time() - start
timing_results.append(duration)
print('Timings: ', timing_results)
print('Mean time:', np.mean(timing_results))
plot_samples(final_model, args, epoch=9999, bpd=0.0)
if torch.cuda.device_count() > 1:
print("Let's use", torch.cuda.device_count(), "GPUs!")
final_model = torch.nn.DataParallel(final_model, dim=0)
test_bpd = evaluate(test_loader, final_model, args)
with open(snap_dir + 'log.txt', 'a') as ff:
msg = 'FINAL \ttest negative elbo bpd {:.4f}'.format(
test_bpd)
print(msg)
print(msg, file=ff)
test_bpd = evaluate(test_loader, final_model, args, iw_samples=1000)
with open(snap_dir + 'log.txt', 'a') as ff:
msg = 'FINAL \ttest negative log_px bpd {:.4f}'.format(
test_bpd)
print(msg)
print(msg, file=ff)
if __name__ == '__main__':
main()
|
[
"torch.cuda.synchronize",
"optimization.training.evaluate",
"argparse.ArgumentParser",
"torch.load",
"torch.cuda.device_count",
"time.time",
"numpy.mean",
"torch.cuda.is_available",
"optimization.training.plot_samples",
"torch.nn.DataParallel",
"torch.no_grad",
"os.path.join",
"utils.load_data.load_dataset"
] |
[((282, 355), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch Discrete Normalizing flows"""'}), "(description='PyTorch Discrete Normalizing flows')\n", (305, 355), False, 'import argparse\n'), ((869, 894), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (892, 894), False, 'import torch\n'), ((1047, 1065), 'utils.load_data.load_dataset', 'load_dataset', (['args'], {}), '(args)\n', (1059, 1065), False, 'from utils.load_data import load_dataset\n'), ((1085, 1137), 'torch.load', 'torch.load', (["(snap_dir + 'a.model')"], {'map_location': '"""cpu"""'}), "(snap_dir + 'a.model', map_location='cpu')\n", (1095, 1137), False, 'import torch\n'), ((1937, 1977), 'optimization.training.evaluate', 'evaluate', (['test_loader', 'final_model', 'args'], {}), '(test_loader, final_model, args)\n', (1945, 1977), False, 'from optimization.training import evaluate, plot_samples\n'), ((2178, 2235), 'optimization.training.evaluate', 'evaluate', (['test_loader', 'final_model', 'args'], {'iw_samples': '(1000)'}), '(test_loader, final_model, args, iw_samples=1000)\n', (2186, 2235), False, 'from optimization.training import evaluate, plot_samples\n'), ((957, 989), 'os.path.join', 'join', (['"""snapshots"""', 'args.snap_dir'], {}), "('snapshots', args.snap_dir)\n", (961, 989), False, 'from os.path import join\n'), ((1244, 1259), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1257, 1259), False, 'import torch\n'), ((1703, 1755), 'optimization.training.plot_samples', 'plot_samples', (['final_model', 'args'], {'epoch': '(9999)', 'bpd': '(0.0)'}), '(final_model, args, epoch=9999, bpd=0.0)\n', (1715, 1755), False, 'from optimization.training import evaluate, plot_samples\n'), ((1764, 1789), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1787, 1789), False, 'import torch\n'), ((1880, 1921), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['final_model'], {'dim': '(0)'}), '(final_model, dim=0)\n', (1901, 1921), False, 'import torch\n'), ((1359, 1383), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (1381, 1383), False, 'import torch\n'), ((1404, 1415), 'time.time', 'time.time', ([], {}), '()\n', (1413, 1415), False, 'import time\n'), ((1485, 1509), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (1507, 1509), False, 'import torch\n'), ((1669, 1692), 'numpy.mean', 'np.mean', (['timing_results'], {}), '(timing_results)\n', (1676, 1692), True, 'import numpy as np\n'), ((1822, 1847), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1845, 1847), False, 'import torch\n'), ((1533, 1544), 'time.time', 'time.time', ([], {}), '()\n', (1542, 1544), False, 'import time\n')]
|
# encoding: utf-8
import queue
from flow_detector import *
# 定义一个字典用来记录每个流已经嗅探到的数据包个数
flow_recorder = {}
# 定义一个队列用来缓存数据包
q = queue.Queue()
def put_pkt_to_queue(pkts):
for pkt in pkts:
q.put(pkt)
def get_pkt_id(pkt):
if pkt.haslayer('IP'):
src_ip = pkt["IP"].src
dst_ip = pkt["IP"].dst
if pkt.haslayer('TCP'):
src_port = pkt['TCP'].sport
dst_port = pkt['TCP'].dport
protocol = 'TCP'
elif pkt.haslayer('UDP'):
src_port = pkt['UDP'].sport
dst_port = pkt['UDP'].dport
protocol = 'UDP'
elif pkt.haslayer(' ICMP'):
src_port = "NULL"
dst_port = "NULL"
protocol = 'ICMP'
else:
src_port = "NULL"
dst_port = "NULL"
protocol = 'OTHERS'
else:
return ""
pkt_id = str(src_ip) + "_" + str(src_port) + "_" + str(dst_ip) + "_" + str(dst_port) + "_" + str(protocol)
return pkt_id
def make_dir(new_dir_path):
"""
create the new directory if it not exist
:param new_dir_path: absolutely path , the new directory
:return: absolutely path
"""
if os.path.exists(new_dir_path):
pass
else:
os.makedirs(new_dir_path)
return new_dir_path
# 冒泡事件触发函数
def put_pkt_to_folder(pkt, pkt_id):
save_folder = make_dir(folder_root + os.sep + "flow" + os.sep + str(pkt_id))
wrpcap(save_folder + os.sep + str(random.randint(0, 1000)) + ".pcap", pkt)
# print(pkt_id, " dumped to ", save_folder, " success!")
# 更新字典中的值
if pkt_id in flow_recorder:
flow_recorder[pkt_id] = flow_recorder[pkt_id] + 1
else:
flow_recorder[pkt_id] = 1
if is_full(pkt_id):
# flow_recorder.pop(pkt_id)
print(flow_recorder[pkt_id])
flow_recorder[pkt_id] = 0 # 重新计数
return 1
else:
return 0
def start_detect(pkt_id):
flow_array = transform_main(pkt_id)
label = detector_main(flow_array)
print(pkt_id, "is:", label)
return label
def dump_pkt_from_queue():
if q.empty():
# print("queue is empty!")
return "NULL", -1
else:
pkt = q.get()
pkt_id = get_pkt_id(pkt)
if pkt_id == "":
return "NULL", -1
else:
flag = put_pkt_to_folder(pkt, pkt_id)
return pkt_id, flag
# 要开一个新的线程来专门嗅探数据包
def sniff_pkt():
while True:
pkts = sniff(filter=sniff_filter, iface=sniff_iface, count=sniff_count) # 抓包 # prn=lambda x: x.show()
put_pkt_to_queue(pkts)
def sniff_main(save_path):
# 创建一个线程用来监听数据包
thread1 = threading.Thread(target=sniff_pkt, name="thread1")
# 开启监听
thread1.start()
# 尝试从队列中获取数据包
i = 0
while True:
pkt_id, flag = dump_pkt_from_queue()
if flag == 1:
label = start_detect(pkt_id)
save_label(save_path, label, i)
i += 1
def is_full(pkt_id):
if flow_recorder[pkt_id] >= pkt_num:
return True
else:
return False
if __name__ == '__main__':
sniff_main()
|
[
"queue.Queue"
] |
[((133, 146), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (144, 146), False, 'import queue\n')]
|
from discord.ext import commands
import difflib
from .message_generator import DefaultMessageGenerator, MessageGenerator
from typing import Optional, Mapping, Set, List
class DidYouMean(commands.Cog):
"""
Core class of this library.
Attributes
----------
bot
The bot object.
matcher_dict : Mapping[str, difflib.SequenceMatcher]
A dict for storing matchers.
max_suggest : int
Maximum number of suggestions.
"""
def __init__(self, bot) -> None:
self.bot = bot
self.matcher_dict: Mapping[str, difflib.SequenceMatcher] = {}
self.max_suggest = 3
self._command_names: Set[str] = set()
self._listup_commands(self.bot)
self._max_command_length = max((len(c) for c in self._command_names))
self._message_generator = DefaultMessageGenerator
def set_message_generator(self, generator) -> None:
"""
The function to set message generator.
Parameters
----------
generator
This class inherits from the `MessageGenerator` class.
Raises
------
TypeError
If the class does not inherit from `MessageGenerator`.
"""
if not isinstance(generator, MessageGenerator):
raise TypeError("Message generator must extend 'MessageGenerator'.")
self._message_generator = generator
def create_matcher(self, command_name: str) -> difflib.SequenceMatcher:
matcher = difflib.SequenceMatcher(None, command_name)
self.matcher_dict[command_name] = matcher
return matcher
def similar_factor_extraction(self, command: str) -> Optional[List[str]]:
matcher = self.matcher_dict.get(command) or self.create_matcher(command)
similar_cmd_list = []
for name in self._command_names:
matcher.set_seq2(name)
ratio = matcher.ratio()
if ratio > 0.6:
similar_cmd_list.append((name, ratio))
similar_cmd_list.sort(key=lambda c: c[1], reverse=True)
if not similar_cmd_list:
return
return [c[0] for c in similar_cmd_list][:self.max_suggest]
def _listup_commands(self, group, prefix=None) -> None:
if prefix is None:
prefix = []
prefix_str = ' '.join(prefix) + ' ' if len(prefix) > 0 else ''
for command in group.commands:
if command.hidden:
continue
elif isinstance(command, commands.Group):
names = [command.name] + list(command.aliases)
for name in names:
self._command_names.add(prefix_str + name)
prefix.append(command.name)
self._listup_commands(command, prefix)
prefix.pop()
elif isinstance(command, commands.Command):
names = [command.name] + list(command.aliases)
for name in names:
self._command_names.add(prefix_str + name)
@commands.Cog.listener()
async def on_ready(self):
self._listup_commands(self.bot)
@commands.Cog.listener()
async def on_command_error(self, ctx, err) -> None:
if not isinstance(err, commands.CommandNotFound):
return
invalid_command = ctx.message.content.lstrip(ctx.prefix)[:self._max_command_length]
similar_list = self.similar_factor_extraction(invalid_command)
if similar_list is None:
return
await self._message_generator(invalid_command[:len(similar_list[0])], similar_list).send(ctx)
def setup(bot):
bot.add_cog(DidYouMean(bot))
|
[
"difflib.SequenceMatcher",
"discord.ext.commands.Cog.listener"
] |
[((3136, 3159), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (3157, 3159), False, 'from discord.ext import commands\n'), ((3240, 3263), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (3261, 3263), False, 'from discord.ext import commands\n'), ((1547, 1590), 'difflib.SequenceMatcher', 'difflib.SequenceMatcher', (['None', 'command_name'], {}), '(None, command_name)\n', (1570, 1590), False, 'import difflib\n')]
|
#!/usr/bin/env python3
import os,sys
import re
class Money():
def __init__(self):
self.resource_path = 'resource'
self.money_set = set()
self.digital_set = set()
self.get_money()
self.mon_r = self.get_money_r()
def get_money(self):
with open(self.resource_path+'/money_cn.txt') as f:
for line in f:
self.money_set.add(line.strip())
with open(self.resource_path+'/money_en.txt') as f:
for line in f:
self.money_set.add(line.strip())
with open(self.resource_path+'/digital.txt') as f:
for line in f:
self.digital_set.add(line.strip())
def get_money_r(self):
money_1 = u'([0-90-9两零一二三四五六七八九十百千万亿兆几壹贰叁肆伍陆柒捌玖拾]{1}[0-90-9,,两零一二三四五六七八九十百千万亿兆几壹贰叁肆伍陆柒捌玖拾\.]'
money_2 = u')){1,3}[0-90-9两零一二三四五六七八九]{0,1}'
mon_r = u'{0,30}(元|块|分|人民币|角|毛|RMB){1}(?!儿|去|形|钟|'
suf_error = []
with open(self.resource_path+'/suf_error.txt') as f:
for line in f:
suf_error.append(line.strip())
return money_1 + mon_r + '|'.join(suf_error) + money_2
def find_money(self, string):
T_list = ['O'] * len(string)
ite = re.finditer(self.mon_r, string)
if ite:
for _ in ite:
T_list[_.start()] = 'B-MNY'
for i in range(_.start() + 1, _.end() - 1):
T_list[i] = 'I-MNY'
T_list[_.end() - 1] = 'E-MNY'
stop = 0
for i in range(len(string)):
if i >= stop:
for j in range(len(string), i, -1):
if string[i:j] in self.money_set:
if i > 0 and string[i-1] in self.digital_set:
for k in range(i-1, -1, -1):
if string[k] in self.digital_set and k != 0:
T_list[k] = 'I-MNY'
elif string[k] in self.digital_set and k == 0:
T_list[k] = 'B-MNY'
else:
T_list[k+1] = 'B-MNY'
break
T_list[j-1] = 'E-MNY'
for k in range(i, j-1):
T_list[k] = 'I-MNY'
stop = j
break
return T_list
if __name__ == '__main__':
mny = Money()
string = '我要取1000块钱,存两万美元'
print(string)
print(mny.find_money(string))
while 1:
string = input('input:')
print(mny.find_money(string.strip()))
|
[
"re.finditer"
] |
[((1240, 1271), 're.finditer', 're.finditer', (['self.mon_r', 'string'], {}), '(self.mon_r, string)\n', (1251, 1271), False, 'import re\n')]
|
import click
import json
import csv
from toolz import get, get_in
@click.command()
@click.argument('report_file', type=click.File('r'))
@click.argument('weather_file', type=click.File('r'))
@click.argument('weather_join_file', type=click.File('w'))
def main(report_file, weather_file, weather_join_file):
weather_reader = csv.reader(weather_file)
# Load the weather into a dictionary.
weather_cache = {
# Extract the dict with the weather information.
(r[0], r[1]): get_in(["daily", "data", 0], json.loads(r[-1]), {})
for r in weather_reader
}
report_reader = csv.DictReader(report_file)
fieldnames = report_reader.fieldnames + [
"temperature_high",
"temperature_mid",
"temperature_low",
"dew_point",
"humidity",
"cloud_cover",
"moon_phase",
"precip_intensity",
"precip_probability",
"precip_type",
"pressure",
"summary",
"uv_index",
"visibility",
"wind_bearing",
"wind_speed"
]
writer = csv.DictWriter(weather_join_file, fieldnames=fieldnames)
writer.writeheader()
for line in report_reader:
weather = get((line["geohash"], line["date"]), weather_cache, {})
temperature_high = get("temperatureHigh", weather, None)
temperature_low = get("temperatureLow", weather, None)
line["temperature_high"] = temperature_high
line["temperature_mid"] = (
temperature_low + (temperature_high - temperature_low)/2
) if temperature_high and temperature_low else None
line["temperature_low"] = temperature_low
line["dew_point"] = get("dewPoint", weather, None)
line["humidity"] = get("humidity", weather, None)
line["cloud_cover"] = get("cloudCover", weather, None)
line["moon_phase"] = get("moonPhase", weather, None)
line["precip_intensity"] = get("precipIntensity", weather, None)
line["precip_probability"] = get("precipProbability", weather, None)
line["precip_type"] = get("precipType", weather, None)
line["pressure"] = get("pressure", weather, None)
line["summary"] = get("summary", weather, None)
line["uv_index"] = get("uvIndex", weather, None)
line["visibility"] = get("visibility", weather, None)
line["wind_bearing"] = get("windBearing", weather, None)
line["wind_speed"] = get("windSpeed", weather, None)
writer.writerow(line)
if __name__ == "__main__":
main()
|
[
"csv.reader",
"json.loads",
"toolz.get",
"csv.DictReader",
"click.File",
"click.command",
"csv.DictWriter"
] |
[((70, 85), 'click.command', 'click.command', ([], {}), '()\n', (83, 85), False, 'import click\n'), ((330, 354), 'csv.reader', 'csv.reader', (['weather_file'], {}), '(weather_file)\n', (340, 354), False, 'import csv\n'), ((610, 637), 'csv.DictReader', 'csv.DictReader', (['report_file'], {}), '(report_file)\n', (624, 637), False, 'import csv\n'), ((1080, 1136), 'csv.DictWriter', 'csv.DictWriter', (['weather_join_file'], {'fieldnames': 'fieldnames'}), '(weather_join_file, fieldnames=fieldnames)\n', (1094, 1136), False, 'import csv\n'), ((1213, 1268), 'toolz.get', 'get', (["(line['geohash'], line['date'])", 'weather_cache', '{}'], {}), "((line['geohash'], line['date']), weather_cache, {})\n", (1216, 1268), False, 'from toolz import get, get_in\n'), ((1297, 1334), 'toolz.get', 'get', (['"""temperatureHigh"""', 'weather', 'None'], {}), "('temperatureHigh', weather, None)\n", (1300, 1334), False, 'from toolz import get, get_in\n'), ((1361, 1397), 'toolz.get', 'get', (['"""temperatureLow"""', 'weather', 'None'], {}), "('temperatureLow', weather, None)\n", (1364, 1397), False, 'from toolz import get, get_in\n'), ((1702, 1732), 'toolz.get', 'get', (['"""dewPoint"""', 'weather', 'None'], {}), "('dewPoint', weather, None)\n", (1705, 1732), False, 'from toolz import get, get_in\n'), ((1760, 1790), 'toolz.get', 'get', (['"""humidity"""', 'weather', 'None'], {}), "('humidity', weather, None)\n", (1763, 1790), False, 'from toolz import get, get_in\n'), ((1821, 1853), 'toolz.get', 'get', (['"""cloudCover"""', 'weather', 'None'], {}), "('cloudCover', weather, None)\n", (1824, 1853), False, 'from toolz import get, get_in\n'), ((1883, 1914), 'toolz.get', 'get', (['"""moonPhase"""', 'weather', 'None'], {}), "('moonPhase', weather, None)\n", (1886, 1914), False, 'from toolz import get, get_in\n'), ((1950, 1987), 'toolz.get', 'get', (['"""precipIntensity"""', 'weather', 'None'], {}), "('precipIntensity', weather, None)\n", (1953, 1987), False, 'from toolz import get, get_in\n'), ((2025, 2064), 'toolz.get', 'get', (['"""precipProbability"""', 'weather', 'None'], {}), "('precipProbability', weather, None)\n", (2028, 2064), False, 'from toolz import get, get_in\n'), ((2095, 2127), 'toolz.get', 'get', (['"""precipType"""', 'weather', 'None'], {}), "('precipType', weather, None)\n", (2098, 2127), False, 'from toolz import get, get_in\n'), ((2155, 2185), 'toolz.get', 'get', (['"""pressure"""', 'weather', 'None'], {}), "('pressure', weather, None)\n", (2158, 2185), False, 'from toolz import get, get_in\n'), ((2212, 2241), 'toolz.get', 'get', (['"""summary"""', 'weather', 'None'], {}), "('summary', weather, None)\n", (2215, 2241), False, 'from toolz import get, get_in\n'), ((2269, 2298), 'toolz.get', 'get', (['"""uvIndex"""', 'weather', 'None'], {}), "('uvIndex', weather, None)\n", (2272, 2298), False, 'from toolz import get, get_in\n'), ((2328, 2360), 'toolz.get', 'get', (['"""visibility"""', 'weather', 'None'], {}), "('visibility', weather, None)\n", (2331, 2360), False, 'from toolz import get, get_in\n'), ((2392, 2425), 'toolz.get', 'get', (['"""windBearing"""', 'weather', 'None'], {}), "('windBearing', weather, None)\n", (2395, 2425), False, 'from toolz import get, get_in\n'), ((2455, 2486), 'toolz.get', 'get', (['"""windSpeed"""', 'weather', 'None'], {}), "('windSpeed', weather, None)\n", (2458, 2486), False, 'from toolz import get, get_in\n'), ((122, 137), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (132, 137), False, 'import click\n'), ((176, 191), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (186, 191), False, 'import click\n'), ((235, 250), 'click.File', 'click.File', (['"""w"""'], {}), "('w')\n", (245, 250), False, 'import click\n'), ((528, 545), 'json.loads', 'json.loads', (['r[-1]'], {}), '(r[-1])\n', (538, 545), False, 'import json\n')]
|
import sys
import optparse
from inspect import isclass
from scrapy.cmdline import (
_run_print_help,
_run_command,
_print_commands,
_print_unknown_command
)
class EntryPoint:
name = "scrapy-compose"
from scrapy.commands import ScrapyCommand as BaseCommand
_action = None
_cmd = None
_cmds = None
_parser = None
@staticmethod
def iscommand( obj ):
BaseCommand = EntryPoint.BaseCommand
return (
isclass( obj ) and
issubclass( obj, BaseCommand ) and
obj != BaseCommand
)
@property
def action( self ):
if not self._action:
argv = self.argv
if argv and not argv[0].startswith( "-" ):
self._action = argv.pop( 0 )
return self._action
@property
def commands( self ):
if not self._cmds:
from scrapy_compose.utils.load import package as load_package
cmds = {}
iscommand = self.iscommand
inproject = self.inproject
load_package(
"scrapy_compose.commands",
key = lambda c: (
iscommand( c ) and
( inproject or not c.requires_project ) and
cmds.update(
{ c.__module__.split(".")[-1]: c() }
)
)
)
self._cmds = cmds
return self._cmds
@property
def parser( self ):
if not self._parser:
import optparse
self._parser = optparse.OptionParser(
conflict_handler = 'resolve',
formatter = optparse.TitledHelpFormatter(),
)
return self._parser
@property
def cmd( self ):
if not self._cmd:
from scrapy.crawler import CrawlerProcess
cmd = self.commands[ self.action ]
settings = self.settings
settings.setdict( cmd.default_settings, priority = "command" )
parser = self.parser
parser.usage = " ".join([ self.name, self.action, cmd.syntax() ])
parser.description = cmd.long_desc()
cmd.settings = settings
cmd.add_options( parser )
cmd.crawler_process = CrawlerProcess(settings)
self._cmd = cmd
return self._cmd
def __init__( self, argv = None, settings = None ):
from scrapy.utils.project import inside_project, get_project_settings
self.argv = ( sys.argv if argv is None else argv )[1:]
self.inproject = inside_project()
self.settings = get_project_settings() if settings is None else settings
self.settings.setmodule(
"scrapy_compose.compose_settings"
, priority = "default"
)
def print_header( self ):
import scrapy
p_str = "Scrapy " + scrapy.__version__ + " - "
if self.inproject:
p_str += "project : " + self.settings['BOT_NAME']
else:
p_str += "no active project"
print( "" )
def print_commands( self ):
self.print_header()
print("Usage:")
print(" " + self.name + " <command> [options] [args]\n")
print("Available commands:")
for c_name, cmd in sorted( self.commands.items() ):
print( " %-13s %s" % ( c_name, cmd.short_desc() ) )
if not self.inproject:
print( "" )
print( " [ more ] More commands available when run from project directory" )
print( "" )
print( 'Use "scrapy <command> -h" to see more info about a command' )
def print_unknown_command( self ):
self.print_header()
print( "Unknown or unavailable command: " + self.action )
print( 'Use "scrapy-compose" to see available commands' )
def __call__( self ):
action = self.action
if not action:
self.print_commands()
sys.exit(0)
elif (
action not in self.commands or
not self.inproject and self.cmd.requires_project
):
self.print_unknown_command()
sys.exit(2)
settings = self.settings
cmd = self.cmd
parser = self.parser
opts, args = parser.parse_args( args = self.argv )
_run_print_help(parser, cmd.process_options, args, opts)
_run_print_help(parser, _run_command, cmd, args, opts)
sys.exit(cmd.exitcode)
main = EntryPoint()
|
[
"inspect.isclass",
"scrapy.utils.project.inside_project",
"scrapy.utils.project.get_project_settings",
"scrapy.cmdline._run_print_help",
"optparse.TitledHelpFormatter",
"scrapy.crawler.CrawlerProcess",
"sys.exit"
] |
[((2074, 2090), 'scrapy.utils.project.inside_project', 'inside_project', ([], {}), '()\n', (2088, 2090), False, 'from scrapy.utils.project import inside_project, get_project_settings\n'), ((3528, 3584), 'scrapy.cmdline._run_print_help', '_run_print_help', (['parser', 'cmd.process_options', 'args', 'opts'], {}), '(parser, cmd.process_options, args, opts)\n', (3543, 3584), False, 'from scrapy.cmdline import _run_print_help, _run_command, _print_commands, _print_unknown_command\n'), ((3587, 3641), 'scrapy.cmdline._run_print_help', '_run_print_help', (['parser', '_run_command', 'cmd', 'args', 'opts'], {}), '(parser, _run_command, cmd, args, opts)\n', (3602, 3641), False, 'from scrapy.cmdline import _run_print_help, _run_command, _print_commands, _print_unknown_command\n'), ((3644, 3666), 'sys.exit', 'sys.exit', (['cmd.exitcode'], {}), '(cmd.exitcode)\n', (3652, 3666), False, 'import sys\n'), ((419, 431), 'inspect.isclass', 'isclass', (['obj'], {}), '(obj)\n', (426, 431), False, 'from inspect import isclass\n'), ((1806, 1830), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', (['settings'], {}), '(settings)\n', (1820, 1830), False, 'from scrapy.crawler import CrawlerProcess\n'), ((2110, 2132), 'scrapy.utils.project.get_project_settings', 'get_project_settings', ([], {}), '()\n', (2130, 2132), False, 'from scrapy.utils.project import inside_project, get_project_settings\n'), ((3241, 3252), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3249, 3252), False, 'import sys\n'), ((3392, 3403), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (3400, 3403), False, 'import sys\n'), ((1302, 1332), 'optparse.TitledHelpFormatter', 'optparse.TitledHelpFormatter', ([], {}), '()\n', (1330, 1332), False, 'import optparse\n')]
|
import pytest
import datetime
from calendar import timegm
import jwt
from cryptography.hazmat.primitives.asymmetric import rsa, ec
from cryptography.hazmat.backends import default_backend
from pyspiffe.svid import INVALID_INPUT_ERROR
from pyspiffe.svid.jwt_svid import JwtSvid
from pyspiffe.bundle.jwt_bundle.jwt_bundle import JwtBundle
from pyspiffe.exceptions import ArgumentError
from pyspiffe.svid.exceptions import (
TokenExpiredError,
JwtSvidError,
InvalidTokenError,
MissingClaimError,
)
from pyspiffe.bundle.jwt_bundle.exceptions import AuthorityNotFoundError
from test.svid.test_utils import (
get_keys_pems,
create_jwt,
DEFAULT_SPIFFE_ID,
DEFAULT_AUDIENCE,
DEFAULT_KEY,
DEFAULT_TRUST_DOMAIN,
DEFAULT_EXPIRY,
)
JWT_BUNDLE = JwtBundle(DEFAULT_TRUST_DOMAIN, {'kid1': DEFAULT_KEY.public_key()})
"""
parse_insecure tests
"""
@pytest.mark.parametrize(
'test_input_token,test_input_audience, expected',
[
('', [], INVALID_INPUT_ERROR.format('token cannot be empty.')),
('', None, INVALID_INPUT_ERROR.format('token cannot be empty.')),
(None, [], INVALID_INPUT_ERROR.format('token cannot be empty.')),
(None, None, INVALID_INPUT_ERROR.format('token cannot be empty.')),
],
)
def test_parse_insecure_invalid_input(test_input_token, test_input_audience, expected):
with pytest.raises(ArgumentError) as exception:
JwtSvid.parse_insecure(test_input_token, test_input_audience)
assert str(exception.value) == expected
@pytest.mark.parametrize(
'test_input_token,test_input_audience, expected',
[
(
jwt.encode(
{
'sub': 'spiffeid://somewhere.over.the',
'exp': timegm(
(
datetime.datetime.utcnow() + datetime.timedelta(hours=72)
).utctimetuple()
),
},
'secret',
headers={'alg': 'RS256', 'typ': 'JOSE'},
),
["spire"],
str(MissingClaimError('aud')),
), # no aud
(
jwt.encode(
{
'aud': ['spire'],
'sub': 'spiffeid://somewhere.over.the',
},
'secret',
headers={'alg': 'ES384', 'typ': 'JWT'},
),
["spire"],
str(MissingClaimError('exp')),
), # no exp
(
jwt.encode(
{
'aud': ['spire'],
'exp': timegm(
(
datetime.datetime.utcnow() - datetime.timedelta(hours=1)
).utctimetuple()
),
},
'secret',
headers={'alg': 'RS512', 'typ': 'JWT'},
),
["spire"],
str(MissingClaimError('sub')),
), # no sub
(
jwt.encode(
{
'aud': ['spire'],
'sub': 'spiffeid://somewhere.over.the',
'exp': timegm(
(
datetime.datetime.utcnow() - datetime.timedelta(hours=1)
).utctimetuple()
),
},
'secret',
headers={'alg': 'PS512', 'typ': 'JOSE'},
),
['spire'],
str(TokenExpiredError()),
), # expired token
],
)
def test_parse_insecure_invalid_claims(test_input_token, test_input_audience, expected):
with pytest.raises(JwtSvidError) as exception:
JwtSvid.parse_insecure(test_input_token, test_input_audience)
assert str(exception.value) == expected
@pytest.mark.parametrize(
'test_input_token,test_input_audience',
[
(
'<KEY>',
['spire'],
), # middle
(
'<KEY>',
['spire'],
), # first
],
)
def test_parse_insecure_invalid_token(test_input_token, test_input_audience):
with pytest.raises(InvalidTokenError):
JwtSvid.parse_insecure(test_input_token, test_input_audience)
@pytest.mark.parametrize(
'test_input_token,test_input_audience, expected',
[
(
jwt.encode(
{
'aud': ['spire'],
'sub': 'spiffe://test.org',
'exp': timegm(
(
datetime.datetime.utcnow() + datetime.timedelta(hours=100)
).utctimetuple()
),
},
'secret',
headers={'alg': 'RS256', 'typ': 'JWT'},
),
['spire'],
'spiffe://test.org',
),
(
jwt.encode(
{
'aud': ['spire', 'test', 'valid'],
'sub': 'spiffe://test.com.br',
'exp': timegm(
(
datetime.datetime.utcnow() + datetime.timedelta(hours=1)
).utctimetuple()
),
},
'secret key',
headers={'alg': 'PS384', 'typ': 'JOSE'},
),
{'spire', 'test'},
"spiffe://test.com.br",
),
],
)
def test_parse_insecure_valid(test_input_token, test_input_audience, expected):
result = JwtSvid.parse_insecure(test_input_token, test_input_audience)
assert result.token == test_input_token
assert str(result.spiffe_id) == expected
"""
parse_and_validate tests
"""
@pytest.mark.parametrize(
'test_input_token,test_input_jwt_bundle, test_input_audience, expected',
[
(
'',
None,
['spire'],
INVALID_INPUT_ERROR.format('token cannot be empty.'),
),
(
'eyJhbGciOiJFUzI1NiIsImtpZCI6Imd1eTdsOWZSQzhkQW1IUmFtaFpQbktRa3lId2FHQzR0IiwidHlwIjoiSldUIn0.eyJhdWQiOlsib3RoZXItc2VydmljZSJdLCJleHAiOjE2MTIyOTAxODMsImlhdCI6MTYxMjI4OTg4Mywic3ViIjoic3hthrtmZlOi8vZXhhbXBsZS5vcmcvc2VydmljZSJ9.W7CLQvYVBQ8Zg3ELcuB1K9hE4I9wyCMB_8PJTZXbjnlMBcgd0VDbSm5OjoqcGQF975eaVl_AdkryJ_lzxsEQ4A',
None,
['spire'],
INVALID_INPUT_ERROR.format('jwt_bundle cannot be empty.'),
),
],
)
def test_parse_and_validate_invalid_parameters(
test_input_token, test_input_jwt_bundle, test_input_audience, expected
):
with pytest.raises(ArgumentError) as exception:
JwtSvid.parse_and_validate(
test_input_token, test_input_jwt_bundle, test_input_audience
)
assert str(exception.value) == expected
def test_parse_and_validate_invalid_missing_kid_header():
token = create_jwt(kid='')
with pytest.raises(InvalidTokenError) as exception:
JwtSvid.parse_and_validate(token, JWT_BUNDLE, ['spire'])
assert str(exception.value) == 'key_id cannot be empty.'
def test_parse_and_validate_invalid_missing_sub():
token = create_jwt(spiffe_id='')
with pytest.raises(InvalidTokenError) as exception:
JwtSvid.parse_and_validate(token, JWT_BUNDLE, ['spire'])
assert str(exception.value) == 'SPIFFE ID cannot be empty.'
def test_parse_and_validate_invalid_missing_kid():
key_id = 'kid10'
token = create_jwt(kid=key_id)
with pytest.raises(AuthorityNotFoundError) as exception:
JwtSvid.parse_and_validate(token, JWT_BUNDLE, ['spire'])
assert str(exception.value) == 'Key (' + key_id + ') not found in authorities.'
def test_parse_and_validate_invalid_kid_mismatch():
rsa_key2 = rsa.generate_private_key(public_exponent=65537, key_size=2048)
jwt_bundle = JwtBundle(
DEFAULT_TRUST_DOMAIN,
{'kid1': DEFAULT_KEY.public_key(), 'kid10': rsa_key2.public_key()},
)
token = create_jwt(kid='kid10')
with pytest.raises(InvalidTokenError) as exception:
JwtSvid.parse_and_validate(token, jwt_bundle, ['spire'])
assert str(exception.value) == 'Signature verification failed.'
def test_parse_and_validate_valid_token_RSA():
token = create_jwt()
jwt_svid = JwtSvid.parse_and_validate(token, JWT_BUNDLE, ['spire'])
assert jwt_svid.audience == DEFAULT_AUDIENCE
assert str(jwt_svid.spiffe_id) == DEFAULT_SPIFFE_ID
assert jwt_svid.expiry == DEFAULT_EXPIRY
assert jwt_svid.token == token
def test_parse_and_validate_valid_token_EC():
ec_key = ec.generate_private_key(ec.SECP384R1(), default_backend())
jwt_bundle = JwtBundle(DEFAULT_TRUST_DOMAIN, {'kid_ec': ec_key.public_key()})
ec_key_pem, _ = get_keys_pems(ec_key)
token = create_jwt(ec_key_pem, 'kid_ec', alg='ES512')
jwt_svid = JwtSvid.parse_and_validate(token, jwt_bundle, ['spire'])
assert jwt_svid.audience == DEFAULT_AUDIENCE
assert str(jwt_svid.spiffe_id) == DEFAULT_SPIFFE_ID
assert jwt_svid.expiry == DEFAULT_EXPIRY
assert jwt_svid.token == token
def test_parse_and_validate_valid_token_multiple_keys_bundle():
ec_key = ec.generate_private_key(ec.SECP521R1(), default_backend())
jwt_bundle = JwtBundle(
DEFAULT_TRUST_DOMAIN,
{'kid_rsa': DEFAULT_KEY.public_key(), 'kid_ec': ec_key.public_key()},
)
ec_key_pem, _ = get_keys_pems(ec_key)
token = create_jwt(ec_key_pem, kid='kid_ec', alg='ES512')
jwt_svid1 = JwtSvid.parse_and_validate(token, jwt_bundle, ['spire'])
assert jwt_svid1.audience == DEFAULT_AUDIENCE
assert str(jwt_svid1.spiffe_id) == DEFAULT_SPIFFE_ID
assert jwt_svid1.expiry == DEFAULT_EXPIRY
assert jwt_svid1.token == token
token2 = create_jwt(kid='kid_rsa')
jwt_svid2 = JwtSvid.parse_and_validate(token2, jwt_bundle, ['spire'])
assert jwt_svid2.audience == DEFAULT_AUDIENCE
assert str(jwt_svid2.spiffe_id) == DEFAULT_SPIFFE_ID
assert jwt_svid2.expiry == DEFAULT_EXPIRY
assert jwt_svid2.token == token2
|
[
"pyspiffe.svid.INVALID_INPUT_ERROR.format",
"cryptography.hazmat.primitives.asymmetric.ec.SECP521R1",
"test.svid.test_utils.DEFAULT_KEY.public_key",
"pyspiffe.svid.exceptions.MissingClaimError",
"cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key",
"jwt.encode",
"datetime.datetime.utcnow",
"test.svid.test_utils.get_keys_pems",
"pytest.raises",
"datetime.timedelta",
"cryptography.hazmat.primitives.asymmetric.ec.SECP384R1",
"pytest.mark.parametrize",
"cryptography.hazmat.backends.default_backend",
"test.svid.test_utils.create_jwt",
"pyspiffe.svid.jwt_svid.JwtSvid.parse_insecure",
"pyspiffe.svid.exceptions.TokenExpiredError",
"pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate"
] |
[((3833, 3946), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_input_token,test_input_audience"""', "[('<KEY>', ['spire']), ('<KEY>', ['spire'])]"], {}), "('test_input_token,test_input_audience', [('<KEY>',\n ['spire']), ('<KEY>', ['spire'])])\n", (3856, 3946), False, 'import pytest\n'), ((5547, 5608), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_insecure', 'JwtSvid.parse_insecure', (['test_input_token', 'test_input_audience'], {}), '(test_input_token, test_input_audience)\n', (5569, 5608), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((6873, 6891), 'test.svid.test_utils.create_jwt', 'create_jwt', ([], {'kid': '""""""'}), "(kid='')\n", (6883, 6891), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((7140, 7164), 'test.svid.test_utils.create_jwt', 'create_jwt', ([], {'spiffe_id': '""""""'}), "(spiffe_id='')\n", (7150, 7164), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((7437, 7459), 'test.svid.test_utils.create_jwt', 'create_jwt', ([], {'kid': 'key_id'}), '(kid=key_id)\n', (7447, 7459), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((7740, 7802), 'cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key', 'rsa.generate_private_key', ([], {'public_exponent': '(65537)', 'key_size': '(2048)'}), '(public_exponent=65537, key_size=2048)\n', (7764, 7802), False, 'from cryptography.hazmat.primitives.asymmetric import rsa, ec\n'), ((7955, 7978), 'test.svid.test_utils.create_jwt', 'create_jwt', ([], {'kid': '"""kid10"""'}), "(kid='kid10')\n", (7965, 7978), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((8230, 8242), 'test.svid.test_utils.create_jwt', 'create_jwt', ([], {}), '()\n', (8240, 8242), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((8258, 8314), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'JWT_BUNDLE', "['spire']"], {}), "(token, JWT_BUNDLE, ['spire'])\n", (8284, 8314), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((8723, 8744), 'test.svid.test_utils.get_keys_pems', 'get_keys_pems', (['ec_key'], {}), '(ec_key)\n', (8736, 8744), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((8757, 8802), 'test.svid.test_utils.create_jwt', 'create_jwt', (['ec_key_pem', '"""kid_ec"""'], {'alg': '"""ES512"""'}), "(ec_key_pem, 'kid_ec', alg='ES512')\n", (8767, 8802), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((8818, 8874), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'jwt_bundle', "['spire']"], {}), "(token, jwt_bundle, ['spire'])\n", (8844, 8874), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((9360, 9381), 'test.svid.test_utils.get_keys_pems', 'get_keys_pems', (['ec_key'], {}), '(ec_key)\n', (9373, 9381), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((9395, 9444), 'test.svid.test_utils.create_jwt', 'create_jwt', (['ec_key_pem'], {'kid': '"""kid_ec"""', 'alg': '"""ES512"""'}), "(ec_key_pem, kid='kid_ec', alg='ES512')\n", (9405, 9444), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((9461, 9517), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'jwt_bundle', "['spire']"], {}), "(token, jwt_bundle, ['spire'])\n", (9487, 9517), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((9721, 9746), 'test.svid.test_utils.create_jwt', 'create_jwt', ([], {'kid': '"""kid_rsa"""'}), "(kid='kid_rsa')\n", (9731, 9746), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((9763, 9820), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token2', 'jwt_bundle', "['spire']"], {}), "(token2, jwt_bundle, ['spire'])\n", (9789, 9820), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((819, 843), 'test.svid.test_utils.DEFAULT_KEY.public_key', 'DEFAULT_KEY.public_key', ([], {}), '()\n', (841, 843), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((1371, 1399), 'pytest.raises', 'pytest.raises', (['ArgumentError'], {}), '(ArgumentError)\n', (1384, 1399), False, 'import pytest\n'), ((1422, 1483), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_insecure', 'JwtSvid.parse_insecure', (['test_input_token', 'test_input_audience'], {}), '(test_input_token, test_input_audience)\n', (1444, 1483), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((3673, 3700), 'pytest.raises', 'pytest.raises', (['JwtSvidError'], {}), '(JwtSvidError)\n', (3686, 3700), False, 'import pytest\n'), ((3723, 3784), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_insecure', 'JwtSvid.parse_insecure', (['test_input_token', 'test_input_audience'], {}), '(test_input_token, test_input_audience)\n', (3745, 3784), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((4153, 4185), 'pytest.raises', 'pytest.raises', (['InvalidTokenError'], {}), '(InvalidTokenError)\n', (4166, 4185), False, 'import pytest\n'), ((4195, 4256), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_insecure', 'JwtSvid.parse_insecure', (['test_input_token', 'test_input_audience'], {}), '(test_input_token, test_input_audience)\n', (4217, 4256), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((6595, 6623), 'pytest.raises', 'pytest.raises', (['ArgumentError'], {}), '(ArgumentError)\n', (6608, 6623), False, 'import pytest\n'), ((6646, 6738), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['test_input_token', 'test_input_jwt_bundle', 'test_input_audience'], {}), '(test_input_token, test_input_jwt_bundle,\n test_input_audience)\n', (6672, 6738), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((6902, 6934), 'pytest.raises', 'pytest.raises', (['InvalidTokenError'], {}), '(InvalidTokenError)\n', (6915, 6934), False, 'import pytest\n'), ((6957, 7013), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'JWT_BUNDLE', "['spire']"], {}), "(token, JWT_BUNDLE, ['spire'])\n", (6983, 7013), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((7175, 7207), 'pytest.raises', 'pytest.raises', (['InvalidTokenError'], {}), '(InvalidTokenError)\n', (7188, 7207), False, 'import pytest\n'), ((7230, 7286), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'JWT_BUNDLE', "['spire']"], {}), "(token, JWT_BUNDLE, ['spire'])\n", (7256, 7286), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((7470, 7507), 'pytest.raises', 'pytest.raises', (['AuthorityNotFoundError'], {}), '(AuthorityNotFoundError)\n', (7483, 7507), False, 'import pytest\n'), ((7530, 7586), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'JWT_BUNDLE', "['spire']"], {}), "(token, JWT_BUNDLE, ['spire'])\n", (7556, 7586), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((7989, 8021), 'pytest.raises', 'pytest.raises', (['InvalidTokenError'], {}), '(InvalidTokenError)\n', (8002, 8021), False, 'import pytest\n'), ((8044, 8100), 'pyspiffe.svid.jwt_svid.JwtSvid.parse_and_validate', 'JwtSvid.parse_and_validate', (['token', 'jwt_bundle', "['spire']"], {}), "(token, jwt_bundle, ['spire'])\n", (8070, 8100), False, 'from pyspiffe.svid.jwt_svid import JwtSvid\n'), ((8585, 8599), 'cryptography.hazmat.primitives.asymmetric.ec.SECP384R1', 'ec.SECP384R1', ([], {}), '()\n', (8597, 8599), False, 'from cryptography.hazmat.primitives.asymmetric import rsa, ec\n'), ((8601, 8618), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (8616, 8618), False, 'from cryptography.hazmat.backends import default_backend\n'), ((9163, 9177), 'cryptography.hazmat.primitives.asymmetric.ec.SECP521R1', 'ec.SECP521R1', ([], {}), '()\n', (9175, 9177), False, 'from cryptography.hazmat.primitives.asymmetric import rsa, ec\n'), ((9179, 9196), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (9194, 9196), False, 'from cryptography.hazmat.backends import default_backend\n'), ((986, 1038), 'pyspiffe.svid.INVALID_INPUT_ERROR.format', 'INVALID_INPUT_ERROR.format', (['"""token cannot be empty."""'], {}), "('token cannot be empty.')\n", (1012, 1038), False, 'from pyspiffe.svid import INVALID_INPUT_ERROR\n'), ((1060, 1112), 'pyspiffe.svid.INVALID_INPUT_ERROR.format', 'INVALID_INPUT_ERROR.format', (['"""token cannot be empty."""'], {}), "('token cannot be empty.')\n", (1086, 1112), False, 'from pyspiffe.svid import INVALID_INPUT_ERROR\n'), ((1134, 1186), 'pyspiffe.svid.INVALID_INPUT_ERROR.format', 'INVALID_INPUT_ERROR.format', (['"""token cannot be empty."""'], {}), "('token cannot be empty.')\n", (1160, 1186), False, 'from pyspiffe.svid import INVALID_INPUT_ERROR\n'), ((1210, 1262), 'pyspiffe.svid.INVALID_INPUT_ERROR.format', 'INVALID_INPUT_ERROR.format', (['"""token cannot be empty."""'], {}), "('token cannot be empty.')\n", (1236, 1262), False, 'from pyspiffe.svid import INVALID_INPUT_ERROR\n'), ((2166, 2290), 'jwt.encode', 'jwt.encode', (["{'aud': ['spire'], 'sub': 'spiffeid://somewhere.over.the'}", '"""secret"""'], {'headers': "{'alg': 'ES384', 'typ': 'JWT'}"}), "({'aud': ['spire'], 'sub': 'spiffeid://somewhere.over.the'},\n 'secret', headers={'alg': 'ES384', 'typ': 'JWT'})\n", (2176, 2290), False, 'import jwt\n'), ((5928, 5980), 'pyspiffe.svid.INVALID_INPUT_ERROR.format', 'INVALID_INPUT_ERROR.format', (['"""token cannot be empty."""'], {}), "('token cannot be empty.')\n", (5954, 5980), False, 'from pyspiffe.svid import INVALID_INPUT_ERROR\n'), ((6381, 6438), 'pyspiffe.svid.INVALID_INPUT_ERROR.format', 'INVALID_INPUT_ERROR.format', (['"""jwt_bundle cannot be empty."""'], {}), "('jwt_bundle cannot be empty.')\n", (6407, 6438), False, 'from pyspiffe.svid import INVALID_INPUT_ERROR\n'), ((7878, 7902), 'test.svid.test_utils.DEFAULT_KEY.public_key', 'DEFAULT_KEY.public_key', ([], {}), '()\n', (7900, 7902), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((9276, 9300), 'test.svid.test_utils.DEFAULT_KEY.public_key', 'DEFAULT_KEY.public_key', ([], {}), '()\n', (9298, 9300), False, 'from test.svid.test_utils import get_keys_pems, create_jwt, DEFAULT_SPIFFE_ID, DEFAULT_AUDIENCE, DEFAULT_KEY, DEFAULT_TRUST_DOMAIN, DEFAULT_EXPIRY\n'), ((2096, 2120), 'pyspiffe.svid.exceptions.MissingClaimError', 'MissingClaimError', (['"""aud"""'], {}), "('aud')\n", (2113, 2120), False, 'from pyspiffe.svid.exceptions import TokenExpiredError, JwtSvidError, InvalidTokenError, MissingClaimError\n'), ((2449, 2473), 'pyspiffe.svid.exceptions.MissingClaimError', 'MissingClaimError', (['"""exp"""'], {}), "('exp')\n", (2466, 2473), False, 'from pyspiffe.svid.exceptions import TokenExpiredError, JwtSvidError, InvalidTokenError, MissingClaimError\n'), ((2952, 2976), 'pyspiffe.svid.exceptions.MissingClaimError', 'MissingClaimError', (['"""sub"""'], {}), "('sub')\n", (2969, 2976), False, 'from pyspiffe.svid.exceptions import TokenExpiredError, JwtSvidError, InvalidTokenError, MissingClaimError\n'), ((3516, 3535), 'pyspiffe.svid.exceptions.TokenExpiredError', 'TokenExpiredError', ([], {}), '()\n', (3533, 3535), False, 'from pyspiffe.svid.exceptions import TokenExpiredError, JwtSvidError, InvalidTokenError, MissingClaimError\n'), ((1818, 1844), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1842, 1844), False, 'import datetime\n'), ((1847, 1875), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(72)'}), '(hours=72)\n', (1865, 1875), False, 'import datetime\n'), ((2676, 2702), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2700, 2702), False, 'import datetime\n'), ((2705, 2732), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (2723, 2732), False, 'import datetime\n'), ((3239, 3265), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3263, 3265), False, 'import datetime\n'), ((3268, 3295), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3286, 3295), False, 'import datetime\n'), ((4572, 4598), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4596, 4598), False, 'import datetime\n'), ((4601, 4630), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(100)'}), '(hours=100)\n', (4619, 4630), False, 'import datetime\n'), ((5125, 5151), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (5149, 5151), False, 'import datetime\n'), ((5154, 5181), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (5172, 5181), False, 'import datetime\n')]
|
import falcon
import json
import pathlib
from helper.log import logger
from src.middleware import HttpMethodValidator
from helper.utils import Constants
from src.query_builder import RunQuery
from src.db import DataBase
from falcon_swagger_ui import register_swaggerui_app
try:
logger.info("Connecting to Database")
database = DataBase(True)
logger.info(database)
logger.info("Connection successful")
except Exception as ex:
logger.info("Error " + str(ex))
raise Exception("Error Couldn't connect to %s Database" % (Constants.database.value))
class Home:
def on_get(self, req, resp):
logger.info("Sending response")
resp.status = falcon.HTTP_200
resp.body = json.dumps([{Constants.message.value: "server works"}], ensure_ascii=False)
SWAGGERUI_URL = '/swagger'
SCHEMA_URL = '/static/v1/swagger.json'
STATIC_PATH = pathlib.Path(__file__).parent / 'static'
home = Home()
search = RunQuery(database)
api = falcon.API(middleware=[HttpMethodValidator()])
api.add_static_route('/static', str(STATIC_PATH))
api.add_route('/', home)
api.add_route('/api/v1/embl/search', search)
page_title = 'EMBL search API doc'
register_swaggerui_app(
api, SWAGGERUI_URL, SCHEMA_URL,
page_title=page_title,
config={'supportedSubmitMethods': ['get'], }
)
|
[
"helper.log.logger.info",
"json.dumps",
"src.query_builder.RunQuery",
"src.db.DataBase",
"pathlib.Path",
"falcon_swagger_ui.register_swaggerui_app",
"src.middleware.HttpMethodValidator"
] |
[((936, 954), 'src.query_builder.RunQuery', 'RunQuery', (['database'], {}), '(database)\n', (944, 954), False, 'from src.query_builder import RunQuery\n'), ((1165, 1291), 'falcon_swagger_ui.register_swaggerui_app', 'register_swaggerui_app', (['api', 'SWAGGERUI_URL', 'SCHEMA_URL'], {'page_title': 'page_title', 'config': "{'supportedSubmitMethods': ['get']}"}), "(api, SWAGGERUI_URL, SCHEMA_URL, page_title=\n page_title, config={'supportedSubmitMethods': ['get']})\n", (1187, 1291), False, 'from falcon_swagger_ui import register_swaggerui_app\n'), ((283, 320), 'helper.log.logger.info', 'logger.info', (['"""Connecting to Database"""'], {}), "('Connecting to Database')\n", (294, 320), False, 'from helper.log import logger\n'), ((336, 350), 'src.db.DataBase', 'DataBase', (['(True)'], {}), '(True)\n', (344, 350), False, 'from src.db import DataBase\n'), ((355, 376), 'helper.log.logger.info', 'logger.info', (['database'], {}), '(database)\n', (366, 376), False, 'from helper.log import logger\n'), ((381, 417), 'helper.log.logger.info', 'logger.info', (['"""Connection successful"""'], {}), "('Connection successful')\n", (392, 417), False, 'from helper.log import logger\n'), ((623, 654), 'helper.log.logger.info', 'logger.info', (['"""Sending response"""'], {}), "('Sending response')\n", (634, 654), False, 'from helper.log import logger\n'), ((713, 788), 'json.dumps', 'json.dumps', (["[{Constants.message.value: 'server works'}]"], {'ensure_ascii': '(False)'}), "([{Constants.message.value: 'server works'}], ensure_ascii=False)\n", (723, 788), False, 'import json\n'), ((871, 893), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (883, 893), False, 'import pathlib\n'), ((984, 1005), 'src.middleware.HttpMethodValidator', 'HttpMethodValidator', ([], {}), '()\n', (1003, 1005), False, 'from src.middleware import HttpMethodValidator\n')]
|
import numpy as np
from manimlib import *
class Quaternion:
def __init__(self, x=None, y=0, z=0, w=1):
"""Quaternion style [x, y, z, w]"""
if issubclass(type(x), (np.ndarray, list, tuple)):
self._x = x[0]
self._y = x[1]
self._z = x[2]
self._w = x[3]
else:
if x is None:
x = 0
self._x = x
self._y = y
self._z = z
self._w = w
self._vec = np.array([self._x, self._y, self._z])
self._q = np.array([*self._vec, self._w])
def _set_q(self):
self._vec = np.array([self._x, self._y, self._z])
self._q = np.array([*self._vec, self._w])
def to_array(self):
return self._q
def normalise(self):
L = np.linalg.norm(self._vec)
# self._q /= L
self._x /= L
self._y /= L
self._z /= L
self._w /= L
self._set_q()
def slerp(self):
"""TODO"""
pass
def multi(self, *quats):
q = self
for qi in quats:
q = Quaternion.multiply_quat_2(q, qi)
self._vec = q._vec
self._q = q._q
# self._set_q()
return q
@staticmethod
def multiply_quat(q1, q2):
"""reference http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm"""
x = q1.x * q2.w + q1.y * q2.z - q1.z * q2.y + q1.w * q2.x
y = -q1.x * q2.z + q1.y * q2.w + q1.z * q2.x + q1.w * q2.y
z = q1.x * q2.y - q1.y * q2.x + q1.z * q2.w + q1.w * q2.z
w = -q1.x * q2.x - q1.y * q2.y - q1.z * q2.z + q1.w * q2.w
new_q = object.__new__(Quaternion)
new_q.__init__(x, y, z, w)
return new_q
@staticmethod
def multiply_quat_2(q1, q2):
"""Graßmann Product"""
v1 = q1._vec
v2 = q2._vec
w1 = q1._w
w2 = q2._w
vec = w1 * v2 + w2 * v1 + np.cross(v1, v2)
w = w1 * w2 - v1.dot(v2)
new_q = object.__new__(Quaternion)
new_q.__init__([*vec, w])
return new_q
def __new__(cls, *args, **kwargs):
return object.__new__(cls)
def copy(self):
obj = object.__new__(Quaternion)
obj.__init__(*self._q)
return obj
def set_x(self, value):
self._x = value
self._set_q()
def set_y(self, value):
self._y = value
self._set_q()
def set_z(self, value):
self._z = value
self._set_q()
def set_w(self, value):
self._w = value
self._set_q()
def set_from_euler(self):
"""TODO"""
pass
def set_from_axis_angle(self, axis: np.ndarray, angle):
axis = normalize(np.array(axis))
half_angle = angle / 2
s = np.sin(half_angle)
self._x = axis[0] * s
self._y = axis[1] * s
self._z = axis[2] * s
self._w = np.cos(half_angle)
self._set_q()
return self
def conjugate(self, in_place=True):
if in_place:
self._vec *= -1
self._set_q()
return self
else:
q = self.copy()
q._vec *= -1
q._set_q()
return q
def invert(self):
return self.conjugate()
def dot(self, v):
return self._q.dot(v)
def __str__(self):
return self._q.__str__()
@property
def x(self):
return self._vec[0]
@property
def y(self):
return self._vec[1]
@property
def z(self):
return self._vec[2]
@property
def w(self):
return self._w
if __name__ == "__main__":
axis = np.array([1, 1, 1])
q1 = Quaternion().set_from_axis_angle(axis, 20 * DEGREES)
q2 = Quaternion().set_from_axis_angle(axis, 30 * DEGREES)
print(Quaternion.multiply_quat(q1, q2))
print(Quaternion.multiply_quat_2(q1, q2))
|
[
"numpy.cross",
"numpy.sin",
"numpy.linalg.norm",
"numpy.array",
"numpy.cos"
] |
[((3676, 3695), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (3684, 3695), True, 'import numpy as np\n'), ((499, 536), 'numpy.array', 'np.array', (['[self._x, self._y, self._z]'], {}), '([self._x, self._y, self._z])\n', (507, 536), True, 'import numpy as np\n'), ((555, 586), 'numpy.array', 'np.array', (['[*self._vec, self._w]'], {}), '([*self._vec, self._w])\n', (563, 586), True, 'import numpy as np\n'), ((630, 667), 'numpy.array', 'np.array', (['[self._x, self._y, self._z]'], {}), '([self._x, self._y, self._z])\n', (638, 667), True, 'import numpy as np\n'), ((686, 717), 'numpy.array', 'np.array', (['[*self._vec, self._w]'], {}), '([*self._vec, self._w])\n', (694, 717), True, 'import numpy as np\n'), ((804, 829), 'numpy.linalg.norm', 'np.linalg.norm', (['self._vec'], {}), '(self._vec)\n', (818, 829), True, 'import numpy as np\n'), ((2795, 2813), 'numpy.sin', 'np.sin', (['half_angle'], {}), '(half_angle)\n', (2801, 2813), True, 'import numpy as np\n'), ((2923, 2941), 'numpy.cos', 'np.cos', (['half_angle'], {}), '(half_angle)\n', (2929, 2941), True, 'import numpy as np\n'), ((1951, 1967), 'numpy.cross', 'np.cross', (['v1', 'v2'], {}), '(v1, v2)\n', (1959, 1967), True, 'import numpy as np\n'), ((2736, 2750), 'numpy.array', 'np.array', (['axis'], {}), '(axis)\n', (2744, 2750), True, 'import numpy as np\n')]
|
import matplotlib.pyplot as plt
import cv2
#import imutils
import requests
import base64
import json
import numpy as np
from PIL import Image
from PIL import ImageEnhance
from skimage import color, data, restoration
from scipy.signal import convolve2d
import pytesseract
import PIL.ImageOps
pytesseract.pytesseract.tesseract_cmd = r'C:\Program Files (x86)\Tesseract-OCR\tesseract.exe'
plate=None
def main (img):
img = cv2.imread(img,cv2.IMREAD_COLOR)
img = cv2.resize(img, (600,400) )
img = cv2.resize(img, (600,400) )
threshold = 180 # to be determined
_, img_binarized = cv2.threshold(img, threshold, 255, cv2.THRESH_BINARY)
pil_img = Image.fromarray(img_binarized)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray = cv2.bilateralFilter(gray, 13, 15, 15)
edged = cv2.Canny(gray, 30, 200)
thresh = cv2.adaptiveThreshold(gray, 255, 1, 1, 11, 2)
#contours = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
#contours = imutils.grab_contours(contours)
contours = sorted(contours, key = cv2.contourArea, reverse = True)[:30]
screenCnt = None
gaussian_blur_license_plate = cv2.GaussianBlur(
img, (5, 5), 0)
for c in contours:
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.02 * peri, True)
if len(approx) == 4:
screenCnt = approx
break
if screenCnt is None:
detected = 0
print ("No contour detected")
else:
detected = 1
if detected == 1:
cv2.drawContours(img, [screenCnt], -1, (0, 0, 255), 3)
mask = np.zeros(gray.shape,np.uint8)
new_image = cv2.drawContours(mask,[screenCnt],0,255,-1,)
new_image = cv2.bitwise_and(img,img,mask=mask)
(x, y) = np.where(mask == 255)
(topx, topy) = (np.min(x), np.min(y))
(bottomx, bottomy) = (np.max(x), np.max(y))
Cropped = gray[topx:bottomx+1, topy:bottomy+1]
text = pytesseract.image_to_string(Cropped, lang='eng')
print("programming_fever's License Plate Recognition\n")
print("Detected license plate Number is:",text)
img = cv2.resize(img,(500,300))
Cropped = cv2.resize(Cropped,(400,200))
im = Image.fromarray(Cropped)
im.save('test.png')
image = Image.open('test.png')
enh_bri = ImageEnhance.Brightness(image )
brightness = 1.0
image_brightened = enh_bri.enhance(brightness)
imwhole = np.array(image_brightened)
cv2.imshow('car',img)
cv2.imshow('Cropped',imwhole)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"cv2.GaussianBlur",
"PIL.ImageEnhance.Brightness",
"cv2.bitwise_and",
"cv2.approxPolyDP",
"cv2.arcLength",
"cv2.adaptiveThreshold",
"cv2.bilateralFilter",
"cv2.imshow",
"cv2.cvtColor",
"numpy.max",
"cv2.drawContours",
"cv2.destroyAllWindows",
"cv2.resize",
"cv2.Canny",
"cv2.waitKey",
"numpy.min",
"cv2.threshold",
"numpy.zeros",
"pytesseract.image_to_string",
"PIL.Image.open",
"cv2.imread",
"numpy.where",
"numpy.array",
"PIL.Image.fromarray",
"cv2.findContours"
] |
[((428, 461), 'cv2.imread', 'cv2.imread', (['img', 'cv2.IMREAD_COLOR'], {}), '(img, cv2.IMREAD_COLOR)\n', (438, 461), False, 'import cv2\n'), ((472, 499), 'cv2.resize', 'cv2.resize', (['img', '(600, 400)'], {}), '(img, (600, 400))\n', (482, 499), False, 'import cv2\n'), ((510, 537), 'cv2.resize', 'cv2.resize', (['img', '(600, 400)'], {}), '(img, (600, 400))\n', (520, 537), False, 'import cv2\n'), ((600, 653), 'cv2.threshold', 'cv2.threshold', (['img', 'threshold', '(255)', 'cv2.THRESH_BINARY'], {}), '(img, threshold, 255, cv2.THRESH_BINARY)\n', (613, 653), False, 'import cv2\n'), ((668, 698), 'PIL.Image.fromarray', 'Image.fromarray', (['img_binarized'], {}), '(img_binarized)\n', (683, 698), False, 'from PIL import Image\n'), ((712, 749), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (724, 749), False, 'import cv2\n'), ((761, 798), 'cv2.bilateralFilter', 'cv2.bilateralFilter', (['gray', '(13)', '(15)', '(15)'], {}), '(gray, 13, 15, 15)\n', (780, 798), False, 'import cv2\n'), ((812, 836), 'cv2.Canny', 'cv2.Canny', (['gray', '(30)', '(200)'], {}), '(gray, 30, 200)\n', (821, 836), False, 'import cv2\n'), ((850, 895), 'cv2.adaptiveThreshold', 'cv2.adaptiveThreshold', (['gray', '(255)', '(1)', '(1)', '(11)', '(2)'], {}), '(gray, 255, 1, 1, 11, 2)\n', (871, 895), False, 'import cv2\n'), ((1009, 1073), 'cv2.findContours', 'cv2.findContours', (['thresh', 'cv2.RETR_LIST', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(thresh, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)\n', (1025, 1073), False, 'import cv2\n'), ((1254, 1286), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['img', '(5, 5)', '(0)'], {}), '(img, (5, 5), 0)\n', (1270, 1286), False, 'import cv2\n'), ((1711, 1741), 'numpy.zeros', 'np.zeros', (['gray.shape', 'np.uint8'], {}), '(gray.shape, np.uint8)\n', (1719, 1741), True, 'import numpy as np\n'), ((1757, 1804), 'cv2.drawContours', 'cv2.drawContours', (['mask', '[screenCnt]', '(0)', '(255)', '(-1)'], {}), '(mask, [screenCnt], 0, 255, -1)\n', (1773, 1804), False, 'import cv2\n'), ((1818, 1854), 'cv2.bitwise_and', 'cv2.bitwise_and', (['img', 'img'], {'mask': 'mask'}), '(img, img, mask=mask)\n', (1833, 1854), False, 'import cv2\n'), ((1867, 1888), 'numpy.where', 'np.where', (['(mask == 255)'], {}), '(mask == 255)\n', (1875, 1888), True, 'import numpy as np\n'), ((2042, 2090), 'pytesseract.image_to_string', 'pytesseract.image_to_string', (['Cropped'], {'lang': '"""eng"""'}), "(Cropped, lang='eng')\n", (2069, 2090), False, 'import pytesseract\n'), ((2214, 2241), 'cv2.resize', 'cv2.resize', (['img', '(500, 300)'], {}), '(img, (500, 300))\n', (2224, 2241), False, 'import cv2\n'), ((2255, 2286), 'cv2.resize', 'cv2.resize', (['Cropped', '(400, 200)'], {}), '(Cropped, (400, 200))\n', (2265, 2286), False, 'import cv2\n'), ((2296, 2320), 'PIL.Image.fromarray', 'Image.fromarray', (['Cropped'], {}), '(Cropped)\n', (2311, 2320), False, 'from PIL import Image\n'), ((2359, 2381), 'PIL.Image.open', 'Image.open', (['"""test.png"""'], {}), "('test.png')\n", (2369, 2381), False, 'from PIL import Image\n'), ((2396, 2426), 'PIL.ImageEnhance.Brightness', 'ImageEnhance.Brightness', (['image'], {}), '(image)\n', (2419, 2426), False, 'from PIL import ImageEnhance\n'), ((2516, 2542), 'numpy.array', 'np.array', (['image_brightened'], {}), '(image_brightened)\n', (2524, 2542), True, 'import numpy as np\n'), ((2549, 2571), 'cv2.imshow', 'cv2.imshow', (['"""car"""', 'img'], {}), "('car', img)\n", (2559, 2571), False, 'import cv2\n'), ((2575, 2605), 'cv2.imshow', 'cv2.imshow', (['"""Cropped"""', 'imwhole'], {}), "('Cropped', imwhole)\n", (2585, 2605), False, 'import cv2\n'), ((2610, 2624), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (2621, 2624), False, 'import cv2\n'), ((2629, 2652), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (2650, 2652), False, 'import cv2\n'), ((1336, 1358), 'cv2.arcLength', 'cv2.arcLength', (['c', '(True)'], {}), '(c, True)\n', (1349, 1358), False, 'import cv2\n'), ((1376, 1414), 'cv2.approxPolyDP', 'cv2.approxPolyDP', (['c', '(0.02 * peri)', '(True)'], {}), '(c, 0.02 * peri, True)\n', (1392, 1414), False, 'import cv2\n'), ((1644, 1698), 'cv2.drawContours', 'cv2.drawContours', (['img', '[screenCnt]', '(-1)', '(0, 0, 255)', '(3)'], {}), '(img, [screenCnt], -1, (0, 0, 255), 3)\n', (1660, 1698), False, 'import cv2\n'), ((1909, 1918), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (1915, 1918), True, 'import numpy as np\n'), ((1920, 1929), 'numpy.min', 'np.min', (['y'], {}), '(y)\n', (1926, 1929), True, 'import numpy as np\n'), ((1957, 1966), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (1963, 1966), True, 'import numpy as np\n'), ((1968, 1977), 'numpy.max', 'np.max', (['y'], {}), '(y)\n', (1974, 1977), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger
from flex_nav_flexbe_states.get_pose_state import GetPoseState
from flex_nav_flexbe_states.get_path_state import GetPathState
from flex_nav_flexbe_states.follow_path_state import FollowPathState
from flex_nav_flexbe_states.clear_costmaps_state import ClearCostmapsState
from flex_nav_flexbe_states.rotate_angle_state import RotateAngleState
from flexbe_states.operator_decision_state import OperatorDecisionState
from flexbe_states.log_state import LogState
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
# [/MANUAL_IMPORT]
'''
Created on Fri Aug 19 2016
@author: <NAME>
'''
class CreateMoveBaseSM(Behavior):
'''
A drop in replacement for move_base that works with CHRISLab Flexible Navigation system and iRobot Create
'''
def __init__(self):
super(CreateMoveBaseSM, self).__init__()
self.name = 'Create Move Base'
# parameters of this behavior
# references to used behaviors
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
# [/MANUAL_INIT]
# Behavior comments:
def create(self):
# x:175 y:115, x:629 y:210
_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'])
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
# [/MANUAL_CREATE]
with _state_machine:
# x:56 y:20
OperatableStateMachine.add('Get Pose',
GetPoseState(topic='/move_base_simple/goal'),
transitions={'done': 'Global Planner'},
autonomy={'done': Autonomy.Off},
remapping={'goal': 'goal'})
# x:52 y:155
OperatableStateMachine.add('Global Planner',
GetPathState(planner_topic='global_planner'),
transitions={'planned': 'Local Planner', 'empty': 'Log Fail', 'failed': 'Log Fail'},
autonomy={'planned': Autonomy.Off, 'empty': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'goal': 'goal', 'plan': 'plan'})
# x:251 y:155
OperatableStateMachine.add('Local Planner',
FollowPathState(topic='local_planner'),
transitions={'done': 'Continue', 'failed': 'Log Fail', 'preempted': 'Continue'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off, 'preempted': Autonomy.Off},
remapping={'plan': 'plan'})
# x:441 y:128
OperatableStateMachine.add('Clear Costmaps',
ClearCostmapsState(costmap_topics=['global_planner/clear_costmap','local_planner/clear_costmap']),
transitions={'done': 'Rotate Recovery', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off})
# x:448 y:28
OperatableStateMachine.add('Rotate Recovery',
RotateAngleState(target_time=5.0, cmd_topic='/create_node/cmd_vel', odometry_topic='/create_node/odom'),
transitions={'done': 'Get Pose'},
autonomy={'done': Autonomy.Off})
# x:237 y:59
OperatableStateMachine.add('Continue',
OperatorDecisionState(outcomes=['yes', 'no'], hint=None, suggestion=None),
transitions={'yes': 'Get Pose', 'no': 'finished'},
autonomy={'yes': Autonomy.Off, 'no': Autonomy.Off})
# x:167 y:270
OperatableStateMachine.add('Log Fail',
LogState(text='Navigation failed!', severity=Logger.logerr),
transitions={'done': 'Autonomy'},
autonomy={'done': Autonomy.Off})
# x:437 y:272
OperatableStateMachine.add('Autonomy',
OperatorDecisionState(outcomes=['yes', 'no'], hint=None, suggestion='yes'),
transitions={'yes': 'Clear Costmaps', 'no': 'failed'},
autonomy={'yes': Autonomy.High, 'no': Autonomy.Off})
return _state_machine
# Private functions can be added inside the following tags
# [MANUAL_FUNC]
# [/MANUAL_FUNC]
|
[
"flex_nav_flexbe_states.get_pose_state.GetPoseState",
"flex_nav_flexbe_states.follow_path_state.FollowPathState",
"flex_nav_flexbe_states.clear_costmaps_state.ClearCostmapsState",
"flexbe_states.operator_decision_state.OperatorDecisionState",
"flex_nav_flexbe_states.rotate_angle_state.RotateAngleState",
"flexbe_core.OperatableStateMachine",
"flex_nav_flexbe_states.get_path_state.GetPathState",
"flexbe_states.log_state.LogState"
] |
[((1701, 1756), 'flexbe_core.OperatableStateMachine', 'OperatableStateMachine', ([], {'outcomes': "['finished', 'failed']"}), "(outcomes=['finished', 'failed'])\n", (1723, 1756), False, 'from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger\n'), ((2026, 2070), 'flex_nav_flexbe_states.get_pose_state.GetPoseState', 'GetPoseState', ([], {'topic': '"""/move_base_simple/goal"""'}), "(topic='/move_base_simple/goal')\n", (2038, 2070), False, 'from flex_nav_flexbe_states.get_pose_state import GetPoseState\n'), ((2416, 2460), 'flex_nav_flexbe_states.get_path_state.GetPathState', 'GetPathState', ([], {'planner_topic': '"""global_planner"""'}), "(planner_topic='global_planner')\n", (2428, 2460), False, 'from flex_nav_flexbe_states.get_path_state import GetPathState\n'), ((2917, 2955), 'flex_nav_flexbe_states.follow_path_state.FollowPathState', 'FollowPathState', ([], {'topic': '"""local_planner"""'}), "(topic='local_planner')\n", (2932, 2955), False, 'from flex_nav_flexbe_states.follow_path_state import FollowPathState\n'), ((3394, 3496), 'flex_nav_flexbe_states.clear_costmaps_state.ClearCostmapsState', 'ClearCostmapsState', ([], {'costmap_topics': "['global_planner/clear_costmap', 'local_planner/clear_costmap']"}), "(costmap_topics=['global_planner/clear_costmap',\n 'local_planner/clear_costmap'])\n", (3412, 3496), False, 'from flex_nav_flexbe_states.clear_costmaps_state import ClearCostmapsState\n'), ((3815, 3922), 'flex_nav_flexbe_states.rotate_angle_state.RotateAngleState', 'RotateAngleState', ([], {'target_time': '(5.0)', 'cmd_topic': '"""/create_node/cmd_vel"""', 'odometry_topic': '"""/create_node/odom"""'}), "(target_time=5.0, cmd_topic='/create_node/cmd_vel',\n odometry_topic='/create_node/odom')\n", (3831, 3922), False, 'from flex_nav_flexbe_states.rotate_angle_state import RotateAngleState\n'), ((4184, 4257), 'flexbe_states.operator_decision_state.OperatorDecisionState', 'OperatorDecisionState', ([], {'outcomes': "['yes', 'no']", 'hint': 'None', 'suggestion': 'None'}), "(outcomes=['yes', 'no'], hint=None, suggestion=None)\n", (4205, 4257), False, 'from flexbe_states.operator_decision_state import OperatorDecisionState\n'), ((4560, 4619), 'flexbe_states.log_state.LogState', 'LogState', ([], {'text': '"""Navigation failed!"""', 'severity': 'Logger.logerr'}), "(text='Navigation failed!', severity=Logger.logerr)\n", (4568, 4619), False, 'from flexbe_states.log_state import LogState\n'), ((4886, 4960), 'flexbe_states.operator_decision_state.OperatorDecisionState', 'OperatorDecisionState', ([], {'outcomes': "['yes', 'no']", 'hint': 'None', 'suggestion': '"""yes"""'}), "(outcomes=['yes', 'no'], hint=None, suggestion='yes')\n", (4907, 4960), False, 'from flexbe_states.operator_decision_state import OperatorDecisionState\n')]
|
#!/usr/bin/env python3
import os
import sys
import time
import json
import argparse
# filter wheel
import hid
# relay switch
import serial
# camera
import gphoto2 as gp
DEBUG = True
config_dict = {
'camera': {
'aWHITE': {
'exp': '1/4',
'iso': '200',
'f_val': '2',
},
'bGFP': {
'exp': '1/8',
'iso': '200',
'f_val': '1',
},
'cCFP': {
'exp': '1/8',
'iso': '400',
'f_val': '2',
},
'mCherry': {
'exp': '1/4',
'iso': '400',
'f_val': '1',
},
},
'wheel': {
'ven_id': 0x1278,
'pro_id': 0x0920,
'filters': ['aWHITE', 'bGFP', 'cCFP', 'mCherry'],
},
'relay': {
'path': '/dev/tty.usbmodem1421',
},
'interval': 5,
'work_dir': '.',
'out_fmt': 'arw',
}
# class CameraException(Exception):
# pass
#
# class WheelException(Exception):
# pass
#
# class RelayException(Exception):
# pass
def clean_env(camera, wheel, relay):
if DEBUG:
print('Closing camera connection...', file=sys.stderr)
gp.check_result(gp.gp_camera_exit(camera))
if DEBUG:
print('Camera connection closed', file=sys.stderr)
print('Closing filter wheel connection...', file=sys.stderr)
wheel.close()
if DEBUG:
print('Filter wheel connection closed', file=sys.stderr)
print('Closing relay switch connection...', file=sys.stderr)
relay.close()
if DEBUG:
print('Relay switch connection closed', file=sys.stderr)
def set_camera_config(camera, exp, iso, f_val):
if DEBUG:
print('Getting previous camera configuration...', file=sys.stderr)
camera_config = camera.get_config()
error, exp_conf = gp.gp_widget_get_child_by_name(camera_config, 'shutterspeed')
assert error == 0, "ERROR while retrieving current exposure"
error, iso_conf = gp.gp_widget_get_child_by_name(camera_config, 'iso')
assert error == 0, "ERROR while retrieving current ISO"
error, f_conf = gp.gp_widget_get_child_by_name(camera_config, 'f-number')
assert error == 0, "ERROR while retrieving current aperture"
error = gp.check_result(gp.gp_widget_set_value(exp_conf, exp))
assert error == 0, "ERROR while setting exposure to {}".format(exp)
error = gp.check_result(gp.gp_widget_set_value(iso_conf, iso))
assert error == 0, "ERROR while setting ISO to {}".format(iso)
error = gp.check_result(gp.gp_widget_set_value(f_conf, f_val))
assert error == 0, "ERROR while setting aperture to {}".format(f_val)
if DEBUG:
print("Setting new camera configuration (exp {}, iso {}, f {})...".format(exp, iso, f_val), file=sys.stderr)
error = gp.check_result(gp.gp_camera_set_config(camera, camera_config))
assert error == 0, "ERROR while setting camera configuration"
if DEBUG:
print('New camera configuration set', file=sys.stderr)
def timelapse(camera, wheel, relay, config_dict):
ch_idx = 0
# TODO: detect pictures already present in work_dir and continue numbering
capture_idx = -1
work_dir = config_dict['work_dir']
interval = config_dict['interval']
out_fmt = config_dict['out_fmt']
channels = config_dict['wheel']['filters']
assert len(config_dict['camera']) == len(channels), "ERROR: Different number of channels for camera and filter wheel"
ch = channels[0]
# INIT
exp = str(config_dict['camera'][ch]['exp'])
iso = str(config_dict['camera'][ch]['iso'])
f_val = float(config_dict['camera'][ch]['f_val'])
set_camera_config(camera, exp, iso, f_val)
relay.write("reset\n\r".encode('utf-8'))
wheel.write([1, 0])
try:
while True:
if ch_idx == 0:
capture_idx += 1
if DEBUG:
print("CHANNEL {} (ch) [IT {}]".format(ch_idx, capture_idx), file=sys.stderr)
# LIGHTS UP AND CAPTURE
if DEBUG:
print("Lights up...", file=sys.stderr)
relay_cmd = "relay on {}\n\r".format(ch_idx).encode('utf-8')
relay.write(relay_cmd)
if DEBUG:
print("Lights up! Relay status:", file=sys.stderr)
relay_cmd = "relay readall\n\r".encode('utf-8')
relay.write(relay_cmd)
print(relay.readlines(), file=sys.stderr)
print("Shoot...", file=sys.stderr)
camera_fn = gp.check_result(gp.gp_camera_capture(camera, gp.GP_CAPTURE_IMAGE))
if DEBUG:
print("Shoot!", file=sys.stderr)
print("Lights down...", file=sys.stderr)
relay_cmd = "relay off {}\n\r".format(ch_idx).encode('utf-8')
relay.write(relay_cmd)
if DEBUG:
print("Lights down! Relay status:", file=sys.stderr)
relay_cmd = "relay readall\n\r".encode('utf-8')
relay.write(relay_cmd)
print(relay.readlines(), file=sys.stderr)
# SAVE PICTURE
# TODO: save all channels' pictures during sleep
if DEBUG:
print('Saving picture...', file=sys.stderr)
camera_f = gp.check_result(gp.gp_camera_file_get(camera, camera_fn.folder, camera_fn.name, gp.GP_FILE_TYPE_NORMAL))
out_fn = os.path.join(work_dir, "{}_{}.{}".format(str(capture_idx).zfill(10), ch, out_fmt))
gp.check_result(gp.gp_file_save(camera_f, out_fn))
if DEBUG:
print('Picture saved', file=sys.stderr)
if ch_idx == 0:
time_first_shot = time.time()
# GET READY FOR NEXT SHOT
ch_idx = (ch_idx+1) % len(channels)
ch = channels[ch_idx]
# TODO: multithreaded/asynchronous config
exp = str(config_dict['camera'][ch]['exp'])
iso = str(config_dict['camera'][ch]['iso'])
f_val = float(config_dict['camera'][ch]['f_val'])
set_camera_config(camera, exp, iso, f_val)
# TODO: check that the wheel is on the right position
if DEBUG:
print('Rotating filter wheel...', file=sys.stderr)
wheel.write([ch_idx+1, 0])
time.sleep(1)
if DEBUG:
print('Filter wheel rotated', file=sys.stderr)
if ch_idx == 0:
# just to be sure...if relay off command lost, screw up only one shot
relay.write("reset\n\r".encode('utf-8'))
if DEBUG:
print("Relay switch reset! Relay status:", file=sys.stderr)
relay_cmd = "relay readall\n\r".encode('utf-8')
relay.write(relay_cmd)
print(relay.readlines(), file=sys.stderr)
# TODO: sleep the diff between time of first shot and now
# (so that same channel has ~ interval)
print("Going to sleep", file=sys.stderr)
time.sleep(interval)
except KeyboardInterrupt:
clean_env(camera, wheel, relay)
def init_camera(**kwd_args):
context = gp.gp_context_new()
error, camera = gp.gp_camera_new()
error = gp.gp_camera_init(camera, context)
if DEBUG:
error, summary = gp.gp_camera_get_summary(camera, context)
print('Summary', file=sys.stderr)
print('=======', file=sys.stderr)
print(summary.text, file=sys.stderr)
return camera
def init_wheel(ven_id, pro_id, **kwd_args):
wheel = hid.device()
wheel.open(ven_id, pro_id)
if DEBUG:
# TODO: check filter total positions
if not wheel:
print("Error", file=sys.stderr)
return wheel
def init_relay(path, **kwd_args):
relay = serial.Serial(path, 19200, timeout=1)
relay.write(b'reset\n\r')
if DEBUG:
relay.readlines()
relay.write(b'relay readall\n\r')
res = relay.readlines()
# TODO: check that all relays are off
if not res:
print("Error", file=sys.stderr)
return relay
def parse_args():
desc = "Script for running fluorescent timelapses"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
'config_fn',
metavar='conf.json',
type=str,
help='json file containing the channel configurations'
)
args = parser.parse_args()
return args
def main(config_dict):
if DEBUG:
print('Initializing camera connection...', file=sys.stderr)
camera = init_camera(**config_dict['camera'])
if DEBUG:
print('Camera connection initialized', file=sys.stderr)
print('Initializing filter wheel connection...', file=sys.stderr)
wheel = init_wheel(**config_dict['wheel'])
if DEBUG:
print('Filter wheel connection initialized', file=sys.stderr)
print('Initializing relay switch connection...', file=sys.stderr)
relay = init_relay(**config_dict['relay'])
if DEBUG:
print('Relay switch connection initialized', file=sys.stderr)
print('Starting timelapse', file=sys.stderr)
timelapse(camera, wheel, relay, config_dict)
if __name__ == '__main__':
if len(sys.argv) > 1:
args = parse_args()
with open(args.config_fn, 'r') as config_f:
config_dict = json.load(config_f)
if DEBUG:
print(config_dict, file=sys.stderr)
main(config_dict)
|
[
"serial.Serial",
"hid.device",
"json.load",
"argparse.ArgumentParser",
"gphoto2.gp_camera_set_config",
"gphoto2.gp_widget_get_child_by_name",
"gphoto2.gp_camera_file_get",
"gphoto2.gp_camera_get_summary",
"gphoto2.gp_file_save",
"gphoto2.gp_camera_new",
"gphoto2.gp_context_new",
"time.sleep",
"time.time",
"gphoto2.gp_widget_set_value",
"gphoto2.gp_camera_init",
"gphoto2.gp_camera_capture",
"gphoto2.gp_camera_exit"
] |
[((1838, 1899), 'gphoto2.gp_widget_get_child_by_name', 'gp.gp_widget_get_child_by_name', (['camera_config', '"""shutterspeed"""'], {}), "(camera_config, 'shutterspeed')\n", (1868, 1899), True, 'import gphoto2 as gp\n'), ((1987, 2039), 'gphoto2.gp_widget_get_child_by_name', 'gp.gp_widget_get_child_by_name', (['camera_config', '"""iso"""'], {}), "(camera_config, 'iso')\n", (2017, 2039), True, 'import gphoto2 as gp\n'), ((2120, 2177), 'gphoto2.gp_widget_get_child_by_name', 'gp.gp_widget_get_child_by_name', (['camera_config', '"""f-number"""'], {}), "(camera_config, 'f-number')\n", (2150, 2177), True, 'import gphoto2 as gp\n'), ((7191, 7210), 'gphoto2.gp_context_new', 'gp.gp_context_new', ([], {}), '()\n', (7208, 7210), True, 'import gphoto2 as gp\n'), ((7231, 7249), 'gphoto2.gp_camera_new', 'gp.gp_camera_new', ([], {}), '()\n', (7247, 7249), True, 'import gphoto2 as gp\n'), ((7262, 7296), 'gphoto2.gp_camera_init', 'gp.gp_camera_init', (['camera', 'context'], {}), '(camera, context)\n', (7279, 7296), True, 'import gphoto2 as gp\n'), ((7586, 7598), 'hid.device', 'hid.device', ([], {}), '()\n', (7596, 7598), False, 'import hid\n'), ((7823, 7860), 'serial.Serial', 'serial.Serial', (['path', '(19200)'], {'timeout': '(1)'}), '(path, 19200, timeout=1)\n', (7836, 7860), False, 'import serial\n'), ((8224, 8265), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (8247, 8265), False, 'import argparse\n'), ((1199, 1224), 'gphoto2.gp_camera_exit', 'gp.gp_camera_exit', (['camera'], {}), '(camera)\n', (1216, 1224), True, 'import gphoto2 as gp\n'), ((2272, 2309), 'gphoto2.gp_widget_set_value', 'gp.gp_widget_set_value', (['exp_conf', 'exp'], {}), '(exp_conf, exp)\n', (2294, 2309), True, 'import gphoto2 as gp\n'), ((2411, 2448), 'gphoto2.gp_widget_set_value', 'gp.gp_widget_set_value', (['iso_conf', 'iso'], {}), '(iso_conf, iso)\n', (2433, 2448), True, 'import gphoto2 as gp\n'), ((2545, 2582), 'gphoto2.gp_widget_set_value', 'gp.gp_widget_set_value', (['f_conf', 'f_val'], {}), '(f_conf, f_val)\n', (2567, 2582), True, 'import gphoto2 as gp\n'), ((2818, 2864), 'gphoto2.gp_camera_set_config', 'gp.gp_camera_set_config', (['camera', 'camera_config'], {}), '(camera, camera_config)\n', (2841, 2864), True, 'import gphoto2 as gp\n'), ((7337, 7378), 'gphoto2.gp_camera_get_summary', 'gp.gp_camera_get_summary', (['camera', 'context'], {}), '(camera, context)\n', (7361, 7378), True, 'import gphoto2 as gp\n'), ((6300, 6313), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6310, 6313), False, 'import time\n'), ((9394, 9413), 'json.load', 'json.load', (['config_f'], {}), '(config_f)\n', (9403, 9413), False, 'import json\n'), ((4531, 4580), 'gphoto2.gp_camera_capture', 'gp.gp_camera_capture', (['camera', 'gp.GP_CAPTURE_IMAGE'], {}), '(camera, gp.GP_CAPTURE_IMAGE)\n', (4551, 4580), True, 'import gphoto2 as gp\n'), ((5281, 5373), 'gphoto2.gp_camera_file_get', 'gp.gp_camera_file_get', (['camera', 'camera_fn.folder', 'camera_fn.name', 'gp.GP_FILE_TYPE_NORMAL'], {}), '(camera, camera_fn.folder, camera_fn.name, gp.\n GP_FILE_TYPE_NORMAL)\n', (5302, 5373), True, 'import gphoto2 as gp\n'), ((5502, 5535), 'gphoto2.gp_file_save', 'gp.gp_file_save', (['camera_f', 'out_fn'], {}), '(camera_f, out_fn)\n', (5517, 5535), True, 'import gphoto2 as gp\n'), ((5678, 5689), 'time.time', 'time.time', ([], {}), '()\n', (5687, 5689), False, 'import time\n'), ((7053, 7073), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (7063, 7073), False, 'import time\n')]
|
#!/usr/bin/envpython
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
def get_data():
base_cond=[[18,20,19,18,13,4,1],
[20,17,12,9,3,0,0],
[20,20,20,12,5,3,0]]
cond1=[[18,19,18,19,20,15,14],
[19,20,18,16,20,15,9],
[19,20,20,20,17,10,0],
[20,20,20,20,7,9,1]]
cond2=[[20,20,20,20,19,17,4],
[20,20,20,20,20,19,7],
[19,20,20,19,19,15,2]]
cond3=[[20,20,20,20,19,17,12],
[18,20,19,18,13,4,1],
[20,19,18,17,13,2,0],
[19,18,20,20,15,6,0]]
return base_cond,cond1,cond2,cond3
def main():
#loadthedata
results=get_data()
print(results[0], len(results[0]), len(results[0][0]))
fig=plt.figure()
xdata = np.array(range(0, 7))
sns.tsplot(time=xdata, data=results[0], color='r', linestyle='-')
sns.tsplot(time=xdata, data=results[1], color='b', linestyle='--')
sns.tsplot(time=xdata, data=results[2], color='g', linestyle='-.')
sns.tsplot(time=xdata, data=results[3], color='k', linestyle=':')
plt.ylabel('Sucess rate', fontsize=25)
plt.xlabel('Iteration num', fontsize=25, labelpad=-4)
plt.title('Robot performance', fontsize=25)
plt.legend(loc='bottom left')
plt.show()
if __name__=='__main__':
main()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"seaborn.tsplot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((675, 687), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (685, 687), True, 'import matplotlib.pyplot as plt\n'), ((726, 791), 'seaborn.tsplot', 'sns.tsplot', ([], {'time': 'xdata', 'data': 'results[0]', 'color': '"""r"""', 'linestyle': '"""-"""'}), "(time=xdata, data=results[0], color='r', linestyle='-')\n", (736, 791), True, 'import seaborn as sns\n'), ((796, 862), 'seaborn.tsplot', 'sns.tsplot', ([], {'time': 'xdata', 'data': 'results[1]', 'color': '"""b"""', 'linestyle': '"""--"""'}), "(time=xdata, data=results[1], color='b', linestyle='--')\n", (806, 862), True, 'import seaborn as sns\n'), ((867, 933), 'seaborn.tsplot', 'sns.tsplot', ([], {'time': 'xdata', 'data': 'results[2]', 'color': '"""g"""', 'linestyle': '"""-."""'}), "(time=xdata, data=results[2], color='g', linestyle='-.')\n", (877, 933), True, 'import seaborn as sns\n'), ((938, 1003), 'seaborn.tsplot', 'sns.tsplot', ([], {'time': 'xdata', 'data': 'results[3]', 'color': '"""k"""', 'linestyle': '""":"""'}), "(time=xdata, data=results[3], color='k', linestyle=':')\n", (948, 1003), True, 'import seaborn as sns\n'), ((1009, 1047), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sucess rate"""'], {'fontsize': '(25)'}), "('Sucess rate', fontsize=25)\n", (1019, 1047), True, 'import matplotlib.pyplot as plt\n'), ((1052, 1105), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Iteration num"""'], {'fontsize': '(25)', 'labelpad': '(-4)'}), "('Iteration num', fontsize=25, labelpad=-4)\n", (1062, 1105), True, 'import matplotlib.pyplot as plt\n'), ((1110, 1153), 'matplotlib.pyplot.title', 'plt.title', (['"""Robot performance"""'], {'fontsize': '(25)'}), "('Robot performance', fontsize=25)\n", (1119, 1153), True, 'import matplotlib.pyplot as plt\n'), ((1158, 1187), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""bottom left"""'}), "(loc='bottom left')\n", (1168, 1187), True, 'import matplotlib.pyplot as plt\n'), ((1192, 1202), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1200, 1202), True, 'import matplotlib.pyplot as plt\n')]
|
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import torch
from sparsetorch.dD_basis_functions import Tensorprod, Elemprod, Sparse
from sparsetorch.oneD_basis_functions import Hat, Gauss, Fourier, Chebyshev, Legendre
from sparsetorch.plotter import plot_3D_all
from sparsetorch.utils import get_equidist_coord, get_rand_coord
from sparsetorch.solver import Model, Solver
def f_dD(x):
"""Simple example function defined on interval `[0, 1]`
Parameters
----------
x : torch.Tensor
coordinates for evaluation
Returns
-------
torch.Tensor
function evaluations
"""
result = 4 * x[0] * (x[0] - 1)
for x_i in x[1:]:
result *= 4 * x_i * (x_i - 1)
result *= torch.exp(2 * torch.prod(x, dim=0))
return result
def g_dD(x):
"""Complicated example function defined on interval `[0, 6]`
Parameters
----------
x : torch.Tensor
coordinates for evaluation
Returns
-------
torch.Tensor
function evaluations
"""
result = x[0] * (x[0] - 6) / 9
for x_i in x[1:]:
result *= x_i * (x_i - 6) / 9
result *= torch.exp(torch.sin(torch.prod(x, dim=0)))
return result
def step_dD(x):
"""Another example function defined on interval `[0, 1]`, discontinuous
Parameters
----------
x : torch.Tensor
coordinates for evaluation
Returns
-------
torch.Tensor
function evaluations
"""
result = 1.0
for x_i in x:
result *= torch.round(2 * x_i)
return result
def example_1():
"""Example with same equidistant basis functions in 2D and tensorprod combination"""
#############
# settings: #
#############
# basis function settings
basis = Gauss # Hat or Gauss
bf_num = 30 # number of basis functions in one dimension
BF_dD = Tensorprod # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = f_dD(input)
#############
# create 1D basis with equidistant basis functions
bf_1D = basis.equidist(bf_num)
bfs_1D = [bf_1D] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, f_dD, "Example 1")
def example_2():
"""Example with different equidistant basis functions in 2D,
tensorprod combination and different number of basis functions
in different dimensions"""
#############
# settings: #
#############
# basis function settings
basis_x = Hat # Hat or Gauss
basis_y = Gauss # Hat or Gauss
bf_num_x = 7 # number of basis functions in x direction
bf_num_y = 3 # number of basis functions in y direction
BF_dD = Tensorprod # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num_x = 50 # number of function evaluations in x direction
eval_num_y = 60 # number of function evaluations in y direction
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.tensor([eval_num_x, eval_num_y]))
# function evaluations
target = f_dD(input)
#############
# create 1D basis with equidistant basis functions
bf_1D_x = basis_x.equidist(bf_num_x)
bf_1D_y = basis_y.equidist(bf_num_y)
bfs_1D = [bf_1D_x, bf_1D_y]
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, f_dD, "Example 2")
def example_3():
"""Example with custom basis functions and elemprod combination"""
#############
# settings: #
#############
# basis function settings
basis_x = Hat # Hat or Gauss
basis_y = Gauss # Hat or Gauss
bf_num = 50 # number of basis functions
torch.manual_seed(332)
# position and width parameters of basis functions
mu_x = torch.rand(bf_num)
h_x = torch.rand(bf_num)
mu_y = torch.rand(bf_num)
h_y = torch.rand(bf_num)
BF_dD = Elemprod # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 60 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = f_dD(input)
#############
# create 1D basis with equidistant basis functions
bf_1D_x = basis_x(mu_x, h_x)
bf_1D_y = basis_y(mu_y, h_y)
bfs_1D = [bf_1D_x, bf_1D_y]
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, f_dD, "Example 3")
def example_4():
"""Example with same hierarchical basis functions in 2D, sparse combination
and approximated function nonzero on boundary
"""
#
#############
# settings: #
#############
# basis function settings
basis = Hat # Hat or Gauss
level = 5 # highest level of basis functions in one dimension
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = step_dD(input)
#############
# create 1D basis with hierarchical basis functions
bf_1D = basis.hierarchical(level, boundary=True)
bfs_1D = [bf_1D] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, step_dD, "Example 4")
def example_5():
"""Example with hierarchical basis functions in 2D, sparse combination
and approximated function nonzero on boundary
"""
#############
# settings: #
#############
# basis function settings
basis = Hat # Hat or Gauss
level_x = 4 # highest level of basis functions in x direction
level_y = 5 # highest level of basis functions in y direction
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = step_dD(input)
#############
# create 1D basis with hierarchical basis functions
bf_1D_x = basis.hierarchical(level_x, boundary=True)
bf_1D_y = basis.hierarchical(level_y, boundary=True)
bfs_1D = [bf_1D_x, bf_1D_y]
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, step_dD, "Example 5")
def example_6():
"""Example with orthogonal basis functionsin 2D, sparse combination
and approximated function nonzero on boundary
"""
#
#############
# settings: #
#############
# basis function settings
basis = Chebyshev # Fourier, Chebyshev, or Legendre
n_max = 40 # maximum level of basis functions
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = step_dD(input)
#############
# create 1D basis with orthogonal basis functions
bfs_1D = [basis(n_max)] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares
solver.le()
# plot
plot_3D_all(model, step_dD, "Example 6")
def example_7():
"""Example with challenging function, orthogonal basis functions,
sparse combination and approximated function nonzero on boundary
"""
#############
# settings: #
#############
# basis function settings
basis = Fourier # Fourier, Chebyshev, or Legendre
n_max = 16 # maximum level of basis functions
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 100 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), 6 * torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = g_dD(input)
#############
# create 1D basis with orthogonal basis functions
bfs_1D = [basis(n_max, a=0.0, b=6.0)] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares with regularization
solver.le()
# plot
plot_3D_all(
model,
g_dD,
"Example 7",
x_min=0,
x_max=6,
y_min=0,
y_max=6,
steps=2 * eval_num,
)
def example_8():
"""Example with challenging function, hierarchical basis functions,
sparse combination and approximated function nonzero on boundary
"""
#############
# settings: #
#############
# basis function settings
basis = Hat # Hat or Gauss
level = 8 # highest level of basis functions in one dimension
BF_dD = Sparse # Tensorprod, Elemprod, or Sparse
# evaluation coordinates
eval_num = 150 # number of function evaluations in one dimension
input = get_equidist_coord(torch.zeros(2), 6 * torch.ones(2),
torch.ones(2) * eval_num)
# function evaluations
target = g_dD(input)
# create 1D basis with hierarchical basis functions
bf_1D = basis.hierarchical(level, boundary=False, a=0, b=6)
bfs_1D = [bf_1D] * 2
# create dD basis with above declared 1D basis functions
bf_dD = BF_dD(bfs_1D)
# create model
model = Model(bf_dD, bf_dD.bf_num)
# create solver
solver = Solver(model, input, target)
# solve linear equation / least squares with regularization
solver.le()
# plot
plot_3D_all(
model,
g_dD,
"Example 8",
x_min=0,
x_max=6,
y_min=0,
y_max=6,
steps=2 * eval_num,
)
if __name__ == "__main__":
example_1()
example_2()
example_3()
example_5()
example_6()
example_7()
example_8()
|
[
"torch.ones",
"sparsetorch.solver.Model",
"sparsetorch.solver.Solver",
"torch.manual_seed",
"torch.prod",
"sparsetorch.plotter.plot_3D_all",
"torch.rand",
"torch.zeros",
"torch.round",
"torch.tensor"
] |
[((2440, 2466), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (2445, 2466), False, 'from sparsetorch.solver import Model, Solver\n'), ((2501, 2529), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (2507, 2529), False, 'from sparsetorch.solver import Model, Solver\n'), ((2607, 2644), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'f_dD', '"""Example 1"""'], {}), "(model, f_dD, 'Example 1')\n", (2618, 2644), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((3822, 3848), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (3827, 3848), False, 'from sparsetorch.solver import Model, Solver\n'), ((3883, 3911), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (3889, 3911), False, 'from sparsetorch.solver import Model, Solver\n'), ((3989, 4026), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'f_dD', '"""Example 2"""'], {}), "(model, f_dD, 'Example 2')\n", (4000, 4026), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((4320, 4342), 'torch.manual_seed', 'torch.manual_seed', (['(332)'], {}), '(332)\n', (4337, 4342), False, 'import torch\n'), ((4409, 4427), 'torch.rand', 'torch.rand', (['bf_num'], {}), '(bf_num)\n', (4419, 4427), False, 'import torch\n'), ((4438, 4456), 'torch.rand', 'torch.rand', (['bf_num'], {}), '(bf_num)\n', (4448, 4456), False, 'import torch\n'), ((4468, 4486), 'torch.rand', 'torch.rand', (['bf_num'], {}), '(bf_num)\n', (4478, 4486), False, 'import torch\n'), ((4497, 4515), 'torch.rand', 'torch.rand', (['bf_num'], {}), '(bf_num)\n', (4507, 4515), False, 'import torch\n'), ((5133, 5159), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (5138, 5159), False, 'from sparsetorch.solver import Model, Solver\n'), ((5194, 5222), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (5200, 5222), False, 'from sparsetorch.solver import Model, Solver\n'), ((5300, 5337), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'f_dD', '"""Example 3"""'], {}), "(model, f_dD, 'Example 3')\n", (5311, 5337), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((6284, 6310), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (6289, 6310), False, 'from sparsetorch.solver import Model, Solver\n'), ((6345, 6373), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (6351, 6373), False, 'from sparsetorch.solver import Model, Solver\n'), ((6451, 6491), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'step_dD', '"""Example 4"""'], {}), "(model, step_dD, 'Example 4')\n", (6462, 6491), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((7562, 7588), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (7567, 7588), False, 'from sparsetorch.solver import Model, Solver\n'), ((7623, 7651), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (7629, 7651), False, 'from sparsetorch.solver import Model, Solver\n'), ((7729, 7769), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'step_dD', '"""Example 5"""'], {}), "(model, step_dD, 'Example 5')\n", (7740, 7769), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((8669, 8695), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (8674, 8695), False, 'from sparsetorch.solver import Model, Solver\n'), ((8730, 8758), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (8736, 8758), False, 'from sparsetorch.solver import Model, Solver\n'), ((8836, 8876), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'step_dD', '"""Example 6"""'], {}), "(model, step_dD, 'Example 6')\n", (8847, 8876), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((9800, 9826), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (9805, 9826), False, 'from sparsetorch.solver import Model, Solver\n'), ((9861, 9889), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (9867, 9889), False, 'from sparsetorch.solver import Model, Solver\n'), ((9987, 10084), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'g_dD', '"""Example 7"""'], {'x_min': '(0)', 'x_max': '(6)', 'y_min': '(0)', 'y_max': '(6)', 'steps': '(2 * eval_num)'}), "(model, g_dD, 'Example 7', x_min=0, x_max=6, y_min=0, y_max=6,\n steps=2 * eval_num)\n", (9998, 10084), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((11097, 11123), 'sparsetorch.solver.Model', 'Model', (['bf_dD', 'bf_dD.bf_num'], {}), '(bf_dD, bf_dD.bf_num)\n', (11102, 11123), False, 'from sparsetorch.solver import Model, Solver\n'), ((11158, 11186), 'sparsetorch.solver.Solver', 'Solver', (['model', 'input', 'target'], {}), '(model, input, target)\n', (11164, 11186), False, 'from sparsetorch.solver import Model, Solver\n'), ((11284, 11381), 'sparsetorch.plotter.plot_3D_all', 'plot_3D_all', (['model', 'g_dD', '"""Example 8"""'], {'x_min': '(0)', 'x_max': '(6)', 'y_min': '(0)', 'y_max': '(6)', 'steps': '(2 * eval_num)'}), "(model, g_dD, 'Example 8', x_min=0, x_max=6, y_min=0, y_max=6,\n steps=2 * eval_num)\n", (11295, 11381), False, 'from sparsetorch.plotter import plot_3D_all\n'), ((1531, 1551), 'torch.round', 'torch.round', (['(2 * x_i)'], {}), '(2 * x_i)\n', (1542, 1551), False, 'import torch\n'), ((2046, 2060), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (2057, 2060), False, 'import torch\n'), ((2062, 2075), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (2072, 2075), False, 'import torch\n'), ((3360, 3374), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (3371, 3374), False, 'import torch\n'), ((3376, 3389), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (3386, 3389), False, 'import torch\n'), ((3422, 3460), 'torch.tensor', 'torch.tensor', (['[eval_num_x, eval_num_y]'], {}), '([eval_num_x, eval_num_y])\n', (3434, 3460), False, 'import torch\n'), ((4701, 4715), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (4712, 4715), False, 'import torch\n'), ((4717, 4730), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (4727, 4730), False, 'import torch\n'), ((5868, 5882), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (5879, 5882), False, 'import torch\n'), ((5884, 5897), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (5894, 5897), False, 'import torch\n'), ((7078, 7092), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (7089, 7092), False, 'import torch\n'), ((7094, 7107), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (7104, 7107), False, 'import torch\n'), ((8301, 8315), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (8312, 8315), False, 'import torch\n'), ((8317, 8330), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (8327, 8330), False, 'import torch\n'), ((9417, 9431), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (9428, 9431), False, 'import torch\n'), ((10687, 10701), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (10698, 10701), False, 'import torch\n'), ((762, 782), 'torch.prod', 'torch.prod', (['x'], {'dim': '(0)'}), '(x, dim=0)\n', (772, 782), False, 'import torch\n'), ((1177, 1197), 'torch.prod', 'torch.prod', (['x'], {'dim': '(0)'}), '(x, dim=0)\n', (1187, 1197), False, 'import torch\n'), ((2108, 2121), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (2118, 2121), False, 'import torch\n'), ((4763, 4776), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (4773, 4776), False, 'import torch\n'), ((5930, 5943), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (5940, 5943), False, 'import torch\n'), ((7140, 7153), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (7150, 7153), False, 'import torch\n'), ((8363, 8376), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (8373, 8376), False, 'import torch\n'), ((9437, 9450), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (9447, 9450), False, 'import torch\n'), ((9483, 9496), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (9493, 9496), False, 'import torch\n'), ((10707, 10720), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (10717, 10720), False, 'import torch\n'), ((10753, 10766), 'torch.ones', 'torch.ones', (['(2)'], {}), '(2)\n', (10763, 10766), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
import gevent
from gevent import Greenlet
def foo(message, n):
gevent.sleep(n)
print(message)
thread1 = Greenlet.spawn(foo, "Hello", 1)
thread2 = gevent.spawn(foo, "I live!", 2)
thread3 = gevent.spawn(lambda x: (x + 1), 2)
threads = [thread1, thread2, thread3]
gevent.joinall(threads)
|
[
"gevent.Greenlet.spawn",
"gevent.spawn",
"gevent.sleep",
"gevent.joinall"
] |
[((140, 171), 'gevent.Greenlet.spawn', 'Greenlet.spawn', (['foo', '"""Hello"""', '(1)'], {}), "(foo, 'Hello', 1)\n", (154, 171), False, 'from gevent import Greenlet\n'), ((183, 214), 'gevent.spawn', 'gevent.spawn', (['foo', '"""I live!"""', '(2)'], {}), "(foo, 'I live!', 2)\n", (195, 214), False, 'import gevent\n'), ((226, 258), 'gevent.spawn', 'gevent.spawn', (['(lambda x: x + 1)', '(2)'], {}), '(lambda x: x + 1, 2)\n', (238, 258), False, 'import gevent\n'), ((301, 324), 'gevent.joinall', 'gevent.joinall', (['threads'], {}), '(threads)\n', (315, 324), False, 'import gevent\n'), ((93, 108), 'gevent.sleep', 'gevent.sleep', (['n'], {}), '(n)\n', (105, 108), False, 'import gevent\n')]
|
import hashlib
import json
import os
import logging
import re
logger = logging.getLogger(__name__)
def get_hash(backup_name, backup_sources):
sha256_hash = hashlib.sha256()
for source in backup_sources:
source_path = source['Path']
exclusions = '(?:% s)' % '|'. join(source['Exclude'])
try:
for root, dirs, files in os.walk(source_path):
for name in files:
filepath = os.path.join(root, name)
if not re.search(exclusions, filepath):
logger.debug(
f'Calculating checksum: {json.dumps({"File": filepath})}')
file_object = open(filepath, 'rb')
sha256_hash.update(file_object.read(8192))
file_object.close()
except IOError as error:
if file_object:
file_object.close()
if 'Permission denied' in error.strerror:
logger.error(
f'Failed to calculate checksum. Permission Denied: {json.dumps({"Backup": backup_name, "Source": source})}')
raise error
return sha256_hash.hexdigest()
class CalculateChecksum():
def __init__(self, backup_config):
self.name = backup_config['Name']
self.sources = backup_config['Sources']
logger.debug(
f'Attempting to calculate checksum of backup: {json.dumps({"Backup": self.name})}')
self.hash = get_hash(self.name, self.sources)
logger.debug(
f'Successfully calculated checksum of backup: {json.dumps({"Backup": self.name, "Checksum": self.hash})}')
|
[
"os.walk",
"json.dumps",
"hashlib.sha256",
"re.search",
"os.path.join",
"logging.getLogger"
] |
[((72, 99), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (89, 99), False, 'import logging\n'), ((163, 179), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (177, 179), False, 'import hashlib\n'), ((365, 385), 'os.walk', 'os.walk', (['source_path'], {}), '(source_path)\n', (372, 385), False, 'import os\n'), ((454, 478), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (466, 478), False, 'import os\n'), ((1451, 1484), 'json.dumps', 'json.dumps', (["{'Backup': self.name}"], {}), "({'Backup': self.name})\n", (1461, 1484), False, 'import json\n'), ((1625, 1681), 'json.dumps', 'json.dumps', (["{'Backup': self.name, 'Checksum': self.hash}"], {}), "({'Backup': self.name, 'Checksum': self.hash})\n", (1635, 1681), False, 'import json\n'), ((507, 538), 're.search', 're.search', (['exclusions', 'filepath'], {}), '(exclusions, filepath)\n', (516, 538), False, 'import re\n'), ((1093, 1146), 'json.dumps', 'json.dumps', (["{'Backup': backup_name, 'Source': source}"], {}), "({'Backup': backup_name, 'Source': source})\n", (1103, 1146), False, 'import json\n'), ((631, 661), 'json.dumps', 'json.dumps', (["{'File': filepath}"], {}), "({'File': filepath})\n", (641, 661), False, 'import json\n')]
|
#### ====================================================================================================================== ####
############# IMPORTS #############
#### ====================================================================================================================== ####
import csv
#### ====================================================================================================================== ####
############# CSV_LOADER #############
#### ====================================================================================================================== ####
def csv_loader(filename, readall=False):
''' Helper function that reads in a CSV file. Optional flag for including header row.
Input: filename (string), bool_flag (optional)
Output: List of Rows (comma separated)
'''
returnList = []
with open(filename) as csvfile:
for row in csv.reader(csvfile):
returnList.append(row)
if readall:
return returnList
else:
return returnList[1:]
|
[
"csv.reader"
] |
[((1097, 1116), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (1107, 1116), False, 'import csv\n')]
|
import numpy as np
from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE
from pyquil.noise_gates import _get_qvm_noise_supported_gates, THETA
def test_get_qvm_noise_supported_gates_from_compiler_isa(compiler_isa):
gates = _get_qvm_noise_supported_gates(compiler_isa)
for q in [0, 1, 2]:
for g in [
I(q),
RX(np.pi / 2, q),
RX(-np.pi / 2, q),
RX(np.pi, q),
RX(-np.pi, q),
RZ(THETA, q),
]:
assert g in gates
assert CZ(0, 1) in gates
assert CZ(1, 0) in gates
assert ISWAP(1, 2) in gates
assert ISWAP(2, 1) in gates
assert CPHASE(THETA, 2, 0) in gates
assert CPHASE(THETA, 0, 2) in gates
ASPEN_8_QUBITS_NO_RX = {8, 9, 10, 18, 19, 28, 29, 31}
ASPEN_8_QUBITS_NO_RZ = {8, 9, 10, 18, 19, 28, 29, 31}
ASPEN_8_EDGES_NO_CZ = {(0, 1), (10, 11), (1, 2), (21, 22), (17, 10), (12, 25)}
def test_get_qvm_noise_supported_gates_from_aspen8_isa(qcs_aspen8_quantum_processor, noise_model_dict):
gates = _get_qvm_noise_supported_gates(qcs_aspen8_quantum_processor.to_compiler_isa())
for q in range(len(qcs_aspen8_quantum_processor._isa.architecture.nodes)):
if q not in ASPEN_8_QUBITS_NO_RX:
for g in [
RX(np.pi / 2, q),
RX(-np.pi / 2, q),
RX(np.pi, q),
RX(-np.pi, q),
]:
assert g in gates
if q not in ASPEN_8_QUBITS_NO_RZ:
assert RZ(THETA, q) in gates
for edge in qcs_aspen8_quantum_processor._isa.architecture.edges:
if (
edge.node_ids[0],
edge.node_ids[1],
) in ASPEN_8_EDGES_NO_CZ:
continue
assert CZ(edge.node_ids[0], edge.node_ids[1]) in gates
assert CZ(edge.node_ids[1], edge.node_ids[0]) in gates
|
[
"pyquil.gates.I",
"pyquil.gates.CPHASE",
"pyquil.gates.CZ",
"pyquil.gates.ISWAP",
"pyquil.gates.RX",
"pyquil.noise_gates._get_qvm_noise_supported_gates",
"pyquil.gates.RZ"
] |
[((228, 272), 'pyquil.noise_gates._get_qvm_noise_supported_gates', '_get_qvm_noise_supported_gates', (['compiler_isa'], {}), '(compiler_isa)\n', (258, 272), False, 'from pyquil.noise_gates import _get_qvm_noise_supported_gates, THETA\n'), ((527, 535), 'pyquil.gates.CZ', 'CZ', (['(0)', '(1)'], {}), '(0, 1)\n', (529, 535), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((556, 564), 'pyquil.gates.CZ', 'CZ', (['(1)', '(0)'], {}), '(1, 0)\n', (558, 564), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((585, 596), 'pyquil.gates.ISWAP', 'ISWAP', (['(1)', '(2)'], {}), '(1, 2)\n', (590, 596), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((617, 628), 'pyquil.gates.ISWAP', 'ISWAP', (['(2)', '(1)'], {}), '(2, 1)\n', (622, 628), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((649, 668), 'pyquil.gates.CPHASE', 'CPHASE', (['THETA', '(2)', '(0)'], {}), '(THETA, 2, 0)\n', (655, 668), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((689, 708), 'pyquil.gates.CPHASE', 'CPHASE', (['THETA', '(0)', '(2)'], {}), '(THETA, 0, 2)\n', (695, 708), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((328, 332), 'pyquil.gates.I', 'I', (['q'], {}), '(q)\n', (329, 332), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((346, 362), 'pyquil.gates.RX', 'RX', (['(np.pi / 2)', 'q'], {}), '(np.pi / 2, q)\n', (348, 362), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((376, 393), 'pyquil.gates.RX', 'RX', (['(-np.pi / 2)', 'q'], {}), '(-np.pi / 2, q)\n', (378, 393), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((407, 419), 'pyquil.gates.RX', 'RX', (['np.pi', 'q'], {}), '(np.pi, q)\n', (409, 419), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((433, 446), 'pyquil.gates.RX', 'RX', (['(-np.pi)', 'q'], {}), '(-np.pi, q)\n', (435, 446), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((460, 472), 'pyquil.gates.RZ', 'RZ', (['THETA', 'q'], {}), '(THETA, q)\n', (462, 472), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1725, 1763), 'pyquil.gates.CZ', 'CZ', (['edge.node_ids[0]', 'edge.node_ids[1]'], {}), '(edge.node_ids[0], edge.node_ids[1])\n', (1727, 1763), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1788, 1826), 'pyquil.gates.CZ', 'CZ', (['edge.node_ids[1]', 'edge.node_ids[0]'], {}), '(edge.node_ids[1], edge.node_ids[0])\n', (1790, 1826), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1265, 1281), 'pyquil.gates.RX', 'RX', (['(np.pi / 2)', 'q'], {}), '(np.pi / 2, q)\n', (1267, 1281), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1299, 1316), 'pyquil.gates.RX', 'RX', (['(-np.pi / 2)', 'q'], {}), '(-np.pi / 2, q)\n', (1301, 1316), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1334, 1346), 'pyquil.gates.RX', 'RX', (['np.pi', 'q'], {}), '(np.pi, q)\n', (1336, 1346), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1364, 1377), 'pyquil.gates.RX', 'RX', (['(-np.pi)', 'q'], {}), '(-np.pi, q)\n', (1366, 1377), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n'), ((1489, 1501), 'pyquil.gates.RZ', 'RZ', (['THETA', 'q'], {}), '(THETA, q)\n', (1491, 1501), False, 'from pyquil.gates import RZ, RX, I, CZ, ISWAP, CPHASE\n')]
|
#!/usr/bin/python3
# coding: UTF-8
#-
# Copyright © 2018, 2020 mirabilos <<EMAIL>>
#
# Provided that these terms and disclaimer and all copyright notices
# are retained or reproduced in an accompanying document, permission
# is granted to deal in this work without restriction, including un‐
# limited rights to use, publicly perform, distribute, sell, modify,
# merge, give away, or sublicence.
#
# This work is provided “AS IS” and WITHOUT WARRANTY of any kind, to
# the utmost extent permitted by applicable law, neither express nor
# implied; without malicious intent or gross negligence. In no event
# may a licensor, author or contributor be held liable for indirect,
# direct, other damage, loss, or other issues arising in any way out
# of dealing in the work, even if advised of the possibility of such
# damage or existence of a defect, except proven that it results out
# of said person’s immediate fault when using the work as intended.
#-
# python3 riffedit.py -d src.sf2 # dump info only
# python3 riffedit.py -i src.sf2 # identify metadata (see below)
# python3 riffedit.py src.sf2 dst.sf2 { [-az] 'chnk' 'content' } ...
# where -a means to align with NULs and -z to NUL-terminate
# chnk means the RIFF chunk, LIST<chnk>/chnk is also supported
# Chunks currently need to exist in the input, insertion and deletion
# is missing for some later version to add.
# The comment field is limited to 65535 ASCII bytes, the others to 255.
#
# Metadata from a soundfont only includes chunks useful in copyright
# tracking. It outputs the INFO chunks, using input ordering, in the
# format “chunk_name \xFE chunk_body \xFF”, where both name and body
# (properly UTF-8 encoded) have all characters not valid for XML re‐
# moved or replaced with the OPTU-16 value or U+FFFD.
#
# You may also use this under the same terms as the Fluid (R3) soundfont.
from io import SEEK_SET, SEEK_CUR
import os
import struct
import sys
assert(sys.version_info[0] >= 3)
class RIFFChunk(object):
def __init__(self, parent):
self.parent = parent
self.file = parent
while isinstance(self.file, RIFFChunk):
self.file = self.file.parent
cn = self.file.read(4)
cs = self.file.read(4)
ct = None
cf = cn
if (len(cn) != 4) or (len(cs) != 4):
raise EOFError
co = self.file.tell()
try:
cs = struct.unpack_from('<L', cs)[0]
except struct.error:
raise EOFError
if cn in (b'RIFF', b'LIST'):
ct = self.file.read(4)
if len(ct) != 4:
raise EOFError
cf = cn + b'<' + ct + b'>'
self.chunkname = cn
self.chunksize = cs
self.chunk_pad = cs & 1
self.container = ct
self.children = []
self.chunkfmt = cf
self.data_ofs = co
self.data_mem = None
self.justpast = self.data_ofs + self.chunksize + self.chunk_pad
if isinstance(self.parent, RIFFChunk) and \
self.justpast > self.parent.justpast:
raise IndexError('End of this %s chunk %d > end of parent %s chunk %d' % \
(self.chunkfmt, self.justpast, self.parent.chunkfmt, self.parent.justpast))
if self.container is not None:
while True:
try:
child = RIFFChunk(self)
except EOFError:
break
self.children.append(child)
if child.skip_past():
break
def __str__(self):
s = '<RIFFChunk(%s)' % self.chunkfmt
if self.container is not None:
q = '['
for child in self.children:
s += q + str(child)
q = ', '
s += ']'
return s + '>'
def skip_past(self):
self.file.seek(self.justpast, SEEK_SET)
return isinstance(self.parent, RIFFChunk) and \
self.justpast == self.parent.justpast
def __getitem__(self, key):
if self.container is None:
raise IndexError('Chunk %s is not of a container type' % self.chunkname)
for child in self.children:
if child.chunkfmt == key:
return child
raise IndexError('Chunk %s does not have a child %s' % (self.chunkname, key))
def print(self):
if self.container is not None:
raise IndexError('Chunk %s is of a container type' % self.chunkname)
if self.data_mem is not None:
return self.data_mem
self.file.seek(self.data_ofs, SEEK_SET)
s = self.file.read(self.chunksize)
if len(s) != self.chunksize:
raise IOError('Could not read %d data bytes (got %d)' % (self.chunksize, len(s)))
return s
def write(self, file):
if not isinstance(self.chunkname, bytes):
raise ValueError('Chunk name %s is not of type bytes' % self.chunkname)
if len(self.chunkname) != 4:
raise ValueError('Chunk name %s is not of length 4')
if file.write(self.chunkname + struct.pack('<L', self.chunksize)) != 8:
raise IOError('Could not write header bytes to destination file at chunk %s' % \
self.chunkfmt)
if self.container is not None:
cld = file.tell()
if not isinstance(self.container, bytes):
raise ValueError('Container type %s is not of type bytes' % self.container)
if len(self.container) != 4:
raise ValueError('Container type %s is not of length 4')
if file.write(self.container) != 4:
raise IOError('Could not write container bytes to destination file at chunk %s' % \
self.chunkfmt)
for child in self.children:
child.write(file)
cld = file.tell() - cld
if cld != self.chunksize:
raise ValueError('Children wrote %d bytes (expected %d) file at chunk %s' % \
(cld, self.chunksize, self.chunkfmt))
else:
if self.data_mem is not None:
if file.write(self.data_mem) != self.chunksize:
raise IOError('Could not write %d data bytes to destination file at chunk %s' % \
(self.chunksize, self.chunkfmt))
else:
self.file.seek(self.data_ofs, SEEK_SET)
total = self.chunksize
while total > 0:
n = 65536
if n > total:
n = total
buf = self.file.read(n)
n = len(buf)
total -= n
if file.write(buf) != n:
raise IOError('Could not write %d data bytes to destination file at chunk %s' % \
(n, self.chunkfmt))
if self.chunk_pad > 0:
file.write(b'\0')
if file.tell() & 1:
raise ValueError('Misaligned file after chunk %s' % self.chunkfmt)
def set_length(self, newlen):
old = self.chunksize + self.chunk_pad
self.chunksize = newlen
self.chunk_pad = self.chunksize & 1
new = self.chunksize + self.chunk_pad
if isinstance(self.parent, RIFFChunk):
self.parent.adjust_length(new - old)
def set_content(self, content, nul_pad=False):
if self.container is not None:
raise ValueError('Cannot set content of container type %s' % self.chunkfmt)
if isinstance(content, str):
content = content.encode('UTF-8')
if not isinstance(content, bytes):
raise ValueError('New content is not of type bytes')
if nul_pad and (len(content) & 1):
content += b'\0'
self.data_mem = content
self.set_length(len(content))
def adjust_length(self, delta):
self.set_length(self.chunksize + delta)
class RIFFFile(RIFFChunk):
def __init__(self, file):
self.file = file
self.container = True
self.children = []
child = None
while True:
try:
child = RIFFChunk(f)
except EOFError:
break
self.children.append(child)
if child is None:
raise IndexError('No RIFF chunks found')
self.justpast = child.justpast
def __str__(self):
s = '<RIFFFile'
q = '['
for child in self.children:
s += q + str(child)
q = ', '
return s + ']>'
def __getitem__(self, key):
return self.children[key]
def write(self, file):
for child in self.children:
child.write(file)
def dumpriff(container, level=0, isinfo=False):
indent = ('%s%ds' % ('%', 2*level)) % ''
print(indent + 'BEGIN level=%d' % level)
for chunk in container.children:
#print(indent + ' CHUNK %s of size %d, data at %d, next at %d' % (chunk.chunkfmt, chunk.chunksize, chunk.data_ofs, chunk.justpast))
if isinfo:
print(indent + ' CHUNK %s(%d): %s' % (chunk.chunkfmt, chunk.chunksize, chunk.print()))
else:
print(indent + ' CHUNK %s of size %d' % (chunk.chunkfmt, chunk.chunksize))
if chunk.container is not None:
dumpriff(chunk, level+1, chunk.chunkfmt == b'LIST<INFO>')
print(indent + 'END level=%d' % level)
if sys.argv[1] == '-i':
encode_table = {}
# bad characters in XML
for i in range(0, 32):
if i not in (0x09, 0x0A, 0x0D):
encode_table[i] = None
encode_table[0x7F] = 0xFFFD
for i in range(0x80, 0xA0):
encode_table[i] = 0xEF00 + i
for i in range(0xD800, 0xE000):
encode_table[i] = 0xFFFD
for i in range(0, 0x110000, 0x10000):
encode_table[i + 0xFFFE] = 0xFFFD
encode_table[i + 0xFFFF] = 0xFFFD
for i in range(0xFDD0, 0xFDF0):
encode_table[i] = 0xFFFD
# surrogateescape to OPTU-16
for i in range(128, 256):
encode_table[0xDC00 + i] = 0xEF00 + i
ident_encode_table = str.maketrans(encode_table)
del encode_table
def ident_encode(s):
return s.rstrip(b'\x00').\
decode(encoding='utf-8', errors='surrogateescape').\
translate(ident_encode_table).\
encode(encoding='utf-8', errors='replace')
if sys.argv[2] == '-':
f = sys.stdin.buffer
else:
f = open(sys.argv[2], 'rb')
riff = RIFFFile(f)
for chunk in riff[0][b'LIST<INFO>'].children:
if chunk.chunkname not in (b'ifil', b'isng', b'IPRD', b'ISFT'):
for x in (ident_encode(chunk.chunkname), b'\xFE',
ident_encode(chunk.print()), b'\xFF'):
sys.stdout.buffer.write(x)
sys.exit(0)
print('START')
if sys.argv[1] == '-d':
with open(sys.argv[2], 'rb') as f:
riff = RIFFFile(f)
dumpriff(riff)
else:
with open(sys.argv[1], 'rb') as f, open(sys.argv[2], 'wb', buffering=65536) as dst:
riff = RIFFFile(f)
dumpriff(riff)
i = 3
_flags = { '-a': 1, '-z': 2, '-az': 3 }
while i < len(sys.argv):
flags = 0
if sys.argv[i] in _flags:
flags = _flags[sys.argv[i]]
i += 1
if i >= len(sys.argv):
break
chunks = sys.argv[i].split('/')
if chunks[0].isnumeric():
chnk = riff
else:
chnk = riff[0]
for cur in chunks:
chnk = chnk[os.fsencode(cur)]
val = os.fsencode(sys.argv[i + 1])
if flags & 2:
val += b'\0'
chnk.set_content(val, bool(flags & 1))
i += 2
print("=> after processing:")
dumpriff(riff)
riff.write(dst)
print('OUT')
|
[
"struct.pack",
"sys.stdout.buffer.write",
"os.fsencode",
"sys.exit",
"struct.unpack_from"
] |
[((10761, 10772), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10769, 10772), False, 'import sys\n'), ((11587, 11615), 'os.fsencode', 'os.fsencode', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (11598, 11615), False, 'import os\n'), ((2393, 2421), 'struct.unpack_from', 'struct.unpack_from', (['"""<L"""', 'cs'], {}), "('<L', cs)\n", (2411, 2421), False, 'import struct\n'), ((10730, 10756), 'sys.stdout.buffer.write', 'sys.stdout.buffer.write', (['x'], {}), '(x)\n', (10753, 10756), False, 'import sys\n'), ((5073, 5106), 'struct.pack', 'struct.pack', (['"""<L"""', 'self.chunksize'], {}), "('<L', self.chunksize)\n", (5084, 5106), False, 'import struct\n'), ((11551, 11567), 'os.fsencode', 'os.fsencode', (['cur'], {}), '(cur)\n', (11562, 11567), False, 'import os\n')]
|
import sys
import gym
import numpy as np
import gym.spaces
import math
import pandas as pd
df = pd.read_csv('./logs.csv', sep=',')
df = df.sample(frac=1)
def getData(line, keyNum):
if keyNum == 0: # vec
vec = str(df.iloc[line, 0])
v = np.zeros(11, dtype=np.float32)
for i in range(11):
v[i] = float(vec[i+1])
return v
else: # where, angle, power, reward
if keyNum==4:
ans=df.iloc[line+1, keyNum]
else:
ans = df.iloc[line, keyNum]
return ans
class MyEnv(gym.core.Env):
def __init__(self):
self.board = np.zeros(11, dtype=np.float32)
self.action_space = gym.spaces.Discrete(30)
low_bound = 0
high_bound = 1
self.observation_space = gym.spaces.Box(
low=low_bound, high=high_bound, shape=self.board.shape, dtype=np.float32)
self.time = 0
self.obs = getData(0,0)
def step(self, action):
st = "9"
for i in range(len(observation)):
st+=str(int(observation[i]))
power = math.floor(action/6)
action = action-power*6
angle = math.floor(action/3)
action = action-angle*3
where = action
df2 = df[(df['vec']==st)&(df['where']==where)&(df['angle']==angle)&(df['power']==power)]
df2 = df2.sample(frac=1)
reward = float(df2.iloc[0,4])
self.time+=1
observation = getData(self.time, 0)
done = True
return observation, reward, done, {}
def reset(self):
self.obs = getData(self.time, 0)
|
[
"pandas.read_csv",
"gym.spaces.Discrete",
"math.floor",
"numpy.zeros",
"gym.spaces.Box"
] |
[((97, 131), 'pandas.read_csv', 'pd.read_csv', (['"""./logs.csv"""'], {'sep': '""","""'}), "('./logs.csv', sep=',')\n", (108, 131), True, 'import pandas as pd\n'), ((259, 289), 'numpy.zeros', 'np.zeros', (['(11)'], {'dtype': 'np.float32'}), '(11, dtype=np.float32)\n', (267, 289), True, 'import numpy as np\n'), ((620, 650), 'numpy.zeros', 'np.zeros', (['(11)'], {'dtype': 'np.float32'}), '(11, dtype=np.float32)\n', (628, 650), True, 'import numpy as np\n'), ((679, 702), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(30)'], {}), '(30)\n', (698, 702), False, 'import gym\n'), ((781, 873), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': 'low_bound', 'high': 'high_bound', 'shape': 'self.board.shape', 'dtype': 'np.float32'}), '(low=low_bound, high=high_bound, shape=self.board.shape,\n dtype=np.float32)\n', (795, 873), False, 'import gym\n'), ((1083, 1105), 'math.floor', 'math.floor', (['(action / 6)'], {}), '(action / 6)\n', (1093, 1105), False, 'import math\n'), ((1152, 1174), 'math.floor', 'math.floor', (['(action / 3)'], {}), '(action / 3)\n', (1162, 1174), False, 'import math\n')]
|
import io
import re
from glob import glob
from os.path import basename, dirname, join, splitext
from setuptools import find_packages, setup
def read(*names, **kwargs):
with io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
) as fh:
return fh.read()
setup(
name='cloudstorage',
version='0.10.0',
license='MIT',
description='Unified cloud storage API for storage services.',
long_description='%s\n%s' % (
re.compile('^.. start-badges.*^.. end-badges', re.M | re.S).sub(
'', read('README.rst')),
re.sub(':[a-z]+:`~?(.*?)`', r'``\1``', read('CHANGELOG.rst'))
),
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/scottwernervt/cloudstorage/',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=' '.join([
'storage',
'amazon',
'aws',
's3',
'azure',
'rackspace',
'cloudfiles',
'google',
'cloudstorage',
'gcs',
'minio',
]),
install_requires=[
'inflection>=0.3.1', # MIT
'python-dateutil>=2.7.3', # Simplified BSD
'python-magic>=0.4.15', # MIT
# Python 3.4 needs backports
'typing;python_version<"3.5"', # PSF
'httpstatus35;python_version<"3.5"', # PSF
],
extras_require={
'amazon': [
'boto3>=1.8.00', # Apache 2.0
],
'google': [
'google-cloud-storage>=1.18.0', # Apache 2.0
'requests>=2.19.1', # Apache 2.0
],
'local': [
'filelock>=3.0.0', # Public Domain
'itsdangerous>=1.1.0', # BSD License
'xattr>=0.9.6', # MIT
],
'microsoft': [
'azure>=4.0.0', # MIT
],
'minio': [
'minio>=4.0.0', # Apache 2.0
],
'rackspace': [
'openstacksdk<=0.17.2', # Apache 2.0
'rackspacesdk>=0.7.5', # Apache 2.0
'requests>=2.19.1', # Apache 2.0
],
'docs': [
'sphinx', # BSD
'sphinx_rtd_theme', # MIT
'sphinx_autodoc_typehints', # MIT
'Pygments', # BSD
],
},
setup_requires=[
'pytest-runner', # MIT
],
tests_require=[
'flake8', # MIT
'pytest', # MIT
'prettyconf', # MIT
'requests>=2.19.1',
'tox', # MIT
],
test_suite='tests',
)
|
[
"os.path.basename",
"os.path.dirname",
"glob.glob",
"setuptools.find_packages",
"re.compile"
] |
[((807, 827), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (820, 827), False, 'from setuptools import find_packages, setup\n'), ((206, 223), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (213, 223), False, 'from os.path import basename, dirname, join, splitext\n'), ((914, 930), 'glob.glob', 'glob', (['"""src/*.py"""'], {}), "('src/*.py')\n", (918, 930), False, 'from glob import glob\n'), ((883, 897), 'os.path.basename', 'basename', (['path'], {}), '(path)\n', (891, 897), False, 'from os.path import basename, dirname, join, splitext\n'), ((508, 567), 're.compile', 're.compile', (['"""^.. start-badges.*^.. end-badges"""', '(re.M | re.S)'], {}), "('^.. start-badges.*^.. end-badges', re.M | re.S)\n", (518, 567), False, 'import re\n')]
|
import time
import pyaudio
import wave
from google_speech import Speech
class Call_APP():
def __init__(self, appname):
self.chunk = 1024
self.appname = appname
self.f = wave.open(r"/home/kimsoohyun/00-Research/02-Graph/06-appexecute/하이빅스비_2.wav","rb")
self.p = pyaudio.PyAudio()
self.stream = self.p.open(format =self.p.get_format_from_width(self.f.getsampwidth()),
channels = self.f.getnchannels(),
rate = self.f.getframerate(),
output = True)
self.lang = 'ko'
self.sox_effects = ("speed", "1.1")
def call_bixby(self):
data = self.f.readframes(self.chunk)
while data:
self.stream.write(data)
data = self.f.readframes(self.chunk)
self.stream.stop_stream()
self.p.terminate()
def call_appname(self):
text = f'{self.appname}실행'
speech = Speech(text, self.lang)
speech.play(self.sox_effects)
def exit_appname(self):
text = f'{self.appname}종료'
speech = Speech(text, self.lang)
speech.play(self.sox_effects)
def start_main(self):
#self.call_bixby()
#time.sleep(0.5)
self.call_appname()
def end_main(self):
self.call_bixby()
time.sleep(0.5)
self.exit_appname()
def main(self, startend):
if startend == 'start':
self.start_main()
elif startend == "end":
self.end_main()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--appname','-a',
type=str,
required=True,
help='input appname')
parser.add_argument('--startend','-s',
type=str,
required=True,
help='input start or end message')
args = parser.parse_args()
c = Call_APP(args.appname)
c.main(args.startend)
|
[
"wave.open",
"argparse.ArgumentParser",
"google_speech.Speech",
"time.sleep",
"pyaudio.PyAudio"
] |
[((1599, 1660), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process some integers."""'}), "(description='Process some integers.')\n", (1622, 1660), False, 'import argparse\n'), ((198, 284), 'wave.open', 'wave.open', (['"""/home/kimsoohyun/00-Research/02-Graph/06-appexecute/하이빅스비_2.wav"""', '"""rb"""'], {}), "('/home/kimsoohyun/00-Research/02-Graph/06-appexecute/하이빅스비_2.wav',\n 'rb')\n", (207, 284), False, 'import wave\n'), ((298, 315), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (313, 315), False, 'import pyaudio\n'), ((970, 993), 'google_speech.Speech', 'Speech', (['text', 'self.lang'], {}), '(text, self.lang)\n', (976, 993), False, 'from google_speech import Speech\n'), ((1113, 1136), 'google_speech.Speech', 'Speech', (['text', 'self.lang'], {}), '(text, self.lang)\n', (1119, 1136), False, 'from google_speech import Speech\n'), ((1341, 1356), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1351, 1356), False, 'import time\n')]
|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
#
# Texture a sphere.
#
# renderer and interactor
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# read the volume
reader = vtk.vtkJPEGReader()
reader.SetFileName(VTK_DATA_ROOT + "/Data/beach.jpg")
#---------------------------------------------------------
# Do the surface rendering
sphereSource = vtk.vtkSphereSource()
sphereSource.SetRadius(100)
textureSphere = vtk.vtkTextureMapToSphere()
textureSphere.SetInputConnection(sphereSource.GetOutputPort())
sphereStripper = vtk.vtkStripper()
sphereStripper.SetInputConnection(textureSphere.GetOutputPort())
sphereStripper.SetMaximumLength(5)
sphereMapper = vtk.vtkPolyDataMapper()
sphereMapper.SetInputConnection(sphereStripper.GetOutputPort())
sphereMapper.ScalarVisibilityOff()
sphereTexture = vtk.vtkTexture()
sphereTexture.SetInputConnection(reader.GetOutputPort())
sphereProperty = vtk.vtkProperty()
# sphereProperty.BackfaceCullingOn()
sphere = vtk.vtkActor()
sphere.SetMapper(sphereMapper)
sphere.SetTexture(sphereTexture)
sphere.SetProperty(sphereProperty)
#---------------------------------------------------------
ren.AddViewProp(sphere)
camera = ren.GetActiveCamera()
camera.SetFocalPoint(0, 0, 0)
camera.SetPosition(100, 400, -100)
camera.SetViewUp(0, 0, -1)
ren.ResetCameraClippingRange()
renWin.Render()
#---------------------------------------------------------
# test-related code
def TkCheckAbort (object_binding, event_name):
foo = renWin.GetEventPending()
if (foo != 0):
renWin.SetAbortRender(1)
iren.Initialize()
#iren.Start()
|
[
"vtk.vtkRenderWindow",
"vtk.vtkRenderer",
"vtk.util.misc.vtkGetDataRoot",
"vtk.vtkProperty",
"vtk.vtkStripper",
"vtk.vtkActor",
"vtk.vtkTexture",
"vtk.vtkRenderWindowInteractor",
"vtk.vtkSphereSource",
"vtk.vtkTextureMapToSphere",
"vtk.vtkJPEGReader",
"vtk.vtkPolyDataMapper"
] |
[((119, 135), 'vtk.util.misc.vtkGetDataRoot', 'vtkGetDataRoot', ([], {}), '()\n', (133, 135), False, 'from vtk.util.misc import vtkGetDataRoot\n'), ((194, 211), 'vtk.vtkRenderer', 'vtk.vtkRenderer', ([], {}), '()\n', (209, 211), False, 'import vtk\n'), ((221, 242), 'vtk.vtkRenderWindow', 'vtk.vtkRenderWindow', ([], {}), '()\n', (240, 242), False, 'import vtk\n'), ((274, 305), 'vtk.vtkRenderWindowInteractor', 'vtk.vtkRenderWindowInteractor', ([], {}), '()\n', (303, 305), False, 'import vtk\n'), ((363, 382), 'vtk.vtkJPEGReader', 'vtk.vtkJPEGReader', ([], {}), '()\n', (380, 382), False, 'import vtk\n'), ((539, 560), 'vtk.vtkSphereSource', 'vtk.vtkSphereSource', ([], {}), '()\n', (558, 560), False, 'import vtk\n'), ((606, 633), 'vtk.vtkTextureMapToSphere', 'vtk.vtkTextureMapToSphere', ([], {}), '()\n', (631, 633), False, 'import vtk\n'), ((715, 732), 'vtk.vtkStripper', 'vtk.vtkStripper', ([], {}), '()\n', (730, 732), False, 'import vtk\n'), ((849, 872), 'vtk.vtkPolyDataMapper', 'vtk.vtkPolyDataMapper', ([], {}), '()\n', (870, 872), False, 'import vtk\n'), ((989, 1005), 'vtk.vtkTexture', 'vtk.vtkTexture', ([], {}), '()\n', (1003, 1005), False, 'import vtk\n'), ((1080, 1097), 'vtk.vtkProperty', 'vtk.vtkProperty', ([], {}), '()\n', (1095, 1097), False, 'import vtk\n'), ((1145, 1159), 'vtk.vtkActor', 'vtk.vtkActor', ([], {}), '()\n', (1157, 1159), False, 'import vtk\n')]
|
import unittest
import tempfile
import os
import pathlib
import shutil
from staroid import Staroid
def integration_test_ready():
return "STAROID_ACCESS_TOKEN" in os.environ and "STAROID_ACCOUNT" in os.environ
class TestStaroid(unittest.TestCase):
def test_initialize(self):
s = Staroid()
def test_read_config(self):
# unset env
at = None
ac = None
if "STAROID_ACCESS_TOKEN" in os.environ:
at = os.environ["STAROID_ACCESS_TOKEN"]
del os.environ["STAROID_ACCESS_TOKEN"]
if "STAROID_ACCOUNT" in os.environ:
ac = os.environ["STAROID_ACCOUNT"]
del os.environ["STAROID_ACCOUNT"]
# given
fp = tempfile.NamedTemporaryFile()
fp.write(b"access_token: abc\naccount: GITHUB/user1")
fp.flush()
# when
s = Staroid(config_path=fp.name)
# then
self.assertEqual("abc", s.get_access_token())
self.assertEqual("GITHUB/user1", s.get_account())
# restore env
if at != None:
os.environ["STAROID_ACCESS_TOKEN"] = at
if ac != None:
os.environ["STAROID_ACCOUNT"] = ac
def test_download_chisel(self):
# given
tmp_dir = tempfile.mkdtemp()
s = Staroid(cache_dir=tmp_dir)
# when
chisel_path = s.get_chisel_path()
# then
self.assertIsNotNone(chisel_path)
self.assertTrue(os.path.isfile(chisel_path))
# clean up
shutil.rmtree(pathlib.Path(tmp_dir))
@unittest.skipUnless(integration_test_ready(), "Integration test environment is not configured")
def test_read_default_account(self):
# given access_token is set but account is not set
ac = None
if "STAROID_ACCOUNT" in os.environ:
ac = os.environ["STAROID_ACCOUNT"]
del os.environ["STAROID_ACCOUNT"]
# when
s = Staroid()
# then
self.assertNotEqual(None, s.get_account())
# restore env
if ac != None:
os.environ["STAROID_ACCOUNT"] = ac
|
[
"tempfile.NamedTemporaryFile",
"os.path.isfile",
"tempfile.mkdtemp",
"pathlib.Path",
"staroid.Staroid"
] |
[((297, 306), 'staroid.Staroid', 'Staroid', ([], {}), '()\n', (304, 306), False, 'from staroid import Staroid\n'), ((719, 748), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (746, 748), False, 'import tempfile\n'), ((858, 886), 'staroid.Staroid', 'Staroid', ([], {'config_path': 'fp.name'}), '(config_path=fp.name)\n', (865, 886), False, 'from staroid import Staroid\n'), ((1254, 1272), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1270, 1272), False, 'import tempfile\n'), ((1285, 1311), 'staroid.Staroid', 'Staroid', ([], {'cache_dir': 'tmp_dir'}), '(cache_dir=tmp_dir)\n', (1292, 1311), False, 'from staroid import Staroid\n'), ((1940, 1949), 'staroid.Staroid', 'Staroid', ([], {}), '()\n', (1947, 1949), False, 'from staroid import Staroid\n'), ((1452, 1479), 'os.path.isfile', 'os.path.isfile', (['chisel_path'], {}), '(chisel_path)\n', (1466, 1479), False, 'import os\n'), ((1531, 1552), 'pathlib.Path', 'pathlib.Path', (['tmp_dir'], {}), '(tmp_dir)\n', (1543, 1552), False, 'import pathlib\n')]
|
from scipy.stats import truncnorm
import numpy as np
import torch
def truncnorm_like(x):
size = [int(s) for s in x.shape]
eps = truncnorm.rvs(-3.001, 3.001, size=size) / 3.
return torch.from_numpy(eps)
|
[
"scipy.stats.truncnorm.rvs",
"torch.from_numpy"
] |
[((194, 215), 'torch.from_numpy', 'torch.from_numpy', (['eps'], {}), '(eps)\n', (210, 215), False, 'import torch\n'), ((138, 177), 'scipy.stats.truncnorm.rvs', 'truncnorm.rvs', (['(-3.001)', '(3.001)'], {'size': 'size'}), '(-3.001, 3.001, size=size)\n', (151, 177), False, 'from scipy.stats import truncnorm\n')]
|
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see
# <http://www.gnu.org/licenses/>.
"""
Some abstraction of changes. Useful for the classdiff and jardiff
modules.
:author: <NAME> <<EMAIL>>
:license: LGPL
"""
from functools import wraps
__all__ = (
"squash",
"collect_by_typename", "collect_by_type",
"iterate_by_type", "yield_sorted_by_type",
"Change", "Addition", "Removal",
"GenericChange", "SuperChange",
"SquashedChange", "SquashedAddition", "SquashedRemoval", )
def collect_by_typename(obj_sequence, cache=None):
"""
collects objects from obj_sequence and stores them into buckets by
type name. cache is an optional dict into which we collect the
results.
"""
if cache is None:
cache = {}
for val in obj_sequence:
key = type(val).__name__
bucket = cache.get(key, None)
if bucket is not None:
bucket.append(val)
else:
cache[key] = [val]
return cache
def collect_by_type(obj_sequence, cache=None):
"""
collects objects from obj_sequence and stores them into buckets by
type. cache is an optional dict into which we collect the results.
"""
if cache is None:
cache = {}
for val in obj_sequence:
key = type(val)
bucket = cache.get(key, None)
if bucket is not None:
bucket.append(val)
else:
cache[key] = [val]
return cache
def iterate_by_type(objs, typelist):
"""
collects a sequence of objs into buckets by type, then re-emits
objs from the buckets, sorting through the buckets in the order
specified by typelist. Any objects of a type not specified in
typelist will be emitted last in no guaranteed order (but still
grouped by type).
"""
cache = collect_by_type(objs)
for t in typelist:
for val in cache.pop(t, tuple()):
yield val
for tl in cache.values():
for val in tl:
yield val
def yield_sorted_by_type(*typelist):
"""
a useful decorator for the collect_impl method of SuperChange
subclasses. Caches the yielded changes, and re-emits them
collected by their type. The order of the types can be specified
by listing the types as arguments to this decorator. Unlisted
types will be yielded last in no guaranteed order.
Grouping happens by exact type match only. Inheritance is not
taken into consideration for grouping.
"""
def decorate(fun):
@wraps(fun)
def decorated(*args, **kwds):
return iterate_by_type(fun(*args, **kwds), typelist)
return decorated
return decorate
class Change(object):
"""
Base class for representing a specific change between two objects
"""
label = "Change"
def __init__(self, ldata, rdata):
self.ldata = ldata
self.rdata = rdata
self.description = None
self.changed = False
self.entry = None
def __del__(self):
self.clear()
def clear(self):
self.ldata = None
self.rdata = None
self.description = None
self.changed = False
self.entry = None
def check(self):
pass
def get_ldata(self):
return self.ldata
def get_rdata(self):
return self.rdata
def is_change(self):
return self.changed
def is_ignored(self, options):
"""
is this change ignorable, given parameters on the options
object.
"""
return False
def get_description(self):
return self.description or \
(self.label + (" changed" if self.is_change() else " unchanged"))
def collect(self, force=False):
return tuple()
def simplify(self, options=None):
"""
returns a dict describing a simple snapshot of this change, and
its children if any.
"""
simple = {
"class": type(self).__name__,
"is_change": self.is_change(),
"description": self.get_description(),
"label": self.label,
}
if options:
simple["is_ignored"] = self.is_ignored(options)
if isinstance(self, Addition):
simple["is_addition"] = True
if isinstance(self, Removal):
simple["is_removal"] = True
if self.entry:
simple["entry"] = self.entry
return simple
class Removal(Change):
"""
A type of change indicating that something was removed
"""
label = "Removal"
def is_change(self):
return True
class Addition(Change):
"""
A type of change indicating that something was added
"""
label = "Addition"
def is_change(self):
return True
class GenericChange(Change):
"""
A generalized test for a single change on two objects: a left and
a right. Subclasses should override the label and the check_impl
method at a minimum.
"""
label = "Generic Change"
def fn_data(self, side_data):
"""
Get the data to be used in fn_differ from side_data. By default,
this method is the identity
"""
return side_data
def fn_pretty(self, side_data):
"""
override to provide a way to show the pretty version of the left
or right data. Defaults to fn_data
"""
return self.fn_data(side_data)
def fn_pretty_desc(self, side_data):
"""
override to provide a way to describe the data left or right
data. Defaults to fn_pretty
"""
return self.fn_pretty(side_data)
def fn_differ(self, left_data, right_data):
"""
override to provide the check for whether get_ldata() and
get_rdata() differ. defaults to an inequality (!=) check
"""
return left_data != right_data
def get_ldata(self):
"""
returns fn_data of ldata
"""
return self.fn_data(self.ldata)
def get_rdata(self):
"""
returns fn_data of rdata
"""
return self.fn_data(self.rdata)
def pretty_ldata(self):
"""
returns fn_pretty of ldata (NOT the fn_pretty of get_ldata)
"""
return self.fn_pretty(self.ldata)
def pretty_rdata(self):
"""
returns fn_pretty of rdata (NOT the fn_pretty of get_rdata)
"""
return self.fn_pretty(self.rdata)
def pretty_ldata_desc(self):
"""
returns fn_pretty_desc of ldata (NOT the fn_pretty_desc of
get_ldata)
"""
return self.fn_pretty_desc(self.ldata)
def pretty_rdata_desc(self):
"""
returns fn_pretty_desc of rdata (NOT the fn_pretty_desc of
get_rdata)
"""
return self.fn_pretty_desc(self.rdata)
def check_impl(self):
"""
returns a tuple of (is_change,description) which are then stored
in self.changed and self.description
The default implementation will get the data from the left and
right sides by calling self.fn_data, then compare them via
self.fn_differ. If they do differ, a message will be
constructed using self.fn_pretty to create human-readable
versions of the data that changed.
"""
if self.fn_differ(self.get_ldata(), self.get_rdata()):
left = self.pretty_ldata_desc()
right = self.pretty_rdata_desc()
msg = "%s changed: %s to %s" % (self.label, left, right)
return True, msg
else:
return False, None
def check(self):
"""
if necessary, override check_impl to change the behaviour of
subclasses of GenericChange.
"""
self.changed, self.description = self.check_impl()
def simplify(self, options=None):
"""
provide a simple representation of this change as a dictionary
"""
# TODO: we might want to get rid of this method and just move
# it into the JSONEncoder in report.py
simple = super(GenericChange, self).simplify(options)
ld = self.pretty_ldata()
if ld is not None:
simple["old_data"] = ld
rd = self.pretty_rdata()
if rd is not None:
simple["new_data"] = rd
return simple
class SuperChange(GenericChange):
"""
A collection of changes.
For simplest use, override the change_types class field with a
list of Change subclasses. When the default collect_impl is called
from collect, an instance of each type will be created with the
same left and right data as the SuperChange instance was created
with. The check_impl (called from check) will iterate over the
instances and call their check method in-turn.
An instance of SuperChange is considered unchanged if all of its
sub-changes are also unchanged (or if there were no sub-changes).
An instance of SuperChange is considered ignored if it was a
change and all of its changed children were also ignored.
"""
label = "Super Change"
# override with change classes
change_types = tuple()
def __init__(self, ldata, rdata):
super(SuperChange, self).__init__(ldata, rdata)
self.changes = tuple()
def fn_pretty(self, c):
return None
def clear(self):
"""
clears all child changes and drops the reference to them
"""
super(SuperChange, self).clear()
for c in self.changes:
c.clear()
self.changes = tuple()
def collect_impl(self):
"""
instantiates each of the entries in in the overriden change_types
field with the left and right data
"""
ldata = self.get_ldata()
rdata = self.get_rdata()
for change_type in self.change_types:
yield change_type(ldata, rdata)
def collect(self, force=False):
"""
calls collect_impl and stores the results as the child changes of
this super-change. Returns a tuple of the data generated from
collect_impl. Caches the result rather than re-computing each
time, unless force is True
"""
if force or not self.changes:
self.changes = tuple(self.collect_impl())
return self.changes
def check_impl(self):
"""
sets self.changes to the result of self.changes_impl, then if any
member of those checks shows as a change, will return
True,None
"""
c = False
for change in self.collect():
change.check()
c = c or change.is_change()
return c, None
def is_ignored(self, options):
"""
If we have changed children and all the children which are changes
are ignored, then we are ignored. Otherwise, we are not
ignored
"""
if not self.is_change():
return False
changes = self.collect()
if not changes:
return False
for change in changes:
if change.is_change() and not change.is_ignored(options):
return False
return True
def simplify(self, options=None):
"""
generate a simple dict representing this change data, and
collecting all of the sub-change instances (which are NOT
immediately simplified themselves)
"""
data = super(SuperChange, self).simplify(options)
show_ignored = False
show_unchanged = False
if options:
show_ignored = getattr(options, "show_ignored", show_ignored)
show_unchanged = getattr(options, "show_unchanged", show_unchanged)
# build a list of sub-changes honoring show-ignored and
# show-unchanged
subs = list()
for s in self.collect():
if s.is_change():
if show_ignored or not s.is_ignored(options):
subs.append(s)
elif show_unchanged:
subs.append(s)
data["children"] = subs
return data
def squash_children(self, options):
"""
reduces the memory footprint of this super-change by converting
all child changes into squashed changes
"""
oldsubs = self.collect()
self.changes = tuple(squash(c, options=options) for c in oldsubs)
for change in oldsubs:
change.clear()
class SquashedChange(Change):
"""
For when you want to keep just the overall data from a change,
including whether it was ignored, but want to discard the more
in-depth information.
"""
label = "SquashedChange"
def __init__(self, change, is_ignored=False):
super(SquashedChange, self).__init__(None, None)
self.label = change.label
self.description = change.get_description()
self.changed = change.is_change()
self.ignored = is_ignored
self.origclass = type(change)
self.entry = getattr(change, "entry", None)
def is_ignored(self, options):
return self.ignored
def is_change(self):
return self.changed
def simplify(self, options=None):
simple = super(SquashedChange, self).simplify(options)
simple["original_class"] = self.origclass.__name__
return simple
def clear(self):
pass
class SquashedRemoval(SquashedChange, Removal):
"""
Squashed change indicating something was removed
"""
label = "SquashedRemoval"
class SquashedAddition(SquashedChange, Addition):
"""
Squashed change indicating something was added
"""
label = "SquashedAddition"
def squash(change, is_ignored=False, options=None):
"""
squashes the in-depth information of a change to a simplified (and
less memory-intensive) form
"""
if options:
is_ignored = change.is_ignored(options)
result = None
if isinstance(change, Removal):
result = SquashedRemoval(change, is_ignored)
elif isinstance(change, Addition):
result = SquashedAddition(change, is_ignored)
else:
result = SquashedChange(change, is_ignored)
return result
#
# The end.
|
[
"functools.wraps"
] |
[((3089, 3099), 'functools.wraps', 'wraps', (['fun'], {}), '(fun)\n', (3094, 3099), False, 'from functools import wraps\n')]
|
from django.db import models
from django_extensions.db import fields
from pixelpuncher.player.models import Player
class GameState(object):
PLAY = 'PLAY'
GAME_OVER = 'OVER'
CLOSED = 'CLSD'
STATE = (
('PLAY', 'Playing',),
('OVER', 'Game Over',),
('CLSD', 'Closed',),
)
class CardType(object):
BOMB = 'B'
ONE = '1'
TWO = '2'
FOUR = '4'
EIGHT = '8'
SPECIAL = 'S'
CARDS = (
('B', 'Bomb'),
('1', 'One'),
('2', 'Two'),
('4', 'Four'),
('8', 'Eight'),
('S', 'Special'),
)
class GameMessage(models.Model):
player = models.ForeignKey(Player)
message = models.TextField()
shown = models.BooleanField(default=False)
date_created = fields.CreationDateTimeField(editable=True)
def __unicode__(self):
return self.message
class MatchGame(models.Model):
player = models.ForeignKey(Player)
date_created = fields.CreationDateTimeField(editable=True)
state = models.CharField(max_length=4, choices=GameState.STATE, default='PLAY')
points = models.IntegerField(default=0)
multiplier = models.IntegerField(default=1)
class MatchCard(models.Model):
game = models.ForeignKey(MatchGame, related_name='cards')
card_type = models.CharField(max_length=1, choices=CardType.CARDS)
flipped = models.BooleanField(default=False)
position = models.IntegerField(default=0)
@property
def image(self):
if self.flipped:
return "images/cards/{}.png".format(self.card_type)
else:
return "images/cards/card_back.png"
class CheatCode(models.Model):
code = models.CharField(max_length=64, unique=True)
menu_text = models.CharField(max_length=32)
cheat_class = models.CharField(max_length=128)
description = models.TextField(null=True, blank=True)
date_created = fields.CreationDateTimeField(editable=True)
players = models.ManyToManyField(Player, related_name="cheatcodes", blank=True)
admin_only = models.BooleanField(default=False)
show_on_menu = models.BooleanField(default=True)
def __unicode__(self):
return self.code
|
[
"django.db.models.TextField",
"django.db.models.ManyToManyField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.IntegerField",
"django_extensions.db.fields.CreationDateTimeField"
] |
[((643, 668), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Player'], {}), '(Player)\n', (660, 668), False, 'from django.db import models\n'), ((683, 701), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (699, 701), False, 'from django.db import models\n'), ((714, 748), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (733, 748), False, 'from django.db import models\n'), ((768, 811), 'django_extensions.db.fields.CreationDateTimeField', 'fields.CreationDateTimeField', ([], {'editable': '(True)'}), '(editable=True)\n', (796, 811), False, 'from django_extensions.db import fields\n'), ((914, 939), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Player'], {}), '(Player)\n', (931, 939), False, 'from django.db import models\n'), ((959, 1002), 'django_extensions.db.fields.CreationDateTimeField', 'fields.CreationDateTimeField', ([], {'editable': '(True)'}), '(editable=True)\n', (987, 1002), False, 'from django_extensions.db import fields\n'), ((1015, 1086), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(4)', 'choices': 'GameState.STATE', 'default': '"""PLAY"""'}), "(max_length=4, choices=GameState.STATE, default='PLAY')\n", (1031, 1086), False, 'from django.db import models\n'), ((1100, 1130), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1119, 1130), False, 'from django.db import models\n'), ((1148, 1178), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1167, 1178), False, 'from django.db import models\n'), ((1223, 1273), 'django.db.models.ForeignKey', 'models.ForeignKey', (['MatchGame'], {'related_name': '"""cards"""'}), "(MatchGame, related_name='cards')\n", (1240, 1273), False, 'from django.db import models\n'), ((1290, 1344), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'CardType.CARDS'}), '(max_length=1, choices=CardType.CARDS)\n', (1306, 1344), False, 'from django.db import models\n'), ((1359, 1393), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1378, 1393), False, 'from django.db import models\n'), ((1409, 1439), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1428, 1439), False, 'from django.db import models\n'), ((1671, 1715), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'unique': '(True)'}), '(max_length=64, unique=True)\n', (1687, 1715), False, 'from django.db import models\n'), ((1732, 1763), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)'}), '(max_length=32)\n', (1748, 1763), False, 'from django.db import models\n'), ((1782, 1814), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (1798, 1814), False, 'from django.db import models\n'), ((1833, 1872), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1849, 1872), False, 'from django.db import models\n'), ((1892, 1935), 'django_extensions.db.fields.CreationDateTimeField', 'fields.CreationDateTimeField', ([], {'editable': '(True)'}), '(editable=True)\n', (1920, 1935), False, 'from django_extensions.db import fields\n'), ((1950, 2019), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Player'], {'related_name': '"""cheatcodes"""', 'blank': '(True)'}), "(Player, related_name='cheatcodes', blank=True)\n", (1972, 2019), False, 'from django.db import models\n'), ((2037, 2071), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2056, 2071), False, 'from django.db import models\n'), ((2091, 2124), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2110, 2124), False, 'from django.db import models\n')]
|
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
#--- Resource declaration ---
@attr.s
class InferenceScheduler(Resource):
"""
AWS Object Type = "AWS::LookoutEquipment::InferenceScheduler"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html
Property Document:
- ``rp_DataInputConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-datainputconfiguration
- ``rp_DataOutputConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-dataoutputconfiguration
- ``rp_DataUploadFrequency``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-datauploadfrequency
- ``rp_ModelName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-modelname
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-rolearn
- ``p_DataDelayOffsetInMinutes``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-datadelayoffsetinminutes
- ``p_InferenceSchedulerName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-inferenceschedulername
- ``p_ServerSideKmsKeyId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-serversidekmskeyid
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-tags
"""
AWS_OBJECT_TYPE = "AWS::LookoutEquipment::InferenceScheduler"
rp_DataInputConfiguration: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "DataInputConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-datainputconfiguration"""
rp_DataOutputConfiguration: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "DataOutputConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-dataoutputconfiguration"""
rp_DataUploadFrequency: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DataUploadFrequency"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-datauploadfrequency"""
rp_ModelName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ModelName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-modelname"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-rolearn"""
p_DataDelayOffsetInMinutes: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "DataDelayOffsetInMinutes"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-datadelayoffsetinminutes"""
p_InferenceSchedulerName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "InferenceSchedulerName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-inferenceschedulername"""
p_ServerSideKmsKeyId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ServerSideKmsKeyId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-serversidekmskeyid"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#cfn-lookoutequipment-inferencescheduler-tags"""
@property
def rv_InferenceSchedulerArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutequipment-inferencescheduler.html#aws-resource-lookoutequipment-inferencescheduler-return-values"""
return GetAtt(resource=self, attr_name="InferenceSchedulerArn")
|
[
"attr.validators.instance_of"
] |
[((2555, 2588), 'attr.validators.instance_of', 'attr.validators.instance_of', (['dict'], {}), '(dict)\n', (2582, 2588), False, 'import attr\n'), ((2947, 2980), 'attr.validators.instance_of', 'attr.validators.instance_of', (['dict'], {}), '(dict)\n', (2974, 2980), False, 'import attr\n'), ((3355, 3412), 'attr.validators.instance_of', 'attr.validators.instance_of', (['TypeCheck.intrinsic_str_type'], {}), '(TypeCheck.intrinsic_str_type)\n', (3382, 3412), False, 'import attr\n'), ((3769, 3826), 'attr.validators.instance_of', 'attr.validators.instance_of', (['TypeCheck.intrinsic_str_type'], {}), '(TypeCheck.intrinsic_str_type)\n', (3796, 3826), False, 'import attr\n'), ((4161, 4218), 'attr.validators.instance_of', 'attr.validators.instance_of', (['TypeCheck.intrinsic_str_type'], {}), '(TypeCheck.intrinsic_str_type)\n', (4188, 4218), False, 'import attr\n'), ((4571, 4603), 'attr.validators.instance_of', 'attr.validators.instance_of', (['int'], {}), '(int)\n', (4598, 4603), False, 'import attr\n'), ((5008, 5065), 'attr.validators.instance_of', 'attr.validators.instance_of', (['TypeCheck.intrinsic_str_type'], {}), '(TypeCheck.intrinsic_str_type)\n', (5035, 5065), False, 'import attr\n'), ((5462, 5519), 'attr.validators.instance_of', 'attr.validators.instance_of', (['TypeCheck.intrinsic_str_type'], {}), '(TypeCheck.intrinsic_str_type)\n', (5489, 5519), False, 'import attr\n'), ((5988, 6020), 'attr.validators.instance_of', 'attr.validators.instance_of', (['Tag'], {}), '(Tag)\n', (6015, 6020), False, 'import attr\n'), ((6041, 6074), 'attr.validators.instance_of', 'attr.validators.instance_of', (['list'], {}), '(list)\n', (6068, 6074), False, 'import attr\n')]
|
#!/usr/local/bin/python3
# -*- coding: utf-8 -*-
"""
This should not be blank.
"""
# Copyright (c) 2020 University of Utah Student Computing Labs. ################
# All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appears in all copies and
# that both that copyright notice and this permission notice appear
# in supporting documentation, and that the name of The University
# of Utah not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission. This software is supplied as is without expressed or
# implied warranties of any kind.
################################################################################
# firmware_password_manager.py #################################################
#
# A Python script to help Macintosh administrators manage the firmware passwords
# of their computers.
#
#
# 2.0.0 2015.11.05 Initial python rewrite. tjm
#
# 2.1.0 2016.03.07 "Now with spinning rims"
# bug fixes, obfuscation features,
# additional tools and examples. tjm
#
# 2.1.1 2016.03.16 slack identifier customization,
# logic clarifications. tjm
#
# 2.1.2 2016.03.16 cleaned up argparse. tjm
#
# 2.1.3 2016.04.04 remove obsolete flag logic. tjm
#
# 2.1.4 2017.10.23 using rm -P for secure delete,
# added additional alerting, additional pylint cleanup. tjm
#
# 2.5.0 2017.11.14 removed flags, uses configuration file,
# reintroduced setregproptool functionality,
# removed management_tools, ported to
# python3, added testing fuctionality. tjm
#
# 2.5.0 2020.01.23 2.5 actually finished and committed. tjm
#
#
#
# keyfile format:
#
# | comment:passwords <-- comments are ignored, except for new.
# | new:newpassword <-- the new password to be installed.
#
################################################################################
# notes: #######################################################################
#
# ./firmware_password_manager_cfg_v2.5b3.py -c private.INI -t
#
#
# sudo pyinstaller --onefile firmware_password_manager.py
#
#
#
################################################################################
# external tool documentation ##################################################
#
# firmwarepasswd v 1.0
# Copyright (C) 2014 Apple Inc. All Rights Reserved.
#
#
# Usage: firmwarepasswd [OPTION]
#
# ? Show usage
# -h Show usage
# -setpasswd Set a firmware password. You will be promted for passwords as needed.
# NOTE: if this is the first password set, and no mode is
# in place, the mode will automatically be set to "command"
# -setmode [mode] Set mode to:
# "command" - password required to change boot disk
# "full" - password required on all startups
# NOTE: cannot set a mode without having set a password
# -mode Prints out the current mode setting
# -check Prints out whether there is / isn't a firmware password is set
# -delete Delete current firmware password and mode setting
# -verify Verify current firmware password
# -unlockseed Generates a firmware password recovery key
# NOTE: Machine must be stable for this command to generate
# a valid seed. No pending changes that need a restart.
# NOTE: Seed is only valid until the next time a firmware password
# command occurs.
#
#
#
# setregproptool v 2.0 (9) Aug 24 2013
# Copyright (C) 2001-2010 Apple Inc.
# All Rights Reserved.
#
# Usage: setregproptool [-c] [-d [-o <old password>]] [[-m <mode> -p <password>] -o <old password>]
#
# -c Check whether password is enabled.
# Sets return status of 0 if set, 1 otherwise.
# -d Delete current password/mode.
# Requires current password on some machines.
# -p Set password.
# Requires current password on some machines.
# -m Set security mode.
# Requires current password on some machines.
# Mode can be either "full" or "command".
# Full mode requires entry of the password on
# every boot, command mode only requires entry
# of the password if the boot picker is invoked
# to select a different boot device.
#
# When enabling the Firmware Password for the first
# time, both the password and mode must be provided.
# Once the firmware password has been enabled, providing
# the mode or password alone will change that parameter
# only.
#
# -o Old password.
# Only required on certain machines to disable
# or change password or mode. Optional, if not
# provided the tool will prompt for the password.
#
################################################################################
#
# imports
from argparse import RawTextHelpFormatter
import argparse
import base64
import configparser
import hashlib
import inspect
import json
import logging
import os
import platform
import plistlib
import re
import socket
import subprocess
import sys
import pexpect
import requests
class FWPM_Object(object):
"""
This should not be blank.
"""
def __init__(self, args, logger, master_version):
"""
This should not be blank.
"""
self.args = args
self.logger = logger
self.master_version = master_version
self.srp_path = None
self.fwpwd_path = None
self.config_options = {}
self.local_identifier = None
self.passwords_raw = None
self.fwpw_managed_string = None
self.new_password = None
self.other_password_list = []
self.current_fwpw_state = False
self.current_fwpm_hash = None
self.clean_exit = False
self.read_config = False
self.read_keyfile = False
self.modify_fwpw = False
self.modify_nvram = False
self.matching_hashes = False
self.matching_passwords = False
self.configuration_path = None
self.system_version = platform.mac_ver()[0].split(".")
self.srp_check()
self.fwpwd_check()
if self.fwpwd_path:
self.current_fwpw_state = self.fwpwd_current_state()
elif self.srp_path:
self.current_fwpw_state = self.srp_current_state()
self.injest_config()
if self.config_options["slack"]["use_slack"]:
self.slack_optionator()
self.injest_keyfile()
self.hash_current_state()
self.hash_incoming()
#
# What if the string isn't a hash?!?
if (self.current_fwpm_hash == self.fwpw_managed_string) and self.config_options["flags"]["management_string_type"] == 'hash':
self.matching_hashes = True
self.master_control()
def master_control(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if self.current_fwpm_hash == self.fwpw_managed_string:
if self.logger:
self.logger.info("Hashes match. No change required.")
else:
if self.logger:
self.logger.info("Hashes DO NOT match. Change required.")
if self.fwpwd_path:
self.fwpwd_change()
self.secure_delete()
elif self.srp_path:
self.srp_change()
self.secure_delete()
else:
print("No FW tool found.")
quit()
#
# nvram maintenance
#
self.nvram_manager()
#
# some kind of post action reporting.
# handle reboot flag here?
#
self.exit_manager()
def hash_current_state(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
existing_keyfile_hash = None
if self.logger:
self.logger.info("Checking existing hash.")
try:
existing_keyfile_hash_raw = subprocess.check_output(["/usr/sbin/nvram", "-p"]).decode('utf-8')
existing_keyfile_hash_raw = existing_keyfile_hash_raw.split('\n')
for item in existing_keyfile_hash_raw:
if "fwpw-hash" in item:
existing_keyfile_hash = item
else:
self.current_fwpm_hash = None
self.current_fwpm_hash = existing_keyfile_hash.split("\t")[1]
if self.args.testmode:
print("Existing hash: %s" % self.current_fwpm_hash)
except:
pass
def hash_incoming(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if self.logger:
self.logger.info("Checking incoming hash.")
if self.config_options["flags"]["management_string_type"] == "custom":
#
# ?!?!?!?!?!?!?
#
self.fwpw_managed_string = self.config_options["flags"]["management_string_type"]
elif self.config_options["flags"]["management_string_type"] == "hash":
hashed_key = hashlib.new('sha256')
# hashed_key.update(self.passwords_raw.encode('utf-8'))
hashed_key.update(self.new_password.encode('utf-8'))
for entry in sorted(self.other_password_list):
hashed_key.update(entry.encode('utf-8'))
self.fwpw_managed_string = hashed_key.hexdigest()
# prepend '2:' to denote hash created with v2 of script, will force a password change from v1
self.fwpw_managed_string = '2:' + self.fwpw_managed_string
else:
self.fwpw_managed_string = None
if self.args.testmode:
print("Incoming hash: %s" % self.fwpw_managed_string)
def secure_delete(self):
"""
attempts to securely delete the keyfile with medium overwrite and zeroing settings
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if self.logger:
self.logger.info("Deleting keyfile")
use_srm = bool(os.path.exists("/usr/bin/srm"))
if self.args.testmode:
if self.logger:
self.logger.info("Test mode, keyfile not deleted.")
return
if use_srm:
try:
subprocess.call(["/usr/bin/srm", "-mz", self.config_options["keyfile"]["path"]])
if self.logger:
self.logger.info("keyfile deleted successfuly.")
except Exception as exception_message:
if self.logger:
self.logger.critical("Issue with attempt to remove keyfile. %s" % exception_message)
else:
try:
deleted_keyfile = subprocess.call(["/bin/rm", "-Pf", self.config_options["keyfile"]["path"]])
print("return: %r" % deleted_keyfile)
if self.logger:
self.logger.info("keyfile deleted successfuly.")
except Exception as exception_message:
if self.logger:
self.logger.critical("Issue with attempt to remove keyfile. %s" % exception_message)
# is this really needed?
if os.path.exists(self.config_options["keyfile"]["path"]):
if self.logger:
self.logger.critical("Failure to remove keyfile.")
else:
if self.logger:
self.logger.info("Keyfile removed.")
return
def injest_config(self):
"""
attempts to consume and format configuration file
"""
# handle parsing errors in cfg?!?
# where to handle looking for cfg in specific locations?!?
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
try:
if os.path.exists(self.args.configfile):
# firmware_password_manager_cfg_v2.5b8.py:434: DeprecationWarning: The SafeConfigParser class has been renamed to ConfigParser in Python 3.2. This alias will be removed in future versions. Use ConfigParser directly instead.
config = configparser.ConfigParser(allow_no_value=True)
config.read(self.args.configfile)
self.config_options["flags"] = {}
self.config_options["keyfile"] = {}
self.config_options["logging"] = {}
self.config_options["slack"] = {}
self.config_options["os"] = {}
self.config_options["fwpm"] = {}
for section in ["flags", "keyfile", "logging", "slack"]:
for item in config.options(section):
if "use_" in item:
try:
self.config_options[section][item] = config.getboolean(section, item)
except:
self.config_options[section][item] = False
elif "path" in item:
self.config_options[section][item] = config.get(section, item)
else:
self.config_options[section][item] = config.get(section, item)
if self.args.testmode:
print("Configuration file variables:")
for key, value in self.config_options.items():
print(key)
for sub_key, sub_value in value.items():
print("\t%s %r" % (sub_key, sub_value))
else:
if self.logger:
self.logger.critical("Issue locating configuration file, exiting.")
sys.exit()
except Exception as exception_message:
if self.logger:
self.logger.critical("Issue reading configuration file, exiting. %s" % exception_message)
sys.exit()
self.read_config = True
def sanity_check(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
def srp_check(self):
"""
full setregproptool support later, if ever.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if os.path.exists('/usr/local/bin/setregproptool'):
self.srp_path = '/usr/local/bin/setregproptool'
elif os.path.exists(os.path.dirname(os.path.abspath(__file__)) + '/setregproptool'):
self.srp_path = os.path.dirname(os.path.abspath(__file__)) + '/setregproptool'
else:
print("SRP #3a")
if self.logger:
self.logger.info("SRP path: %s" % self.srp_path)
def srp_current_state(self):
"""
full setregproptool support later, if ever.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
try:
existing_fw_pw = subprocess.call([self.srp_path, "-c"])
if self.logger:
self.logger.info("srp says %r" % existing_fw_pw)
if existing_fw_pw:
return False
# it's weird, I know. Blame Apple.
else:
return True
except:
if self.logger:
self.logger.info("ERROR srp says %r" % existing_fw_pw)
return False
#
# # E:451,15: Undefined variable 'CalledProcessError' (undefined-variable)
# except CalledProcessError:
# if self.logger:
# self.logger.info("ERROR srp says %r" % existing_fw_pw)
# return False
def srp_change(self):
"""
full setregproptool support later, if ever.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
print("Using srp tool!")
print("%r" % self.current_fwpw_state)
def fwpwd_check(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if os.path.exists('/usr/sbin/firmwarepasswd'):
self.fwpwd_path = '/usr/sbin/firmwarepasswd'
else:
print("FWPWD #2b")
if self.logger:
self.logger.info("FWPWD path: %s" % self.fwpwd_path)
def fwpwd_current_state(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
existing_fw_pw = subprocess.check_output([self.fwpwd_path, "-check"])
# R:484, 8: The if statement can be replaced with 'return bool(test)' (simplifiable-if-statement)
# return bool('Yes' in existing_fw_pw)
if b'Yes' in existing_fw_pw:
return True
else:
return False
def fwpwd_change(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
known_current_password = False
current_password = ''
# is this really needed?!?
new_fw_tool_cmd = [self.fwpwd_path, '-verify']
if self.current_fwpw_state:
if self.logger:
self.logger.info("Verifying current FW password")
for index in reversed(range(len(self.other_password_list))):
child = pexpect.spawn(' '.join(new_fw_tool_cmd))
child.expect('Enter password:')
child.sendline(self.other_password_list[index])
result = child.expect(['Correct', 'Incorrect'])
if result == 0:
#
# correct password, exit loop
current_password = self.other_password_list[index]
known_current_password = True
break
else:
#
# wrong password, keep going
continue
#
# We've discovered the currently set firmware password
if known_current_password:
#
# Deleting firmware password
if not self.config_options["flags"]["use_fwpw"]:
if self.logger:
self.logger.info("Deleting FW password")
new_fw_tool_cmd = [self.fwpwd_path, '-delete']
if self.logger:
self.logger.info(' '.join(new_fw_tool_cmd))
child = pexpect.spawn(' '.join(new_fw_tool_cmd))
child.expect('Enter password:')
child.sendline(current_password)
result = child.expect(['removed', 'incorrect'])
if result == 0:
#
# password accepted, log result and exit
if self.logger:
self.logger.info("Finished. Password should be removed. Restart required. [%i]" % (index + 1))
self.clean_exit = True
else:
if self.logger:
self.logger.critical("Asked to delete, current password not accepted. Exiting.")
# secure_delete_keyfile(logger, args, config_options)
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Asked to delete, current password not accepted.", '', 'error')
# self.error_bot.send_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Asked to delete, current password not accepted.")
sys.exit(1)
#
# Current and new password are identical
#
#
# WAIT. How (is/would) this possible, clearly the hashes don't match!!! What if they aren't using hashes?
#
#
elif current_password == self.new_password:
self.matching_passwords = True
self.clean_exit = True
#
# Change current firmware password to new password
else:
if self.logger:
self.logger.info("Updating FW password")
new_fw_tool_cmd = [self.fwpwd_path, '-setpasswd']
if self.logger:
self.logger.info(' '.join(new_fw_tool_cmd))
child = pexpect.spawn(' '.join(new_fw_tool_cmd))
result = child.expect('Enter password:')
if result == 0:
pass
else:
if self.logger:
self.logger.error("bad response from firmwarepasswd. Exiting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Bad response from firmwarepasswd.", '', 'error')
sys.exit(1)
child.sendline(current_password)
result = child.expect('Enter new password:')
if result == 0:
pass
else:
if self.logger:
self.logger.error("bad response from firmwarepasswd. Exiting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Bad response from firmwarepasswd.", '', 'error')
sys.exit(1)
child.sendline(self.new_password)
result = child.expect('Re-enter new password:')
if result == 0:
pass
else:
if self.logger:
self.logger.error("bad response from firmwarepasswd. Exiting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Bad response from firmwarepasswd.", '', 'error')
sys.exit(1)
child.sendline(self.new_password)
child.expect(pexpect.EOF)
child.close()
if self.logger:
self.logger.info("Updated FW Password.")
self.clean_exit = True
#
# Unable to match current password with contents of keyfile
else:
if self.logger:
self.logger.critical("Current FW password not in keyfile. Quitting.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Current FW password not in keyfile.", '', 'error')
self.secure_delete()
sys.exit(1)
#
# No current firmware password, setting it
else:
new_fw_tool_cmd = [self.fwpwd_path, '-setpasswd']
if self.logger:
self.logger.info(' '.join(new_fw_tool_cmd))
child = pexpect.spawn(' '.join(new_fw_tool_cmd))
result = child.expect('Enter new password:')
print(child.before)
if result == 0:
pass
else:
if self.logger:
self.logger.error("bad response from firmwarepasswd. Exiting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Bad response from firmwarepasswd.", '', 'error')
sys.exit(1)
child.sendline(self.new_password)
result = child.expect('Re-enter new password:')
if result == 0:
pass
else:
if self.logger:
self.logger.error("bad response from firmwarepasswd. Exiting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Bad response from firmwarepasswd.", '', 'error')
sys.exit(1)
child.sendline(self.new_password)
child.expect(pexpect.EOF)
child.close()
if self.logger:
self.logger.info("Added FW Password.")
self.clean_exit = True
def slack_optionator(self):
"""
ip, mac, hostname
computername
serial
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if self.verify_network():
try:
full_ioreg = subprocess.check_output(['ioreg', '-l']).decode('utf-8')
serial_number_raw = re.findall('\"IOPlatformSerialNumber\" = \"(.*)\"', full_ioreg)
serial_number = serial_number_raw[0]
if self.args.testmode:
print("Serial number: %r" % serial_number)
if self.config_options["slack"]["slack_identifier"].lower() == 'ip' or self.config_options["slack"]["slack_identifier"].lower() == 'mac' or self.config_options["slack"]["slack_identifier"].lower() == 'hostname':
processed_device_list = []
# Get ordered list of network devices
base_network_list = subprocess.check_output(["/usr/sbin/networksetup", "-listnetworkserviceorder"]).decode('utf-8')
network_device_list = re.findall(r'\) (.*)\n\(.*Device: (.*)\)', base_network_list)
ether_up_list = subprocess.check_output(["/sbin/ifconfig", "-au", "ether"]).decode('utf-8')
for device in network_device_list:
device_name = device[0]
port_name = device[1]
try:
if self.args.testmode:
print(device_name, port_name)
if port_name in ether_up_list:
device_info_raw = subprocess.check_output(["/sbin/ifconfig", port_name]).decode('utf-8')
mac_address = re.findall('ether (.*) \n', device_info_raw)
if self.args.testmode:
print("%r" % mac_address)
ether_address = re.findall('inet (.*) netmask', device_info_raw)
if self.args.testmode:
print("%r" % ether_address)
if len(ether_address) and len(mac_address):
processed_device_list.append([device_name, port_name, ether_address[0], mac_address[0]])
except Exception as this_exception:
print(this_exception)
if processed_device_list:
if self.logger:
self.logger.info("1 or more active IP addresses. Choosing primary.")
if self.args.testmode:
print("Processed devices: ", processed_device_list)
if self.config_options["slack"]["slack_identifier"].lower() == 'ip':
self.local_identifier = processed_device_list[0][2] + " (" + processed_device_list[0][0] + ":" + processed_device_list[0][1] + ")"
elif self.config_options["slack"]["slack_identifier"].lower() == 'mac':
self.local_identifier = processed_device_list[0][3] + " (" + processed_device_list[0][0] + ":" + processed_device_list[0][1] + ")"
elif self.config_options["slack"]["slack_identifier"].lower() == 'hostname':
try:
self.local_identifier = socket.getfqdn()
except:
if self.logger:
self.logger.error("error discovering hostname info.")
self.local_identifier = serial_number
else:
if self.logger:
self.logger.error("error discovering IP info.")
self.local_identifier = serial_number
elif self.config_options["slack"]["slack_identifier"].lower() == 'computername':
try:
cname_identifier_raw = subprocess.check_output(['/usr/sbin/scutil', '--get', 'ComputerName'])
self.local_identifier = cname_identifier_raw.split('\n')[0]
if self.logger:
self.logger.info("Computername: %r" % self.local_identifier)
except:
if self.logger:
self.logger.info("error discovering computername.")
self.local_identifier = serial_number
elif self.config_options["slack"]["slack_identifier"].lower() == 'serial':
self.local_identifier = serial_number
if self.logger:
self.logger.info("Serial number: %r" % self.local_identifier)
else:
if self.logger:
self.logger.info("bad or no identifier flag, defaulting to serial number.")
self.local_identifier = serial_number
if self.args.testmode:
print("Local identifier: %r" % self.local_identifier)
except Exception as this_exception:
print(this_exception)
self.config_options["slack"]["use_slack"] = False
else:
self.config_options["slack"]["use_slack"] = False
if self.logger:
self.logger.info("No network detected.")
def slack_message(self, message, icon, type):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
slack_info_channel = False
slack_error_channel = False
if self.config_options["slack"]["use_slack"] and self.config_options["slack"]["slack_info_url"]:
slack_info_channel = True
if self.config_options["slack"]["use_slack"] and self.config_options["slack"]["slack_error_url"]:
slack_error_channel = True
if slack_error_channel and type == 'error':
slack_url = self.config_options["slack"]["slack_error_url"]
elif slack_info_channel:
slack_url = self.config_options["slack"]["slack_info_url"]
else:
return
payload = {'text': message, 'username': 'FWPM ' + self.master_version, 'icon_emoji': ':key:'}
response = requests.post(slack_url, data=json.dumps(payload), headers={'Content-Type': 'application/json'})
self.logger.info('Response: ' + str(response.text))
self.logger.info('Response code: ' + str(response.status_code))
def reboot_exit(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
def injest_keyfile(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
path_to_keyfile_exists = os.path.exists(self.config_options["keyfile"]["path"])
if not path_to_keyfile_exists:
if self.logger:
self.logger.critical("%r does not exist. Exiting." % self.config_options["keyfile"]["path"])
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Keyfile does not exist.", '', 'error')
sys.exit(2)
if self.logger:
self.logger.info("Reading password file")
if self.config_options["keyfile"]["use_obfuscation"]:
#
# unobfuscate plist
if self.logger:
self.logger.info("Reading plist")
passwords = []
if "plist" in self.config_options["keyfile"]["path"]:
try:
keyfile_plist = plistlib.readPlist(self.config_options["keyfile"]["path"])
content_raw = keyfile_plist["data"]
except:
if self.logger:
self.logger.critical("Error reading plist. Exiting.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Error reading plist.", '', 'error')
sys.exit(1)
else:
try:
with open(self.config_options["keyfile"]["path"], 'r') as reader:
content_raw = reader.read()
except:
if self.logger:
self.logger.critical("Error reading plist. Exiting.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Error reading plist.", '', 'error')
sys.exit(1)
content_raw = base64.b64decode(content_raw)
content_raw = content_raw.decode('utf-8').split(",")
content_raw = [x for x in content_raw if x]
output_string = ""
for item in content_raw:
label, pword = item.split(':')
pword = base64.b64decode(pword)
try:
commented = label.split('#')[1]
commented = base64.b64decode(commented)
is_commented = True
except:
is_commented = False
if is_commented:
output_string = "#" + commented.decode('utf-8') + ":" + pword.decode('utf-8')
passwords.append(output_string)
else:
uncommented = base64.b64decode(label)
output_string = uncommented.decode('utf-8') + ":" + pword.decode('utf-8')
passwords.append(output_string)
else:
#
# read keyfile
if self.logger:
self.logger.info("Reading plain text")
try:
with open(self.config_options["keyfile"]["path"], "r") as keyfile:
self.passwords_raw = keyfile.read()
passwords = self.passwords_raw.splitlines()
except:
if self.logger:
self.logger.critical("Error reading keyfile. Exiting.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Error reading keyfile.", '', 'error')
sys.exit(1)
if self.logger:
self.logger.info("Closed password file")
# new_password = <PASSWORD>
# other_password_list = []
#
# parse data from keyfile and build list of passwords
for entry in passwords:
try:
key, value = entry.split(":", 1)
except Exception as this_exception:
if self.logger:
self.logger.critical("Malformed keyfile, key:value format required. %r. Quitting." % this_exception)
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Malformed keyfile.", '', 'error')
sys.exit(1)
if key.lower() == 'new':
if self.new_password is not None:
if self.logger:
self.logger.critical("Malformed keyfile, multiple new keys. Quitting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Malformed keyfile.", '', 'error')
sys.exit(1)
else:
self.new_password = value
self.other_password_list.append(value)
else:
self.other_password_list.append(value)
if self.logger:
self.logger.info("Sanity checking password file contents")
if self.new_password is None and self.config_options["flags"]["use_fwpw"]:
if self.logger:
self.logger.critical("Malformed keyfile, no \'new\' key. Quitting.")
self.secure_delete()
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "Malformed keyfile.", '', 'error')
sys.exit(1)
self.read_keyfile = True
try:
self.other_password_list.remove(self.new_password)
except:
pass
def nvram_manager(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
if self.clean_exit:
if not self.config_options["flags"]["use_fwpw"]:
try:
subprocess.call(["/usr/sbin/nvram", "-d", "fwpw-hash"])
if self.logger:
self.logger.info("nvram entry pruned.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :unlock:\n" + "FWPW and nvram entry removed.", '', 'info')
#
# Should we return here?
#
except Exception as exception_message:
if self.logger:
self.logger.warning("nvram reported error attempting to remove hash. Exiting. %s" % exception_message)
#
# Slack?
#
sys.exit(1)
if self.config_options["flags"]["management_string_type"] == "None":
try:
# ?
# existing_keyfile_hash = subprocess.check_output(["/usr/sbin/nvram", "fwpw-hash"])
try:
subprocess.call(["/usr/sbin/nvram", "-d", "fwpw-hash"])
if self.logger:
self.logger.info("nvram entry pruned.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :closed_lock_with_key:\n" + "FWPW updated.", '', 'info')
except Exception as exception_message:
if self.logger:
self.logger.warning("nvram reported error attempting to remove hash. Exiting. %s" % exception_message)
sys.exit(1)
except:
# assuming hash doesn't exist.
if self.logger:
self.logger.info("Assuming nvram entry doesn't exist.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :closed_lock_with_key:\n" + "FWPW updated.", '', 'info')
elif self.config_options["flags"]["management_string_type"] == "custom" or self.config_options["flags"]["management_string_type"] == "hash":
if self.matching_hashes:
if self.matching_passwords:
if self.logger:
self.logger.info("Hashes and Passwords match. No changes needed.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :white_check_mark::white_check_mark:\n" + "FWPM hashes and FW passwords match.", '', 'info')
else:
if self.logger:
self.logger.info("Hashes match, password modified.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :white_check_mark::heavy_exclamation_mark:\n" + "FWPM hashes and FW passwords match.", '', 'info')
else:
try:
subprocess.call(["/usr/sbin/nvram", "fwpw-hash=" + self.fwpw_managed_string])
if self.logger:
self.logger.info("nvram modified.")
except Exception as exception_message:
if self.logger:
self.logger.warning("nvram modification failed. nvram reported error. %s" % exception_message)
#
# slack error message?
#
sys.exit(1)
if self.matching_passwords:
if self.logger:
self.logger.info("Hash mismatch, Passwords match. Correcting hash.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :heavy_exclamation_mark: :white_check_mark:\n" + "Hash mismatch, Passwords match. Correcting hash.", '', 'info')
else:
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :closed_lock_with_key:\n" + "FWPW and hash updated.", '', 'info')
else:
if self.logger:
self.logger.critical("An error occured. Failed to modify firmware password.")
if self.config_options["slack"]["use_slack"]:
self.slack_message("_*" + self.local_identifier + "*_ :no_entry:\n" + "An error occured. Failed to modify firmware password.", '', 'error')
sys.exit(1)
def exit_manager(self):
"""
This should not be blank.
"""
if self.logger:
self.logger.info("%s: activated" % inspect.stack()[0][3])
#
# check the new booleans, etc to find out what we accomplished...
#
# self.clean_exit = False
#
# self.read_config = False
# self.read_keyfile = False
# self.modify_fwpw = False
# self.modify_nvram = False
#
if self.config_options["flags"]["use_reboot_on_exit"]:
if self.args.testmode:
if self.logger:
self.logger.info("Test mode, cancelling reboot.")
else:
if self.logger:
self.logger.warning("Normal completion. Rebooting.")
os.system('reboot')
else:
if self.logger:
self.logger.info("FWPM exiting normally.")
sys.exit(0)
def verify_network(self):
"""
Host: 8.8.8.8 (google-public-dns-a.google.com)
OpenPort: 53/tcp
Service: domain (DNS/TCP)
"""
try:
_ = requests.get("https://8.8.8.8", timeout=3)
return True
except requests.ConnectionError as exception_message:
print(exception_message)
return False
def main():
"""
This should not be blank.
"""
master_version = "2.5"
logo = """
/_ _/ /_ _/ University of Utah
_/ _/ Marriott Library
_/ _/ Mac Group
_/ _/ https://apple.lib.utah.edu/
_/_/ https://github.com/univ-of-utah-marriott-library-apple
"""
desc = "Manages the firmware password on Apple Macintosh computers."
#
# require root to run.
if os.geteuid():
print("Must be root to run script.")
sys.exit(2)
#
# parse option definitions
parser = argparse.ArgumentParser(description=logo+desc, formatter_class=RawTextHelpFormatter)
#
# required, mutually exclusive commands
prime_group = parser.add_argument_group('Required management settings', 'Choosing one of these options is required to run FWPM. They tell FWPM how you want to manage the firmware password.')
subprime = prime_group.add_mutually_exclusive_group(required=True)
subprime.add_argument('-c', '--configfile', help='Read configuration file')
parser.add_argument('-b', '--reboot', action="store_true", default=False, help='Reboots the computer after the script completes successfully.')
parser.add_argument('-t', '--testmode', action="store_true", default=False, help='Test mode. Verbose logging, will not delete keyfile.')
parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + master_version)
args = parser.parse_args()
if args.testmode:
print(args)
#
# Open log file
try:
log_path = '/var/log/' + 'FWPW_Manager_' + master_version
logging.basicConfig(filename=log_path, level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
logger.info("Running Firmware Password Manager " + master_version)
except:
logger = None
FWPM_Object(args, logger, master_version)
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"base64.b64decode",
"json.dumps",
"os.path.abspath",
"os.path.exists",
"re.findall",
"requests.get",
"configparser.ConfigParser",
"subprocess.check_output",
"os.system",
"subprocess.call",
"sys.exit",
"logging.basicConfig",
"plistlib.readPlist",
"socket.getfqdn",
"hashlib.new",
"platform.mac_ver",
"os.geteuid",
"inspect.stack",
"logging.getLogger"
] |
[((46122, 46134), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (46132, 46134), False, 'import os\n'), ((46252, 46343), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '(logo + desc)', 'formatter_class': 'RawTextHelpFormatter'}), '(description=logo + desc, formatter_class=\n RawTextHelpFormatter)\n', (46275, 46343), False, 'import argparse\n'), ((12480, 12534), 'os.path.exists', 'os.path.exists', (["self.config_options['keyfile']['path']"], {}), "(self.config_options['keyfile']['path'])\n", (12494, 12534), False, 'import os\n'), ((15620, 15667), 'os.path.exists', 'os.path.exists', (['"""/usr/local/bin/setregproptool"""'], {}), "('/usr/local/bin/setregproptool')\n", (15634, 15667), False, 'import os\n'), ((17438, 17480), 'os.path.exists', 'os.path.exists', (['"""/usr/sbin/firmwarepasswd"""'], {}), "('/usr/sbin/firmwarepasswd')\n", (17452, 17480), False, 'import os\n'), ((17888, 17940), 'subprocess.check_output', 'subprocess.check_output', (["[self.fwpwd_path, '-check']"], {}), "([self.fwpwd_path, '-check'])\n", (17911, 17940), False, 'import subprocess\n'), ((33421, 33475), 'os.path.exists', 'os.path.exists', (["self.config_options['keyfile']['path']"], {}), "(self.config_options['keyfile']['path'])\n", (33435, 33475), False, 'import os\n'), ((46189, 46200), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (46197, 46200), False, 'import sys\n'), ((47308, 47423), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': 'log_path', 'level': 'logging.INFO', 'format': '"""%(asctime)s - %(levelname)s - %(message)s"""'}), "(filename=log_path, level=logging.INFO, format=\n '%(asctime)s - %(levelname)s - %(message)s')\n", (47327, 47423), False, 'import logging\n'), ((47436, 47463), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (47453, 47463), False, 'import logging\n'), ((11352, 11382), 'os.path.exists', 'os.path.exists', (['"""/usr/bin/srm"""'], {}), "('/usr/bin/srm')\n", (11366, 11382), False, 'import os\n'), ((13116, 13152), 'os.path.exists', 'os.path.exists', (['self.args.configfile'], {}), '(self.args.configfile)\n', (13130, 13152), False, 'import os\n'), ((16289, 16327), 'subprocess.call', 'subprocess.call', (["[self.srp_path, '-c']"], {}), "([self.srp_path, '-c'])\n", (16304, 16327), False, 'import subprocess\n'), ((33849, 33860), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (33857, 33860), False, 'import sys\n'), ((35331, 35360), 'base64.b64decode', 'base64.b64decode', (['content_raw'], {}), '(content_raw)\n', (35347, 35360), False, 'import base64\n'), ((38996, 39007), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (39004, 39007), False, 'import sys\n'), ((44262, 44273), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (44270, 44273), False, 'import sys\n'), ((45262, 45273), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (45270, 45273), False, 'import sys\n'), ((45473, 45515), 'requests.get', 'requests.get', (['"""https://8.8.8.8"""'], {'timeout': '(3)'}), "('https://8.8.8.8', timeout=3)\n", (45485, 45515), False, 'import requests\n'), ((10344, 10365), 'hashlib.new', 'hashlib.new', (['"""sha256"""'], {}), "('sha256')\n", (10355, 10365), False, 'import hashlib\n'), ((11585, 11670), 'subprocess.call', 'subprocess.call', (["['/usr/bin/srm', '-mz', self.config_options['keyfile']['path']]"], {}), "(['/usr/bin/srm', '-mz', self.config_options['keyfile']['path']]\n )\n", (11600, 11670), False, 'import subprocess\n'), ((12020, 12095), 'subprocess.call', 'subprocess.call', (["['/bin/rm', '-Pf', self.config_options['keyfile']['path']]"], {}), "(['/bin/rm', '-Pf', self.config_options['keyfile']['path']])\n", (12035, 12095), False, 'import subprocess\n'), ((13419, 13465), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {'allow_no_value': '(True)'}), '(allow_no_value=True)\n', (13444, 13465), False, 'import configparser\n'), ((14982, 14992), 'sys.exit', 'sys.exit', ([], {}), '()\n', (14990, 14992), False, 'import sys\n'), ((15186, 15196), 'sys.exit', 'sys.exit', ([], {}), '()\n', (15194, 15196), False, 'import sys\n'), ((24660, 24671), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (24668, 24671), False, 'import sys\n'), ((25489, 25500), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (25497, 25500), False, 'import sys\n'), ((26046, 26057), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (26054, 26057), False, 'import sys\n'), ((26677, 26736), 're.findall', 're.findall', (['""""IOPlatformSerialNumber" = "(.*)\\""""', 'full_ioreg'], {}), '(\'"IOPlatformSerialNumber" = "(.*)"\', full_ioreg)\n', (26687, 26736), False, 'import re\n'), ((32824, 32843), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (32834, 32843), False, 'import json\n'), ((35622, 35645), 'base64.b64decode', 'base64.b64decode', (['pword'], {}), '(pword)\n', (35638, 35645), False, 'import base64\n'), ((45129, 45148), 'os.system', 'os.system', (['"""reboot"""'], {}), "('reboot')\n", (45138, 45148), False, 'import os\n'), ((7140, 7158), 'platform.mac_ver', 'platform.mac_ver', ([], {}), '()\n', (7156, 7158), False, 'import platform\n'), ((9175, 9225), 'subprocess.check_output', 'subprocess.check_output', (["['/usr/sbin/nvram', '-p']"], {}), "(['/usr/sbin/nvram', '-p'])\n", (9198, 9225), False, 'import subprocess\n'), ((27409, 27473), 're.findall', 're.findall', (['"""\\\\) (.*)\\\\n\\\\(.*Device: (.*)\\\\)"""', 'base_network_list'], {}), "('\\\\) (.*)\\\\n\\\\(.*Device: (.*)\\\\)', base_network_list)\n", (27419, 27473), False, 'import re\n'), ((34277, 34335), 'plistlib.readPlist', 'plistlib.readPlist', (["self.config_options['keyfile']['path']"], {}), "(self.config_options['keyfile']['path'])\n", (34295, 34335), False, 'import plistlib\n'), ((35751, 35778), 'base64.b64decode', 'base64.b64decode', (['commented'], {}), '(commented)\n', (35767, 35778), False, 'import base64\n'), ((36125, 36148), 'base64.b64decode', 'base64.b64decode', (['label'], {}), '(label)\n', (36141, 36148), False, 'import base64\n'), ((36989, 37000), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (36997, 37000), False, 'import sys\n'), ((37779, 37790), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (37787, 37790), False, 'import sys\n'), ((38267, 38278), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (38275, 38278), False, 'import sys\n'), ((39465, 39520), 'subprocess.call', 'subprocess.call', (["['/usr/sbin/nvram', '-d', 'fwpw-hash']"], {}), "(['/usr/sbin/nvram', '-d', 'fwpw-hash'])\n", (39480, 39520), False, 'import subprocess\n'), ((15773, 15798), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (15788, 15798), False, 'import os\n'), ((15866, 15891), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (15881, 15891), False, 'import os\n'), ((21134, 21145), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (21142, 21145), False, 'import sys\n'), ((26584, 26624), 'subprocess.check_output', 'subprocess.check_output', (["['ioreg', '-l']"], {}), "(['ioreg', '-l'])\n", (26607, 26624), False, 'import subprocess\n'), ((34747, 34758), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (34755, 34758), False, 'import sys\n'), ((35292, 35303), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (35300, 35303), False, 'import sys\n'), ((40236, 40247), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (40244, 40247), False, 'import sys\n'), ((40528, 40583), 'subprocess.call', 'subprocess.call', (["['/usr/sbin/nvram', '-d', 'fwpw-hash']"], {}), "(['/usr/sbin/nvram', '-d', 'fwpw-hash'])\n", (40543, 40583), False, 'import subprocess\n'), ((8047, 8062), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (8060, 8062), False, 'import inspect\n'), ((8980, 8995), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (8993, 8995), False, 'import inspect\n'), ((9904, 9919), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (9917, 9919), False, 'import inspect\n'), ((11231, 11246), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (11244, 11246), False, 'import inspect\n'), ((13064, 13079), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (13077, 13079), False, 'import inspect\n'), ((15388, 15403), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (15401, 15403), False, 'import inspect\n'), ((15585, 15600), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (15598, 15600), False, 'import inspect\n'), ((16223, 16238), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (16236, 16238), False, 'import inspect\n'), ((17143, 17158), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (17156, 17158), False, 'import inspect\n'), ((17403, 17418), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (17416, 17418), False, 'import inspect\n'), ((17839, 17854), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (17852, 17854), False, 'import inspect\n'), ((18354, 18369), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (18367, 18369), False, 'import inspect\n'), ((22598, 22609), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (22606, 22609), False, 'import sys\n'), ((23239, 23250), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (23247, 23250), False, 'import sys\n'), ((23884, 23895), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (23892, 23895), False, 'import sys\n'), ((26480, 26495), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (26493, 26495), False, 'import inspect\n'), ((27271, 27350), 'subprocess.check_output', 'subprocess.check_output', (["['/usr/sbin/networksetup', '-listnetworkserviceorder']"], {}), "(['/usr/sbin/networksetup', '-listnetworkserviceorder'])\n", (27294, 27350), False, 'import subprocess\n'), ((27507, 27566), 'subprocess.check_output', 'subprocess.check_output', (["['/sbin/ifconfig', '-au', 'ether']"], {}), "(['/sbin/ifconfig', '-au', 'ether'])\n", (27530, 27566), False, 'import subprocess\n'), ((30435, 30505), 'subprocess.check_output', 'subprocess.check_output', (["['/usr/sbin/scutil', '--get', 'ComputerName']"], {}), "(['/usr/sbin/scutil', '--get', 'ComputerName'])\n", (30458, 30505), False, 'import subprocess\n'), ((32024, 32039), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (32037, 32039), False, 'import inspect\n'), ((33181, 33196), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (33194, 33196), False, 'import inspect\n'), ((33364, 33379), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (33377, 33379), False, 'import inspect\n'), ((39311, 39326), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (39324, 39326), False, 'import inspect\n'), ((41155, 41166), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (41163, 41166), False, 'import sys\n'), ((42653, 42730), 'subprocess.call', 'subprocess.call', (["['/usr/sbin/nvram', 'fwpw-hash=' + self.fwpw_managed_string]"], {}), "(['/usr/sbin/nvram', 'fwpw-hash=' + self.fwpw_managed_string])\n", (42668, 42730), False, 'import subprocess\n'), ((44432, 44447), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (44445, 44447), False, 'import inspect\n'), ((28101, 28145), 're.findall', 're.findall', (['"""ether (.*) \n"""', 'device_info_raw'], {}), "('ether (.*) \\n', device_info_raw)\n", (28111, 28145), False, 'import re\n'), ((28311, 28359), 're.findall', 're.findall', (['"""inet (.*) netmask"""', 'device_info_raw'], {}), "('inet (.*) netmask', device_info_raw)\n", (28321, 28359), False, 'import re\n'), ((43192, 43203), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (43200, 43203), False, 'import sys\n'), ((27984, 28038), 'subprocess.check_output', 'subprocess.check_output', (["['/sbin/ifconfig', port_name]"], {}), "(['/sbin/ifconfig', port_name])\n", (28007, 28038), False, 'import subprocess\n'), ((29799, 29815), 'socket.getfqdn', 'socket.getfqdn', ([], {}), '()\n', (29813, 29815), False, 'import socket\n')]
|
from django.shortcuts import render
from django.http import JsonResponse
NotImplemented = JsonResponse({"error":"NotImplemented"})
# Create your views here.
def slugs(request):
return NotImplemented
def last_updated(request):
return NotImplemented
def streamer(request):
return NotImplemented
def endpoints(request):
return NotImplemented
|
[
"django.http.JsonResponse"
] |
[((92, 133), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'NotImplemented'}"], {}), "({'error': 'NotImplemented'})\n", (104, 133), False, 'from django.http import JsonResponse\n')]
|
# -*- coding: utf-8 -*-
"""
@file
@brief Generates random answers for challenges.
"""
import os
import numpy
import pandas
def random_answers_2020_images():
"""
Generates random answers the deep learning challenge of
hackathons :ref:`l-hackathon-2020`.
"""
name = os.path.join(os.path.split(__file__)[0], "labels_2020_random.csv")
df = pandas.read_csv(name)[['file_name']]
df['label'] = numpy.random.randint(low=0, high=2, size=(df.shape[0], ))
df['score'] = numpy.random.random((df.shape[0], ))
return df
def random_answers_2020_ml():
"""
Generates random answers the machine learning challenge of
hackathons :ref:`l-hackathon-2020`.
"""
df = pandas.DataFrame({"index": numpy.arange(473333)})
df['label'] = numpy.random.randint(low=0, high=2, size=(df.shape[0], ))
df['score'] = numpy.random.random((df.shape[0], ))
return df
|
[
"pandas.read_csv",
"numpy.random.randint",
"numpy.arange",
"numpy.random.random",
"os.path.split"
] |
[((417, 473), 'numpy.random.randint', 'numpy.random.randint', ([], {'low': '(0)', 'high': '(2)', 'size': '(df.shape[0],)'}), '(low=0, high=2, size=(df.shape[0],))\n', (437, 473), False, 'import numpy\n'), ((493, 528), 'numpy.random.random', 'numpy.random.random', (['(df.shape[0],)'], {}), '((df.shape[0],))\n', (512, 528), False, 'import numpy\n'), ((772, 828), 'numpy.random.randint', 'numpy.random.randint', ([], {'low': '(0)', 'high': '(2)', 'size': '(df.shape[0],)'}), '(low=0, high=2, size=(df.shape[0],))\n', (792, 828), False, 'import numpy\n'), ((848, 883), 'numpy.random.random', 'numpy.random.random', (['(df.shape[0],)'], {}), '((df.shape[0],))\n', (867, 883), False, 'import numpy\n'), ((362, 383), 'pandas.read_csv', 'pandas.read_csv', (['name'], {}), '(name)\n', (377, 383), False, 'import pandas\n'), ((299, 322), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (312, 322), False, 'import os\n'), ((731, 751), 'numpy.arange', 'numpy.arange', (['(473333)'], {}), '(473333)\n', (743, 751), False, 'import numpy\n')]
|
"""logging utils for the downloader"""
import wandb
import time
from collections import Counter
import fsspec
import json
from multiprocessing import Process, Queue
import queue
class CappedCounter:
"""Maintain a counter with a capping to avoid memory issues"""
def __init__(self, max_size=10 ** 5):
self.max_size = max_size
self.counter = Counter()
def increment(self, key):
if len(self.counter) >= self.max_size:
self._keep_most_frequent()
self.counter[key] += 1
def _keep_most_frequent(self):
self.counter = Counter(dict(self.counter.most_common(int(self.max_size / 2))))
def most_common(self, k):
return self.counter.most_common(k)
def update(self, counter):
self.counter.update(counter.counter)
if len(self.counter) >= self.max_size:
self._keep_most_frequent()
def dump(self):
return self.counter
@classmethod
def load(cls, d, max_size=10 ** 5):
c = CappedCounter(max_size)
c.counter = Counter(d)
return c
class Logger:
"""logger which logs when number of calls reaches a value or a time interval has passed"""
def __init__(self, processes_count=1, min_interval=0):
"""Log only every processes_count and if min_interval (seconds) have elapsed since last log"""
# wait for all processes to return
self.processes_count = processes_count
self.processes_returned = 0
# min time (in seconds) before logging a new table (avoids too many logs)
self.min_interval = min_interval
self.last = time.perf_counter()
# keep track of whether we logged the last call
self.last_call_logged = False
self.last_args = None
self.last_kwargs = None
def __call__(self, *args, **kwargs):
self.processes_returned += 1
if self.processes_returned % self.processes_count == 0 and time.perf_counter() - self.last > self.min_interval:
self.do_log(*args, **kwargs)
self.last = time.perf_counter()
self.last_call_logged = True
else:
self.last_call_logged = False
self.last_args = args
self.last_kwargs = kwargs
def do_log(self, *args, **kwargs):
raise NotImplementedError()
def sync(self):
"""Ensure last call is logged"""
if not self.last_call_logged:
self.do_log(*self.last_args, **self.last_kwargs)
# reset for next file
self.processes_returned = 0
class SpeedLogger(Logger):
"""Log performance metrics"""
def __init__(self, prefix, enable_wandb, **logger_args):
super().__init__(**logger_args)
self.prefix = prefix
self.start = time.perf_counter()
self.count = 0
self.success = 0
self.failed_to_download = 0
self.failed_to_resize = 0
self.enable_wandb = enable_wandb
def __call__(
self, duration, count, success, failed_to_download, failed_to_resize
): # pylint: disable=arguments-differ
self.count += count
self.success += success
self.failed_to_download += failed_to_download
self.failed_to_resize += failed_to_resize
super().__call__(duration, self.count, self.success, self.failed_to_download, self.failed_to_resize)
def do_log(
self, duration, count, success, failed_to_download, failed_to_resize
): # pylint: disable=arguments-differ
img_per_sec = count / duration
success_ratio = 1.0 * success / count
failed_to_download_ratio = 1.0 * failed_to_download / count
failed_to_resize_ratio = 1.0 * failed_to_resize / count
print(
" - ".join(
[
f"{self.prefix:<7}",
f"success: {success_ratio:.3f}",
f"failed to download: {failed_to_download_ratio:.3f}",
f"failed to resize: {failed_to_resize_ratio:.3f}",
f"images per sec: {img_per_sec:.0f}",
f"count: {count}",
]
)
)
if self.enable_wandb:
wandb.log(
{
f"{self.prefix}/img_per_sec": img_per_sec,
f"{self.prefix}/success": success_ratio,
f"{self.prefix}/failed_to_download": failed_to_download_ratio,
f"{self.prefix}/failed_to_resize": failed_to_resize_ratio,
f"{self.prefix}/count": count,
}
)
class StatusTableLogger(Logger):
"""Log status table to W&B, up to `max_status` most frequent items"""
def __init__(self, max_status=100, min_interval=60, enable_wandb=False, **logger_args):
super().__init__(min_interval=min_interval, **logger_args)
# avoids too many errors unique to a specific website (SSL certificates, etc)
self.max_status = max_status
self.enable_wandb = enable_wandb
def do_log(self, status_dict, count): # pylint: disable=arguments-differ
if self.enable_wandb:
status_table = wandb.Table(
columns=["status", "frequency", "count"],
data=[[k, 1.0 * v / count, v] for k, v in status_dict.most_common(self.max_status)],
)
wandb.run.log({"status": status_table})
def write_stats(
output_folder,
shard_id,
count,
successes,
failed_to_download,
failed_to_resize,
start_time,
end_time,
status_dict,
oom_shard_count,
):
"""Write stats to disk"""
stats = {
"count": count,
"successes": successes,
"failed_to_download": failed_to_download,
"failed_to_resize": failed_to_resize,
"duration": end_time - start_time,
"status_dict": status_dict.dump(),
}
fs, output_path = fsspec.core.url_to_fs(output_folder)
shard_name = "{shard_id:0{oom_shard_count}d}".format(shard_id=shard_id, oom_shard_count=oom_shard_count)
json_file = f"{output_path}/{shard_name}_stats.json"
with fs.open(json_file, "w") as f:
json.dump(stats, f, indent=4)
# https://docs.python.org/3/library/multiprocessing.html
# logger process that reads stats files regularly, aggregates and send to wandb / print to terminal
class LoggerProcess(Process):
"""Logger process that reads stats files regularly, aggregates and send to wandb / print to terminal"""
def __init__(self, output_folder, enable_wandb, wandb_project, config_parameters, processes_count, log_interval=60):
super().__init__()
self.log_interval = log_interval
self.enable_wandb = enable_wandb
self.fs, self.output_path = fsspec.core.url_to_fs(output_folder)
self.stats_files = set()
self.wandb_project = wandb_project
self.config_parameters = config_parameters
self.processes_count = processes_count
self.q = Queue()
def run(self):
"""Run logger process"""
if self.enable_wandb:
self.current_run = wandb.init(project=self.wandb_project, config=self.config_parameters, anonymous="allow")
else:
self.current_run = None
self.total_speed_logger = SpeedLogger(
"total", processes_count=self.processes_count, enable_wandb=self.enable_wandb
)
self.status_table_logger = StatusTableLogger(
processes_count=self.processes_count, enable_wandb=self.enable_wandb
)
start_time = time.perf_counter()
last_check = 0
total_status_dict = CappedCounter()
while True:
time.sleep(0.1)
try:
self.q.get(False)
last_one = True
except queue.Empty as _:
last_one = False
if not last_one and time.perf_counter() - last_check < self.log_interval:
continue
try:
# read stats files
stats_files = self.fs.glob(self.output_path + "/*.json")
# get new stats files
new_stats_files = set(stats_files) - self.stats_files
if len(new_stats_files) == 0:
if last_one:
self.finish()
return
# read new stats files
for stats_file in new_stats_files:
with self.fs.open(stats_file, "r") as f:
stats = json.load(f)
SpeedLogger("worker", enable_wandb=self.enable_wandb)(
duration=stats["duration"],
count=stats["count"],
success=stats["successes"],
failed_to_download=stats["failed_to_download"],
failed_to_resize=stats["failed_to_resize"],
)
self.total_speed_logger(
duration=time.perf_counter() - start_time,
count=stats["count"],
success=stats["successes"],
failed_to_download=stats["failed_to_download"],
failed_to_resize=stats["failed_to_resize"],
)
status_dict = CappedCounter.load(stats["status_dict"])
total_status_dict.update(status_dict)
self.status_table_logger(total_status_dict, self.total_speed_logger.count)
self.stats_files.add(stats_file)
last_check = time.perf_counter()
if last_one:
self.finish()
return
except Exception as e: # pylint: disable=broad-except
print(e)
self.finish()
return
def finish(self):
"""Finish logger process"""
self.total_speed_logger.sync()
self.status_table_logger.sync()
if self.current_run is not None:
self.current_run.finish()
def join(self, timeout=None):
"""Stop logger process"""
self.q.put("stop")
super().join()
self.q.close()
|
[
"wandb.log",
"json.dump",
"wandb.run.log",
"json.load",
"fsspec.core.url_to_fs",
"time.perf_counter",
"time.sleep",
"wandb.init",
"multiprocessing.Queue",
"collections.Counter"
] |
[((5910, 5946), 'fsspec.core.url_to_fs', 'fsspec.core.url_to_fs', (['output_folder'], {}), '(output_folder)\n', (5931, 5946), False, 'import fsspec\n'), ((368, 377), 'collections.Counter', 'Counter', ([], {}), '()\n', (375, 377), False, 'from collections import Counter\n'), ((1049, 1059), 'collections.Counter', 'Counter', (['d'], {}), '(d)\n', (1056, 1059), False, 'from collections import Counter\n'), ((1620, 1639), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1637, 1639), False, 'import time\n'), ((2775, 2794), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2792, 2794), False, 'import time\n'), ((6160, 6189), 'json.dump', 'json.dump', (['stats', 'f'], {'indent': '(4)'}), '(stats, f, indent=4)\n', (6169, 6189), False, 'import json\n'), ((6754, 6790), 'fsspec.core.url_to_fs', 'fsspec.core.url_to_fs', (['output_folder'], {}), '(output_folder)\n', (6775, 6790), False, 'import fsspec\n'), ((6982, 6989), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (6987, 6989), False, 'from multiprocessing import Process, Queue\n'), ((7557, 7576), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7574, 7576), False, 'import time\n'), ((2060, 2079), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2077, 2079), False, 'import time\n'), ((4200, 4464), 'wandb.log', 'wandb.log', (["{f'{self.prefix}/img_per_sec': img_per_sec, f'{self.prefix}/success':\n success_ratio, f'{self.prefix}/failed_to_download':\n failed_to_download_ratio, f'{self.prefix}/failed_to_resize':\n failed_to_resize_ratio, f'{self.prefix}/count': count}"], {}), "({f'{self.prefix}/img_per_sec': img_per_sec,\n f'{self.prefix}/success': success_ratio,\n f'{self.prefix}/failed_to_download': failed_to_download_ratio,\n f'{self.prefix}/failed_to_resize': failed_to_resize_ratio,\n f'{self.prefix}/count': count})\n", (4209, 4464), False, 'import wandb\n'), ((5365, 5404), 'wandb.run.log', 'wandb.run.log', (["{'status': status_table}"], {}), "({'status': status_table})\n", (5378, 5404), False, 'import wandb\n'), ((7105, 7197), 'wandb.init', 'wandb.init', ([], {'project': 'self.wandb_project', 'config': 'self.config_parameters', 'anonymous': '"""allow"""'}), "(project=self.wandb_project, config=self.config_parameters,\n anonymous='allow')\n", (7115, 7197), False, 'import wandb\n'), ((7676, 7691), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7686, 7691), False, 'import time\n'), ((9674, 9693), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9691, 9693), False, 'import time\n'), ((1942, 1961), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1959, 1961), False, 'import time\n'), ((7877, 7896), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7894, 7896), False, 'import time\n'), ((8523, 8535), 'json.load', 'json.load', (['f'], {}), '(f)\n', (8532, 8535), False, 'import json\n'), ((9037, 9056), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9054, 9056), False, 'import time\n')]
|
# Generated by Django 2.1.7 on 2019-03-28 03:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('materials', '0054_auto_20190318_1704'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='dimensionality',
field=models.PositiveSmallIntegerField(choices=[(3, 3), (2, 2)]),
),
]
|
[
"django.db.models.PositiveSmallIntegerField"
] |
[((347, 405), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': '[(3, 3), (2, 2)]'}), '(choices=[(3, 3), (2, 2)])\n', (379, 405), False, 'from django.db import migrations, models\n')]
|
import os
import pytest
import simulators
from simulators.tcm_module import (tcm_helper_function, setup_tcm_dirs)
from simulators.tcm_script import parse_args
@pytest.mark.parametrize("star, obs, chip", [
("HD30501", 1, 1),
("HD4747", "a", 4)])
def test_tcm_helper_function(sim_config, star, obs, chip):
simulators = sim_config
obs_name, params, output_prefix = tcm_helper_function(star, obs, chip)
assert isinstance(obs_name, str)
assert isinstance(output_prefix, str)
assert simulators.paths["spectra"] in obs_name
assert "-mixavg-tellcorr_" in obs_name
assert str(star) in obs_name
assert str(obs) in obs_name
assert str(chip) in obs_name
assert os.path.join(star, "tcm", star) in output_prefix
assert "tcm_chisqr_results" in output_prefix
assert params["name"] == star.lower()
def test_setup_tcm_dirs_creates_dirs(sim_config, tmpdir):
simulators = sim_config
simulators.paths["output_dir"] = str(tmpdir)
star = "TestStar"
assert not tmpdir.join(star.upper()).check()
assert not tmpdir.join(star.upper(), "tcm", "plots").check()
result = setup_tcm_dirs(star)
assert tmpdir.join(star.upper()).check(dir=True)
assert tmpdir.join(star.upper(), "tcm", "plots").check(dir=True)
assert result is None
def test_tcm_script_parser():
parsed = parse_args([])
assert parsed.chip is None
assert parsed.error_off is False
assert parsed.disable_wav_scale is False
def test_tcm_script_parser_toggle():
args = ["--chip", "2", "--error_off", "--disable_wav_scale"]
parsed = parse_args(args)
assert parsed.chip is "2"
assert parsed.error_off is True
assert parsed.disable_wav_scale is True
|
[
"simulators.tcm_module.setup_tcm_dirs",
"simulators.tcm_script.parse_args",
"simulators.tcm_module.tcm_helper_function",
"pytest.mark.parametrize",
"os.path.join"
] |
[((164, 251), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""star, obs, chip"""', "[('HD30501', 1, 1), ('HD4747', 'a', 4)]"], {}), "('star, obs, chip', [('HD30501', 1, 1), ('HD4747',\n 'a', 4)])\n", (187, 251), False, 'import pytest\n'), ((382, 418), 'simulators.tcm_module.tcm_helper_function', 'tcm_helper_function', (['star', 'obs', 'chip'], {}), '(star, obs, chip)\n', (401, 418), False, 'from simulators.tcm_module import tcm_helper_function, setup_tcm_dirs\n'), ((1128, 1148), 'simulators.tcm_module.setup_tcm_dirs', 'setup_tcm_dirs', (['star'], {}), '(star)\n', (1142, 1148), False, 'from simulators.tcm_module import tcm_helper_function, setup_tcm_dirs\n'), ((1343, 1357), 'simulators.tcm_script.parse_args', 'parse_args', (['[]'], {}), '([])\n', (1353, 1357), False, 'from simulators.tcm_script import parse_args\n'), ((1588, 1604), 'simulators.tcm_script.parse_args', 'parse_args', (['args'], {}), '(args)\n', (1598, 1604), False, 'from simulators.tcm_script import parse_args\n'), ((702, 733), 'os.path.join', 'os.path.join', (['star', '"""tcm"""', 'star'], {}), "(star, 'tcm', star)\n", (714, 733), False, 'import os\n')]
|
import datetime
import diary
import itsdangerous
ROLE_USER = 0
ROLE_ADMIN = 1
"""
User-diary many-to-many relationship
"""
dairy_user_table = diary.db.Table(
"dairy_user",
diary.db.Model.metadata,
diary.db.Column("diary_id",
diary.db.Integer,
diary.db.ForeignKey("diary.id")),
diary.db.Column("user_id",
diary.db.Integer,
diary.db.ForeignKey("user.id")))
class User(diary.db.Model):
"""
The User object
"""
__tablename__ = "user"
id = diary.db.Column(diary.db.Integer, primary_key=True)
firstname = diary.db.Column(diary.db.Unicode(256), nullable=False)
lastname = diary.db.Column(diary.db.Unicode(256), nullable=False, index=True)
emailaddress = diary.db.Column(diary.db.Unicode(1024), nullable=False, index=True, unique=True)
facebook_id = diary.db.Column(diary.db.Unicode, nullable=True)
role = diary.db.Column(diary.db.SmallInteger, default=ROLE_USER)
active = diary.db.Column(diary.db.Boolean, default=True)
created = diary.db.Column(diary.db.DateTime, default=datetime.datetime.utcnow)
# relations
diaries = diary.db.relationship("Diary",
secondary=dairy_user_table,
lazy="dynamic",
backref="users")
tokens = diary.db.relationship("Auth", lazy="dynamic")
posts = diary.db.relationship("Post", lazy="dynamic")
def has_access(self, diary_id):
return len(self.diaries.filter(Diary.id == diary_id).all()) is 1
def generate_auth_token(self, expiration=600):
sec_key = diary.app.config["SECRET_KEY"]
s = itsdangerous.TimedJSONWebSignatureSerializer(sec_key, expires_in=expiration)
user_data = {
"user_id": self.id,
"user_facebook_id": self.facebook_id,
"user_email": self.emailaddress,
}
return unicode(s.dumps(user_data), "utf-8")
@staticmethod
def verify_auth_token(token):
sec_key = diary.app.config["SECRET_KEY"]
s = itsdangerous.TimedJSONWebSignatureSerializer(sec_key)
try:
data = s.loads(token)
except itsdangerous.SignatureExpired:
return None # valid token, but expired
except itsdangerous.BadSignature:
return None # invalid token
user = User.query.get(data["user_id"])
return user if data["user_facebook_id"] == user.facebook_id else None
class Auth(diary.db.Model):
"""
Auth tokens
"""
__tablename__ = "auth_token"
id = diary.db.Column(diary.db.Integer, primary_key=True)
owner_id = diary.db.Column(diary.db.Integer, diary.db.ForeignKey("user.id"))
facebook_token = diary.db.Column(diary.db.Unicode, nullable=False)
token = diary.db.Column(diary.db.Unicode, nullable=False)
modified = diary.db.Column(diary.db.DateTime, default=datetime.datetime.utcnow)
class Diary(diary.db.Model):
"""
The Diary object
"""
__tablename__ = "diary"
id = diary.db.Column(diary.db.Integer, primary_key=True)
owner_id = diary.db.Column(diary.db.Integer, diary.db.ForeignKey("user.id"))
title = diary.db.Column(diary.db.Unicode(1024), nullable=False, index=True)
created = diary.db.Column(diary.db.DateTime, default=datetime.datetime.utcnow)
# relations
posts = diary.db.relationship("Post", lazy="dynamic")
def sorted_posts(self, page):
return self.posts.order_by(Post.date.desc(), Post.id.desc()).paginate(page, 10, False).items
def to_dict(self):
return {
"id": self.id,
"owner_id": self.owner_id,
"title": self.title,
"created": self.created,
}
class Post(diary.db.Model):
"""
The Post object
"""
__tablename__ = "post"
id = diary.db.Column(diary.db.Integer, primary_key=True)
user_id = diary.db.Column(diary.db.Integer, diary.db.ForeignKey("user.id"))
diary_id = diary.db.Column(diary.db.Integer, diary.db.ForeignKey("diary.id"))
title = diary.db.Column(diary.db.Unicode(1024), nullable=False, index=True)
body = diary.db.Column(diary.db.Text, nullable=False)
date = diary.db.Column(diary.db.Date, default=datetime.datetime.utcnow)
created = diary.db.Column(diary.db.DateTime, default=datetime.datetime.utcnow)
modified = diary.db.Column(diary.db.DateTime, default=datetime.datetime.utcnow)
# relations
pictures = diary.db.relationship("Picture", lazy="dynamic")
def to_dict(self):
pics = []
for pic in self.pictures.all():
pics.append(pic.to_dict())
return {
"id": self.id,
"user_id": self.user_id,
"diary_id": self.diary_id,
"title": self.title,
"body": self.body,
"date": self.date.isoformat(),
"created": self.created.isoformat(),
"modified": self.modified.isoformat(),
"pictures": pics,
}
class Picture(diary.db.Model):
"""
The Picture object
"""
__tablename__ = "picture"
id = diary.db.Column(diary.db.Integer, primary_key=True)
post_id = diary.db.Column(diary.db.Integer, diary.db.ForeignKey("post.id"))
title = diary.db.Column(diary.db.Unicode(1024), nullable=False, index=True)
file_url = diary.db.Column(diary.db.Unicode(1024), nullable=False)
thumb_url = diary.db.Column(diary.db.Unicode(1024), nullable=True)
def to_dict(self):
return {
"id": self.id,
"title": self.title,
"file_url": self.file_url,
"thumb_url": self.thumb_url,
}
|
[
"diary.db.relationship",
"diary.db.Unicode",
"diary.db.ForeignKey",
"itsdangerous.TimedJSONWebSignatureSerializer",
"diary.db.Column"
] |
[((530, 581), 'diary.db.Column', 'diary.db.Column', (['diary.db.Integer'], {'primary_key': '(True)'}), '(diary.db.Integer, primary_key=True)\n', (545, 581), False, 'import diary\n'), ((845, 893), 'diary.db.Column', 'diary.db.Column', (['diary.db.Unicode'], {'nullable': '(True)'}), '(diary.db.Unicode, nullable=True)\n', (860, 893), False, 'import diary\n'), ((903, 960), 'diary.db.Column', 'diary.db.Column', (['diary.db.SmallInteger'], {'default': 'ROLE_USER'}), '(diary.db.SmallInteger, default=ROLE_USER)\n', (918, 960), False, 'import diary\n'), ((972, 1019), 'diary.db.Column', 'diary.db.Column', (['diary.db.Boolean'], {'default': '(True)'}), '(diary.db.Boolean, default=True)\n', (987, 1019), False, 'import diary\n'), ((1032, 1100), 'diary.db.Column', 'diary.db.Column', (['diary.db.DateTime'], {'default': 'datetime.datetime.utcnow'}), '(diary.db.DateTime, default=datetime.datetime.utcnow)\n', (1047, 1100), False, 'import diary\n'), ((1128, 1223), 'diary.db.relationship', 'diary.db.relationship', (['"""Diary"""'], {'secondary': 'dairy_user_table', 'lazy': '"""dynamic"""', 'backref': '"""users"""'}), "('Diary', secondary=dairy_user_table, lazy='dynamic',\n backref='users')\n", (1149, 1223), False, 'import diary\n'), ((1333, 1378), 'diary.db.relationship', 'diary.db.relationship', (['"""Auth"""'], {'lazy': '"""dynamic"""'}), "('Auth', lazy='dynamic')\n", (1354, 1378), False, 'import diary\n'), ((1389, 1434), 'diary.db.relationship', 'diary.db.relationship', (['"""Post"""'], {'lazy': '"""dynamic"""'}), "('Post', lazy='dynamic')\n", (1410, 1434), False, 'import diary\n'), ((2469, 2520), 'diary.db.Column', 'diary.db.Column', (['diary.db.Integer'], {'primary_key': '(True)'}), '(diary.db.Integer, primary_key=True)\n', (2484, 2520), False, 'import diary\n'), ((2619, 2668), 'diary.db.Column', 'diary.db.Column', (['diary.db.Unicode'], {'nullable': '(False)'}), '(diary.db.Unicode, nullable=False)\n', (2634, 2668), False, 'import diary\n'), ((2679, 2728), 'diary.db.Column', 'diary.db.Column', (['diary.db.Unicode'], {'nullable': '(False)'}), '(diary.db.Unicode, nullable=False)\n', (2694, 2728), False, 'import diary\n'), ((2742, 2810), 'diary.db.Column', 'diary.db.Column', (['diary.db.DateTime'], {'default': 'datetime.datetime.utcnow'}), '(diary.db.DateTime, default=datetime.datetime.utcnow)\n', (2757, 2810), False, 'import diary\n'), ((2907, 2958), 'diary.db.Column', 'diary.db.Column', (['diary.db.Integer'], {'primary_key': '(True)'}), '(diary.db.Integer, primary_key=True)\n', (2922, 2958), False, 'import diary\n'), ((3128, 3196), 'diary.db.Column', 'diary.db.Column', (['diary.db.DateTime'], {'default': 'datetime.datetime.utcnow'}), '(diary.db.DateTime, default=datetime.datetime.utcnow)\n', (3143, 3196), False, 'import diary\n'), ((3222, 3267), 'diary.db.relationship', 'diary.db.relationship', (['"""Post"""'], {'lazy': '"""dynamic"""'}), "('Post', lazy='dynamic')\n", (3243, 3267), False, 'import diary\n'), ((3644, 3695), 'diary.db.Column', 'diary.db.Column', (['diary.db.Integer'], {'primary_key': '(True)'}), '(diary.db.Integer, primary_key=True)\n', (3659, 3695), False, 'import diary\n'), ((3941, 3987), 'diary.db.Column', 'diary.db.Column', (['diary.db.Text'], {'nullable': '(False)'}), '(diary.db.Text, nullable=False)\n', (3956, 3987), False, 'import diary\n'), ((3997, 4061), 'diary.db.Column', 'diary.db.Column', (['diary.db.Date'], {'default': 'datetime.datetime.utcnow'}), '(diary.db.Date, default=datetime.datetime.utcnow)\n', (4012, 4061), False, 'import diary\n'), ((4074, 4142), 'diary.db.Column', 'diary.db.Column', (['diary.db.DateTime'], {'default': 'datetime.datetime.utcnow'}), '(diary.db.DateTime, default=datetime.datetime.utcnow)\n', (4089, 4142), False, 'import diary\n'), ((4156, 4224), 'diary.db.Column', 'diary.db.Column', (['diary.db.DateTime'], {'default': 'datetime.datetime.utcnow'}), '(diary.db.DateTime, default=datetime.datetime.utcnow)\n', (4171, 4224), False, 'import diary\n'), ((4253, 4301), 'diary.db.relationship', 'diary.db.relationship', (['"""Picture"""'], {'lazy': '"""dynamic"""'}), "('Picture', lazy='dynamic')\n", (4274, 4301), False, 'import diary\n'), ((4815, 4866), 'diary.db.Column', 'diary.db.Column', (['diary.db.Integer'], {'primary_key': '(True)'}), '(diary.db.Integer, primary_key=True)\n', (4830, 4866), False, 'import diary\n'), ((287, 318), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""diary.id"""'], {}), "('diary.id')\n", (306, 318), False, 'import diary\n'), ((404, 434), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (423, 434), False, 'import diary\n'), ((612, 633), 'diary.db.Unicode', 'diary.db.Unicode', (['(256)'], {}), '(256)\n', (628, 633), False, 'import diary\n'), ((680, 701), 'diary.db.Unicode', 'diary.db.Unicode', (['(256)'], {}), '(256)\n', (696, 701), False, 'import diary\n'), ((764, 786), 'diary.db.Unicode', 'diary.db.Unicode', (['(1024)'], {}), '(1024)\n', (780, 786), False, 'import diary\n'), ((1642, 1718), 'itsdangerous.TimedJSONWebSignatureSerializer', 'itsdangerous.TimedJSONWebSignatureSerializer', (['sec_key'], {'expires_in': 'expiration'}), '(sec_key, expires_in=expiration)\n', (1686, 1718), False, 'import itsdangerous\n'), ((2003, 2056), 'itsdangerous.TimedJSONWebSignatureSerializer', 'itsdangerous.TimedJSONWebSignatureSerializer', (['sec_key'], {}), '(sec_key)\n', (2047, 2056), False, 'import itsdangerous\n'), ((2568, 2598), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (2587, 2598), False, 'import diary\n'), ((3006, 3036), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (3025, 3036), False, 'import diary\n'), ((3064, 3086), 'diary.db.Unicode', 'diary.db.Unicode', (['(1024)'], {}), '(1024)\n', (3080, 3086), False, 'import diary\n'), ((3742, 3772), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (3761, 3772), False, 'import diary\n'), ((3821, 3852), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""diary.id"""'], {}), "('diary.id')\n", (3840, 3852), False, 'import diary\n'), ((3880, 3902), 'diary.db.Unicode', 'diary.db.Unicode', (['(1024)'], {}), '(1024)\n', (3896, 3902), False, 'import diary\n'), ((4913, 4943), 'diary.db.ForeignKey', 'diary.db.ForeignKey', (['"""post.id"""'], {}), "('post.id')\n", (4932, 4943), False, 'import diary\n'), ((4971, 4993), 'diary.db.Unicode', 'diary.db.Unicode', (['(1024)'], {}), '(1024)\n', (4987, 4993), False, 'import diary\n'), ((5052, 5074), 'diary.db.Unicode', 'diary.db.Unicode', (['(1024)'], {}), '(1024)\n', (5068, 5074), False, 'import diary\n'), ((5122, 5144), 'diary.db.Unicode', 'diary.db.Unicode', (['(1024)'], {}), '(1024)\n', (5138, 5144), False, 'import diary\n')]
|
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import unittest
from eng_to_kana.morae_kana_converter import MoraeKanaConverter
class TestMoraeKanaConverter(unittest.TestCase):
def setUp(self):
self.func = MoraeKanaConverter().convertMorae
def test_1(self):
expected_pairs = {
'fa.za.a': 'ファザー',
'a.a.mu': 'アーム',
'ko.ma.N.da.a': 'コマンダー',
'shi.i': 'シー',
'pi.g.gu': 'ピッグ',
'be.d.do': 'ベッド',
'ba.a.do': 'バード',
'ha.N.ba.a.ga.a': 'ハンバーガー',
'kya.t.to': 'キャット',
'gya.N.bu.ru': 'ギャンブル',
'gya.ru': 'ギャル',
'ka.N.to.ri.i': 'カントリー',
'mo.N.ki.i': 'モンキー',
'fu.ro.N.to': 'フロント',
'ro.N.do.N': 'ロンドン',
'bo.k.ku.su': 'ボックス',
'su.to.ro': 'ストロ',
'po.o.to': 'ポート',
'bu.k.ku': 'ブック',
'ba.ru.u.N': 'バルーン',
'a.ba.u.to': 'アバウト',
'pa.i.ro.t.to': 'パイロット',
'wi.na.a': 'ウィナー',
'ma.ma': 'ママ',
'pu.u.ma': 'プーマ',
'de.i': 'デイ',
'de.i.bi.d.do': 'デイビッド',
'ma.i': 'マイ',
'bo.i': 'ボイ',
'to.i': 'トイ',
'fo.o.N': 'フォーン',
'no.o': 'ノー',
'na.u': 'ナウ',
'kwi.a.a': 'クィアー',
'he.a.a': 'ヘアー',
'tu.a.a': 'ツアー',
'kyu.u.bu': 'キューブ',
'a.ma.zo.N': 'アマゾン',
'bo.k.ku.si.N.gu': 'ボックシング',
'gu.u.gu.ru': 'グーグル',
'ma.i.ku.ro.o.so.fu.to': 'マイクローソフト',
'ne.i.sho.N': 'ネイション',
'ro.o.ma': 'ローマ',
'wu.d.do': 'ウッド',
'wu.z.zu': 'ウッズ',
'si.N': 'シン',
're.fu.to': 'レフト',
'mi.ru.ku': 'ミルク',
'so.N.gu': 'ソング',
'da.a.ri.N.gu': 'ダーリング',
'i.i.su.to': 'イースト',
'i.e.su': 'イエス',
'hu.u.pu': 'フープ',
'po.p.pu': 'ポップ',
'ka.t.to': 'カット',
'pa.k.ku': 'パック',
'ki.su': 'キス',
'pa.c.chi': 'パッチ',
'me.s.shu': 'メッシュ'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
def test_2(self):
expected_pairs = {
'wa.t.to': 'ワット',
'ho.wa.t.to': 'ホワット',
'wi.i.to': 'ウィート',
'ho.wi.i.to': 'ホウィート',
'wi.i.za.zu': 'ウィーザズ',
'ho.wi.i.za.zu': 'ホウィーザズ',
'wi.i.zi.zu': 'ウィージズ',
'we.N': 'ウェン',
'ho.we.N': 'ホウェン',
'wi.N': 'ウィン',
'ho.wi.N': 'ホウィン',
'wu.u': 'ウー',
'ho.wu.u': 'ホウー',
'hyu.u': 'ヒュー',
'wi.c.chi': 'ウィッチ',
'ho.wi.c.chi': 'ホウィッチ',
'wa.i.ti.i': 'ワイティー',
'ho.wa.i.ti.i': 'ホワイティー',
'wo.o': 'ウォー',
'ho.wo.o': 'ホウォー',
'ho.o': 'ホー',
'wa.i': 'ワイ',
'ho.wa.i': 'ホワイ',
'wi.zu': 'ウィズ',
'wi.su': 'ウィス'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
def test_3(self):
expected_pairs = {
'a.jo.i.N': 'アジョイン',
'e.ri.i.a': 'エリーア',
'ba.N.kwe.t.to': 'バンクェット',
'kya.ri.j.ji': 'キャリッジ',
'ke.a.a.ji': 'ケアージ',
'e.j.ji': 'エッジ',
'da.i.e.t.to': 'ダイエット',
'jya.j.ji': 'ジャッジ',
'ma.jo.ri.ti.i': 'マジョリティー',
'myu.u.to': 'ミュート',
'o.ra.N.ji': 'オランジ',
'o.ri.N.ji': 'オリンジ',
'po.o.e.t.to': 'ポーエット',
'kwa.k.ku': 'クァック',
'kwe.i.ku': 'クェイク',
'kwi.i.ri.i': 'クィーリー',
'kwi.i.N': 'クィーン',
'kwa.i.e.t.to': 'クァイエット',
'kwi.p.pu': 'クィップ',
'kwo.o.ta': 'クォータ',
'kwo.o.to': 'クォート',
'so.ro.o': 'ソロー',
'wi.ji.t.to': 'ウィジット'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
def test_4(self):
expected_pairs = {
'ko.ru': 'コル',
'di.d.do': 'ディッド',
'fi.N.ga.a': 'フィンガー',
'hi.a.a': 'ヒアー',
'a.i.do.ru': 'アイドル',
're.i.di.i': 'レイディー',
'ri.to.ru': 'リトル',
'ma.za.a': 'マザー',
'pu.re.ja.a': 'プレジャー',
'pyu.a.a': 'ピュアー',
'tu.ri.su.to': 'ツリスト',
'bi.jo.N': 'ビジョン'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
def test_5(self):
expected_pairs = {
'a.be.i.jya.a': 'アベイジャー',
'a.kyu.a.si.i.zu': 'アキュアシーズ',
'e.i.na.a': 'エイナー',
'e.nye.i': 'エニェイ',
'e.i.ku.na.a': 'エイクナー',
'a.ki.i.ya.ma': 'アキーヤマ',
'a.re.i.yo.o': 'アレイヨー',
'a.u.ga.su.ti.ni.i.a.k.ku': 'アウガスティニーアック',
'a.bi.nyo.N': 'アビニョン',
'a.i.ya.s.shu': 'アイヤッシュ',
'bo.o.ryu': 'ボーリュ',
'bi.i.di.e.N.to': 'ビーディエント',
'ka.mo.N': 'カモン',
'cho.i.na.t.to.su.ki.i': 'チョイナットスキー',
'do.ra.i.bu': 'ドライブ',
'da.k.ku.ta.a': 'ダックター',
'fi.ri.i.a.bu': 'フィリーアブ',
'fi.ryo.o': 'フィリョー',
'fa.i.ti.k.ku': 'ファイティック',
'ho.re.i.ji': 'ホレイジ'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
def test_6(self):
expected_pairs = {
'a.pu.ro.o.do': 'アプロード',
'a.p.pu.ru': 'アップル',
'a.p.pu.ru.bi.i': 'アップルビー',
'a.pu.re.t.to': 'アプレット',
'a.pu.ri.ke.i.sho.N': 'アプリケイション',
'a.pu.ra.i': 'アプライ',
'pa.i.na.p.pu.ru': 'パイナップル'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
def test_7(self):
expected_pairs = {
'a.bu.ri.j.ji.do': 'アブリッジド',
'a.bu.ri.j.ji.me.N.to': 'アブリッジメント',
'a.byu.u.ra.z.zi.i': 'アビューラッジー',
'a.byu.u.su': 'アビュース',
'a.byu.u.zu': 'アビューズ',
'ko.N.fyu.u.zu': 'コンフューズ',
'fyu.u': 'フュー',
'ye.i': 'イェイ'
}
for key, value in expected_pairs.items():
self.assertEqual(value, self.func(key))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"os.path.dirname",
"eng_to_kana.morae_kana_converter.MoraeKanaConverter"
] |
[((6548, 6563), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6561, 6563), False, 'import unittest\n'), ((50, 75), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (65, 75), False, 'import os\n'), ((255, 275), 'eng_to_kana.morae_kana_converter.MoraeKanaConverter', 'MoraeKanaConverter', ([], {}), '()\n', (273, 275), False, 'from eng_to_kana.morae_kana_converter import MoraeKanaConverter\n')]
|
from django import template
from django.core.cache import cache
register = template.Library()
@register.simple_tag
def revision():
rev = cache.get('current_revision')
if rev == None:
from lib.revision_hook import get_revision, set_cache
rev = get_revision()
set_cache(rev)
return "Revision: <a href=\"https://github.com/upTee/upTee/commit/{0}\">{1}</a>".format(rev, rev[:7])
|
[
"lib.revision_hook.set_cache",
"django.template.Library",
"lib.revision_hook.get_revision",
"django.core.cache.cache.get"
] |
[((76, 94), 'django.template.Library', 'template.Library', ([], {}), '()\n', (92, 94), False, 'from django import template\n'), ((144, 173), 'django.core.cache.cache.get', 'cache.get', (['"""current_revision"""'], {}), "('current_revision')\n", (153, 173), False, 'from django.core.cache import cache\n'), ((270, 284), 'lib.revision_hook.get_revision', 'get_revision', ([], {}), '()\n', (282, 284), False, 'from lib.revision_hook import get_revision, set_cache\n'), ((293, 307), 'lib.revision_hook.set_cache', 'set_cache', (['rev'], {}), '(rev)\n', (302, 307), False, 'from lib.revision_hook import get_revision, set_cache\n')]
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import paddle.nn as nn
import paddle_crf as crf
import paddle.nn.functional as F
class JointModel(paddle.nn.Layer):
def __init__(self, vocab_size, embedding_size, hidden_size, num_intents, num_slots, num_layers=1, drop_p=0.1):
super(JointModel, self).__init__()
self.vocab_size = vocab_size
self.embedding_size = embedding_size
self.hidden_size = hidden_size
self.drop_p = drop_p
self.num_intents = num_intents
self.num_slots = num_slots
self.embedding = nn.Embedding(vocab_size, embedding_size)
self.dropout = nn.Dropout(p=drop_p)
self.layer_norm = nn.LayerNorm(2*hidden_size)
self.bilstm = nn.LSTM(input_size=embedding_size, hidden_size=hidden_size, direction="bidirectional", num_layers=num_layers, dropout=drop_p)
self.ner_classifier = nn.Linear(hidden_size*2, num_slots+2)
self.intent_classifier = nn.Linear(hidden_size*2, num_intents)
self.crf = crf.LinearChainCrf(num_slots, crf_lr=0.001, with_start_stop_tag=True)
self.crf_loss = crf.LinearChainCrfLoss(self.crf)
self.viterbi_decoder = crf.ViterbiDecoder(self.crf.transitions)
def forward(self, inputs, lens):
batch_size, seq_len = inputs.shape
inputs_embedding = self.embedding(inputs)
if self.drop_p:
inputs_embedding = self.dropout(inputs_embedding)
lstm_outputs, _ = self.bilstm(inputs_embedding)
lstm_outputs = self.layer_norm(lstm_outputs)
emissions = self.ner_classifier(lstm_outputs)
indices = paddle.stack([paddle.arange(batch_size), lens-1], axis=1)
last_step_hiddens = paddle.gather_nd(lstm_outputs, indices)
intent_logits = self.intent_classifier(last_step_hiddens)
return emissions, intent_logits
def get_slot_loss(self, features, lens, tags):
slot_loss = self.crf_loss(features, lens, tags)
slot_loss = paddle.mean(slot_loss)
return slot_loss
def get_intent_loss(self, intent_logits, intent_labels):
return F.cross_entropy(intent_logits, intent_labels)
|
[
"paddle.nn.Dropout",
"paddle_crf.LinearChainCrfLoss",
"paddle.nn.Linear",
"paddle.gather_nd",
"paddle.mean",
"paddle.arange",
"paddle.nn.Embedding",
"paddle_crf.LinearChainCrf",
"paddle_crf.ViterbiDecoder",
"paddle.nn.LayerNorm",
"paddle.nn.functional.cross_entropy",
"paddle.nn.LSTM"
] |
[((1157, 1197), 'paddle.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embedding_size'], {}), '(vocab_size, embedding_size)\n', (1169, 1197), True, 'import paddle.nn as nn\n'), ((1221, 1241), 'paddle.nn.Dropout', 'nn.Dropout', ([], {'p': 'drop_p'}), '(p=drop_p)\n', (1231, 1241), True, 'import paddle.nn as nn\n'), ((1268, 1297), 'paddle.nn.LayerNorm', 'nn.LayerNorm', (['(2 * hidden_size)'], {}), '(2 * hidden_size)\n', (1280, 1297), True, 'import paddle.nn as nn\n'), ((1318, 1448), 'paddle.nn.LSTM', 'nn.LSTM', ([], {'input_size': 'embedding_size', 'hidden_size': 'hidden_size', 'direction': '"""bidirectional"""', 'num_layers': 'num_layers', 'dropout': 'drop_p'}), "(input_size=embedding_size, hidden_size=hidden_size, direction=\n 'bidirectional', num_layers=num_layers, dropout=drop_p)\n", (1325, 1448), True, 'import paddle.nn as nn\n'), ((1474, 1515), 'paddle.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', '(num_slots + 2)'], {}), '(hidden_size * 2, num_slots + 2)\n', (1483, 1515), True, 'import paddle.nn as nn\n'), ((1545, 1584), 'paddle.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'num_intents'], {}), '(hidden_size * 2, num_intents)\n', (1554, 1584), True, 'import paddle.nn as nn\n'), ((1603, 1672), 'paddle_crf.LinearChainCrf', 'crf.LinearChainCrf', (['num_slots'], {'crf_lr': '(0.001)', 'with_start_stop_tag': '(True)'}), '(num_slots, crf_lr=0.001, with_start_stop_tag=True)\n', (1621, 1672), True, 'import paddle_crf as crf\n'), ((1697, 1729), 'paddle_crf.LinearChainCrfLoss', 'crf.LinearChainCrfLoss', (['self.crf'], {}), '(self.crf)\n', (1719, 1729), True, 'import paddle_crf as crf\n'), ((1761, 1801), 'paddle_crf.ViterbiDecoder', 'crf.ViterbiDecoder', (['self.crf.transitions'], {}), '(self.crf.transitions)\n', (1779, 1801), True, 'import paddle_crf as crf\n'), ((2288, 2327), 'paddle.gather_nd', 'paddle.gather_nd', (['lstm_outputs', 'indices'], {}), '(lstm_outputs, indices)\n', (2304, 2327), False, 'import paddle\n'), ((2564, 2586), 'paddle.mean', 'paddle.mean', (['slot_loss'], {}), '(slot_loss)\n', (2575, 2586), False, 'import paddle\n'), ((2694, 2739), 'paddle.nn.functional.cross_entropy', 'F.cross_entropy', (['intent_logits', 'intent_labels'], {}), '(intent_logits, intent_labels)\n', (2709, 2739), True, 'import paddle.nn.functional as F\n'), ((2216, 2241), 'paddle.arange', 'paddle.arange', (['batch_size'], {}), '(batch_size)\n', (2229, 2241), False, 'import paddle\n')]
|
from django.conf.urls import url, include
import django.contrib.auth.views
import tracker.views
urlpatterns = [
url(r'^$', tracker.views.tracker_page, name='tracker-page'),
url(r'^door-opener$', tracker.views.door_opener, name='door-opener'),
url(r'^login$', django.contrib.auth.views.login, {'template_name': 'login.html'}, name='login'),
url(r'^logout$', django.contrib.auth.views.logout, {'next_page': '/'}, name='logout'),
#url('^', include('django.contrib.auth.urls')),
]
|
[
"django.conf.urls.url"
] |
[((121, 179), 'django.conf.urls.url', 'url', (['"""^$"""', 'tracker.views.tracker_page'], {'name': '"""tracker-page"""'}), "('^$', tracker.views.tracker_page, name='tracker-page')\n", (124, 179), False, 'from django.conf.urls import url, include\n'), ((190, 257), 'django.conf.urls.url', 'url', (['"""^door-opener$"""', 'tracker.views.door_opener'], {'name': '"""door-opener"""'}), "('^door-opener$', tracker.views.door_opener, name='door-opener')\n", (193, 257), False, 'from django.conf.urls import url, include\n'), ((268, 366), 'django.conf.urls.url', 'url', (['"""^login$"""', 'django.contrib.auth.views.login', "{'template_name': 'login.html'}"], {'name': '"""login"""'}), "('^login$', django.contrib.auth.views.login, {'template_name':\n 'login.html'}, name='login')\n", (271, 366), False, 'from django.conf.urls import url, include\n'), ((373, 462), 'django.conf.urls.url', 'url', (['"""^logout$"""', 'django.contrib.auth.views.logout', "{'next_page': '/'}"], {'name': '"""logout"""'}), "('^logout$', django.contrib.auth.views.logout, {'next_page': '/'}, name=\n 'logout')\n", (376, 462), False, 'from django.conf.urls import url, include\n')]
|
#!/usr/bin/env python
# Copyright (c) 2021 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
"""
Here are defined all the CARLA sensors
"""
import copy
import math
import numpy as np
import carla
# ==================================================================================================
# -- BaseSensor -----------------------------------------------------------------------------------
# ==================================================================================================
class BaseSensor(object):
def __init__(self, name, attributes, interface, parent):
self.name = name
self.attributes = attributes
self.interface = interface
self.parent = parent
self.interface.register(self.name, self)
def is_event_sensor(self):
return False
def parse(self):
raise NotImplementedError
def update_sensor(self, data, frame):
if not self.is_event_sensor():
self.interface._data_buffers.put((self.name, frame, self.parse(data)))
else:
self.interface._event_data_buffers.put((self.name, frame, self.parse(data)))
def callback(self, data):
self.update_sensor(data, data.frame)
def destroy(self):
raise NotImplementedError
class CarlaSensor(BaseSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
world = self.parent.get_world()
type_ = self.attributes.pop("type", "")
transform = self.attributes.pop("transform", "0,0,0,0,0,0")
if isinstance(transform, str):
transform = [float(x) for x in transform.split(",")]
assert len(transform) == 6
blueprint = world.get_blueprint_library().find(type_)
blueprint.set_attribute("role_name", name)
for key, value in attributes.items():
blueprint.set_attribute(str(key), str(value))
transform = carla.Transform(
carla.Location(transform[0], transform[1], transform[2]),
carla.Rotation(transform[4], transform[5], transform[3])
)
self.sensor = world.spawn_actor(blueprint, transform, attach_to=self.parent)
self.sensor.listen(self.callback)
def destroy(self):
if self.sensor is not None:
self.sensor.destroy()
self.sensor = None
class PseudoSensor(BaseSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def callback(self, data, frame):
self.update_sensor(data, frame)
# ==================================================================================================
# -- Cameras -----------------------------------------------------------------------------------
# ==================================================================================================
class BaseCamera(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def parse(self, sensor_data):
"""Parses the Image into an numpy array"""
# sensor_data: [fov, height, width, raw_data]
array = np.frombuffer(sensor_data.raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (sensor_data.height, sensor_data.width, 4))
array = array[:, :, :3]
array = array[:, :, ::-1]
return array
class CameraRGB(BaseCamera):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
class CameraDepth(BaseCamera):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
class CameraSemanticSegmentation(BaseCamera):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
class CameraDVS(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def is_event_sensor(self):
return True
def parse(self, sensor_data):
"""Parses the DVSEvents into an RGB image"""
# sensor_data: [x, y, t, polarity]
dvs_events = np.frombuffer(sensor_data.raw_data, dtype=np.dtype([
('x', np.uint16), ('y', np.uint16), ('t', np.int64), ('pol', np.bool)]))
dvs_img = np.zeros((sensor_data.height, sensor_data.width, 3), dtype=np.uint8)
dvs_img[dvs_events[:]['y'], dvs_events[:]['x'], dvs_events[:]['pol'] * 2] = 255 # Blue is positive, red is negative
return dvs_img
# ==================================================================================================
# -- LIDAR -----------------------------------------------------------------------------------
# ==================================================================================================
class Lidar(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def parse(self, sensor_data):
"""Parses the LidarMeasurememt into an numpy array"""
# sensor_data: [x, y, z, intensity]
points = np.frombuffer(sensor_data.raw_data, dtype=np.dtype('f4'))
points = copy.deepcopy(points)
points = np.reshape(points, (int(points.shape[0] / 4), 4))
return points
class SemanticLidar(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def parse(self, sensor_data):
"""Parses the SemanticLidarMeasurememt into an numpy array"""
# sensor_data: [x, y, z, cos(angle), actor index, semantic tag]
points = np.frombuffer(sensor_data.raw_data, dtype=np.dtype('f4'))
points = copy.deepcopy(points)
points = np.reshape(points, (int(points.shape[0] / 6), 6))
return points
# ==================================================================================================
# -- Others -----------------------------------------------------------------------------------
# ==================================================================================================
class Radar(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def parse(self, sensor_data):
"""Parses the RadarMeasurement into an numpy array"""
# sensor_data: [depth, azimuth, altitute, velocity]
points = np.frombuffer(sensor_data.raw_data, dtype=np.dtype('f4'))
points = copy.deepcopy(points)
points = np.reshape(points, (int(points.shape[0] / 4), 4))
points = np.flip(points, 1)
return points
class Gnss(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def parse(self, sensor_data):
"""Parses the GnssMeasurement into an numpy array"""
# sensor_data: [latitude, longitude, altitude]
return np.array([sensor_data.latitude, sensor_data.longitude, sensor_data.altitude], dtype=np.float64)
class Imu(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def parse(self, sensor_data):
"""Parses the IMUMeasurement into an numpy array"""
# sensor_data: [accelerometer, gyroscope, compass]
return np.array([sensor_data.accelerometer.x, sensor_data.accelerometer.y, sensor_data.accelerometer.z,
sensor_data.gyroscope.x, sensor_data.gyroscope.y, sensor_data.gyroscope.z,
sensor_data.compass,
], dtype=np.float64)
class LaneInvasion(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def is_event_sensor(self):
return True
def parse(self, sensor_data):
"""Parses the IMUMeasurement into a list"""
# sensor_data: [transform, lane marking]
return [sensor_data.transform, sensor_data.crossed_lane_markings]
class Collision(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
self._last_event_frame = 0
super().__init__(name, attributes, interface, parent)
def callback(self, data):
# The collision sensor can have multiple callbacks per tick. Get only the first one
if self._last_event_frame != data.frame:
self._last_event_frame = data.frame
self.update_sensor(data, data.frame)
def is_event_sensor(self):
return True
def parse(self, sensor_data):
"""Parses the ObstacleDetectionEvent into a list"""
# sensor_data: [other actor, distance]
impulse = sensor_data.normal_impulse
impulse_value = math.sqrt(impulse.x ** 2 + impulse.y ** 2 + impulse.z ** 2)
return [sensor_data.other_actor, impulse_value]
class Obstacle(CarlaSensor):
def __init__(self, name, attributes, interface, parent):
super().__init__(name, attributes, interface, parent)
def is_event_sensor(self):
return True
def parse(self, sensor_data):
"""Parses the ObstacleDetectionEvent into a list"""
# sensor_data: [other actor, distance]
return [sensor_data.other_actor, sensor_data.distance]
|
[
"copy.deepcopy",
"numpy.flip",
"math.sqrt",
"numpy.dtype",
"numpy.zeros",
"numpy.array",
"numpy.reshape",
"carla.Rotation",
"carla.Location"
] |
[((3460, 3521), 'numpy.reshape', 'np.reshape', (['array', '(sensor_data.height, sensor_data.width, 4)'], {}), '(array, (sensor_data.height, sensor_data.width, 4))\n', (3470, 3521), True, 'import numpy as np\n'), ((4610, 4678), 'numpy.zeros', 'np.zeros', (['(sensor_data.height, sensor_data.width, 3)'], {'dtype': 'np.uint8'}), '((sensor_data.height, sensor_data.width, 3), dtype=np.uint8)\n', (4618, 4678), True, 'import numpy as np\n'), ((5507, 5528), 'copy.deepcopy', 'copy.deepcopy', (['points'], {}), '(points)\n', (5520, 5528), False, 'import copy\n'), ((6046, 6067), 'copy.deepcopy', 'copy.deepcopy', (['points'], {}), '(points)\n', (6059, 6067), False, 'import copy\n'), ((6856, 6877), 'copy.deepcopy', 'copy.deepcopy', (['points'], {}), '(points)\n', (6869, 6877), False, 'import copy\n'), ((6962, 6980), 'numpy.flip', 'np.flip', (['points', '(1)'], {}), '(points, 1)\n', (6969, 6980), True, 'import numpy as np\n'), ((7319, 7419), 'numpy.array', 'np.array', (['[sensor_data.latitude, sensor_data.longitude, sensor_data.altitude]'], {'dtype': 'np.float64'}), '([sensor_data.latitude, sensor_data.longitude, sensor_data.altitude\n ], dtype=np.float64)\n', (7327, 7419), True, 'import numpy as np\n'), ((7734, 7959), 'numpy.array', 'np.array', (['[sensor_data.accelerometer.x, sensor_data.accelerometer.y, sensor_data.\n accelerometer.z, sensor_data.gyroscope.x, sensor_data.gyroscope.y,\n sensor_data.gyroscope.z, sensor_data.compass]'], {'dtype': 'np.float64'}), '([sensor_data.accelerometer.x, sensor_data.accelerometer.y,\n sensor_data.accelerometer.z, sensor_data.gyroscope.x, sensor_data.\n gyroscope.y, sensor_data.gyroscope.z, sensor_data.compass], dtype=np.\n float64)\n', (7742, 7959), True, 'import numpy as np\n'), ((9166, 9225), 'math.sqrt', 'math.sqrt', (['(impulse.x ** 2 + impulse.y ** 2 + impulse.z ** 2)'], {}), '(impulse.x ** 2 + impulse.y ** 2 + impulse.z ** 2)\n', (9175, 9225), False, 'import math\n'), ((2146, 2202), 'carla.Location', 'carla.Location', (['transform[0]', 'transform[1]', 'transform[2]'], {}), '(transform[0], transform[1], transform[2])\n', (2160, 2202), False, 'import carla\n'), ((2216, 2272), 'carla.Rotation', 'carla.Rotation', (['transform[4]', 'transform[5]', 'transform[3]'], {}), '(transform[4], transform[5], transform[3])\n', (2230, 2272), False, 'import carla\n'), ((3425, 3442), 'numpy.dtype', 'np.dtype', (['"""uint8"""'], {}), "('uint8')\n", (3433, 3442), True, 'import numpy as np\n'), ((4495, 4581), 'numpy.dtype', 'np.dtype', (["[('x', np.uint16), ('y', np.uint16), ('t', np.int64), ('pol', np.bool)]"], {}), "([('x', np.uint16), ('y', np.uint16), ('t', np.int64), ('pol', np.\n bool)])\n", (4503, 4581), True, 'import numpy as np\n'), ((5474, 5488), 'numpy.dtype', 'np.dtype', (['"""f4"""'], {}), "('f4')\n", (5482, 5488), True, 'import numpy as np\n'), ((6013, 6027), 'numpy.dtype', 'np.dtype', (['"""f4"""'], {}), "('f4')\n", (6021, 6027), True, 'import numpy as np\n'), ((6823, 6837), 'numpy.dtype', 'np.dtype', (['"""f4"""'], {}), "('f4')\n", (6831, 6837), True, 'import numpy as np\n')]
|
from sqlalchemy import (
Index,
Column,
Integer,
String,
ForeignKey,
)
# from sqlalchemy.orm import relationship
from .meta import Base
class Junction(Base):
__tablename__ = 'user_portfolios'
id = Column(Integer, primary_key=True)
stock_id = Column(String, ForeignKey('stocks.symbol'), nullable=False)
account_id = Column(String, ForeignKey('accounts.username'), nullable=False)
|
[
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] |
[((228, 261), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (234, 261), False, 'from sqlalchemy import Index, Column, Integer, String, ForeignKey\n'), ((292, 319), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stocks.symbol"""'], {}), "('stocks.symbol')\n", (302, 319), False, 'from sqlalchemy import Index, Column, Integer, String, ForeignKey\n'), ((369, 400), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""accounts.username"""'], {}), "('accounts.username')\n", (379, 400), False, 'from sqlalchemy import Index, Column, Integer, String, ForeignKey\n')]
|
import pandas as pd
from scipy.io import arff
from sklearn.preprocessing import OneHotEncoder, LabelEncoder, OrdinalEncoder, StandardScaler
from sklearn.impute import SimpleImputer
from sklearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split, cross_val_score, KFold, GridSearchCV
dataset = pd.read_csv(r'..\..\data\breast-cancer-wisconsin\wdbc.data', header=None) # header=None, usecols=[3,6]
# print(dataset[1].value_counts())
dataset.pop(0)
y = LabelEncoder().fit_transform(dataset.pop(1).values)
si_step = ('si', SimpleImputer(strategy='constant', fill_value='MISSING'))
ohe_step = ('ohe', OneHotEncoder(sparse=False, handle_unknown='ignore'))
oe_step = ('le', OrdinalEncoder())
num_si_step = ('si', SimpleImputer(strategy='mean'))
sc_step = ('sc', StandardScaler())
cat_pipe = Pipeline([si_step, ohe_step])
num_pipe = Pipeline([num_si_step, sc_step])
bin_pipe = Pipeline([oe_step])
transformers = [
# ('cat', cat_pipe, ['DGN', 'PRE6', 'PRE14']),
('num', num_pipe, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
28, 29]),
# ('bin', bin_pipe, ['PRE7', 'PRE8', 'PRE9', 'PRE10', 'PRE11', 'PRE17', 'PRE19', 'PRE25', 'PRE30', 'PRE32']),
]
ct = ColumnTransformer(transformers=transformers)
# X_transformed = ct.fit_transform(dataset)
ml_pipe = Pipeline([
('X_transform', ct),
('mlp', MLPClassifier(solver='adam', alpha=1e-5, hidden_layer_sizes=(4, 3))),
])
kf = KFold(n_splits=5, shuffle=True)
# cv_score = cross_val_score(ml_pipe, dataset, y, cv=kf).mean()
param_grid = {
'X_transform__num__si__strategy': ['mean', 'median'],
'mlp__solver': ['sgd', 'adam', 'lbfgs'],
'mlp__alpha': [1e-1, 1e-3, 1e-5],
'mlp__hidden_layer_sizes': [(5, 2), (4, 3), (4, 4), (5, 5)],
'mlp__activation': ['identity', 'logistic', 'tanh', 'relu'],
}
knn_pipe = Pipeline([
('X_transform', ct),
('knn', KNeighborsClassifier(n_neighbors=8)),
])
ml_pipe.fit(dataset, y)
print(f'All data score: {ml_pipe.score(dataset, y)}')
knn_param_grid = {
'X_transform__num__si__strategy': ['mean', 'median'],
'knn__n_neighbors': range(1, 10),
}
gs = GridSearchCV(ml_pipe, param_grid, cv=kf)
gs.fit(dataset, y)
print(gs.best_params_)
print(gs.best_score_)
print(pd.DataFrame(gs.cv_results_))
|
[
"pandas.DataFrame",
"sklearn.model_selection.GridSearchCV",
"sklearn.impute.SimpleImputer",
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv",
"sklearn.preprocessing.OneHotEncoder",
"sklearn.model_selection.KFold",
"sklearn.preprocessing.LabelEncoder",
"sklearn.compose.ColumnTransformer",
"sklearn.neighbors.KNeighborsClassifier",
"sklearn.neural_network.MLPClassifier",
"sklearn.pipeline.Pipeline",
"sklearn.preprocessing.OrdinalEncoder"
] |
[((467, 543), 'pandas.read_csv', 'pd.read_csv', (['"""..\\\\..\\\\data\\\\breast-cancer-wisconsin\\\\wdbc.data"""'], {'header': 'None'}), "('..\\\\..\\\\data\\\\breast-cancer-wisconsin\\\\wdbc.data', header=None)\n", (478, 543), True, 'import pandas as pd\n'), ((963, 992), 'sklearn.pipeline.Pipeline', 'Pipeline', (['[si_step, ohe_step]'], {}), '([si_step, ohe_step])\n', (971, 992), False, 'from sklearn.pipeline import Pipeline\n'), ((1004, 1036), 'sklearn.pipeline.Pipeline', 'Pipeline', (['[num_si_step, sc_step]'], {}), '([num_si_step, sc_step])\n', (1012, 1036), False, 'from sklearn.pipeline import Pipeline\n'), ((1048, 1067), 'sklearn.pipeline.Pipeline', 'Pipeline', (['[oe_step]'], {}), '([oe_step])\n', (1056, 1067), False, 'from sklearn.pipeline import Pipeline\n'), ((1416, 1460), 'sklearn.compose.ColumnTransformer', 'ColumnTransformer', ([], {'transformers': 'transformers'}), '(transformers=transformers)\n', (1433, 1460), False, 'from sklearn.compose import ColumnTransformer\n'), ((1644, 1675), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(5)', 'shuffle': '(True)'}), '(n_splits=5, shuffle=True)\n', (1649, 1675), False, 'from sklearn.model_selection import train_test_split, cross_val_score, KFold, GridSearchCV\n'), ((2334, 2374), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['ml_pipe', 'param_grid'], {'cv': 'kf'}), '(ml_pipe, param_grid, cv=kf)\n', (2346, 2374), False, 'from sklearn.model_selection import train_test_split, cross_val_score, KFold, GridSearchCV\n'), ((697, 753), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {'strategy': '"""constant"""', 'fill_value': '"""MISSING"""'}), "(strategy='constant', fill_value='MISSING')\n", (710, 753), False, 'from sklearn.impute import SimpleImputer\n'), ((774, 826), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {'sparse': '(False)', 'handle_unknown': '"""ignore"""'}), "(sparse=False, handle_unknown='ignore')\n", (787, 826), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder, OrdinalEncoder, StandardScaler\n'), ((845, 861), 'sklearn.preprocessing.OrdinalEncoder', 'OrdinalEncoder', ([], {}), '()\n', (859, 861), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder, OrdinalEncoder, StandardScaler\n'), ((884, 914), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {'strategy': '"""mean"""'}), "(strategy='mean')\n", (897, 914), False, 'from sklearn.impute import SimpleImputer\n'), ((933, 949), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (947, 949), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder, OrdinalEncoder, StandardScaler\n'), ((2445, 2473), 'pandas.DataFrame', 'pd.DataFrame', (['gs.cv_results_'], {}), '(gs.cv_results_)\n', (2457, 2473), True, 'import pandas as pd\n'), ((627, 641), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (639, 641), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder, OrdinalEncoder, StandardScaler\n'), ((1564, 1632), 'sklearn.neural_network.MLPClassifier', 'MLPClassifier', ([], {'solver': '"""adam"""', 'alpha': '(1e-05)', 'hidden_layer_sizes': '(4, 3)'}), "(solver='adam', alpha=1e-05, hidden_layer_sizes=(4, 3))\n", (1577, 1632), False, 'from sklearn.neural_network import MLPClassifier\n'), ((2090, 2125), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {'n_neighbors': '(8)'}), '(n_neighbors=8)\n', (2110, 2125), False, 'from sklearn.neighbors import KNeighborsClassifier\n')]
|
#!/usr/bin/env python
import os
import re
import sys
from setuptools import (setup, find_packages)
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload -r pypi')
sys.exit()
install_requires = ['colorama',
'colorlog',
'PyYAML>=3.11',
'file-magic']
try:
import concurrent.futures
except ImportError:
install_requires.append('futures')
if sys.version_info < (2, 7):
exit('Python version 2.7 or above is required.')
test_requirements = ['pytest>=3.0.3', 'pytest-cov>=2.4.0']
with open('fval/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
readme = open('README.rst').read()
long_description = readme
setup(
name='fval',
version=version,
description='A file validator.',
long_description=long_description,
author='<NAME>',
author_email='<EMAIL>',
url='http://github.com/jonhadfield/fval',
packages=find_packages(),
data_files=[
('{0}/.fval'.format(os.path.expanduser('~')),
['samples/fval.cfg'])
],
entry_points={
'console_scripts': [
'fval = fval:main'
],
},
include_package_data=True,
install_requires=install_requires,
license='MIT',
scripts=['bin/fval'],
zip_safe=False,
classifiers=(
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: BSD :: Linux',
'Operating System :: POSIX :: BSD :: FreeBSD',
'Operating System :: POSIX :: BSD :: OpenBSD',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
),
tests_require=install_requires + test_requirements,
)
|
[
"os.path.expanduser",
"setuptools.find_packages",
"os.system",
"sys.exit"
] |
[((136, 185), 'os.system', 'os.system', (['"""python setup.py sdist upload -r pypi"""'], {}), "('python setup.py sdist upload -r pypi')\n", (145, 185), False, 'import os\n'), ((190, 200), 'sys.exit', 'sys.exit', ([], {}), '()\n', (198, 200), False, 'import sys\n'), ((1108, 1123), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1121, 1123), False, 'from setuptools import setup, find_packages\n'), ((1170, 1193), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (1188, 1193), False, 'import os\n')]
|
import pygame
from screens import AnswerScreen, FinishScreen, InfoScreen, MenuScreen, QuestionScreen, TestSceen
from time import sleep
class Game:
# Game constants
WIDTH = 1024
HEIGHT = 768
GAME_NAME = '<NAME>'
INTRO_TEXT = ''
# Game states
running = True
__screens = {}
current_screen = MenuScreen.ID
CORRECT_ANSWER = 1
WRONG_ANSWER = 2
TIMES_UP = 3
state_question = CORRECT_ANSWER
graphs = []
standard_graphs = []
current_graph = None
current_question = 0
max_questions = 0
correct_ans = 0
wrong_ans = 0
def __init__(self):
self.screen = pygame.display.set_mode((self.WIDTH, self.HEIGHT))
pygame.display.set_caption(self.GAME_NAME)
icon = pygame.image.load('icon.png')
pygame.display.set_icon(icon)
self.__screens[MenuScreen.ID] = MenuScreen(self)
self.__screens[InfoScreen.ID] = InfoScreen(self)
self.__screens[QuestionScreen.ID] = QuestionScreen(self)
self.__screens[AnswerScreen.ID] = AnswerScreen(self)
self.__screens[FinishScreen.ID] = FinishScreen(self)
#self.__screens[TestSceen.ID] = TestSceen(self)
self.clock = pygame.time.Clock()
def run(self, graphs=[]):
pygame.init()
self.standard_graphs = graphs
self.max_questions = len(graphs)
self.current_graph = graphs[0]
while self.running:
self.__screens[self.current_screen].run()
def exit(self):
self.running = False
def start_game(self):
self.current_question = 0
self.wrong_ans = 0
self.correct_ans = 0
self.graphs = self.standard_graphs
self.max_questions = len(self.graphs)
self.change_screen(QuestionScreen)
def change_screen(self, screen):
self.current_screen = screen.ID
def no_answer_question(self):
#print('path', self.current_graph.path)
self.current_graph.path
self.state_question = self.TIMES_UP
self.change_screen(AnswerScreen)
def answer_question(self, user_answer):
#print('path', self.current_graph.path)
#print(user_answer)
if self.current_graph.path == user_answer:
self.correct_ans+=1
self.state_question = self.CORRECT_ANSWER
else:
self.wrong_ans+=1
self.state_question = self.WRONG_ANSWER
self.change_screen(AnswerScreen)
def next_question(self):
self.current_question = self.current_question+1
if self.current_question>=self.max_questions:
self.current_question = 0
self.change_screen(FinishScreen)
else:
self.change_screen(QuestionScreen)
|
[
"pygame.display.set_icon",
"screens.QuestionScreen",
"pygame.display.set_mode",
"pygame.init",
"screens.AnswerScreen",
"screens.MenuScreen",
"screens.InfoScreen",
"pygame.image.load",
"pygame.display.set_caption",
"pygame.time.Clock",
"screens.FinishScreen"
] |
[((642, 692), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(self.WIDTH, self.HEIGHT)'], {}), '((self.WIDTH, self.HEIGHT))\n', (665, 692), False, 'import pygame\n'), ((710, 752), 'pygame.display.set_caption', 'pygame.display.set_caption', (['self.GAME_NAME'], {}), '(self.GAME_NAME)\n', (736, 752), False, 'import pygame\n'), ((768, 797), 'pygame.image.load', 'pygame.image.load', (['"""icon.png"""'], {}), "('icon.png')\n", (785, 797), False, 'import pygame\n'), ((806, 835), 'pygame.display.set_icon', 'pygame.display.set_icon', (['icon'], {}), '(icon)\n', (829, 835), False, 'import pygame\n'), ((876, 892), 'screens.MenuScreen', 'MenuScreen', (['self'], {}), '(self)\n', (886, 892), False, 'from screens import AnswerScreen, FinishScreen, InfoScreen, MenuScreen, QuestionScreen, TestSceen\n'), ((933, 949), 'screens.InfoScreen', 'InfoScreen', (['self'], {}), '(self)\n', (943, 949), False, 'from screens import AnswerScreen, FinishScreen, InfoScreen, MenuScreen, QuestionScreen, TestSceen\n'), ((994, 1014), 'screens.QuestionScreen', 'QuestionScreen', (['self'], {}), '(self)\n', (1008, 1014), False, 'from screens import AnswerScreen, FinishScreen, InfoScreen, MenuScreen, QuestionScreen, TestSceen\n'), ((1057, 1075), 'screens.AnswerScreen', 'AnswerScreen', (['self'], {}), '(self)\n', (1069, 1075), False, 'from screens import AnswerScreen, FinishScreen, InfoScreen, MenuScreen, QuestionScreen, TestSceen\n'), ((1118, 1136), 'screens.FinishScreen', 'FinishScreen', (['self'], {}), '(self)\n', (1130, 1136), False, 'from screens import AnswerScreen, FinishScreen, InfoScreen, MenuScreen, QuestionScreen, TestSceen\n'), ((1214, 1233), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (1231, 1233), False, 'import pygame\n'), ((1273, 1286), 'pygame.init', 'pygame.init', ([], {}), '()\n', (1284, 1286), False, 'import pygame\n')]
|
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for wstl.magics._location."""
import json
from os import path
from absl.testing import absltest
from IPython.testing.globalipapp import get_ipython
from unittest import mock
from google.cloud import storage
from wstl.magics import _constants
from wstl.magics import _location
_ip = get_ipython()
class LocationTest(absltest.TestCase):
def test_parse_location_json_prefix_object_success(self):
shell = mock.MagicMock()
input_wstl_arg = """json://{"hello":"world"}"""
locations = _location.parse_location(shell, input_wstl_arg, file_ext=None)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, "{\"hello\":\"world\"}")
def test_parse_location_json_prefix_list_success(self):
shell = mock.MagicMock()
input_wstl_arg = """json://[{"first": "world"},{"second": "world"}]"""
locations = _location.parse_location(shell, input_wstl_arg, file_ext=None)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json,
"""[{"first": "world"},{"second": "world"}]""")
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Bucket", autospec=True)
def test_parse_location_gs_prefix_success(self, mock_bucket, mock_client):
class Item(object):
def __init__(self, bucket_name, name):
self.bucket = bucket_name
self.name = name
class FakeBucket(object):
def __init__(self, bucket_name):
self.name = bucket_name
bucket = FakeBucket("dummy_bucket")
items = [
Item(bucket, "file1.wstl"),
Item(bucket, "lib_folder/file2.wstl"),
Item(bucket, "lib_folder/file3.txt"),
Item(bucket, "input.json")
]
mock_bucket.list_blobs.return_value = iter(items)
mock_client.return_value.get_bucket.return_value = mock_bucket
shell = mock.MagicMock()
input_wstl_arg = "gs://dummy_bucket/input.json"
locations = _location.parse_location(
shell, input_wstl_arg, file_ext=_constants.JSON_FILE_EXT)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("gcs_location"))
self.assertEqual(locations[0].gcs_location, input_wstl_arg)
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Bucket", autospec=True)
def test_parse_location_gs_prefix_wildcard_success(self, mock_bucket,
mock_client):
class Item(object):
def __init__(self, bucket, name):
self.bucket = bucket
self.name = name
class FakeBucket(object):
def __init__(self, bucket_name):
self.name = bucket_name
bucket = FakeBucket("dummy_bucket")
items = [
Item(bucket, "file1.txt"),
Item(bucket, "lib_folder/file2.wstl"),
Item(bucket, "lib_folder/file3.wstl"),
Item(bucket, "lib_folder/file4.json"),
Item(bucket, "input.json")
]
mock_bucket.list_blobs.return_value = iter(items)
mock_client.return_value.get_bucket.return_value = mock_bucket
shell = mock.MagicMock()
input_wstl_arg = "gs://dummy_bucket/lib_folder/*"
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.WSTL_FILE_EXT,
load_contents=False)
self.assertLen(locations, 2)
self.assertTrue(locations[0].HasField("gcs_location"))
self.assertEqual(locations[0].gcs_location,
"gs://dummy_bucket/lib_folder/file2.wstl")
self.assertTrue(locations[1].HasField("gcs_location"))
self.assertEqual(locations[1].gcs_location,
"gs://dummy_bucket/lib_folder/file3.wstl")
@mock.patch.object(storage, "Client", autospec=True)
@mock.patch.object(storage, "Bucket", autospec=True)
def test_parse_location_gs_prefix_wildcard_unsupported_ext(
self, mock_bucket, mock_client):
class Item(object):
def __init__(self, bucket, name):
self.bucket = bucket
self.name = name
class FakeBucket(object):
def __init__(self, bucket_name):
self.name = bucket_name
bucket = FakeBucket("dummy_bucket")
items = [
Item(bucket, "file1.txt"),
Item(bucket, "lib_folder/file2.wstl"),
Item(bucket, "lib_folder/file3.wstl"),
Item(bucket, "lib_folder/file4.json"),
Item(bucket, "input.json")
]
mock_bucket.list_blobs.return_value = iter(items)
mock_client.return_value.get_bucket.return_value = mock_bucket
shell = mock.MagicMock()
input_wstl_arg = "gs://dummy_bucket/*.txt"
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.WSTL_FILE_EXT,
load_contents=False)
self.assertEmpty(locations)
def test_parse_location_file_prefix_file_exists_success(self):
shell = mock.MagicMock()
content = """{"hello": "world"}"""
tmp_file = self.create_tempfile(
file_path="dummy.json", content=content, mode="w")
input_wstl_arg = "file://{}".format(tmp_file.full_path)
locations = _location.parse_location(
shell, input_wstl_arg, file_ext=_constants.JSON_FILE_EXT)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, content)
def test_parse_location_file_prefix_wstl_suffix_success(self):
shell = mock.MagicMock()
content = """Result: $ToUpper("a")"""
tmp_file = self.create_tempfile(
file_path="dummy.wstl", content=content, mode="w")
input_wstl_arg = "file://{}".format(tmp_file.full_path)
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.WSTL_FILE_EXT,
load_contents=False)
self.assertTrue(locations[0].HasField("local_path"))
self.assertEqual(locations[0].local_path, tmp_file.full_path)
def test_parse_location_file_prefix_wstl_wildcard_success(self):
shell = mock.MagicMock()
content = """Result: $ToUpper("a")"""
tmp_file = self.create_tempfile(
file_path="dummy.wstl", content=content, mode="w")
input_wstl_arg = "file://{}/*".format(path.dirname(tmp_file.full_path))
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.WSTL_FILE_EXT,
load_contents=False)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("local_path"))
self.assertEqual(locations[0].local_path, tmp_file.full_path)
def test_parse_location_file_prefix_wildcard_success(self):
shell = mock.MagicMock()
content = """{"hello": "world"}"""
tmp_file = self.create_tempfile(
file_path="dummy.json", content=content, mode="w")
input_wstl_arg = "file://{}/*".format(path.dirname(tmp_file.full_path))
locations = _location.parse_location(
shell, input_wstl_arg, file_ext=_constants.JSON_FILE_EXT)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, content)
def test_parse_location_file_suffix_ndjson_success(self):
shell = mock.MagicMock()
content = """{"first": "item"}\n{"second": "item"}"""
tmp_file = self.create_tempfile(
file_path="dummy.ndjson", content=content, mode="w")
input_wstl_arg = "file://{}".format(tmp_file.full_path)
locations = _location.parse_location(
shell, input_wstl_arg, file_ext=_constants.JSON_FILE_EXT)
self.assertLen(locations, 2)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, "{\"first\": \"item\"}")
self.assertTrue(locations[1].HasField("inline_json"))
self.assertEqual(locations[1].inline_json, "{\"second\": \"item\"}")
def test_parse_location_file_prefix_textproto_suffix_success(self):
shell = mock.MagicMock()
content = """dummy_field: true"""
tmp_file = self.create_tempfile(
file_path="dummy.textproto", content=content, mode="w")
input_wstl_arg = "file://{}".format(tmp_file.full_path)
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.TEXTPROTO_FILE_EXT,
load_contents=False)
self.assertTrue(locations[0].HasField("local_path"))
self.assertEqual(locations[0].local_path, tmp_file.full_path)
def test_parse_location_file_prefix_textproto_suffix_load_content_success(
self):
shell = mock.MagicMock()
content = """dummy_field: true"""
tmp_file = self.create_tempfile(
file_path="dummy.textproto", content=content, mode="w")
input_wstl_arg = "file://{}".format(tmp_file.full_path)
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.TEXTPROTO_FILE_EXT,
load_contents=True)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, "dummy_field: true")
def test_parse_location_file_prefix_no_load_content_success(self):
shell = mock.MagicMock()
content = """{"hello": "world"}"""
tmp_file = self.create_tempfile(
file_path="dummy.json", content=content, mode="w")
input_wstl_arg = "file://{}/*".format(path.dirname(tmp_file.full_path))
locations = _location.parse_location(
shell,
input_wstl_arg,
file_ext=_constants.JSON_FILE_EXT,
load_contents=False)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("local_path"))
self.assertEqual(locations[0].local_path, tmp_file.full_path)
def test_parse_location_file_prefix_invalid_path(self):
shell = mock.MagicMock()
content = """{"hello": "world"}"""
tmp_file = self.create_tempfile(content=content, mode="w")
input_wstl_arg = "file://invalid-{}".format(tmp_file.full_path)
locations = _location.parse_location(
shell, input_wstl_arg, file_ext=_constants.JSON_FILE_EXT)
self.assertEmpty(locations)
def test_parse_location_file_prefix_missing_extension(self):
shell = mock.MagicMock()
input_wstl_arg = "file://placeholder.json"
with self.assertRaises(ValueError):
_location.parse_location(shell, input_wstl_arg, file_ext=None)
def test_parse_location_python_prefix_string_success(self):
str_content = """{"hello": "world"}"""
_ip.push("str_content")
input_wstl_arg = "py://str_content"
locations = _location.parse_location(_ip, input_wstl_arg, file_ext=None)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, str_content)
def test_parse_location_python_prefix_dict_success(self):
dict_content = {"hello": "world"}
_ip.push("dict_content")
input_wstl_arg = "py://dict_content"
locations = _location.parse_location(_ip, input_wstl_arg, file_ext=None)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json, json.dumps(dict_content))
def test_parse_location_python_prefix_list_success(self):
list_content = [{"first": "item"}, {"second": "item"}]
_ip.push("list_content")
input_wstl_arg = "py://list_content"
locations = _location.parse_location(_ip, input_wstl_arg, file_ext=None)
self.assertLen(locations, 1)
self.assertTrue(locations[0].HasField("inline_json"))
self.assertEqual(locations[0].inline_json,
json.dumps(list_content, sort_keys=True))
def test_parse_location_unknown_prefix_failure(self):
shell = mock.MagicMock()
input_wstl_arg = "invalid://blah"
with self.assertRaises(ValueError):
_location.parse_location(shell, input_wstl_arg, file_ext=None)
if __name__ == "__main__":
absltest.main()
|
[
"unittest.mock.patch.object",
"absl.testing.absltest.main",
"wstl.magics._location.parse_location",
"unittest.mock.MagicMock",
"os.path.dirname",
"json.dumps",
"IPython.testing.globalipapp.get_ipython"
] |
[((868, 881), 'IPython.testing.globalipapp.get_ipython', 'get_ipython', ([], {}), '()\n', (879, 881), False, 'from IPython.testing.globalipapp import get_ipython\n'), ((1760, 1811), 'unittest.mock.patch.object', 'mock.patch.object', (['storage', '"""Client"""'], {'autospec': '(True)'}), "(storage, 'Client', autospec=True)\n", (1777, 1811), False, 'from unittest import mock\n'), ((1815, 1866), 'unittest.mock.patch.object', 'mock.patch.object', (['storage', '"""Bucket"""'], {'autospec': '(True)'}), "(storage, 'Bucket', autospec=True)\n", (1832, 1866), False, 'from unittest import mock\n'), ((2874, 2925), 'unittest.mock.patch.object', 'mock.patch.object', (['storage', '"""Client"""'], {'autospec': '(True)'}), "(storage, 'Client', autospec=True)\n", (2891, 2925), False, 'from unittest import mock\n'), ((2929, 2980), 'unittest.mock.patch.object', 'mock.patch.object', (['storage', '"""Bucket"""'], {'autospec': '(True)'}), "(storage, 'Bucket', autospec=True)\n", (2946, 2980), False, 'from unittest import mock\n'), ((4352, 4403), 'unittest.mock.patch.object', 'mock.patch.object', (['storage', '"""Client"""'], {'autospec': '(True)'}), "(storage, 'Client', autospec=True)\n", (4369, 4403), False, 'from unittest import mock\n'), ((4407, 4458), 'unittest.mock.patch.object', 'mock.patch.object', (['storage', '"""Bucket"""'], {'autospec': '(True)'}), "(storage, 'Bucket', autospec=True)\n", (4424, 4458), False, 'from unittest import mock\n'), ((12397, 12412), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (12410, 12412), False, 'from absl.testing import absltest\n'), ((996, 1012), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1010, 1012), False, 'from unittest import mock\n'), ((1081, 1143), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': 'None'}), '(shell, input_wstl_arg, file_ext=None)\n', (1105, 1143), False, 'from wstl.magics import _location\n'), ((1378, 1394), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1392, 1394), False, 'from unittest import mock\n'), ((1486, 1548), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': 'None'}), '(shell, input_wstl_arg, file_ext=None)\n', (1510, 1548), False, 'from wstl.magics import _location\n'), ((2537, 2553), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2551, 2553), False, 'from unittest import mock\n'), ((2622, 2709), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.JSON_FILE_EXT'}), '(shell, input_wstl_arg, file_ext=_constants.\n JSON_FILE_EXT)\n', (2646, 2709), False, 'from wstl.magics import _location\n'), ((3749, 3765), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3763, 3765), False, 'from unittest import mock\n'), ((3836, 3944), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.WSTL_FILE_EXT', 'load_contents': '(False)'}), '(shell, input_wstl_arg, file_ext=_constants.\n WSTL_FILE_EXT, load_contents=False)\n', (3860, 3944), False, 'from wstl.magics import _location\n'), ((5189, 5205), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5203, 5205), False, 'from unittest import mock\n'), ((5269, 5377), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.WSTL_FILE_EXT', 'load_contents': '(False)'}), '(shell, input_wstl_arg, file_ext=_constants.\n WSTL_FILE_EXT, load_contents=False)\n', (5293, 5377), False, 'from wstl.magics import _location\n'), ((5516, 5532), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5530, 5532), False, 'from unittest import mock\n'), ((5744, 5831), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.JSON_FILE_EXT'}), '(shell, input_wstl_arg, file_ext=_constants.\n JSON_FILE_EXT)\n', (5768, 5831), False, 'from wstl.magics import _location\n'), ((6028, 6044), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6042, 6044), False, 'from unittest import mock\n'), ((6259, 6367), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.WSTL_FILE_EXT', 'load_contents': '(False)'}), '(shell, input_wstl_arg, file_ext=_constants.\n WSTL_FILE_EXT, load_contents=False)\n', (6283, 6367), False, 'from wstl.magics import _location\n'), ((6599, 6615), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6613, 6615), False, 'from unittest import mock\n'), ((6846, 6954), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.WSTL_FILE_EXT', 'load_contents': '(False)'}), '(shell, input_wstl_arg, file_ext=_constants.\n WSTL_FILE_EXT, load_contents=False)\n', (6870, 6954), False, 'from wstl.magics import _location\n'), ((7214, 7230), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (7228, 7230), False, 'from unittest import mock\n'), ((7458, 7545), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.JSON_FILE_EXT'}), '(shell, input_wstl_arg, file_ext=_constants.\n JSON_FILE_EXT)\n', (7482, 7545), False, 'from wstl.magics import _location\n'), ((7770, 7786), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (7784, 7786), False, 'from unittest import mock\n'), ((8019, 8106), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.JSON_FILE_EXT'}), '(shell, input_wstl_arg, file_ext=_constants.\n JSON_FILE_EXT)\n', (8043, 8106), False, 'from wstl.magics import _location\n'), ((8488, 8504), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (8502, 8504), False, 'from unittest import mock\n'), ((8720, 8833), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.TEXTPROTO_FILE_EXT', 'load_contents': '(False)'}), '(shell, input_wstl_arg, file_ext=_constants.\n TEXTPROTO_FILE_EXT, load_contents=False)\n', (8744, 8833), False, 'from wstl.magics import _location\n'), ((9088, 9104), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (9102, 9104), False, 'from unittest import mock\n'), ((9320, 9432), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.TEXTPROTO_FILE_EXT', 'load_contents': '(True)'}), '(shell, input_wstl_arg, file_ext=_constants.\n TEXTPROTO_FILE_EXT, load_contents=True)\n', (9344, 9432), False, 'from wstl.magics import _location\n'), ((9669, 9685), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (9683, 9685), False, 'from unittest import mock\n'), ((9913, 10021), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.JSON_FILE_EXT', 'load_contents': '(False)'}), '(shell, input_wstl_arg, file_ext=_constants.\n JSON_FILE_EXT, load_contents=False)\n', (9937, 10021), False, 'from wstl.magics import _location\n'), ((10277, 10293), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (10291, 10293), False, 'from unittest import mock\n'), ((10480, 10567), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': '_constants.JSON_FILE_EXT'}), '(shell, input_wstl_arg, file_ext=_constants.\n JSON_FILE_EXT)\n', (10504, 10567), False, 'from wstl.magics import _location\n'), ((10680, 10696), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (10694, 10696), False, 'from unittest import mock\n'), ((11043, 11103), 'wstl.magics._location.parse_location', '_location.parse_location', (['_ip', 'input_wstl_arg'], {'file_ext': 'None'}), '(_ip, input_wstl_arg, file_ext=None)\n', (11067, 11103), False, 'from wstl.magics import _location\n'), ((11440, 11500), 'wstl.magics._location.parse_location', '_location.parse_location', (['_ip', 'input_wstl_arg'], {'file_ext': 'None'}), '(_ip, input_wstl_arg, file_ext=None)\n', (11464, 11500), False, 'from wstl.magics import _location\n'), ((11871, 11931), 'wstl.magics._location.parse_location', '_location.parse_location', (['_ip', 'input_wstl_arg'], {'file_ext': 'None'}), '(_ip, input_wstl_arg, file_ext=None)\n', (11895, 11931), False, 'from wstl.magics import _location\n'), ((12202, 12218), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (12216, 12218), False, 'from unittest import mock\n'), ((6796, 6828), 'os.path.dirname', 'path.dirname', (['tmp_file.full_path'], {}), '(tmp_file.full_path)\n', (6808, 6828), False, 'from os import path\n'), ((7408, 7440), 'os.path.dirname', 'path.dirname', (['tmp_file.full_path'], {}), '(tmp_file.full_path)\n', (7420, 7440), False, 'from os import path\n'), ((9863, 9895), 'os.path.dirname', 'path.dirname', (['tmp_file.full_path'], {}), '(tmp_file.full_path)\n', (9875, 9895), False, 'from os import path\n'), ((10790, 10852), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': 'None'}), '(shell, input_wstl_arg, file_ext=None)\n', (10814, 10852), False, 'from wstl.magics import _location\n'), ((11639, 11663), 'json.dumps', 'json.dumps', (['dict_content'], {}), '(dict_content)\n', (11649, 11663), False, 'import json\n'), ((12091, 12131), 'json.dumps', 'json.dumps', (['list_content'], {'sort_keys': '(True)'}), '(list_content, sort_keys=True)\n', (12101, 12131), False, 'import json\n'), ((12303, 12365), 'wstl.magics._location.parse_location', '_location.parse_location', (['shell', 'input_wstl_arg'], {'file_ext': 'None'}), '(shell, input_wstl_arg, file_ext=None)\n', (12327, 12365), False, 'from wstl.magics import _location\n')]
|
#encoding:utf-8
import requests
ID = '137442'
url = 'http://tu.duowan.cn/gallery/%s.html' % ID
headers = {
'User-Agent': 'Mozilla/5.0 (Linux; U; Android 8.1.0; zh-cn; OE106 Build/OPM1.171019.026) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.132 MQQBrowser/9.2 Mobile Safari/537.36',
'Referer': 'http://tu.duowan.com/gallery/%s.html' % ID,
}
import re
r = requests.get(url,headers=headers)
if r.status_code == 200: # ok
s = r.content
html = s.decode()
print(html)
# strs = str(s)
# print(strs.replace(r"\\\\",r"\\"))
# html = r.content.decode('utf-8')
# print(html)
a = re.findall('imgJson = ([\s\S]*?);',html)
# print(a)
# exit()
# print(a)
# print(type(a))
# print(a[0])
import json
jsonp = json.loads(a[0])
folder = jsonp['gallery_title']
picInfo = jsonp['picInfo']
print(len(picInfo))
print(folder)
for i in picInfo:
add_intro = i['add_intro']
url = i['url']
print(add_intro,url)
|
[
"re.findall",
"json.loads",
"requests.get"
] |
[((388, 422), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (400, 422), False, 'import requests\n'), ((640, 683), 're.findall', 're.findall', (['"""imgJson = ([\\\\s\\\\S]*?);"""', 'html'], {}), "('imgJson = ([\\\\s\\\\S]*?);', html)\n", (650, 683), False, 'import re\n'), ((791, 807), 'json.loads', 'json.loads', (['a[0]'], {}), '(a[0])\n', (801, 807), False, 'import json\n')]
|
#!/usr/bin/env python
from __future__ import print_function
import pyeapi
connection = pyeapi.connect(host='192.168.1.16')
output = connection.execute(['enable', 'show version'])
print(('My system MAC address is', output['result'][1]['systemMacAddress']))
|
[
"pyeapi.connect"
] |
[((88, 123), 'pyeapi.connect', 'pyeapi.connect', ([], {'host': '"""192.168.1.16"""'}), "(host='192.168.1.16')\n", (102, 123), False, 'import pyeapi\n')]
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
#http://www.pasteall.org/47943/python
# <pep8-80 compliant>
import bpy
import os
import mathutils
from mathutils import Vector
from contextlib import redirect_stdout
import io
stdout = io.StringIO()
# TODO, make options
PREF_SCALE = 1
PREF_FACE_THICK = 0.1
PREF_GRID_SNAP = False
# Quake 1/2?
# Quake 3+?
PREF_DEF_TEX_OPTS = '0 0 0 1 1 0 0 0' # not user settable yet
PREF_NULL_TEX = 'NULL' # not user settable yet
PREF_INVIS_TEX = 'common/caulk'
PREF_DOOM3_FORMAT = True
def face_uv_image_get(me, face):
uv_faces = me.uv_textures.active
if uv_faces:
return uv_faces.data[face.index].image
else:
return None
def face_uv_coords_get(me, face):
tf_uv_faces = me.tessface_uv_textures.active
if tf_uv_faces:
return tf_uv_faces.data[face.index].uv_raw[:]
else:
return None
def face_material_get(me, face):
idx = face.material_index
return me.materials[idx] if idx < len(me.materials) else None
def poly_to_doom(me, p, radius):
"""
Convert a face into Doom3 representation (infinite plane defined by its normal
and distance from origin along that normal).
"""
# Compute the distance to the mesh from the origin to the plane.
# Line from origin in the direction of the face normal.
origin = Vector((0, 0, 0))
target = Vector(p.normal) * radius
# Find the target point.
intersect = mathutils.geometry.intersect_line_plane(origin, target, Vector(p.center), Vector(p.normal))
# We have to handle cases where intersection with face happens on the "negative" part of the vector!
length = intersect.length
nor = p.normal.copy()
if (nor.dot(intersect.normalized()) > 0):
length *= -1
nor.resize_4d()
nor.w = length
return nor
def doom_are_same_planes(p1, p2):
"""
To avoid writing two planes that are nearly the same!
"""
# XXX Is sign of the normal/length important in Doom for plane definition??? For now, assume that no!
if p1.w < 0:
p1 = p1 * -1.0
if p2.w < 0:
p2 = p2 * -1.0
threshold = 0.0001
if abs(p1.w - p2.w) > threshold:
return False
# Distances are the same, check orientations!
if p1.xyz.normalized().dot(p2.xyz.normalized()) < (1 - threshold):
return False
# Same plane!
return True
def doom_check_plane(done_planes, plane):
"""
Check if plane as already been handled, or is similar enough to an already handled one.
Return True if it has already been handled somehow.
done_planes is expected to be a dict {written_plane: {written_plane, similar_plane_1, similar_plane_2, ...}, ...}.
"""
p_key = tuple(plane)
if p_key in done_planes:
return True
for p, dp in done_planes.items():
if p_key in dp:
return True
elif doom_are_same_planes(Vector(p), plane):
done_planes[p].add(p_key)
return True
done_planes[p_key] = {p_key}
return False
def ob_to_radius(ob):
radius = max(Vector(pt).length for pt in ob.bound_box)
# Make the ray casts, go just outside the bounding sphere.
return radius * 1.1
def is_cube_facegroup(faces):
"""
Returns a bool, true if the faces make up a cube
"""
# cube must have 6 faces
if len(faces) != 6:
# print('1')
return False
# Check for quads and that there are 6 unique verts
verts = {}
for f in faces:
f_v = f.vertices[:]
if len(f_v) != 4:
return False
for v in f_v:
verts[v] = 0
if len(verts) != 8:
return False
# Now check that each vert has 3 face users
for f in faces:
f_v = f.vertices[:]
for v in f_v:
verts[v] += 1
for v in verts.values():
if v != 3: # vert has 3 users?
return False
# Could we check for 12 unique edges??, probably not needed.
return True
def is_tricyl_facegroup(faces):
"""
is the face group a tri cylinder
Returns a bool, true if the faces make an extruded tri solid
"""
# tricyl must have 5 faces
if len(faces) != 5:
# print('1')
return False
# Check for quads and that there are 6 unique verts
verts = {}
tottri = 0
for f in faces:
if len(f.vertices) == 3:
tottri += 1
for vi in f.vertices:
verts[vi] = 0
if len(verts) != 6 or tottri != 2:
return False
# Now check that each vert has 3 face users
for f in faces:
for vi in f.vertices:
verts[vi] += 1
for v in verts.values():
if v != 3: # vert has 3 users?
return False
# Could we check for 9 unique edges??, probably not needed.
return True
def split_mesh_in_convex_parts(me):
"""
Not implemented yet. Should split given mesh into manifold convex meshes.
For now simply always returns the given mesh.
"""
# TODO.
return (me,)
def round_vec(v):
if PREF_GRID_SNAP:
return v.to_tuple(0)
else:
return v[:]
def write_quake_brush_cube(fw, ob, faces):
"""
Takes 6 faces and writes a brush,
these faces can be from 1 mesh, 1 cube within a mesh of larger cubes
Faces could even come from different meshes or be contrived.
"""
format_vec = '( %d %d %d ) ' if PREF_GRID_SNAP else '( %.9g %.9g %.9g ) '
fw('// brush from cube\n{\n')
for f in faces:
# from 4 verts this gets them in reversed order and only 3 of them
# 0,1,2,3 -> 2,1,0
me = f.id_data # XXX25
for v in f.vertices[:][2::-1]:
fw(format_vec % round_vec(me.vertices[v].co))
material = face_material_get(me, f)
if material and material.game_settings.invisible:
fw(PREF_INVIS_TEX)
else:
image = face_uv_image_get(me, f)
if image:
fw(os.path.splitext(bpy.path.basename(image.filepath))[0])
else:
fw(PREF_NULL_TEX)
fw(" %s\n" % PREF_DEF_TEX_OPTS) # Texture stuff ignored for now
fw('}\n')
def write_quake_brush_face(fw, ob, face):
"""
takes a face and writes it as a brush
each face is a cube/brush.
"""
format_vec = '( %d %d %d ) ' if PREF_GRID_SNAP else '( %.9g %.9g %.9g ) '
image_text = PREF_NULL_TEX
me = face.id_data
material = face_material_get(me, face)
if material and material.game_settings.invisible:
image_text = PREF_INVIS_TEX
else:
image = face_uv_image_get(me, face)
if image:
image_text = os.path.splitext(bpy.path.basename(image.filepath))[0]
# reuse face vertices
f_vertices = [me.vertices[vi] for vi in face.vertices]
# original verts as tuples for writing
orig_vco = tuple(round_vec(v.co) for v in f_vertices)
# new verts that give the face a thickness
dist = PREF_SCALE * PREF_FACE_THICK
new_vco = tuple(round_vec(v.co - (v.normal * dist)) for v in f_vertices)
#new_vco = [round_vec(v.co - (face.no * dist)) for v in face]
fw('// brush from face\n{\n')
# front
for co in orig_vco[2::-1]:
fw(format_vec % co)
fw(image_text)
fw(" %s\n" % PREF_DEF_TEX_OPTS) # Texture stuff ignored for now
for co in new_vco[:3]:
fw(format_vec % co)
if image and not material.game_settings.use_backface_culling: #uf.use_twoside:
fw(image_text)
else:
fw(PREF_INVIS_TEX)
fw(" %s\n" % PREF_DEF_TEX_OPTS) # Texture stuff ignored for now
# sides.
if len(orig_vco) == 3: # Tri, it seemms tri brushes are supported.
index_pairs = ((0, 1), (1, 2), (2, 0))
else:
index_pairs = ((0, 1), (1, 2), (2, 3), (3, 0))
for i1, i2 in index_pairs:
for co in orig_vco[i1], orig_vco[i2], new_vco[i2]:
fw(format_vec % co)
fw(PREF_INVIS_TEX)
fw(" %s\n" % PREF_DEF_TEX_OPTS) # Texture stuff ignored for now
fw('}\n')
def write_doom_brush(fw, ob, me):
"""
Takes a mesh object and writes its convex parts.
"""
format_vec = '( {} {} {} {} ) '
format_vec_uv = '( ( {} {} {} ) ( {} {} {} ) ) '
# Get the bounding sphere for the object for ray-casting
radius = ob_to_radius(ob)
fw('// brush from faces\n{\n'
'brushDef3\n{\n'
)
done_planes = {} # Store already written plane, to avoid writing the same one (or a similar-enough one) again.
for p in me.polygons:
image_text = PREF_NULL_TEX
material = face_material_get(me, p)
if material:
if material.game_settings.invisible:
image_text = PREF_INVIS_TEX
else:
image_text = material.name
# reuse face vertices
plane = poly_to_doom(me, p, radius)
if plane is None:
print(" ERROR: Could not create the plane from polygon!");
elif doom_check_plane(done_planes, plane):
#print(" WARNING: Polygon too similar to another one!");
pass
else:
fw(format_vec.format(*plane.to_tuple(6)))
fw(format_vec_uv.format(0.015625, 0, 1, 0, 0.015625, 1)) # TODO insert UV stuff here
fw('"%s" ' % image_text)
fw("%s\n" % PREF_DEF_TEX_OPTS) # Texture stuff ignored for now
fw('}\n}\n')
def write_node_map(fw, ob):
"""
Writes the properties of an object (empty in this case)
as a MAP node as long as it has the property name - classname
returns True/False based on weather a node was written
"""
props = [(p.name, p.value) for p in ob.game.properties]
IS_MAP_NODE = False
for name, value in props:
if name == "classname":
IS_MAP_NODE = True
break
if not IS_MAP_NODE:
return False
# Write a node
fw('{\n')
for name_value in props:
fw('"%s" "%s"\n' % name_value)
fw('"origin" "%.9g %.9g %.9g"\n' % round_vec(ob.matrix_world.to_translation()))
fw('}\n')
return True
def split_objects(context, objects):
scene = context.scene
final_objects = []
bpy.ops.object.select_all(action='DESELECT')
for ob in objects:
ob.select = True
bpy.ops.object.duplicate()
objects = bpy.context.selected_objects
bpy.ops.object.select_all(action='DESELECT')
tot_ob = len(objects)
for i, ob in enumerate(objects):
print("Splitting object: %d/%d" % (i, tot_ob))
ob.select = True
if ob.type == "MESH":
scene.objects.active = ob
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.mesh.select_mode(type='EDGE')
bpy.ops.object.mode_set(mode='OBJECT')
for edge in ob.data.edges:
if edge.use_seam:
edge.select = True
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.edge_split()
bpy.ops.mesh.separate(type='LOOSE')
bpy.ops.object.mode_set(mode='OBJECT')
split_objects = context.selected_objects
for split_ob in split_objects:
assert(split_ob.type == "MESH")
scene.objects.active = split_ob
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_mode(type='EDGE')
bpy.ops.mesh.select_all(action="SELECT")
bpy.ops.mesh.region_to_loop()
bpy.ops.mesh.fill_holes(sides=8)
slot_idx = 0
for slot_idx, m in enumerate(split_ob.material_slots):
if m.name == "textures/common/caulk":
break
#if m.name != "textures/common/caulk":
# mat = bpy.data.materials.new("textures/common/caulk")
# bpy.context.object.data.materials.append(mat)
split_ob.active_material_index = slot_idx # we need to use either actual material name or custom property instead of index
bpy.ops.object.material_slot_assign()
with redirect_stdout(stdout):
bpy.ops.mesh.remove_doubles()
bpy.ops.mesh.quads_convert_to_tris()
bpy.ops.mesh.tris_convert_to_quads()
bpy.ops.object.mode_set(mode='OBJECT')
final_objects += split_objects
ob.select = False
print(final_objects)
return final_objects
def export_map(context, filepath):
"""
pup_block = [\
('Scale:', PREF_SCALE, 1, 1000,
'Scale the blender scene by this value.'),\
('Face Width:', PREF_FACE_THICK, 0.01, 10,
'Thickness of faces exported as brushes.'),\
('Grid Snap', PREF_GRID_SNAP,
'snaps floating point values to whole numbers.'),\
'Null Texture',\
('', PREF_NULL_TEX, 1, 128,
'Export textureless faces with this texture'),\
'Unseen Texture',\
('', PREF_INVIS_TEX, 1, 128,
'Export invisible faces with this texture'),\
]
if not Draw.PupBlock('map export', pup_block):
return
"""
import time
from mathutils import Matrix
from bpy_extras import mesh_utils
t = time.time()
print("Map Exporter 0.0")
scene = context.scene
objects = context.selected_objects
obs_mesh = []
obs_lamp = []
obs_surf = []
obs_empty = []
SCALE_MAT = Matrix()
SCALE_MAT[0][0] = SCALE_MAT[1][1] = SCALE_MAT[2][2] = PREF_SCALE
TOTBRUSH = TOTLAMP = TOTNODE = 0
for ob in objects:
type = ob.type
if type == 'MESH':
obs_mesh.append(ob)
elif type == 'SURFACE':
obs_surf.append(ob)
elif type == 'LAMP':
obs_lamp.append(ob)
elif type == 'EMPTY':
obs_empty.append(ob)
obs_mesh = split_objects(context, obs_mesh)
with open(filepath, 'w') as fl:
fw = fl.write
if obs_mesh or obs_surf:
if PREF_DOOM3_FORMAT:
fw('Version 2')
# brushes and surf's must be under worldspan
fw('\n// entity 0\n')
fw('{\n')
fw('"classname" "worldspawn"\n')
print("\twriting cubes from meshes")
tot_ob = len(obs_mesh)
for i, ob in enumerate(obs_mesh):
print("Exporting object: %d/%d" % (i, tot_ob))
dummy_mesh = ob.to_mesh(scene, True, 'PREVIEW')
#print len(mesh_split2connected(dummy_mesh))
# 1 to tx the normals also
dummy_mesh.transform(ob.matrix_world * SCALE_MAT)
# High quality normals
#XXX25: BPyMesh.meshCalcNormals(dummy_mesh)
if PREF_DOOM3_FORMAT:
for me in split_mesh_in_convex_parts(dummy_mesh):
write_doom_brush(fw, ob, me)
TOTBRUSH += 1
if (me is not dummy_mesh):
bpy.data.meshes.remove(me)
else:
# We need tessfaces
dummy_mesh.update(calc_tessface=True)
# Split mesh into connected regions
for face_group in mesh_utils.mesh_linked_tessfaces(dummy_mesh):
if is_cube_facegroup(face_group):
write_quake_brush_cube(fw, ob, face_group)
TOTBRUSH += 1
elif is_tricyl_facegroup(face_group):
write_quake_brush_cube(fw, ob, face_group)
TOTBRUSH += 1
else:
for f in face_group:
write_quake_brush_face(fw, ob, f)
TOTBRUSH += 1
#print 'warning, not exporting "%s" it is not a cube' % ob.name
bpy.data.meshes.remove(dummy_mesh)
valid_dims = 3, 5, 7, 9, 11, 13, 15
for ob in obs_surf:
'''
Surf, patches
'''
data = ob.data
surf_name = data.name
mat = ob.matrix_world * SCALE_MAT
# This is what a valid patch looks like
"""
// brush 0
{
patchDef2
{
NULL
( 3 3 0 0 0 )
(
( ( -64 -64 0 0 0 ) ( -64 0 0 0 -2 ) ( -64 64 0 0 -4 ) )
( ( 0 -64 0 2 0 ) ( 0 0 0 2 -2 ) ( 0 64 0 2 -4 ) )
( ( 64 -64 0 4 0 ) ( 64 0 0 4 -2 ) ( 80 88 0 4 -4 ) )
)
}
}
"""
for i, nurb in enumerate(data.splines):
u = nurb.point_count_u
v = nurb.point_count_v
if u in valid_dims and v in valid_dims:
fw('// brush %d surf_name\n' % i)
fw('{\n')
fw('patchDef2\n')
fw('{\n')
fw('NULL\n')
fw('( %d %d 0 0 0 )\n' % (u, v))
fw('(\n')
u_iter = 0
for p in nurb.points:
if u_iter == 0:
fw('(')
u_iter += 1
# add nmapping 0 0 ?
if PREF_GRID_SNAP:
fw(" ( %d %d %d 0 0 )" %
round_vec(mat * p.co.xyz))
else:
fw(' ( %.6f %.6f %.6f 0 0 )' %
(mat * p.co.xyz)[:])
# Move to next line
if u_iter == u:
fw(' )\n')
u_iter = 0
fw(')\n')
fw('}\n')
fw('}\n')
# Debugging
# for p in nurb: print 'patch', p
else:
print("Warning: not exporting patch",
surf_name, u, v, 'Unsupported')
if obs_mesh or obs_surf:
fw('}\n') # end worldspan
print("\twriting lamps")
for ob in obs_lamp:
print("\t\t%s" % ob.name)
lamp = ob.data
fw('{\n')
fw('"classname" "light"\n')
fw('"light" "%.6f"\n' % (lamp.distance * PREF_SCALE))
if PREF_GRID_SNAP:
fw('"origin" "%d %d %d"\n' %
tuple([round(axis * PREF_SCALE)
for axis in ob.matrix_world.to_translation()]))
else:
fw('"origin" "%.6f %.6f %.6f"\n' %
tuple([axis * PREF_SCALE
for axis in ob.matrix_world.to_translation()]))
fw('"_color" "%.6f %.6f %.6f"\n' % tuple(lamp.color))
fw('"style" "0"\n')
fw('}\n')
TOTLAMP += 1
print("\twriting empty objects as nodes")
for ob in obs_empty:
if write_node_map(fw, ob):
print("\t\t%s" % ob.name)
TOTNODE += 1
else:
print("\t\tignoring %s" % ob.name)
for ob in obs_mesh:
scene.objects.unlink(ob)
bpy.data.objects.remove(ob)
print("Exported Map in %.4fsec" % (time.time() - t))
print("Brushes: %d Nodes: %d Lamps %d\n" % (TOTBRUSH, TOTNODE, TOTLAMP))
def save(operator,
context,
filepath=None,
global_scale=1.0,
face_thickness=0.1,
texture_null="NULL",
texture_opts='0 0 0 1 1 0 0 0',
grid_snap=False,
doom3_format=True,
):
global PREF_SCALE
global PREF_FACE_THICK
global PREF_NULL_TEX
global PREF_DEF_TEX_OPTS
global PREF_GRID_SNAP
global PREF_DOOM3_FORMAT
PREF_SCALE = global_scale
PREF_FACE_THICK = face_thickness
PREF_NULL_TEX = texture_null
PREF_DEF_TEX_OPTS = texture_opts
PREF_GRID_SNAP = grid_snap
PREF_DOOM3_FORMAT = doom3_format
if (PREF_DOOM3_FORMAT):
PREF_DEF_TEX_OPTS = '0 0 0'
else:
PREF_DEF_TEX_OPTS = '0 0 0 1 1 0 0 0'
export_map(context, filepath)
return {'FINISHED'}
|
[
"bpy.data.objects.remove",
"bpy.ops.mesh.edge_split",
"bpy.ops.mesh.region_to_loop",
"bpy.ops.mesh.tris_convert_to_quads",
"bpy.ops.object.duplicate",
"bpy.path.basename",
"bpy.ops.object.select_all",
"bpy.ops.mesh.separate",
"bpy_extras.mesh_utils.mesh_linked_tessfaces",
"bpy.ops.mesh.select_mode",
"bpy.ops.mesh.remove_doubles",
"io.StringIO",
"bpy.data.meshes.remove",
"contextlib.redirect_stdout",
"bpy.ops.mesh.fill_holes",
"time.time",
"bpy.ops.mesh.select_all",
"mathutils.Vector",
"bpy.ops.mesh.quads_convert_to_tris",
"bpy.ops.object.mode_set",
"bpy.ops.object.material_slot_assign",
"mathutils.Matrix"
] |
[((983, 996), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (994, 996), False, 'import io\n'), ((2084, 2101), 'mathutils.Vector', 'Vector', (['(0, 0, 0)'], {}), '((0, 0, 0))\n', (2090, 2101), False, 'from mathutils import Vector\n'), ((10926, 10970), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (10951, 10970), False, 'import bpy\n'), ((11024, 11050), 'bpy.ops.object.duplicate', 'bpy.ops.object.duplicate', ([], {}), '()\n', (11048, 11050), False, 'import bpy\n'), ((11099, 11143), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (11124, 11143), False, 'import bpy\n'), ((14065, 14076), 'time.time', 'time.time', ([], {}), '()\n', (14074, 14076), False, 'import time\n'), ((14264, 14272), 'mathutils.Matrix', 'Matrix', ([], {}), '()\n', (14270, 14272), False, 'from mathutils import Matrix\n'), ((2115, 2131), 'mathutils.Vector', 'Vector', (['p.normal'], {}), '(p.normal)\n', (2121, 2131), False, 'from mathutils import Vector\n'), ((2242, 2258), 'mathutils.Vector', 'Vector', (['p.center'], {}), '(p.center)\n', (2248, 2258), False, 'from mathutils import Vector\n'), ((2260, 2276), 'mathutils.Vector', 'Vector', (['p.normal'], {}), '(p.normal)\n', (2266, 2276), False, 'from mathutils import Vector\n'), ((20087, 20114), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['ob'], {}), '(ob)\n', (20110, 20114), False, 'import bpy\n'), ((11377, 11413), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""EDIT"""'}), "(mode='EDIT')\n", (11400, 11413), False, 'import bpy\n'), ((11426, 11468), 'bpy.ops.mesh.select_all', 'bpy.ops.mesh.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (11449, 11468), False, 'import bpy\n'), ((11481, 11518), 'bpy.ops.mesh.select_mode', 'bpy.ops.mesh.select_mode', ([], {'type': '"""EDGE"""'}), "(type='EDGE')\n", (11505, 11518), False, 'import bpy\n'), ((11531, 11569), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""'}), "(mode='OBJECT')\n", (11554, 11569), False, 'import bpy\n'), ((11694, 11730), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""EDIT"""'}), "(mode='EDIT')\n", (11717, 11730), False, 'import bpy\n'), ((11743, 11768), 'bpy.ops.mesh.edge_split', 'bpy.ops.mesh.edge_split', ([], {}), '()\n', (11766, 11768), False, 'import bpy\n'), ((11781, 11816), 'bpy.ops.mesh.separate', 'bpy.ops.mesh.separate', ([], {'type': '"""LOOSE"""'}), "(type='LOOSE')\n", (11802, 11816), False, 'import bpy\n'), ((11829, 11867), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""'}), "(mode='OBJECT')\n", (11852, 11867), False, 'import bpy\n'), ((3638, 3647), 'mathutils.Vector', 'Vector', (['p'], {}), '(p)\n', (3644, 3647), False, 'from mathutils import Vector\n'), ((3810, 3820), 'mathutils.Vector', 'Vector', (['pt'], {}), '(pt)\n', (3816, 3820), False, 'from mathutils import Vector\n'), ((12078, 12114), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""EDIT"""'}), "(mode='EDIT')\n", (12101, 12114), False, 'import bpy\n'), ((12131, 12168), 'bpy.ops.mesh.select_mode', 'bpy.ops.mesh.select_mode', ([], {'type': '"""EDGE"""'}), "(type='EDGE')\n", (12155, 12168), False, 'import bpy\n'), ((12185, 12225), 'bpy.ops.mesh.select_all', 'bpy.ops.mesh.select_all', ([], {'action': '"""SELECT"""'}), "(action='SELECT')\n", (12208, 12225), False, 'import bpy\n'), ((12242, 12271), 'bpy.ops.mesh.region_to_loop', 'bpy.ops.mesh.region_to_loop', ([], {}), '()\n', (12269, 12271), False, 'import bpy\n'), ((12288, 12320), 'bpy.ops.mesh.fill_holes', 'bpy.ops.mesh.fill_holes', ([], {'sides': '(8)'}), '(sides=8)\n', (12311, 12320), False, 'import bpy\n'), ((12889, 12926), 'bpy.ops.object.material_slot_assign', 'bpy.ops.object.material_slot_assign', ([], {}), '()\n', (12924, 12926), False, 'import bpy\n'), ((13038, 13074), 'bpy.ops.mesh.quads_convert_to_tris', 'bpy.ops.mesh.quads_convert_to_tris', ([], {}), '()\n', (13072, 13074), False, 'import bpy\n'), ((13091, 13127), 'bpy.ops.mesh.tris_convert_to_quads', 'bpy.ops.mesh.tris_convert_to_quads', ([], {}), '()\n', (13125, 13127), False, 'import bpy\n'), ((13144, 13182), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""'}), "(mode='OBJECT')\n", (13167, 13182), False, 'import bpy\n'), ((16021, 16065), 'bpy_extras.mesh_utils.mesh_linked_tessfaces', 'mesh_utils.mesh_linked_tessfaces', (['dummy_mesh'], {}), '(dummy_mesh)\n', (16053, 16065), False, 'from bpy_extras import mesh_utils\n'), ((16661, 16695), 'bpy.data.meshes.remove', 'bpy.data.meshes.remove', (['dummy_mesh'], {}), '(dummy_mesh)\n', (16683, 16695), False, 'import bpy\n'), ((20155, 20166), 'time.time', 'time.time', ([], {}), '()\n', (20164, 20166), False, 'import time\n'), ((7419, 7452), 'bpy.path.basename', 'bpy.path.basename', (['image.filepath'], {}), '(image.filepath)\n', (7436, 7452), False, 'import bpy\n'), ((12948, 12971), 'contextlib.redirect_stdout', 'redirect_stdout', (['stdout'], {}), '(stdout)\n', (12963, 12971), False, 'from contextlib import redirect_stdout\n'), ((12992, 13021), 'bpy.ops.mesh.remove_doubles', 'bpy.ops.mesh.remove_doubles', ([], {}), '()\n', (13019, 13021), False, 'import bpy\n'), ((15800, 15826), 'bpy.data.meshes.remove', 'bpy.data.meshes.remove', (['me'], {}), '(me)\n', (15822, 15826), False, 'import bpy\n'), ((6726, 6759), 'bpy.path.basename', 'bpy.path.basename', (['image.filepath'], {}), '(image.filepath)\n', (6743, 6759), False, 'import bpy\n')]
|
import subprocess
from datetime import datetime, timedelta
from i3pystatus import IntervalModule
from i3pystatus.core.desktop import DesktopNotification
STOPPED = 0
RUNNING = 1
BREAK = 2
class Pomodoro(IntervalModule):
"""
This plugin shows Pomodoro timer.
Left click starts/restarts timer.
Right click stops it.
Example color settings.
.. code-block:: python
color_map = {
'stopped': '#2ECCFA',
'running': '#FFFF00',
'break': '#37FF00'
}
"""
settings = (
('sound',
'Path to sound file to play as alarm. Played by "aplay" utility'),
('pomodoro_duration',
'Working (pomodoro) interval duration in seconds'),
('break_duration', 'Short break duration in seconds'),
('long_break_duration', 'Long break duration in seconds'),
('short_break_count', 'Short break count before first long break'),
('format', 'format string, available formatters: current_pomodoro, '
'total_pomodoro, time'),
('inactive_format', 'format string to display when no timer is running'),
('color', 'dictionary containing a mapping of statuses to colours')
)
inactive_format = 'Start Pomodoro'
color_map = {
'stopped': '#2ECCFA',
'running': '#FFFF00',
'break': '#37FF00'
}
color = None
sound = None
interval = 1
short_break_count = 3
format = '☯ {current_pomodoro}/{total_pomodoro} {time}'
pomodoro_duration = 25 * 60
break_duration = 5 * 60
long_break_duration = 15 * 60
on_rightclick = "stop"
on_leftclick = "start"
def init(self):
# state could be either running/break or stopped
self.state = STOPPED
self.current_pomodoro = 0
self.total_pomodoro = self.short_break_count + 1 # and 1 long break
self.time = None
if self.color is not None and type(self.color) == dict:
self.color_map.update(self.color)
def run(self):
if self.time and datetime.utcnow() >= self.time:
if self.state == RUNNING:
self.state = BREAK
if self.current_pomodoro == self.short_break_count:
self.time = datetime.utcnow() + \
timedelta(seconds=self.long_break_duration)
else:
self.time = datetime.utcnow() + \
timedelta(seconds=self.break_duration)
text = 'Go for a break!'
else:
self.state = RUNNING
self.time = datetime.utcnow() + \
timedelta(seconds=self.pomodoro_duration)
text = 'Back to work!'
self.current_pomodoro = (self.current_pomodoro + 1) % self.total_pomodoro
self._alarm(text)
if self.state == RUNNING or self.state == BREAK:
min, sec = divmod((self.time - datetime.utcnow()).total_seconds(), 60)
text = '{:02}:{:02}'.format(int(min), int(sec))
sdict = {
'time': text,
'current_pomodoro': self.current_pomodoro + 1,
'total_pomodoro': self.total_pomodoro
}
color = self.color_map['running'] if self.state == RUNNING else self.color_map['break']
text = self.format.format(**sdict)
else:
text = self.inactive_format
color = self.color_map['stopped']
self.output = {
'full_text': text,
'color': color
}
def start(self):
self.state = RUNNING
self.time = datetime.utcnow() + timedelta(seconds=self.pomodoro_duration)
self.current_pomodoro = 0
def stop(self):
self.state = STOPPED
self.time = None
def _alarm(self, text):
notification = DesktopNotification(title='Alarm!', body=text)
notification.display()
if self.sound is not None:
subprocess.Popen(['aplay',
self.sound,
'-q'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
[
"datetime.datetime.utcnow",
"subprocess.Popen",
"datetime.timedelta",
"i3pystatus.core.desktop.DesktopNotification"
] |
[((3886, 3932), 'i3pystatus.core.desktop.DesktopNotification', 'DesktopNotification', ([], {'title': '"""Alarm!"""', 'body': 'text'}), "(title='Alarm!', body=text)\n", (3905, 3932), False, 'from i3pystatus.core.desktop import DesktopNotification\n'), ((3663, 3680), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3678, 3680), False, 'from datetime import datetime, timedelta\n'), ((3683, 3724), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'self.pomodoro_duration'}), '(seconds=self.pomodoro_duration)\n', (3692, 3724), False, 'from datetime import datetime, timedelta\n'), ((4012, 4115), 'subprocess.Popen', 'subprocess.Popen', (["['aplay', self.sound, '-q']"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['aplay', self.sound, '-q'], stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n", (4028, 4115), False, 'import subprocess\n'), ((2066, 2083), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2081, 2083), False, 'from datetime import datetime, timedelta\n'), ((2624, 2641), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2639, 2641), False, 'from datetime import datetime, timedelta\n'), ((2666, 2707), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'self.pomodoro_duration'}), '(seconds=self.pomodoro_duration)\n', (2675, 2707), False, 'from datetime import datetime, timedelta\n'), ((2271, 2288), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2286, 2288), False, 'from datetime import datetime, timedelta\n'), ((2317, 2360), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'self.long_break_duration'}), '(seconds=self.long_break_duration)\n', (2326, 2360), False, 'from datetime import datetime, timedelta\n'), ((2415, 2432), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2430, 2432), False, 'from datetime import datetime, timedelta\n'), ((2461, 2499), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'self.break_duration'}), '(seconds=self.break_duration)\n', (2470, 2499), False, 'from datetime import datetime, timedelta\n'), ((2968, 2985), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2983, 2985), False, 'from datetime import datetime, timedelta\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from scipy.interpolate import interp2d
from scipy.ndimage import convolve1d
from PIL import Image
c = 299792.0 # um/ns
class Ray:
def __init__(self, lambda0: "um" = .532,
pulse_length: "ns" = 10, radius: "um" = 100):
"""
Parameters
----------
lambda_: flaot
wave length of the light in um
pulse_length : float
time in us
radius : float
radius of the beam
"""
self.radius = radius
self.lambda0 = lambda0
self._t = np.linspace(0, pulse_length, 2048)
self._y = np.linspace(-2, 2, 2048)
self._tt, self._yy = np.meshgrid(self._t, self._y)
self._lambda0 = lambda0*np.ones_like(self._tt)
self._delta = 0
@property
def pulse_length(self):
return self._t
@property
def beam_width(self):
return self._y
@property
def _k(self):
return 2*np.pi/self._lambda
@property
def _k0(self):
return 2*np.pi/self._lambda0
@property
def phi(self):
return self._k*self.dz + self._k0*self.dz
def E(self, t):
E = np.exp(1j*(self.phi + self._delta))
E_real = np.real(E)
E_imag = np.imag(E)
fE_real = interp2d(self._t, self._y, E_real)
fE_imag = interp2d(self._t, self._y, E_imag)
return fE_real(t, self._y) + 1j*fE_imag(t, self._y)
def set_lambda(self, lambda_):
self._lambda = lambda_
def propogate(self, dz):
self.dz = dz
def add_delta(self, delta):
self._delta = delta
class Target:
def __init__(self, velocity_equation):
"""
Parameters
----------
velocity_equation : str or fn
either step or sigmoid to use default velocity profile, or a
function that excepts a t and y meshgrid
"""
self._t = np.linspace(-5, 15, 2048)
self._y = np.linspace(-3, 3, 2048)
self.tau = 0
self._tt, self._yy = np.meshgrid(self._t, self._y)
if velocity_equation == "step":
self.velocity_equation = self.step
elif velocity_equation == "sigmoid":
self.velocity_equation = self.sigmoid
elif velocity_equation == "stationary":
self.velocity_equation = self.stationary
else:
self.velocity_equation = velocity_equation
@property
def _zz(self):
dt = np.diff(self._t).mean()
return np.cumsum(self._vv, axis=1)*dt
@property
def zz(self):
return interp2d(self._t, self._y, self._zz)
@property
def _dz(self):
"""Path the light travels to the target and back
"""
dzz = self._zz[..., -1, np.newaxis] - self._zz
return dzz
@property
def dz(self):
return interp2d(self._t, self._y, self._dz)
@property
def _vv(self):
return self.velocity_equation(self._tt, self._yy)
@property
def vv(self):
return interp2d(self._t, self._y, self._vv)
@staticmethod
def sigmoid(t: "ns", y: "um", max_velocity: "um/ns" = 5):
"""A velocity profile that follows a sigmoid like shape
"""
return max_velocity*np.exp(-y**4)/(1 + np.exp(-5*(t-3)))
@staticmethod
def step(t: "ns", y: "um", max_velocity: "um/ns" = 1):
"""A discontinuous jump velocity profile
"""
assert t.shape == y.shape
v = np.zeros_like(t)
v[t > 3] = max_velocity
return v
@staticmethod
def stationary(t: "ns", y: "um"):
"""A static target, not moving
"""
return np.zeros_like(t)
def reflect_off_target(self, ray):
ray = self._doppler_shift(ray)
dz = self.dz(ray.pulse_length, ray.beam_width)
ray.propogate(dz)
return ray
def _doppler_shift(self, ray):
vv = self.vv(ray.pulse_length, ray.beam_width)
ray.set_lambda(ray.lambda0*(1 - 2*vv/c))
return ray
def reflection_intensity(self, ray):
dy = np.diff(ray.beam_width).mean()
dz = np.diff(self.zz(ray.pulse_length, ray.beam_width), axis=0)
theta = np.arctan(dz/dy)
Idot = np.vstack(
(np.ones(shape=(2048)), np.apply_along_axis(np.cos, 0, theta))
)
return Idot
def plot_velocity(self):
fig = plt.figure()
ax = fig.add_subplot(111, projection="3d")
Axes3D.plot_surface(ax, self._tt, self._yy, self._vv)
ax.set_xlabel("Time [ns]")
ax.set_ylabel("x [mm]")
ax.set_zlabel("Velocity [km s-1]")
fig = plt.figure()
im = plt.pcolormesh(self._tt, self._yy, self._vv)
cb = fig.colorbar(im)
plt.xlabel("Time [ns]")
plt.ylabel("x [mm]")
cb.set_label("Velocity [km s-1]")
class Etalon:
def __init__(self, thickness: "mm", n):
"""Initial an etalon object
Parameters
-----------
d : float
thickness of the etalon
n : float
ndex of refraction of the etalon
"""
self._n = n
self._d = thickness
@property
def tau(self) -> "ns":
return 2*self._d/c*(self._n - 1/self._n)
def VPF(self, lambda0=.532):
return lambda0/(2*self.tau)
def set_VPF(self, VPF, lambda0: "um"):
tau = lambda0/(2*VPF)
self.set_tau(tau)
def set_tau(self, tau: "ns"):
"""Change the thickness of the etalon to match a
"""
self._d = c*tau/(2*(self._n - 1/self._n))
class Interferometer:
def __init__(self, etalon, tau: "ns" = .1):
"""
Parameters
----------
etalon : Etalon
the etalon used in the interferometer, provides VPF
tau : float
the time resolution of the streak camera, determined by the
width of the streak slit
"""
self.etalon = etalon
self.tau = tau
def _interfear_ray(self, ray):
"""Generate the interference pattern
"""
# get the electric field over the pulse length
E1 = ray.E(ray.pulse_length)
# generate the offset for the second ray
_delta_shape = len(ray.beam_width)
ray.add_delta(
np.linspace(0, 100, _delta_shape).reshape(_delta_shape, 1)
)
# generate the second ray, which is delayed by the etalon thickness
E2 = ray.E(ray.pulse_length - self.etalon.tau)
# Super position of the rays
E = E1 + E2
# only take the real component of the inner product (intensity)
Icos = np.real(E*E.conj())
return Icos
def _add_noise(self, im, ray, target, noise_level, signal_level):
"""Add detector noise to the generated fringe pattern
"""
print("...Including noise")
"""
noise = np.load("noise.npy")
"""
sig = im[:, 500]
sig_fft = np.fft.rfft(sig)
noise_fft = np.zeros_like(sig_fft)
noise_fft[3] = 50000
noise_fft[50] = 20000
noise_fft[200] = 5000
noise = np.fft.irfft(noise_fft)
noise /= noise.max()
nenv = noise_level*signal_level*np.exp(-i/40)
n = nenv*(2*np.random.random(size=(len(i))) - 1)
im = (im.T*noise.real).T
im /= im.max()
im += np.random.random(size=im.shape)*im.std()/3
return im
def _convolve_streak_slit(self, im, t):
"""Blur in the time-domain to account for the width of the streak
camera
Parameters
-----------
im : 2d np array
generated sweep
t : np array
array corresponding to the time of the sweep
"""
print("...Convolving streak slit")
dt = np.diff(t).mean()
tpx = int(self.tau//dt)
window = np.ones(shape=tpx)
return convolve1d(im, window, axis=1)
def output(self, ray, target, noise=False):
"""Generate the simulated data
Parameters
----------
ray : Ray class
the input ray
target :Target class
target containing the velocity profile
noise : bool (optional)
add in detector noise to the generated image
"""
I = self._interfear_ray(ray)
I = self._convolve_streak_slit(I, ray.pulse_length)
if noise:
I = self._add_noise(I, ray, target)
return I
def spatial_var_step(a: "angle", t: "ns", y: "um", max_velocity: "um/ns" = 1):
"""A velocity step-profile which varies linearly in space
Parameters
----------
a : float
the slope of the spatially varying profile
t : float
the time (in ns) at which to evaluate the velocity
y : float
the spatial location at which to evaluate the velocity
max_velocity : float
the maximum velocity of the shock
Returns
-------
the velocity determined by the argument parameters
"""
assert t.shape == y.shape
v = np.zeros_like(t)
v[t > -y/a + 3] = max_velocity
return v
def sin_step(freq, amp, t: "ns", y: "um", max_velocity: "um/ns" = 1):
"""A sinusoidally varying velocity profile in space
Parameters
----------
freq : float
the frequency of the spatially varying profile
amp : float
the amplitude of oscillations
t : float
the time (in ns) at which to evaluate the velocity
y : float
the spatial location at which to evaluate the velocity
max_velocity : float
the maximum velocity of the shock
Returns
-------
the velocity determined by the argument parameters
"""
v = np.zeros_like(t)
v[t > -amp*np.sin(freq*y/(2*np.pi)) + 3] = max_velocity
return v
def reference_shot(save=False, noise=False):
"""Generate a reference image
Parameters
----
save : bool (optional)
save the generated image
noise : bool (optional)
add in detector noise
Returns
-------
Pil Image instance
"""
stationary_target = Target(velocity_equation="stationary")
ray = Ray(pulse_length=10)
ray = stationary_target.reflect_off_target(ray)
etalon = Etalon(1, 1.5195) # VPF doesn't matter
interferometer = Interferometer(etalon=etalon)
ref = interferometer.output(ray, stationary_target, noise)
ref *= 256/ref.max()
ref = ref.astype(np.uint8)
refim = Image.fromarray(ref, mode="L")
plt.figure()
plt.imshow(refim, aspect='auto', cmap="gray", extent=(0, 10, -2, 2))
plt.xlabel("Time [ns]")
if save:
refim.save("~/Desktop/ref.jpg", "JPEG")
return ref
if __name__ == "__main__":
plt.close("all")
velocity_equation = lambda t, y: sin_step(20, .5, t, y, max_velocity=1)
etalon = Etalon(1, 1.5195)
etalon.set_VPF(2., lambda0=.532)
# target = Target(velocity_equation="step")
target = Target(velocity_equation=velocity_equation)
ray = Ray(pulse_length=10)
ray = target.reflect_off_target(ray)
interferometer = Interferometer(etalon=etalon)
sweep = interferometer.output(ray, target, noise=False)
plt.figure()
plt.imshow(sweep, aspect='auto', cmap="gray", extent=(0, 10, -2, 2))
plt.xlabel("Time [ns]")
sweep *= 256/sweep.max()
sweep = sweep.astype(np.uint8)
im = Image.fromarray(sweep, mode="L")
|
[
"numpy.fft.rfft",
"numpy.ones",
"matplotlib.pyplot.figure",
"numpy.imag",
"numpy.sin",
"numpy.exp",
"numpy.zeros_like",
"numpy.meshgrid",
"numpy.fft.irfft",
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.close",
"numpy.cumsum",
"numpy.apply_along_axis",
"numpy.linspace",
"numpy.real",
"mpl_toolkits.mplot3d.Axes3D.plot_surface",
"numpy.ones_like",
"scipy.ndimage.convolve1d",
"scipy.interpolate.interp2d",
"matplotlib.pyplot.pcolormesh",
"matplotlib.pyplot.ylabel",
"numpy.arctan",
"numpy.diff",
"numpy.random.random",
"PIL.Image.fromarray",
"matplotlib.pyplot.xlabel"
] |
[((9115, 9131), 'numpy.zeros_like', 'np.zeros_like', (['t'], {}), '(t)\n', (9128, 9131), True, 'import numpy as np\n'), ((9778, 9794), 'numpy.zeros_like', 'np.zeros_like', (['t'], {}), '(t)\n', (9791, 9794), True, 'import numpy as np\n'), ((10530, 10560), 'PIL.Image.fromarray', 'Image.fromarray', (['ref'], {'mode': '"""L"""'}), "(ref, mode='L')\n", (10545, 10560), False, 'from PIL import Image\n'), ((10566, 10578), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (10576, 10578), True, 'import matplotlib.pyplot as plt\n'), ((10583, 10651), 'matplotlib.pyplot.imshow', 'plt.imshow', (['refim'], {'aspect': '"""auto"""', 'cmap': '"""gray"""', 'extent': '(0, 10, -2, 2)'}), "(refim, aspect='auto', cmap='gray', extent=(0, 10, -2, 2))\n", (10593, 10651), True, 'import matplotlib.pyplot as plt\n'), ((10656, 10679), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [ns]"""'], {}), "('Time [ns]')\n", (10666, 10679), True, 'import matplotlib.pyplot as plt\n'), ((10791, 10807), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (10800, 10807), True, 'import matplotlib.pyplot as plt\n'), ((11247, 11259), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (11257, 11259), True, 'import matplotlib.pyplot as plt\n'), ((11264, 11332), 'matplotlib.pyplot.imshow', 'plt.imshow', (['sweep'], {'aspect': '"""auto"""', 'cmap': '"""gray"""', 'extent': '(0, 10, -2, 2)'}), "(sweep, aspect='auto', cmap='gray', extent=(0, 10, -2, 2))\n", (11274, 11332), True, 'import matplotlib.pyplot as plt\n'), ((11337, 11360), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [ns]"""'], {}), "('Time [ns]')\n", (11347, 11360), True, 'import matplotlib.pyplot as plt\n'), ((11435, 11467), 'PIL.Image.fromarray', 'Image.fromarray', (['sweep'], {'mode': '"""L"""'}), "(sweep, mode='L')\n", (11450, 11467), False, 'from PIL import Image\n'), ((646, 680), 'numpy.linspace', 'np.linspace', (['(0)', 'pulse_length', '(2048)'], {}), '(0, pulse_length, 2048)\n', (657, 680), True, 'import numpy as np\n'), ((699, 723), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(2048)'], {}), '(-2, 2, 2048)\n', (710, 723), True, 'import numpy as np\n'), ((753, 782), 'numpy.meshgrid', 'np.meshgrid', (['self._t', 'self._y'], {}), '(self._t, self._y)\n', (764, 782), True, 'import numpy as np\n'), ((1249, 1288), 'numpy.exp', 'np.exp', (['(1.0j * (self.phi + self._delta))'], {}), '(1.0j * (self.phi + self._delta))\n', (1255, 1288), True, 'import numpy as np\n'), ((1302, 1312), 'numpy.real', 'np.real', (['E'], {}), '(E)\n', (1309, 1312), True, 'import numpy as np\n'), ((1330, 1340), 'numpy.imag', 'np.imag', (['E'], {}), '(E)\n', (1337, 1340), True, 'import numpy as np\n'), ((1359, 1393), 'scipy.interpolate.interp2d', 'interp2d', (['self._t', 'self._y', 'E_real'], {}), '(self._t, self._y, E_real)\n', (1367, 1393), False, 'from scipy.interpolate import interp2d\n'), ((1412, 1446), 'scipy.interpolate.interp2d', 'interp2d', (['self._t', 'self._y', 'E_imag'], {}), '(self._t, self._y, E_imag)\n', (1420, 1446), False, 'from scipy.interpolate import interp2d\n'), ((1990, 2015), 'numpy.linspace', 'np.linspace', (['(-5)', '(15)', '(2048)'], {}), '(-5, 15, 2048)\n', (2001, 2015), True, 'import numpy as np\n'), ((2034, 2058), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(2048)'], {}), '(-3, 3, 2048)\n', (2045, 2058), True, 'import numpy as np\n'), ((2110, 2139), 'numpy.meshgrid', 'np.meshgrid', (['self._t', 'self._y'], {}), '(self._t, self._y)\n', (2121, 2139), True, 'import numpy as np\n'), ((2657, 2693), 'scipy.interpolate.interp2d', 'interp2d', (['self._t', 'self._y', 'self._zz'], {}), '(self._t, self._y, self._zz)\n', (2665, 2693), False, 'from scipy.interpolate import interp2d\n'), ((2919, 2955), 'scipy.interpolate.interp2d', 'interp2d', (['self._t', 'self._y', 'self._dz'], {}), '(self._t, self._y, self._dz)\n', (2927, 2955), False, 'from scipy.interpolate import interp2d\n'), ((3096, 3132), 'scipy.interpolate.interp2d', 'interp2d', (['self._t', 'self._y', 'self._vv'], {}), '(self._t, self._y, self._vv)\n', (3104, 3132), False, 'from scipy.interpolate import interp2d\n'), ((3540, 3556), 'numpy.zeros_like', 'np.zeros_like', (['t'], {}), '(t)\n', (3553, 3556), True, 'import numpy as np\n'), ((3729, 3745), 'numpy.zeros_like', 'np.zeros_like', (['t'], {}), '(t)\n', (3742, 3745), True, 'import numpy as np\n'), ((4258, 4276), 'numpy.arctan', 'np.arctan', (['(dz / dy)'], {}), '(dz / dy)\n', (4267, 4276), True, 'import numpy as np\n'), ((4450, 4462), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4460, 4462), True, 'import matplotlib.pyplot as plt\n'), ((4522, 4575), 'mpl_toolkits.mplot3d.Axes3D.plot_surface', 'Axes3D.plot_surface', (['ax', 'self._tt', 'self._yy', 'self._vv'], {}), '(ax, self._tt, self._yy, self._vv)\n', (4541, 4575), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((4701, 4713), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4711, 4713), True, 'import matplotlib.pyplot as plt\n'), ((4727, 4771), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['self._tt', 'self._yy', 'self._vv'], {}), '(self._tt, self._yy, self._vv)\n', (4741, 4771), True, 'import matplotlib.pyplot as plt\n'), ((4810, 4833), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [ns]"""'], {}), "('Time [ns]')\n", (4820, 4833), True, 'import matplotlib.pyplot as plt\n'), ((4842, 4862), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""x [mm]"""'], {}), "('x [mm]')\n", (4852, 4862), True, 'import matplotlib.pyplot as plt\n'), ((7025, 7041), 'numpy.fft.rfft', 'np.fft.rfft', (['sig'], {}), '(sig)\n', (7036, 7041), True, 'import numpy as np\n'), ((7062, 7084), 'numpy.zeros_like', 'np.zeros_like', (['sig_fft'], {}), '(sig_fft)\n', (7075, 7084), True, 'import numpy as np\n'), ((7190, 7213), 'numpy.fft.irfft', 'np.fft.irfft', (['noise_fft'], {}), '(noise_fft)\n', (7202, 7213), True, 'import numpy as np\n'), ((7930, 7948), 'numpy.ones', 'np.ones', ([], {'shape': 'tpx'}), '(shape=tpx)\n', (7937, 7948), True, 'import numpy as np\n'), ((7964, 7994), 'scipy.ndimage.convolve1d', 'convolve1d', (['im', 'window'], {'axis': '(1)'}), '(im, window, axis=1)\n', (7974, 7994), False, 'from scipy.ndimage import convolve1d\n'), ((815, 837), 'numpy.ones_like', 'np.ones_like', (['self._tt'], {}), '(self._tt)\n', (827, 837), True, 'import numpy as np\n'), ((2578, 2605), 'numpy.cumsum', 'np.cumsum', (['self._vv'], {'axis': '(1)'}), '(self._vv, axis=1)\n', (2587, 2605), True, 'import numpy as np\n'), ((7284, 7299), 'numpy.exp', 'np.exp', (['(-i / 40)'], {}), '(-i / 40)\n', (7290, 7299), True, 'import numpy as np\n'), ((2539, 2555), 'numpy.diff', 'np.diff', (['self._t'], {}), '(self._t)\n', (2546, 2555), True, 'import numpy as np\n'), ((3318, 3333), 'numpy.exp', 'np.exp', (['(-y ** 4)'], {}), '(-y ** 4)\n', (3324, 3333), True, 'import numpy as np\n'), ((3337, 3357), 'numpy.exp', 'np.exp', (['(-5 * (t - 3))'], {}), '(-5 * (t - 3))\n', (3343, 3357), True, 'import numpy as np\n'), ((4139, 4162), 'numpy.diff', 'np.diff', (['ray.beam_width'], {}), '(ray.beam_width)\n', (4146, 4162), True, 'import numpy as np\n'), ((4314, 4333), 'numpy.ones', 'np.ones', ([], {'shape': '(2048)'}), '(shape=2048)\n', (4321, 4333), True, 'import numpy as np\n'), ((4337, 4374), 'numpy.apply_along_axis', 'np.apply_along_axis', (['np.cos', '(0)', 'theta'], {}), '(np.cos, 0, theta)\n', (4356, 4374), True, 'import numpy as np\n'), ((7427, 7458), 'numpy.random.random', 'np.random.random', ([], {'size': 'im.shape'}), '(size=im.shape)\n', (7443, 7458), True, 'import numpy as np\n'), ((7862, 7872), 'numpy.diff', 'np.diff', (['t'], {}), '(t)\n', (7869, 7872), True, 'import numpy as np\n'), ((6351, 6384), 'numpy.linspace', 'np.linspace', (['(0)', '(100)', '_delta_shape'], {}), '(0, 100, _delta_shape)\n', (6362, 6384), True, 'import numpy as np\n'), ((9810, 9840), 'numpy.sin', 'np.sin', (['(freq * y / (2 * np.pi))'], {}), '(freq * y / (2 * np.pi))\n', (9816, 9840), True, 'import numpy as np\n')]
|
import json
class Settings:
def __init__(self, filename):
self.json_file = None
with open(filename) as f:
self.json_file = json.load(f)
self.hh_sheet_id = self.json_file['hh_sheet_id']
self.ha_sheet_id = self.json_file['ha_sheet_id']
# List of str: names of the individual sheets in the Google Sheet we want to use
self.hh_sheet_names = self.json_file['hh_sheet_names']
self.ha_sheet_names = self.json_file['ha_sheet_names']
self.columns_list = self.json_file['columns_list']
self.dialogue_column = self.json_file['dialogue_column']
self.dialogue_act_column = self.json_file['dialogue_act_column']
self.intent_column = self.json_file['intent_column']
self.delivery_column = self.json_file['delivery_column']
self.who_column = self.json_file['who_column']
self.action_column = self.json_file['action_column']
self.tone_column = self.json_file['tone_column']
self.driver_dict = self.json_file['driver_dict']
self.creativity_dict = self.json_file['creativity_dict']
self.dialogue_act_dict = self.json_file['dialogue_act_dict']
self.intent_dict = self.json_file['intent_dict']
self.delivery_dict = self.json_file['delivery_dict']
self.action_dict = self.json_file['action_dict']
self.who_dict = self.json_file['who_dict']
self.tone_dict = self.json_file['tone_dict']
self.root_column_name = self.json_file['root_column_name']
self.root_encode_dict = self.json_file[self.json_file['root_encode_dict']]
self.root_hierarchy = self.json_file['root_hierarchy']
self.stagger_training = self.json_file['stagger_training']
self.num_runs = self.json_file['num_runs']
self.num_train_epochs = self.json_file['num_train_epochs']
self.per_device_train_batch_size = self.json_file['per_device_train_batch_size']
self.per_device_eval_batch_size = self.json_file['per_device_eval_batch_size']
self.warmup_steps = self.json_file['warmup_steps']
self.weight_decay = self.json_file['weight_decay']
self.evaluation_strategy = self.json_file['evaluation_strategy']
self.eval_accumulation_steps = self.json_file['eval_accumulation_steps']
|
[
"json.load"
] |
[((157, 169), 'json.load', 'json.load', (['f'], {}), '(f)\n', (166, 169), False, 'import json\n')]
|
import tensorflow as tf
def train_input_fn(filepath, example_parser, batch_size, num_epochs, shuffle_buffer_size):
"""
模型的训练阶段input_fn
Args:
filepath (str): 训练集/验证集的路径
example_parser (function): 解析example的函数
batch_size (int): 每个batch样本大小
num_epochs (int): 训练轮数
shuffle_buffer_size (inr): shuffle时buffer的大小
Returns:
dataset
"""
dataset = tf.data.TFRecordDataset(filepath)
if shuffle_buffer_size > 0:
dataset = dataset.shuffle(shuffle_buffer_size)
dataset = dataset.repeat(num_epochs)
dataset = dataset.batch(batch_size)
dataset = dataset.map(example_parser, num_parallel_calls=tf.data.experimental.AUTOTUNE)
dataset = dataset.prefetch(1)
return dataset
def eval_input_fn(filepath, example_parser, batch_size):
"""
模型的eval阶段input_fn
Args:
filepath (str): 训练集/验证集的路径
example_parser (function): 解析example的函数
batch_size (int): 每个batch样本大小
Returns:
dataset
"""
dataset = tf.data.TFRecordDataset(filepath)
dataset = dataset.batch(batch_size)
dataset = dataset.map(example_parser, num_parallel_calls=tf.data.experimental.AUTOTUNE)
dataset = dataset.prefetch(1)
return dataset
def to_sparse_tensor(one_hot_tensor):
"""
将one-hot/multi-hot输入转化成稀疏张量, 作为tf.nn.safe_embedding_lookup_sparse的输入
Args:
one_hot_tensor (tensor): one-hot/multi-hot输入
Returns:
tf.SparseTensor
"""
one_hot_indices = tf.where(tf.not_equal(one_hot_tensor, 0))
one_hot_values = one_hot_indices[:, 1]
return tf.SparseTensor(
indices=one_hot_indices,
values=one_hot_values,
dense_shape=tf.shape(one_hot_tensor, out_type=tf.int64))
|
[
"tensorflow.shape",
"tensorflow.not_equal",
"tensorflow.data.TFRecordDataset"
] |
[((417, 450), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['filepath'], {}), '(filepath)\n', (440, 450), True, 'import tensorflow as tf\n'), ((1042, 1075), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['filepath'], {}), '(filepath)\n', (1065, 1075), True, 'import tensorflow as tf\n'), ((1527, 1558), 'tensorflow.not_equal', 'tf.not_equal', (['one_hot_tensor', '(0)'], {}), '(one_hot_tensor, 0)\n', (1539, 1558), True, 'import tensorflow as tf\n'), ((1716, 1759), 'tensorflow.shape', 'tf.shape', (['one_hot_tensor'], {'out_type': 'tf.int64'}), '(one_hot_tensor, out_type=tf.int64)\n', (1724, 1759), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
import binascii
import threading
from time import *
import socketserver
from string import hexdigits
from Crypto.Util.number import getPrime, inverse, bytes_to_long, long_to_bytes
banner = """
Welcome to my supreme signing server!
Send me a signed command, I will verify and do it for you, I will also sign your commands, but don't tinker too much with them though!
I'm not Blind, I can see through your cunning ruse, sometimes!
"""
FLAG_FILE = "flag.txt"
class RSA:
def __init__(self):
self.e = 0x10001
p = getPrime(1024)
q = getPrime(1024)
self.n = p * q
phi = (p - 1) * (q - 1)
self.d = inverse(self.e, phi)
def get_public_key(self):
return (self.e, self.n)
def sign(self, msg):
hex_str_of_peek = binascii.hexlify("peek".encode()).decode()
if msg.startswith(hex_str_of_peek):
return -1
msg = bytes_to_long(binascii.unhexlify(msg.encode()))
return pow(msg, self.d, self.n)
def verify(self, msg):
msg = bytes_to_long(binascii.unhexlify(msg.encode()))
return pow(msg, self.e, self.n)
class Service(socketserver.BaseRequestHandler):
#handle() will always run first
def handle(self):
self.get_flag()
rsa = RSA()
self.send(banner)
while True:
choice = self.receive("1. Sign\n2. Verify\nYour choice: ").decode()
if choice == "1":
cmd = self.receive("Command to sign: ").decode()
if not self.assure_hex(cmd):
self.send("Please send a hex string!\n")
continue
signed_msg = rsa.sign(cmd)
if signed_msg != -1:
self.send("Message signed successfully!\n" + self.num_to_hex_str(signed_msg))
else:
self.send("Ah ah, don't tinker with the commands!")
elif choice == "2":
cmd = self.receive("Command to verify: ").decode()
if not self.assure_hex(cmd):
self.send("Please send a hex string!\n")
continue
verified_cmd = rsa.verify(cmd)
verified_cmd = long_to_bytes(verified_cmd)
try:
#could be jibberish ¯\_(ツ)_/¯
verified_cmd = verified_cmd.decode()
if verified_cmd == "peek flag":
self.send("Here is the flag!\n" + self.flag)
break
elif verified_cmd == "get pubkey":
self.send("Here is the public key!\n" + str(rsa.get_public_key()) + "\n")
else:
self.send("Command executed!")
break
except:
self.send("There's something wrong with your command!")
break
else:
break
def num_to_hex_str(self, num):
return binascii.hexlify(long_to_bytes(num)).decode()
def hex_str_to_num(self, string):
return bytes_to_long(binascii.unhexlify(string.encode()))
def assure_hex(self, string):
return all(c in hexdigits for c in string)
def get_flag(self):
with open(FLAG_FILE, "r") as f:
self.flag = f.read()
def send(self, string, newline=True):
if type(string) is str:
string = string.encode("utf-8")
if newline:
string = string + b"\n"
self.request.sendall(string)
def receive(self, prompt=": "):
self.send(prompt, newline=False)
return self.request.recv(1000).strip()
class ThreadedService(
socketserver.ThreadingMixIn,
socketserver.TCPServer,
socketserver.DatagramRequestHandler,
):
pass
def main():
port = 20314
host = "103.245.249.107"
service = Service
server = ThreadedService((host, port), service)
server.allow_reuse_address = True
server_thread = threading.Thread(target=server.serve_forever)
server_thread.daemon = True
server_thread.start()
print("Server started on " + str(server.server_address) + "!")
# Now let the main thread just wait...
while True:
sleep(10)
if __name__ == "__main__":
main()
|
[
"threading.Thread",
"Crypto.Util.number.getPrime",
"Crypto.Util.number.long_to_bytes",
"Crypto.Util.number.inverse"
] |
[((4100, 4145), 'threading.Thread', 'threading.Thread', ([], {'target': 'server.serve_forever'}), '(target=server.serve_forever)\n', (4116, 4145), False, 'import threading\n'), ((554, 568), 'Crypto.Util.number.getPrime', 'getPrime', (['(1024)'], {}), '(1024)\n', (562, 568), False, 'from Crypto.Util.number import getPrime, inverse, bytes_to_long, long_to_bytes\n'), ((581, 595), 'Crypto.Util.number.getPrime', 'getPrime', (['(1024)'], {}), '(1024)\n', (589, 595), False, 'from Crypto.Util.number import getPrime, inverse, bytes_to_long, long_to_bytes\n'), ((668, 688), 'Crypto.Util.number.inverse', 'inverse', (['self.e', 'phi'], {}), '(self.e, phi)\n', (675, 688), False, 'from Crypto.Util.number import getPrime, inverse, bytes_to_long, long_to_bytes\n'), ((2280, 2307), 'Crypto.Util.number.long_to_bytes', 'long_to_bytes', (['verified_cmd'], {}), '(verified_cmd)\n', (2293, 2307), False, 'from Crypto.Util.number import getPrime, inverse, bytes_to_long, long_to_bytes\n'), ((3110, 3128), 'Crypto.Util.number.long_to_bytes', 'long_to_bytes', (['num'], {}), '(num)\n', (3123, 3128), False, 'from Crypto.Util.number import getPrime, inverse, bytes_to_long, long_to_bytes\n')]
|
import base64
import json
import re
import unittest
import boto3
from tests import get_version, get_function_name, is_local
from tests.sam import LocalLambdaServer, start_local_lambda
class TestRuntimeLayer(unittest.TestCase):
lambda_server: LocalLambdaServer = None
@classmethod
def setUpClass(cls):
if is_local():
cls.lambda_server = start_local_lambda(template_path="test-template.yaml",
parameter_overrides={'Version': get_version()},
)
def get_client(self):
return self.lambda_server.get_client() if is_local() else boto3.client('lambda')
def test_script(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("ExampleFunction"),
Payload=json.dumps({'x': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
result = json.loads(raw_payload)
self.assertEqual(2, result)
def test_lowercase_extension(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("LowerCaseExtensionFunction"),
Payload=json.dumps({'x': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
result = json.loads(raw_payload)
self.assertEqual(2, result)
def test_multiple_arguments(self):
lambda_client = self.get_client()
payload = {'x': 'bar', 'y': 1}
response = lambda_client.invoke(FunctionName=get_function_name("MultipleArgumentsFunction"),
Payload=json.dumps(payload),
)
raw_payload = response['Payload'].read().decode('utf-8')
result = json.loads(raw_payload)
self.assertDictEqual(payload, result)
@unittest.skipIf(is_local(), 'Lambda local does not support log retrieval')
def test_debug_logging(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("LoggingFunction"),
LogType='Tail',
Payload=json.dumps({'x': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
result = json.loads(raw_payload)
self.assertEqual(1, result)
log = base64.b64decode(response['LogResult']).decode('utf-8')
self.assertIn("runtime:Sourcing 'script.R'", log)
self.assertIn("runtime:Invoking function 'handler_with_debug_logging' with parameters:\n$x\n[1] 1", log)
self.assertIn("runtime:Function returned:\n[1] 1", log)
self.assertIn("runtime:Posted result:\n", log)
@unittest.skipIf(is_local(), 'Lambda local does not support log retrieval')
def test_no_debug_logging(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("ExampleFunction"),
LogType='Tail',
Payload=json.dumps({'x': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
result = json.loads(raw_payload)
self.assertEqual(2, result)
log = base64.b64decode(response['LogResult']).decode('utf-8')
self.assertNotIn("Sourcing ", log)
self.assertNotIn("Invoking function ", log)
self.assertNotIn("Function returned:", log)
self.assertNotIn("Posted result:", log)
@unittest.skipIf(is_local(), 'Lambda local does not pass errors properly')
def test_missing_source_file(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("MissingSourceFileFunction"),
Payload=json.dumps({'y': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
json_payload = json.loads(raw_payload)
self.assertEqual('Unhandled', response['FunctionError'])
self.assertIn('Source file does not exist: missing.[R|r]', json_payload['errorMessage'])
self.assertEqual('simpleError', json_payload['errorType'])
@unittest.skipIf(is_local(), 'Lambda local does not pass errors properly')
def test_missing_function(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("MissingFunctionFunction"),
Payload=json.dumps({'y': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
json_payload = json.loads(raw_payload)
self.assertEqual('Unhandled', response['FunctionError'])
self.assertIn('Function "handler_missing" does not exist', json_payload['errorMessage'])
self.assertEqual('simpleError', json_payload['errorType'])
@unittest.skipIf(is_local(), 'Lambda local does not pass errors properly')
def test_function_as_variable(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("HandlerAsVariableFunction"),
Payload=json.dumps({'y': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
json_payload = json.loads(raw_payload)
self.assertEqual('Unhandled', response['FunctionError'])
self.assertIn('Function "handler_as_variable" does not exist', json_payload['errorMessage'])
self.assertEqual('simpleError', json_payload['errorType'])
@unittest.skipIf(is_local(), 'Lambda local does not pass errors properly')
def test_missing_argument(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("ExampleFunction"))
raw_payload = response['Payload'].read().decode('utf-8')
json_payload = json.loads(raw_payload)
self.assertEqual('Unhandled', response['FunctionError'])
self.assertIn('argument "x" is missing, with no default', json_payload['errorMessage'])
self.assertEqual('simpleError', json_payload['errorType'])
@unittest.skipIf(is_local(), 'Lambda local does not pass errors properly')
def test_unused_argument(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("ExampleFunction"),
Payload=json.dumps({'x': 1, 'y': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
json_payload = json.loads(raw_payload)
self.assertEqual('Unhandled', response['FunctionError'])
self.assertIn('unused argument (y = 1)', json_payload['errorMessage'])
self.assertEqual('simpleError', json_payload['errorType'])
# @unittest.skipIf(is_local(), 'Fails locally with "argument list too long"')
@unittest.skip('Fails with timeout')
def test_long_argument(self):
lambda_client = self.get_client()
payload = {x: x for x in range(0, 100000)}
response = lambda_client.invoke(FunctionName=get_function_name("VariableArgumentsFunction"),
Payload=json.dumps(payload),
)
raw_payload = response['Payload'].read().decode('utf-8')
result = json.loads(raw_payload)
self.assertEqual(1, result)
@unittest.skipIf(is_local(), 'Lambda local does not pass errors properly')
def test_missing_library(self):
lambda_client = self.get_client()
response = lambda_client.invoke(FunctionName=get_function_name("MissingLibraryFunction"),
Payload=json.dumps({'y': 1}),
)
raw_payload = response['Payload'].read().decode('utf-8')
json_payload = json.loads(raw_payload)
self.assertEqual('Unhandled', response['FunctionError'])
self.assertIn('there is no package called ‘Matrix’', json_payload['errorMessage'])
error_type = 'packageNotFoundError' if get_version() == '3_6_0' else 'simpleError'
self.assertEqual(error_type, json_payload['errorType'])
@classmethod
def tearDownClass(cls):
if is_local():
cls.lambda_server.kill()
|
[
"json.loads",
"boto3.client",
"json.dumps",
"base64.b64decode",
"unittest.skip",
"tests.get_version",
"tests.get_function_name",
"tests.is_local"
] |
[((7296, 7331), 'unittest.skip', 'unittest.skip', (['"""Fails with timeout"""'], {}), "('Fails with timeout')\n", (7309, 7331), False, 'import unittest\n'), ((329, 339), 'tests.is_local', 'is_local', ([], {}), '()\n', (337, 339), False, 'from tests import get_version, get_function_name, is_local\n'), ((1051, 1074), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (1061, 1074), False, 'import json\n'), ((1490, 1513), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (1500, 1513), False, 'import json\n'), ((1965, 1988), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (1975, 1988), False, 'import json\n'), ((2533, 2556), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (2543, 2556), False, 'import json\n'), ((2057, 2067), 'tests.is_local', 'is_local', ([], {}), '()\n', (2065, 2067), False, 'from tests import get_version, get_function_name, is_local\n'), ((3454, 3477), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (3464, 3477), False, 'import json\n'), ((2975, 2985), 'tests.is_local', 'is_local', ([], {}), '()\n', (2983, 2985), False, 'from tests import get_version, get_function_name, is_local\n'), ((4242, 4265), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (4252, 4265), False, 'import json\n'), ((3801, 3811), 'tests.is_local', 'is_local', ([], {}), '()\n', (3809, 3811), False, 'from tests import get_version, get_function_name, is_local\n'), ((4953, 4976), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (4963, 4976), False, 'import json\n'), ((4517, 4527), 'tests.is_local', 'is_local', ([], {}), '()\n', (4525, 4527), False, 'from tests import get_version, get_function_name, is_local\n'), ((5670, 5693), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (5680, 5693), False, 'import json\n'), ((5228, 5238), 'tests.is_local', 'is_local', ([], {}), '()\n', (5236, 5238), False, 'from tests import get_version, get_function_name, is_local\n'), ((6265, 6288), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (6275, 6288), False, 'import json\n'), ((5949, 5959), 'tests.is_local', 'is_local', ([], {}), '()\n', (5957, 5959), False, 'from tests import get_version, get_function_name, is_local\n'), ((6974, 6997), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (6984, 6997), False, 'import json\n'), ((6539, 6549), 'tests.is_local', 'is_local', ([], {}), '()\n', (6547, 6549), False, 'from tests import get_version, get_function_name, is_local\n'), ((7753, 7776), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (7763, 7776), False, 'import json\n'), ((8269, 8292), 'json.loads', 'json.loads', (['raw_payload'], {}), '(raw_payload)\n', (8279, 8292), False, 'import json\n'), ((7835, 7845), 'tests.is_local', 'is_local', ([], {}), '()\n', (7843, 7845), False, 'from tests import get_version, get_function_name, is_local\n'), ((8661, 8671), 'tests.is_local', 'is_local', ([], {}), '()\n', (8669, 8671), False, 'from tests import get_version, get_function_name, is_local\n'), ((657, 667), 'tests.is_local', 'is_local', ([], {}), '()\n', (665, 667), False, 'from tests import get_version, get_function_name, is_local\n'), ((673, 695), 'boto3.client', 'boto3.client', (['"""lambda"""'], {}), "('lambda')\n", (685, 695), False, 'import boto3\n'), ((819, 855), 'tests.get_function_name', 'get_function_name', (['"""ExampleFunction"""'], {}), "('ExampleFunction')\n", (836, 855), False, 'from tests import get_version, get_function_name, is_local\n'), ((905, 925), 'json.dumps', 'json.dumps', (["{'x': 1}"], {}), "({'x': 1})\n", (915, 925), False, 'import json\n'), ((1247, 1294), 'tests.get_function_name', 'get_function_name', (['"""LowerCaseExtensionFunction"""'], {}), "('LowerCaseExtensionFunction')\n", (1264, 1294), False, 'from tests import get_version, get_function_name, is_local\n'), ((1344, 1364), 'json.dumps', 'json.dumps', (["{'x': 1}"], {}), "({'x': 1})\n", (1354, 1364), False, 'import json\n'), ((1724, 1770), 'tests.get_function_name', 'get_function_name', (['"""MultipleArgumentsFunction"""'], {}), "('MultipleArgumentsFunction')\n", (1741, 1770), False, 'from tests import get_version, get_function_name, is_local\n'), ((1820, 1839), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (1830, 1839), False, 'import json\n'), ((2245, 2281), 'tests.get_function_name', 'get_function_name', (['"""LoggingFunction"""'], {}), "('LoggingFunction')\n", (2262, 2281), False, 'from tests import get_version, get_function_name, is_local\n'), ((2387, 2407), 'json.dumps', 'json.dumps', (["{'x': 1}"], {}), "({'x': 1})\n", (2397, 2407), False, 'import json\n'), ((2607, 2646), 'base64.b64decode', 'base64.b64decode', (["response['LogResult']"], {}), "(response['LogResult'])\n", (2623, 2646), False, 'import base64\n'), ((3166, 3202), 'tests.get_function_name', 'get_function_name', (['"""ExampleFunction"""'], {}), "('ExampleFunction')\n", (3183, 3202), False, 'from tests import get_version, get_function_name, is_local\n'), ((3308, 3328), 'json.dumps', 'json.dumps', (["{'x': 1}"], {}), "({'x': 1})\n", (3318, 3328), False, 'import json\n'), ((3528, 3567), 'base64.b64decode', 'base64.b64decode', (["response['LogResult']"], {}), "(response['LogResult'])\n", (3544, 3567), False, 'import base64\n'), ((3994, 4040), 'tests.get_function_name', 'get_function_name', (['"""MissingSourceFileFunction"""'], {}), "('MissingSourceFileFunction')\n", (4011, 4040), False, 'from tests import get_version, get_function_name, is_local\n'), ((4090, 4110), 'json.dumps', 'json.dumps', (["{'y': 1}"], {}), "({'y': 1})\n", (4100, 4110), False, 'import json\n'), ((4707, 4751), 'tests.get_function_name', 'get_function_name', (['"""MissingFunctionFunction"""'], {}), "('MissingFunctionFunction')\n", (4724, 4751), False, 'from tests import get_version, get_function_name, is_local\n'), ((4801, 4821), 'json.dumps', 'json.dumps', (["{'y': 1}"], {}), "({'y': 1})\n", (4811, 4821), False, 'import json\n'), ((5422, 5468), 'tests.get_function_name', 'get_function_name', (['"""HandlerAsVariableFunction"""'], {}), "('HandlerAsVariableFunction')\n", (5439, 5468), False, 'from tests import get_version, get_function_name, is_local\n'), ((5518, 5538), 'json.dumps', 'json.dumps', (["{'y': 1}"], {}), "({'y': 1})\n", (5528, 5538), False, 'import json\n'), ((6139, 6175), 'tests.get_function_name', 'get_function_name', (['"""ExampleFunction"""'], {}), "('ExampleFunction')\n", (6156, 6175), False, 'from tests import get_version, get_function_name, is_local\n'), ((6728, 6764), 'tests.get_function_name', 'get_function_name', (['"""ExampleFunction"""'], {}), "('ExampleFunction')\n", (6745, 6764), False, 'from tests import get_version, get_function_name, is_local\n'), ((6814, 6842), 'json.dumps', 'json.dumps', (["{'x': 1, 'y': 1}"], {}), "({'x': 1, 'y': 1})\n", (6824, 6842), False, 'import json\n'), ((7512, 7558), 'tests.get_function_name', 'get_function_name', (['"""VariableArgumentsFunction"""'], {}), "('VariableArgumentsFunction')\n", (7529, 7558), False, 'from tests import get_version, get_function_name, is_local\n'), ((7608, 7627), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (7618, 7627), False, 'import json\n'), ((8024, 8067), 'tests.get_function_name', 'get_function_name', (['"""MissingLibraryFunction"""'], {}), "('MissingLibraryFunction')\n", (8041, 8067), False, 'from tests import get_version, get_function_name, is_local\n'), ((8117, 8137), 'json.dumps', 'json.dumps', (["{'y': 1}"], {}), "({'y': 1})\n", (8127, 8137), False, 'import json\n'), ((8496, 8509), 'tests.get_version', 'get_version', ([], {}), '()\n', (8507, 8509), False, 'from tests import get_version, get_function_name, is_local\n'), ((511, 524), 'tests.get_version', 'get_version', ([], {}), '()\n', (522, 524), False, 'from tests import get_version, get_function_name, is_local\n')]
|
from collections import OrderedDict
from vendors.models import Vendor
from .models import StaffingRequest
def requests_as_choices():
choices = OrderedDict()
requests = StaffingRequest.objects.all().order_by('-id')
for request in requests:
choices[request.project] = choices.get(request.project, [])
choices[request.project].append((request.id, request))
return choices.items()
def vendors_as_choices():
choices = OrderedDict()
vendors = Vendor.objects.all().order_by('-avg_score')
choices['Suggested vendors'] = []
for vendor in vendors:
choices['All vendors'] = choices.get('All vendors', [])
choices['All vendors'].append((vendor.id, vendor))
return choices.items()
|
[
"collections.OrderedDict",
"vendors.models.Vendor.objects.all"
] |
[((150, 163), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (161, 163), False, 'from collections import OrderedDict\n'), ((454, 467), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (465, 467), False, 'from collections import OrderedDict\n'), ((482, 502), 'vendors.models.Vendor.objects.all', 'Vendor.objects.all', ([], {}), '()\n', (500, 502), False, 'from vendors.models import Vendor\n')]
|
#! /usr/bin/env python3
"""
Copyright 2021 <NAME>.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
#
# Classify applications into 104 classes given their raw code.
#
# The representation (graph) is created from IR.
#
import os
import sys
import glob
import numpy as np
from absl import app, flags, logging
from yacos.info import compy as R
from yacos.info.compy.extractors import LLVMDriver
def execute(argv):
"""Extract a graph representation."""
del argv
FLAGS = flags.FLAGS
# Instantiate the LLVM driver.
driver = LLVMDriver([])
# Instantiate the builder.
builder = R.LLVMIR2VecBuilder(driver)
# Verify datset directory.
if not os.path.isdir(FLAGS.dataset_directory):
logging.error('Dataset directory {} does not exist.'.format(
FLAGS.dataset_directory)
)
sys.exit(1)
folders = [
os.path.join(FLAGS.dataset_directory, subdir)
for subdir in os.listdir(FLAGS.dataset_directory)
if os.path.isdir(os.path.join(FLAGS.dataset_directory, subdir))
]
idx = FLAGS.dataset_directory.rfind('/')
last_folder = FLAGS.dataset_directory[idx+1:]
# Load data from all folders
for folder in folders:
# Create the output directory.
outdir = os.path.join(folder.replace(last_folder,
'{}_ir2vec'.format(last_folder)))
os.makedirs(outdir, exist_ok=True)
# Extract "ir2vec" from the file
sources = glob.glob('{}/*.ll'.format(folder))
for source in sources:
try:
extractionInfo = builder.ir_to_info(source)
except Exception:
logging.error('Error {}.'.format(source))
continue
filename = source.replace(folder, outdir)
filename = filename[:-3]
np.savez_compressed(filename,
values=extractionInfo.moduleInfo.ir2vec)
# Execute
if __name__ == '__main__':
# app
flags.DEFINE_string('dataset_directory',
None,
'Dataset directory')
flags.mark_flag_as_required('dataset_directory')
app.run(execute)
|
[
"os.makedirs",
"os.path.isdir",
"absl.flags.mark_flag_as_required",
"yacos.info.compy.LLVMIR2VecBuilder",
"absl.flags.DEFINE_string",
"numpy.savez_compressed",
"absl.app.run",
"yacos.info.compy.extractors.LLVMDriver",
"os.path.join",
"os.listdir",
"sys.exit"
] |
[((1023, 1037), 'yacos.info.compy.extractors.LLVMDriver', 'LLVMDriver', (['[]'], {}), '([])\n', (1033, 1037), False, 'from yacos.info.compy.extractors import LLVMDriver\n'), ((1083, 1110), 'yacos.info.compy.LLVMIR2VecBuilder', 'R.LLVMIR2VecBuilder', (['driver'], {}), '(driver)\n', (1102, 1110), True, 'from yacos.info import compy as R\n'), ((2511, 2578), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""dataset_directory"""', 'None', '"""Dataset directory"""'], {}), "('dataset_directory', None, 'Dataset directory')\n", (2530, 2578), False, 'from absl import app, flags, logging\n'), ((2631, 2679), 'absl.flags.mark_flag_as_required', 'flags.mark_flag_as_required', (['"""dataset_directory"""'], {}), "('dataset_directory')\n", (2658, 2679), False, 'from absl import app, flags, logging\n'), ((2685, 2701), 'absl.app.run', 'app.run', (['execute'], {}), '(execute)\n', (2692, 2701), False, 'from absl import app, flags, logging\n'), ((1154, 1192), 'os.path.isdir', 'os.path.isdir', (['FLAGS.dataset_directory'], {}), '(FLAGS.dataset_directory)\n', (1167, 1192), False, 'import os\n'), ((1318, 1329), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1326, 1329), False, 'import sys\n'), ((1363, 1408), 'os.path.join', 'os.path.join', (['FLAGS.dataset_directory', 'subdir'], {}), '(FLAGS.dataset_directory, subdir)\n', (1375, 1408), False, 'import os\n'), ((1898, 1932), 'os.makedirs', 'os.makedirs', (['outdir'], {'exist_ok': '(True)'}), '(outdir, exist_ok=True)\n', (1909, 1932), False, 'import os\n'), ((1439, 1474), 'os.listdir', 'os.listdir', (['FLAGS.dataset_directory'], {}), '(FLAGS.dataset_directory)\n', (1449, 1474), False, 'import os\n'), ((2355, 2425), 'numpy.savez_compressed', 'np.savez_compressed', (['filename'], {'values': 'extractionInfo.moduleInfo.ir2vec'}), '(filename, values=extractionInfo.moduleInfo.ir2vec)\n', (2374, 2425), True, 'import numpy as np\n'), ((1508, 1553), 'os.path.join', 'os.path.join', (['FLAGS.dataset_directory', 'subdir'], {}), '(FLAGS.dataset_directory, subdir)\n', (1520, 1553), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
from route4me import Route4Me
API_KEY = "11111111111111111111111111111111"
def main():
r4m = Route4Me(API_KEY)
route = r4m.route
response = route.get_routes(limit=1, offset=0)
if isinstance(response, dict) and 'errors' in response.keys():
print('. '.join(response['errors']))
else:
route_id = response[0]['route_id']
print('Route ID: {}'.format(route_id))
response = route.get_route(route_id=route_id)
if isinstance(response, dict) and 'errors' in response.keys():
print('. '.join(response['errors']))
else:
print('Original Route')
print('Route ID: {}'.format(response['route_id']))
for i, address in enumerate(response['addresses']):
print('Address #{}'.format(i + 1))
print('\tAddress: {0}'.format(address['address']))
print('\tRoute Destination ID: {0}'.format(
address['route_destination_id']))
route_destination_id = response['addresses'][1]['route_destination_id']
route_destination_id2 = response['addresses'][2]['route_destination_id']
data = {
"route_destination_id": route_destination_id,
"route_id": route_id,
"addresses": [{
"route_destination_id": route_destination_id2,
"sequence_no": 6,
}]
}
print('After Re-sequence Route')
response = route.resequence_route(**data)
print('Route ID: {}'.format(response['route_id']))
for i, address in enumerate(response['addresses']):
print('Address #{}'.format(i + 1))
print('\tAddress: {0}'.format(address['address']))
print('\tRoute Destination ID: {0}'.format(
address['route_destination_id']))
if __name__ == '__main__':
main()
|
[
"route4me.Route4Me"
] |
[((125, 142), 'route4me.Route4Me', 'Route4Me', (['API_KEY'], {}), '(API_KEY)\n', (133, 142), False, 'from route4me import Route4Me\n')]
|
from pyramid.events import subscriber
from phoenix.events import JobFinished, JobStarted
import logging
LOGGER = logging.getLogger("PHOENIX")
@subscriber(JobStarted)
def notify_job_started(event):
event.request.session.flash(
'<h4><img src="/static/phoenix/img/ajax-loader.gif"></img> Job Created. Please wait ...</h4>', queue='success')
@subscriber(JobFinished)
def notify_job_finished(event):
if event.succeeded():
LOGGER.info("job %s succeded.", event.job.get('title'))
# event.request.session.flash("Job <b>{0}</b> succeded.".format(event.job.get('title')), queue='success')
else:
LOGGER.warn("job %s failed.", event.job.get('title'))
# logger.warn("status = %s", event.job.get('status'))
# event.request.session.flash("Job <b>{0}</b> failed.".format(event.job.get('title')), queue='danger')
|
[
"pyramid.events.subscriber",
"logging.getLogger"
] |
[((115, 143), 'logging.getLogger', 'logging.getLogger', (['"""PHOENIX"""'], {}), "('PHOENIX')\n", (132, 143), False, 'import logging\n'), ((147, 169), 'pyramid.events.subscriber', 'subscriber', (['JobStarted'], {}), '(JobStarted)\n', (157, 169), False, 'from pyramid.events import subscriber\n'), ((357, 380), 'pyramid.events.subscriber', 'subscriber', (['JobFinished'], {}), '(JobFinished)\n', (367, 380), False, 'from pyramid.events import subscriber\n')]
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
#
# Author <NAME>
# E-mail <EMAIL>
# License MIT
# Created 03/11/2016
# Updated 11/12/2016
# Version 1.0.0
#
"""
Description of classify.py
======================
save train & test in files
read train & test
for list of classifier
train/test
gather results prec/rec/f
print best clf and results
:Example:
source activate py27
ipython
run classify.py
Only for 100 percent precision
run classify.py --train /media/sf_DATA/Datasets/Simbals/yann/train.csv --test /media/sf_DATA/Datasets/Simbals/yann/test.csv
notes
RandomForest complexity
https://www.quora.com/What-is-in-general-time-complexity-of-random-forest-What-are-the-important-parameters-that-affect-this-complexity
n instances and m attributes
computational cost of building a tree is O(mn log n).
RandomForest done in 135939ms (3mn) for 13 attributes and 192 instances
mn log n = 13*192*math.log(192) = 13122 ( 135939ms)
mn log n = 39*186*math.log(186) = 37907 (~ms)
To know the element available
print((clf.get_params().keys())
..todo::
Add
AdaBoostClassifier
BaggingClassifier
BernoulliNB
CalibratedClassifierCV
DPGMM
http://scikit-learn.org/stable/modules/generated/sklearn.mixture.DPGMM.html
Deprecated since version 0.18: This class will be removed in 0.20.
Use sklearn.mixture.BayesianGaussianMixture with parameter
weight_concentration_prior_type='dirichlet_process' instead.
DecisionTreeClassifier
ExtraTreeClassifier
ExtraTreesClassifier
GMM
GaussianNB
GradientBoostingClassifier
KNeighborsClassifier
LDA
LabelPropagation
LabelSpreading
LinearDiscriminantAnalysis
LogisticRegression
LogisticRegressionCV
MultinomialNB
NuSVC
QDA
QuadraticDiscriminantAnalysis
RandomForestClassifier
SGDClassifier
SVC
VBGMM
_ConstantPredictor
"""
import os
import sys
import time
import json
import utils
import joblib
import argparse
import webbrowser
import multiprocessing
import numpy as np
import matplotlib.pyplot as plt
from statistics import stdev
from functools import partial
from sklearn import metrics
from sklearn.cross_validation import KFold, cross_val_score
from sklearn.model_selection import StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis, LinearDiscriminantAnalysis
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score
from sklearn.utils.testing import all_estimators
from sklearn import linear_model
from sklearn.metrics import roc_curve
from sklearn.metrics import roc_auc_score
from sklearn.metrics import precision_recall_curve, average_precision_score
def list_clf():
"""
..todo::
Do the same for:
class_weight
predict
predict_log_proba
"""
estimators = all_estimators()
for name, class_ in estimators:
if hasattr(class_, 'predict_proba'):
print(name)
def plot_clf(indir="res/"):
indir = utils.abs_path_dir(indir) + "/"
algos = []
measure = []
with open(indir + "global.csv", "r") as filep:
for line in filep:
line = line.split(",")
algos.append(line[0])
measure.append(tuple(map(float, line[1:4])))
n_groups = 3
fig, ax = plt.subplots(figsize=(10, 6))
index = np.arange(n_groups)
bar_width = 0.2
opacity = 0.4
error_config = {'ecolor': '0.3'}
color = utils.rand_color(len(algos))
rects = {}
offset = 0.15
for ind, algo in enumerate(algos):
print(ind)
print(tuple(measure[ind]))
rects[ind] = plt.bar(index + bar_width*ind + offset, tuple(measure[ind]), bar_width,
alpha=opacity,
color=color[ind],
label=algo)
plt.ylabel('Scores (in %)')
plt.xticks(index + bar_width*ind + offset, ('Precision', 'Recall', 'F-Measure'))
plt.legend()
plt.ylim(0, 1)
# spines & axis
ax = plt.gca()
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
art = []
lgd = ax.legend(loc=9, bbox_to_anchor=(1.1, 1.), frameon=False)
# lgd = pylab.legend(loc=9, bbox_to_anchor=(0.5, -0.1), ncol=2)
art.append(lgd)
# ax.legend()
plt.tight_layout()
img_name = "global.png"
plt.savefig(img_name, dpi=200, additional_artists=art, bbox_inches="tight")
# webbrowser.open(img_name)
# plt.show()
def read_file(filename):
"""Description of read_file
train/test example line:
filename,feat1,feat2,...,featn,tag
"""
filename = utils.abs_path_file(filename)
groundtruths = []
features = []
with open(filename, "r") as filep:
for row in filep:
line = row.split(",")
groundtruths.append(line[-1][:-1])
features.append([float(i) for i in line[1:-1]])
return features, groundtruths
def read_preds(filename):
"""Description of read_file
ex file:
ISRC,tag
"""
filename = utils.abs_path_file(filename)
isrcs = {}
with open(filename, "r") as filep:
for row in filep:
line = row.split(",")
# print(line)
isrcs[line[0]] = float(line[1])
# isrcs[line[0]] = 1.0-float(line[1])
return isrcs
def read_item_tag(filename):
"""Description of read_file
example line:
filename,tag
"""
filename = utils.abs_path_file(filename)
groundtruths = {}
with open(filename, "r") as filep:
for row in filep:
line = row.split(",")
groundtruths[line[0]] = line[1][:-1]
return groundtruths
def precision_100percent(train, test):
"""Description of precision_100percent
..todo::
1 Find best clf with default param
2 vary param of best clf and find best param
3 use best param and best clf to find recall for 100 percent precision
"""
utils.print_success("Find Recall for best Precision for each tag")
train = utils.abs_path_file(train)
test = utils.abs_path_file(test)
train_features, train_groundtruths = read_file(train)
test_features, test_groundtruths = read_file(test)
classifiers = {
# "RandomForest": RandomForestClassifier(),#n_estimators=5
"DecisionTree":DecisionTreeClassifier()#,#max_depth=10
# "SVM":SVC(kernel="linear", C=0.0205),
# "ExtraTreesClassifier":ExtraTreesClassifier(n_estimators=5, criterion="entropy", max_features="log2", max_depth=9),
# "LogisticRegression":LogisticRegression()
}
tags = list(set(test_groundtruths))
nb_tag = len(tags)
step = 0.01
# for index, tag in enumerate(["i"]):
for index, tag in enumerate(tags):
utils.print_success("Tag " + tag)
max_precision = 0
max_recall = 0
max_f_measure = 0
max_clf = ""
max_weight = 0
for key in classifiers:
clf = classifiers[key]
# for weight in np.arange(0., 0.01, 0.000001):
# for weight in np.arange(step, 1-step, step):
for weight in np.arange(0.0, 1.0, step):
print("Classifier " + key + " & Weight " + str(weight))
sys.stdout.write("\033[F")
sys.stdout.write("\033[K")
clf.set_params(class_weight={"i":weight, "s":1-weight})
clf.fit(train_features, train_groundtruths)
predictions = clf.predict(test_features)
precision = precision_score(test_groundtruths, predictions, average=None)[index]
if precision >= max_precision:
recall = recall_score(test_groundtruths, predictions, average=None)[index]
# if recall > max_recall:
max_precision = precision
max_recall = recall
max_f_measure = f1_score(test_groundtruths, predictions, average=None)[index]
max_weight = weight
max_clf = key
sys.stdout.write("\033[K")
utils.print_info("\tClassifier " + str(max_clf))
utils.print_info("\tPrecision " + str(max_precision))
utils.print_info("\tRecall " + str(max_recall))
utils.print_info("\tF-Measure " + str(max_f_measure))
utils.print_info("\tWeight " + str(max_weight))
def train_test(train, test, res_dir="res/", disp=True, outfilename=None):
"""Description of compare
compare multiple classifier and display the best one
"""
utils.print_success("Comparison of differents classifiers")
if train is not None and test is not None:
train_features = []
test_features = []
train_groundtruths = []
test_groundtruths = []
for elem in train:
train_groundtruths.append(elem)
train_features.append(train[elem])
for elem in test:
test_groundtruths.append(elem)
test_features.append(test[elem])
else:
utils.print_error("No valid data provided.")
res_dir = utils.create_dir(res_dir)
classifiers = {
# "RandomForest": RandomForestClassifier(n_estimators=5),
"KNeighbors":KNeighborsClassifier(1),
# "GaussianProcess":GaussianProcessClassifier(1.0 * RBF(1.0), warm_start=True),
# "DecisionTree":DecisionTreeClassifier(max_depth=5),
# "MLP":MLPClassifier(),
# "AdaBoost":AdaBoostClassifier(),
# "GaussianNB":GaussianNB(),
# "QDA":QuadraticDiscriminantAnalysis(),
# "SVM":SVC(kernel="linear", C=0.025),
# "GradientBoosting":GradientBoostingClassifier(),
# "ExtraTrees":ExtraTreesClassifier(),
# "LogisticRegression":LogisticRegression(),
# "LinearDiscriminantAnalysis":LinearDiscriminantAnalysis()
}
for key in classifiers:
utils.print_success(key)
clf = classifiers[key]
utils.print_info("\tFit")
clf.fit(train_features, train_groundtruths)
utils.print_info("\tPredict")
predictions = clf.predict(test_features)
print("Precision weighted\t" + str(precision_score(test_groundtruths, predictions, average='weighted')))
print("Recall weighted\t" + str(recall_score(test_groundtruths, predictions, average='weighted')))
print("F1 weighted\t" + str(f1_score(test_groundtruths, predictions, average='weighted')))
# print("Precision weighted\t" + str(precision_score(test_groundtruths, predictions, average=None)))
# print("Recall weighted\t" + str(recall_score(test_groundtruths, predictions, average=None)))
# print("f1 weighted\t" + str(f1_score(test_groundtruths, predictions, average=None)))
def classify(train=None, test=None, data=None, res_dir="res/", disp=True, outfilename=None):
"""Description of compare
compare multiple classifier and display the best one
"""
utils.print_success("Comparison of differents classifiers")
if data is not None:
train_features = data["train_features"]
train_groundtruths = data["train_groundtruths"]
test_features = data["test_features"]
test_groundtruths = data["test_groundtruths"]
else:
train = utils.abs_path_file(train)
test = utils.abs_path_file(test)
train_features, train_groundtruths = read_file(train)
test_features, test_groundtruths = read_file(test)
if not utils.create_dir(res_dir):
res_dir = utils.abs_path_dir(res_dir)
classifiers = {
"RandomForest": RandomForestClassifier(n_jobs=-1)
# "RandomForest": RandomForestClassifier(n_estimators=5),
# "KNeighbors":KNeighborsClassifier(3),
# "GaussianProcess":GaussianProcessClassifier(1.0 * RBF(1.0), warm_start=True),
# "DecisionTree":DecisionTreeClassifier(max_depth=5),
# "MLP":MLPClassifier(),
# "AdaBoost":AdaBoostClassifier(),
# "GaussianNB":GaussianNB(),
# "QDA":QuadraticDiscriminantAnalysis(),
# "SVM":SVC(kernel="linear", C=0.025),
# "GradientBoosting":GradientBoostingClassifier(),
# "ExtraTrees":ExtraTreesClassifier(),
# "LogisticRegression":LogisticRegression(),
# "LinearDiscriminantAnalysis":LinearDiscriminantAnalysis()
}
for key in classifiers:
utils.print_success(key)
clf = classifiers[key]
utils.print_info("\tFit")
clf.fit(train_features, train_groundtruths)
utils.print_info("\tPredict")
predictions = clf.predict(test_features)
if outfilename is not None:
with open(outfilename, "w") as filep:
for gt, pred in zip(test_groundtruths, predictions):
filep.write(gt + "," + pred + "\n")
# Global
data = [key]
data.append(str(precision_score(test_groundtruths, predictions, average='weighted')))
data.append(str(recall_score(test_groundtruths, predictions, average='weighted')))
data.append(str(f1_score(test_groundtruths, predictions, average='weighted')))
data = ",".join(data)
if disp:
print(data)
else:
with open(res_dir + "global.csv", "a") as filep:
filep.write(data + ",\n")
# Local
for index, tag in enumerate(list(set(train_groundtruths))):
precision = precision_score(test_groundtruths, predictions, average=None)
recall = recall_score(test_groundtruths, predictions, average=None)
f1 = f1_score(test_groundtruths, predictions, average=None)
line = key + "," + str(precision[index]) + "," + str(recall[index]) + "," + str(f1[index])
if disp:
print(line)
else:
with open(res_dir + "tag_" + tag + ".csv", "a") as filep:
filep.write(line + ",\n")
return predictions
def read_train_files(indir, separator=" "):
"""Description of read_train_files
Gather local features and GT from every individual train songs
"""
utils.print_success("Reading multiple train files")
indir = utils.abs_path_dir(indir) + "/"
groundtruths = []
features = []
included_extenstions = ["csv"]
filenames = [fn for fn in os.listdir(indir)
if any(fn.endswith(ext) for ext in included_extenstions)]
for index, filename in enumerate(filenames):
print(str(index + 1) + "/" + str(len(filenames)) + " " + filename)
sys.stdout.write("\033[F") # Cursor up one line
sys.stdout.write("\033[K") # Clear line
with open(indir + filename, "r") as filep:
for row in filep:
line = row.split(separator)
features.append([float(i) for i in line[:-1]])
groundtruths.append(line[-1][:-1])
sys.stdout.write("\033[K") # Clear line
return features, groundtruths
def read_train_file(filename):
"""
Read ONE train file
"""
groundtruths = []
features = []
filename = utils.abs_path_file(filename)
with open(filename, "r") as filep:
for line in filep:
line = line.split(",")
groundtruths.append(line[-1][:-1])
features.append(line[1:-1])
return features, groundtruths
def create_model(clf_name, features, groundtruths, outdir, classifiers):
begin = int(round(time.time() * 1000))
utils.print_success("Starting " + clf_name)
clf_dir = outdir + clf_name + "/"
utils.create_dir(clf_dir)
clf = classifiers[clf_name]
clf.fit(features, groundtruths)
joblib.dump(clf, clf_dir + clf_name + ".pkl")
utils.print_info(clf_name + " done in " + str(int(round(time.time() * 1000)) - begin) + "ms")
def create_models(outdir, train_features=None, train_groundtruths=None, train_file=None, train_dir=None, separator=" ", classifiers=None):
"""Description of create_models
Generate models for train data for different clf
In order to test later
..notes::
train_file must be formatted like:
item_name_1,feat1,feat2,...,featN,tag_or_class
item_name_2,feat1,feat2,...,featN,tag_or_class
...
item_name_N,feat1,feat2,...,featN,tag_or_class
..todo::
Manage when Provide train feat and gts or train_file
Find why commented clf cannot be used
pour train dir = /media/sf_github/yann/train/
20h04m49s Creating models
20h04m49s Reading multiple train files
20h05m04s Starting SVM
20h05m07s Starting RandomForest
20h05m11s Starting GradientBoosting
20h05m16s Starting DecisionTree
20h05m22s Starting ExtraTrees
20h05m27s Starting AdaBoost
20h05m34s Starting KNeighbors
20h05m50s KNeighbors done in 60836ms
20h06m18s ExtraTrees done in 89147ms
20h06m29s DecisionTree done in 100211ms
20h07m05s RandomForest done in 135939ms
20h08m56s AdaBoost done in 246550ms
20h13m40s GradientBoosting done in 530909ms
00h43m29s SVM done in 16719954ms
"""
utils.print_success("Creating models")
outdir = utils.abs_path_dir(outdir) + "/"
if train_file is not None:
features, groundtruths = read_train_file(train_file)
elif train_dir is not None:
features, groundtruths = read_train_files(train_dir, separator=separator)
else:
utils.print_warning("TODO Manage train feat and gts")
if classifiers is None:
classifiers = {
"RandomForest": RandomForestClassifier(),
"LogisticRegression":LogisticRegression(),
"KNeighbors":KNeighborsClassifier(),
"DecisionTree":DecisionTreeClassifier(),
"AdaBoost":AdaBoostClassifier(),
"GradientBoosting":GradientBoostingClassifier(),
"ExtraTrees":ExtraTreesClassifier(),
"SVM":SVC(kernel="linear", C=0.025, probability=True)
# "GaussianProcess":GaussianProcessClassifier(),
# "MLP":MLPClassifier(),
# "GaussianNB":GaussianNB(),
# "QDA":QuadraticDiscriminantAnalysis(),
# "LinearDiscriminantAnalysis":LinearDiscriminantAnalysis()
}
else:
if "RandomForest" in classifiers:
clf_name = "RandomForest"
begin = int(round(time.time() * 1000))
utils.print_success("Starting " + clf_name)
clf_dir = outdir + clf_name + "/"
utils.create_dir(clf_dir)
clf = RandomForestClassifier(n_jobs=-1)
# clf = RandomForestClassifier(verbose=100)
clf.fit(features, groundtruths)
joblib.dump(clf, clf_dir + clf_name + ".pkl")
utils.print_info(clf_name + " done in " + str(int(round(time.time() * 1000)) - begin) + "ms")
# # Parallel computing
# clf = []
# for key in classifiers:
# clf.append(key)
# partial_create_model = partial(create_model, features=features, groundtruths=groundtruths, outdir=outdir, classifiers=classifiers)
# # pool = multiprocessing.Pool(4)
# pool = multiprocessing.Pool(len(classifiers))
# pool.map(partial_create_model, clf) #make our results with a map call
# pool.close() #we are not adding any more processes
# pool.join() #tell it to wait until all threads are done before going on
def read_test_file(filename):
"""
Read ONE test file with content like:
feat1 feat2 ... featN
feat1 feat2 ... featN
...
feat1 feat2 ... featN
"""
features = []
filename = utils.abs_path_file(filename)
with open(filename, "r") as filep:
for line in filep:
line = line.split(" ")
line[-1] = line[-1][:-1]
feat = []
for tmp_feat in line:
feat.append(float(tmp_feat))
features.append(feat)
return features
def column(matrix, i):
return [row[i] for row in matrix]
def test_models(models_dir, test_dir, out_dir):
models_dir = utils.abs_path_dir(models_dir) + "/"
test_dir = utils.abs_path_dir(test_dir) + "/"
utils.create_dir(out_dir)
test_files = os.listdir(test_dir)
models = os.listdir(models_dir)
for model in models:
utils.print_success(model)
pred_dir = out_dir + model + "/"
utils.create_dir(pred_dir)
clf = joblib.load(models_dir + model + "/" + model + ".pkl")
for index, test_file in enumerate(test_files):
print(str(index) + "\t" + test_file)
sys.stdout.write("\033[F")
sys.stdout.write("\033[K")
test_features = read_test_file(test_dir + test_file)
predictions = clf.predict_proba(test_features)
with open(pred_dir + test_file, "w") as filep:
for pred in predictions:
filep.write(str(pred[0]) + "\n")
sys.stdout.write("\033[K")
def test_model(model, models_dir, test_dir, out_dir, test_files=None, test_file=None):
"""Description of test_model
Use one model previously fitted in order to predict_proba() or predict()
the tag for a bunch of test_files
..todo::
To enhance computation time: only compute file which are in groundtruths
if file already computed, do not recompute
"""
begin = int(round(time.time() * 1000))
utils.print_success("Testing " + model)
pred_dir = out_dir + model
clf = joblib.load(models_dir + model + "/" + model + ".pkl")
if test_files is not None:
pred_dir = pred_dir + "/"
utils.create_dir(pred_dir)
for index, test_file in enumerate(test_files):
# Check if isrc is in groundtruths to speed up computation time
if test_file[:12] in groundtruths:
test_features = read_test_file(test_dir + test_file)
try:
predictions = clf.predict_proba(test_features)
except AttributeError:
utils.print_warning("predict_proba does not exists for " + model + "\nRegular predict function is used.")
predictions = clf.predict(test_features)
with open(pred_dir + test_file, "w") as filep:
for pred in predictions:
filep.write(str(pred[0]) + "\n")
elif test_file is not None:
pred_dir = pred_dir + "_"
test_features = []
filename = []
with open(test_file, "r") as filep:
for index, line in enumerate(filep):
line = line.split(",")
# print(str(index) + " " + line[0])
test_features.append(line[1:-1])
filename.append(line[0])
try:
predictions = clf.predict_proba(test_features)
with open(pred_dir + "predict_proba.csv", "a") as filep2:
for filen, pred in zip(filename, predictions):
filep2.write(filen + "," + str(pred[0]) + "\n")
except:
pass
predictions = clf.predict(test_features)
with open(pred_dir + "predict.csv", "a") as filep2:
for filen, pred in zip(filename, predictions):
filep2.write(filen + "," + str(pred[0]) + "\n")
else:
utils.print_error("Error in arg for test_model() function")
utils.print_info(model + " done in " + str(int(round(time.time() * 1000)) - begin) + "ms")
def test_models_parallel(models_dir, out_dir, test_dir=None, test_file=None):
"""Description of test_models_parallel
17h16m12s DecisionTree done in 16135373ms
17h25m08s GradientBoosting done in 16671109ms
18h59m05s RandomForest done in 22307811ms
18h59m07s AdaBoost done in 22310633ms
19h18m12s ExtraTrees done in 23455779ms
"""
models_dir = utils.abs_path_dir(models_dir) + "/"
models = os.listdir(models_dir)
utils.create_dir(out_dir)
if test_dir is not None:
test_dir = utils.abs_path_dir(test_dir) + "/"
test_files = os.listdir(test_dir)
test_file = None
elif test_file is not None:
test_files = None
else:
utils.print_warning("TODO Error in arg for test_models_parallel() function")
partial_test_model = partial(test_model, models_dir=models_dir, test_dir=test_dir, out_dir=out_dir, test_files=test_files, test_file=test_file)
pool = multiprocessing.Pool(len(models))
pool.map(partial_test_model, models) #make our results with a map call
pool.close() #we are not adding any more processes
pool.join() #tell it to wait until all threads are done before going on
def cross_validation(train_filename, n_folds, outfilename):
filename = utils.abs_path_file(train_filename)
features = []
groundtruths = []
with open(filename, "r") as filep:
for line in filep:
line = line.split(",")
features.append([float(x) for x in line[1:-1]])
groundtruths.append(line[-1][:-1])
features = np.array(features)
groundtruths = np.array(groundtruths)
# Init
# if os.path.exists(outfilename):
try:
with open(outfilename, "r") as filep:
data = json.load(filep)
except:
data = {}
# else:
# data = {}
algo_name = "Method 1"
data[algo_name] = {}
data[algo_name]["uneven"] = {}
data[algo_name]["balanced"] = {}
for distribution in data[algo_name]:
data[algo_name][distribution]["precision"] = {}
data[algo_name][distribution]["recall"] = {}
data[algo_name][distribution]["f1"] = {}
for tmp in data[algo_name][distribution]:
data[algo_name][distribution][tmp]["instru"] = []
data[algo_name][distribution][tmp]["song"] = []
skf = StratifiedKFold(n_splits=n_folds)
for i in range(0, 10):
utils.print_warning("TODO for i in range")
song_precis = []
song_recall = []
song_fmeasu = []
inst_precis = []
inst_recall = []
inst_fmeasu = []
cur_fold = 0
for train, test in skf.split(features, groundtruths):
cur_fold += 1
utils.print_success("Iteration " + str(i) + "\tFold " + str(cur_fold))
dataset = {}
dataset["train_features"] = features[train]
dataset["train_groundtruths"] = groundtruths[train]
dataset["test_features"] = features[test]
dataset["test_groundtruths"] = groundtruths[test]
predictions = classify(data=dataset)
song_precis.append(precision_score(dataset["test_groundtruths"], predictions, average=None)[1])
song_recall.append(recall_score(dataset["test_groundtruths"], predictions, average=None)[1])
song_fmeasu.append(f1_score(dataset["test_groundtruths"], predictions, average=None)[1])
inst_precis.append(precision_score(dataset["test_groundtruths"], predictions, average=None)[0])
inst_recall.append(recall_score(dataset["test_groundtruths"], predictions, average=None)[0])
inst_fmeasu.append(f1_score(dataset["test_groundtruths"], predictions, average=None)[0])
song_precis = sum(song_precis) / float(len(song_precis))
song_recall = sum(song_recall) / float(len(song_recall))
song_fmeasu = sum(song_fmeasu) / float(len(song_fmeasu))
inst_precis = sum(inst_precis) / float(len(inst_precis))
inst_recall = sum(inst_recall) / float(len(inst_recall))
inst_fmeasu = sum(inst_fmeasu) / float(len(inst_fmeasu))
# Song
data[algo_name]["balanced"]["precision"]["song"].append(song_precis)
data[algo_name]["balanced"]["recall"]["song"].append(song_recall)
data[algo_name]["balanced"]["f1"]["song"].append(song_fmeasu)
# Instru
data[algo_name]["balanced"]["precision"]["instru"].append(inst_precis)
data[algo_name]["balanced"]["recall"]["instru"].append(inst_recall)
data[algo_name]["balanced"]["f1"]["instru"].append(inst_fmeasu)
with open(outfilename, "w") as outfile:
json.dump(data, outfile, indent=2)
def split(features, groundtruths, n_split):
"""Description of split
1 tmp array containing all item for each tag
2 random split of array for each tag
..todo::
manage possible errors
randomize split selection
"""
if n_split == 1:
return features, groundtruths
tags = list(set(groundtruths))
new_index = {}
for tag in tags:
new_index[tag] = []
for index, gt in enumerate(groundtruths):
new_index[gt].append(index)
new_feats = []
new_gts = []
for i in range(0, n_split):
indexes = []
for tag in tags:
ref = len(new_index[tag])/n_split
indexes.append(new_index[tag][ref*i:ref*(i+1)])
"""
..todo:: manage multiple tags!
"""
indexes = indexes[0] + indexes[1]
# print(features[:5])
# print(len(indexes))
# print(len(indexes[0]))
# print(len(indexes[1]))
# sys.exit()
indexes.sort()
new_gts.append([groundtruths[j] for j in indexes])
new_feats.append([features[j] for j in indexes])
return new_feats, new_gts
def increasing_test(groundtruths_file, predictions_file, metric, tag):
gts = read_item_tag(groundtruths_file)
preds = read_item_tag(predictions_file)
test_groundtruths = []
predictions = []
for isrc in preds:
if isrc in gts:
test_groundtruths.append(gts[isrc])
predictions.append(preds[isrc])
res = []
if "accuracy" in metric:
res.append(accuracy_score(test_groundtruths, predictions))
elif "precision" in metric:
res.append(precision_score(test_groundtruths, predictions, average=None)[tag])
elif "recall" in metric:
res.append(recall_score(test_groundtruths, predictions, average=None)[tag])
elif "f1_score" in metric:
res.append(f1_score(test_groundtruths, predictions, average=None)[tag])
else:
utils.print_error("classify.py line 735 metric argument error")
# print("Accuracy : " + str(accuracy_score(test_groundtruths, predictions)))
# print("Precision: " + str(precision_score(test_groundtruths, predictions, average=None)))
# print("Recall : " + str(recall_score(test_groundtruths, predictions, average=None)))
# print("F-score : " + str(f1_score(test_groundtruths, predictions, average=None)))
n_splits = 10
# for n_split in range(2, n_splits+1):
for n_split in [2, 10, 100]:
print("\t" + str(n_split))
feats_array, gts_array = split(predictions, test_groundtruths, n_split)
tmp_acc = []
for feats, gts in zip(feats_array, gts_array):
if "accuracy" in metric:
cur_acc = accuracy_score(gts, feats)
elif "precision" in metric:
cur_acc = precision_score(gts, feats, average=None)[tag]
elif "recall" in metric:
cur_acc = recall_score(gts, feats, average=None)[tag]
elif "f1_score" in metric:
cur_acc = f1_score(gts, feats, average=None)[tag]
tmp_acc.append(cur_acc)
print("\t\t" + str(stdev(tmp_acc)))
accuracy = sum(tmp_acc) / float(len(tmp_acc))
res.append(accuracy)
return res
def growing_testset(train_filename, test_filename, clf, clf_name=None):
"""Description of growing_testset
1 Generate accuracy graph for global
2 Create precision / recall / f-measure figures for each tag
..todo::
intermediate file which stores predictions for each ISRC
param for number of steps
repet N times
division problem ! it does N N/2 ... N/10 but we want :
1*N/10 2*N/10 ... 10*N/10
"""
train_features, train_groundtruths = read_file(train_filename)
test_features, test_groundtruths = read_file(test_filename)
if clf_name is not None and "RANSAC" in clf_name:
train_groundtruths = [True if i =="s" else False for i in train_groundtruths]
test_groundtruths = [True if i =="s" else False for i in test_groundtruths]
clf.fit(train_features, train_groundtruths)
if clf_name is not None and "RANSAC" in clf_name:
preds_float = clf.predict(test_features)
predictions = [True if i > 0.5 else False for i in preds_float]
else:
predictions = clf.predict(test_features)
test_acc = []
# test_acc.append(accuracy_score(test_groundtruths, predictions))
test_acc.append(precision_score(test_groundtruths, predictions, average=None)[0])
print("Accuracy : " + str(test_acc))
print("Precision: " + str(precision_score(test_groundtruths, predictions, average=None)))
print("Recall : " + str(recall_score(test_groundtruths, predictions, average=None)))
print("F-score : " + str(f1_score(test_groundtruths, predictions, average=None)))
n_splits = 10
for n_split in range(2, n_splits+1):
print(n_split)
feats_array, gts_array = split(test_features, test_groundtruths, n_split)
tmp_acc = []
for feats, gts in zip(feats_array, gts_array):
if clf_name is not None and "RANSAC" in clf_name:
preds_float = clf.predict(feats)
predictions = [True if i > 0.5 else False for i in preds_float]
else:
predictions = clf.predict(feats)
# cur_acc = accuracy_score(gts, predictions)
cur_acc = precision_score(gts, predictions, average=None)[0]
tmp_acc.append(cur_acc)
print("\t" + str(cur_acc))
accuracy = sum(tmp_acc) / float(len(tmp_acc))
test_acc.append(accuracy)
return test_acc
def plot_roc(indir, gts_file, outdir):
groundtruths = read_item_tag(gts_file)
plt.figure(1)
plt.plot([0, 1], [0, 1], 'k--', label="Random (0.5)")
indir = utils.abs_path_dir(indir)
for item in os.listdir(indir):
if ".csv" in item:
isrcs = read_preds(indir + "/" + item)
test_groundtruths = []
predictions = []
for isrc in isrcs:
if isrc in groundtruths:
test_groundtruths.append(groundtruths[isrc])
predictions.append(isrcs[isrc])
test_groundtruths = [tag=="s" for tag in test_groundtruths]
fpr_rf, tpr_rf, _ = roc_curve(test_groundtruths, predictions)
label = item[:-4] + " (" + str(round(roc_auc_score(test_groundtruths, predictions), 3)) + ")"
color = ""
if "VQMM" in item:
color = "ro"
elif "SVMBFF" in item:
color = "g-"
elif "GA" in item:
color = "b:"
plt.plot(fpr_rf, tpr_rf, color, label=label)
ax = plt.gca()
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
plt.xlabel('False positive rate')
plt.ylabel('True positive rate')
# plt.title('ROC curve for Algo (AUC)')
plt.legend(loc='best')
outdir = utils.abs_path_dir(outdir)
roc_fn = outdir + "Figure_3_ROC.png"
plt.savefig(roc_fn, dpi=200, bbox_inches="tight")
plt.savefig(outdir + "Figure_3_ROC.eps")
# plt.show()
plt.close()
utils.print_success("ROC curve successfully created in " + roc_fn)
def plot_precision_recall(indir, gts_file, outdir):
groundtruths = read_item_tag(gts_file)
plt.figure(1)
indir = utils.abs_path_dir(indir)
for item in os.listdir(indir):
if ".csv" in item:
isrcs = read_preds(indir + "/" + item)
test_groundtruths = []
predictions = []
for isrc in isrcs:
if isrc in groundtruths:
test_groundtruths.append(groundtruths[isrc])
predictions.append(isrcs[isrc])
test_groundtruths = [tag=="s" for tag in test_groundtruths]
precision, recall, _ = precision_recall_curve(test_groundtruths, predictions)
plt.plot(recall, precision, label=item[:-4] + " (" + str(round(average_precision_score(test_groundtruths, predictions), 3)) + ")")
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([-0.05, 1.05])
plt.title('Precision-Recall curve for Algo (AUC)')
plt.legend(loc='best')
plt.savefig(outdir + "precision_recall.png", dpi=200, bbox_inches="tight")
# plt.show()
plt.close()
utils.print_success("Precision-Recall curve created in " + outdir)
if __name__ == "__main__":
PARSER = argparse.ArgumentParser(description="Compare classifiers")
PARSER.add_argument(
"--train",
help="path to train file",
type=str,
default="data/proba_hist_train.csv",
metavar="train")
PARSER.add_argument(
"--test",
help="path to test file",
type=str,
default="data/proba_hist_test.csv",
metavar="test")
PARSER.add_argument(
"-o",
"--outdir",
help="path to output directory",
type=str,
default="res/",
metavar="outdir")
plot_roc("roc_curve/")
# plot_precision_recall("/media/sf_github/classifiers/roc_curve/")
# # models_dir = "models_paral/"
# # utils.create_dir(models_dir)
# # train_file_1 = "/media/sf_DATA/Datasets/Simbals/yann/train.csv"
# # train_dir_1 = "/media/sf_github/yann/train/"
# # create_models(train_file=train_file_1)
# # create_models(outdir=models_dir, train_dir=train_dir_1)
# # test_models_parallel(models_dir, "/media/sf_DATA/Datasets/Simbals/yaafe/results/processed/", "/media/sf_DATA/Datasets/Simbals/yaafe/proba_preds/")
# # classify(PARSER.parse_args().train, PARSER.parse_args().test, PARSER.parse_args().outdir)
# # precision_100percent(PARSER.parse_args().train, PARSER.parse_args().test)
# # plot_clf()
# """
# Samedi 26 Novembre 2016 test finaux pour mon algo
# demandé par <NAME> Matthias
# """
# train_file = "/media/sf_github/yann/2_local_predictions/method_3_trainset_normalized.txt"
# models_dir = "final_models/"
# utils.create_dir(models_dir)
# # create_models(outdir=models_dir, train_file=train_file)
# out_dir = "/media/sf_DATA/Datasets/Simbals/yann/algo_final/"
# utils.create_dir(out_dir)
# test_file="/media/sf_github/yann/2_local_predictions/method_3_testset_normalized_with_tag.txt"
# # test_models_parallel(
# # models_dir=models_dir,
# # test_file=test_file,
# # out_dir=out_dir)
# test_features = []
# isrc_order = []
# utils.print_info("Loading clf")
# clf = joblib.load("/media/sf_github/classifiers/final_modelsRandomForest/RandomForest.pkl")
# with open(test_file, "r") as filep:
# for index, line in enumerate(filep):
# line = line.split(",")
# utils.print_info(str(index) + "\t" + line[0])
# test_features.append(line[1:-1])
# isrc_order.append(line[0])
# utils.print_info("Predict_proba")
# predictions = clf.predict(test_features)
# # predictions = clf.predict_proba(test_features)
# utils.print_info("Writing results")
# with open("/media/sf_DATA/Datasets/Simbals/yann/algo_final/RF.txt" , "w") as filep2:
# for index, pred in enumerate(predictions):
# filep2.write(isrc_order[index] + "," + str(pred[0]) + "\n")
# utils.print_info("Done")
# test_groundtruths = {}
# with open("/media/sf_github/repro/groundtruths.csv", "r") as filep:
# for row in filep:
# line = row.split(",")
# test_groundtruths[line[0]] = line[1][:-1]
# for i in np.arange(0.1, 1.0, 0.1):
# outfile = open("results/Bayle2_"+str(i)+".csv", "w")
# utils.print_progress_start(str(i))
# with open("/media/sf_DATA/Datasets/Simbals/yann/algo_final/RFproba.txt", "r") as filep:
# for line in filep:
# line = line.split(",")
# if line[0] in test_groundtruths:
# if float(line[-1][:-1]) > i:
# prediction = "i"
# else:
# prediction = "s"
# outfile.write(line[0] + "," + prediction + "\n")
# utils.print_progress_end()
# outfile.close()
# # groundtruths = []
# # predictions = []
# outfile = open("results/Bayle.csv", "w")
# with open("/media/sf_DATA/Datasets/Simbals/yann/algo_final/RF.txt", "r") as filep:
# for line in filep:
# line = line.split(",")
# if line[0] in test_groundtruths:
# outfile.write(line[0] + "," + line[-1][:-1] + "\n")
# # groundtruths.append(test_groundtruths[line[0]])
# # predictions.append(line[-1][:-1])
# outfile.close()
# # utils.scores("bayle", predictions, groundtruths)
|
[
"sys.stdout.write",
"matplotlib.pyplot.title",
"utils.print_info",
"argparse.ArgumentParser",
"sklearn.metrics.accuracy_score",
"joblib.dump",
"sklearn.tree.DecisionTreeClassifier",
"matplotlib.pyplot.figure",
"sklearn.metrics.f1_score",
"numpy.arange",
"matplotlib.pyplot.gca",
"sklearn.svm.SVC",
"matplotlib.pyplot.tight_layout",
"utils.create_dir",
"matplotlib.pyplot.close",
"utils.abs_path_dir",
"sklearn.ensemble.ExtraTreesClassifier",
"sklearn.metrics.average_precision_score",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"sklearn.ensemble.RandomForestClassifier",
"functools.partial",
"json.dump",
"sklearn.ensemble.AdaBoostClassifier",
"matplotlib.pyplot.ylim",
"sklearn.utils.testing.all_estimators",
"utils.print_error",
"matplotlib.pyplot.legend",
"statistics.stdev",
"utils.print_success",
"sklearn.metrics.recall_score",
"sklearn.metrics.roc_auc_score",
"sklearn.metrics.precision_recall_curve",
"sklearn.linear_model.LogisticRegression",
"utils.print_warning",
"utils.abs_path_file",
"matplotlib.pyplot.ylabel",
"os.listdir",
"matplotlib.pyplot.xlim",
"json.load",
"matplotlib.pyplot.plot",
"sklearn.metrics.roc_curve",
"time.time",
"sklearn.ensemble.GradientBoostingClassifier",
"sklearn.neighbors.KNeighborsClassifier",
"numpy.array",
"sklearn.model_selection.StratifiedKFold",
"sklearn.metrics.precision_score",
"joblib.load",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((3293, 3309), 'sklearn.utils.testing.all_estimators', 'all_estimators', ([], {}), '()\n', (3307, 3309), False, 'from sklearn.utils.testing import all_estimators\n'), ((3757, 3786), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (3769, 3786), True, 'import matplotlib.pyplot as plt\n'), ((3800, 3819), 'numpy.arange', 'np.arange', (['n_groups'], {}), '(n_groups)\n', (3809, 3819), True, 'import numpy as np\n'), ((4270, 4297), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Scores (in %)"""'], {}), "('Scores (in %)')\n", (4280, 4297), True, 'import matplotlib.pyplot as plt\n'), ((4302, 4388), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(index + bar_width * ind + offset)', "('Precision', 'Recall', 'F-Measure')"], {}), "(index + bar_width * ind + offset, ('Precision', 'Recall',\n 'F-Measure'))\n", (4312, 4388), True, 'import matplotlib.pyplot as plt\n'), ((4387, 4399), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4397, 4399), True, 'import matplotlib.pyplot as plt\n'), ((4404, 4418), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(1)'], {}), '(0, 1)\n', (4412, 4418), True, 'import matplotlib.pyplot as plt\n'), ((4449, 4458), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4456, 4458), True, 'import matplotlib.pyplot as plt\n'), ((4814, 4832), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4830, 4832), True, 'import matplotlib.pyplot as plt\n'), ((4865, 4940), 'matplotlib.pyplot.savefig', 'plt.savefig', (['img_name'], {'dpi': '(200)', 'additional_artists': 'art', 'bbox_inches': '"""tight"""'}), "(img_name, dpi=200, additional_artists=art, bbox_inches='tight')\n", (4876, 4940), True, 'import matplotlib.pyplot as plt\n'), ((5140, 5169), 'utils.abs_path_file', 'utils.abs_path_file', (['filename'], {}), '(filename)\n', (5159, 5169), False, 'import utils\n'), ((5559, 5588), 'utils.abs_path_file', 'utils.abs_path_file', (['filename'], {}), '(filename)\n', (5578, 5588), False, 'import utils\n'), ((5962, 5991), 'utils.abs_path_file', 'utils.abs_path_file', (['filename'], {}), '(filename)\n', (5981, 5991), False, 'import utils\n'), ((6470, 6536), 'utils.print_success', 'utils.print_success', (['"""Find Recall for best Precision for each tag"""'], {}), "('Find Recall for best Precision for each tag')\n", (6489, 6536), False, 'import utils\n'), ((6549, 6575), 'utils.abs_path_file', 'utils.abs_path_file', (['train'], {}), '(train)\n', (6568, 6575), False, 'import utils\n'), ((6587, 6612), 'utils.abs_path_file', 'utils.abs_path_file', (['test'], {}), '(test)\n', (6606, 6612), False, 'import utils\n'), ((9069, 9128), 'utils.print_success', 'utils.print_success', (['"""Comparison of differents classifiers"""'], {}), "('Comparison of differents classifiers')\n", (9088, 9128), False, 'import utils\n'), ((9603, 9628), 'utils.create_dir', 'utils.create_dir', (['res_dir'], {}), '(res_dir)\n', (9619, 9628), False, 'import utils\n'), ((11438, 11497), 'utils.print_success', 'utils.print_success', (['"""Comparison of differents classifiers"""'], {}), "('Comparison of differents classifiers')\n", (11457, 11497), False, 'import utils\n'), ((14585, 14636), 'utils.print_success', 'utils.print_success', (['"""Reading multiple train files"""'], {}), "('Reading multiple train files')\n", (14604, 14636), False, 'import utils\n'), ((15352, 15378), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[K"""'], {}), "('\\x1b[K')\n", (15368, 15378), False, 'import sys\n'), ((15553, 15582), 'utils.abs_path_file', 'utils.abs_path_file', (['filename'], {}), '(filename)\n', (15572, 15582), False, 'import utils\n'), ((15926, 15969), 'utils.print_success', 'utils.print_success', (["('Starting ' + clf_name)"], {}), "('Starting ' + clf_name)\n", (15945, 15969), False, 'import utils\n'), ((16012, 16037), 'utils.create_dir', 'utils.create_dir', (['clf_dir'], {}), '(clf_dir)\n', (16028, 16037), False, 'import utils\n'), ((16110, 16155), 'joblib.dump', 'joblib.dump', (['clf', "(clf_dir + clf_name + '.pkl')"], {}), "(clf, clf_dir + clf_name + '.pkl')\n", (16121, 16155), False, 'import joblib\n'), ((17583, 17621), 'utils.print_success', 'utils.print_success', (['"""Creating models"""'], {}), "('Creating models')\n", (17602, 17621), False, 'import utils\n'), ((20069, 20098), 'utils.abs_path_file', 'utils.abs_path_file', (['filename'], {}), '(filename)\n', (20088, 20098), False, 'import utils\n'), ((20611, 20636), 'utils.create_dir', 'utils.create_dir', (['out_dir'], {}), '(out_dir)\n', (20627, 20636), False, 'import utils\n'), ((20654, 20674), 'os.listdir', 'os.listdir', (['test_dir'], {}), '(test_dir)\n', (20664, 20674), False, 'import os\n'), ((20688, 20710), 'os.listdir', 'os.listdir', (['models_dir'], {}), '(models_dir)\n', (20698, 20710), False, 'import os\n'), ((21847, 21886), 'utils.print_success', 'utils.print_success', (["('Testing ' + model)"], {}), "('Testing ' + model)\n", (21866, 21886), False, 'import utils\n'), ((21928, 21982), 'joblib.load', 'joblib.load', (["(models_dir + model + '/' + model + '.pkl')"], {}), "(models_dir + model + '/' + model + '.pkl')\n", (21939, 21982), False, 'import joblib\n'), ((24336, 24358), 'os.listdir', 'os.listdir', (['models_dir'], {}), '(models_dir)\n', (24346, 24358), False, 'import os\n'), ((24363, 24388), 'utils.create_dir', 'utils.create_dir', (['out_dir'], {}), '(out_dir)\n', (24379, 24388), False, 'import utils\n'), ((24719, 24846), 'functools.partial', 'partial', (['test_model'], {'models_dir': 'models_dir', 'test_dir': 'test_dir', 'out_dir': 'out_dir', 'test_files': 'test_files', 'test_file': 'test_file'}), '(test_model, models_dir=models_dir, test_dir=test_dir, out_dir=\n out_dir, test_files=test_files, test_file=test_file)\n', (24726, 24846), False, 'from functools import partial\n'), ((25174, 25209), 'utils.abs_path_file', 'utils.abs_path_file', (['train_filename'], {}), '(train_filename)\n', (25193, 25209), False, 'import utils\n'), ((25473, 25491), 'numpy.array', 'np.array', (['features'], {}), '(features)\n', (25481, 25491), True, 'import numpy as np\n'), ((25511, 25533), 'numpy.array', 'np.array', (['groundtruths'], {}), '(groundtruths)\n', (25519, 25533), True, 'import numpy as np\n'), ((26250, 26283), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': 'n_folds'}), '(n_splits=n_folds)\n', (26265, 26283), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((34342, 34355), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (34352, 34355), True, 'import matplotlib.pyplot as plt\n'), ((34360, 34413), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 1]', '[0, 1]', '"""k--"""'], {'label': '"""Random (0.5)"""'}), "([0, 1], [0, 1], 'k--', label='Random (0.5)')\n", (34368, 34413), True, 'import matplotlib.pyplot as plt\n'), ((34431, 34456), 'utils.abs_path_dir', 'utils.abs_path_dir', (['indir'], {}), '(indir)\n', (34449, 34456), False, 'import utils\n'), ((34473, 34490), 'os.listdir', 'os.listdir', (['indir'], {}), '(indir)\n', (34483, 34490), False, 'import os\n'), ((35362, 35371), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (35369, 35371), True, 'import matplotlib.pyplot as plt\n'), ((35538, 35571), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""False positive rate"""'], {}), "('False positive rate')\n", (35548, 35571), True, 'import matplotlib.pyplot as plt\n'), ((35576, 35608), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True positive rate"""'], {}), "('True positive rate')\n", (35586, 35608), True, 'import matplotlib.pyplot as plt\n'), ((35657, 35679), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (35667, 35679), True, 'import matplotlib.pyplot as plt\n'), ((35693, 35719), 'utils.abs_path_dir', 'utils.abs_path_dir', (['outdir'], {}), '(outdir)\n', (35711, 35719), False, 'import utils\n'), ((35765, 35814), 'matplotlib.pyplot.savefig', 'plt.savefig', (['roc_fn'], {'dpi': '(200)', 'bbox_inches': '"""tight"""'}), "(roc_fn, dpi=200, bbox_inches='tight')\n", (35776, 35814), True, 'import matplotlib.pyplot as plt\n'), ((35819, 35859), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(outdir + 'Figure_3_ROC.eps')"], {}), "(outdir + 'Figure_3_ROC.eps')\n", (35830, 35859), True, 'import matplotlib.pyplot as plt\n'), ((35881, 35892), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (35890, 35892), True, 'import matplotlib.pyplot as plt\n'), ((35897, 35963), 'utils.print_success', 'utils.print_success', (["('ROC curve successfully created in ' + roc_fn)"], {}), "('ROC curve successfully created in ' + roc_fn)\n", (35916, 35963), False, 'import utils\n'), ((36064, 36077), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (36074, 36077), True, 'import matplotlib.pyplot as plt\n'), ((36095, 36120), 'utils.abs_path_dir', 'utils.abs_path_dir', (['indir'], {}), '(indir)\n', (36113, 36120), False, 'import utils\n'), ((36137, 36154), 'os.listdir', 'os.listdir', (['indir'], {}), '(indir)\n', (36147, 36154), False, 'import os\n'), ((36797, 36817), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Recall"""'], {}), "('Recall')\n", (36807, 36817), True, 'import matplotlib.pyplot as plt\n'), ((36822, 36845), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Precision"""'], {}), "('Precision')\n", (36832, 36845), True, 'import matplotlib.pyplot as plt\n'), ((36850, 36871), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.0, 1.05]'], {}), '([0.0, 1.05])\n', (36858, 36871), True, 'import matplotlib.pyplot as plt\n'), ((36876, 36899), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[-0.05, 1.05]'], {}), '([-0.05, 1.05])\n', (36884, 36899), True, 'import matplotlib.pyplot as plt\n'), ((36904, 36954), 'matplotlib.pyplot.title', 'plt.title', (['"""Precision-Recall curve for Algo (AUC)"""'], {}), "('Precision-Recall curve for Algo (AUC)')\n", (36913, 36954), True, 'import matplotlib.pyplot as plt\n'), ((36959, 36981), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (36969, 36981), True, 'import matplotlib.pyplot as plt\n'), ((36986, 37060), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(outdir + 'precision_recall.png')"], {'dpi': '(200)', 'bbox_inches': '"""tight"""'}), "(outdir + 'precision_recall.png', dpi=200, bbox_inches='tight')\n", (36997, 37060), True, 'import matplotlib.pyplot as plt\n'), ((37082, 37093), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (37091, 37093), True, 'import matplotlib.pyplot as plt\n'), ((37098, 37164), 'utils.print_success', 'utils.print_success', (["('Precision-Recall curve created in ' + outdir)"], {}), "('Precision-Recall curve created in ' + outdir)\n", (37117, 37164), False, 'import utils\n'), ((37206, 37264), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Compare classifiers"""'}), "(description='Compare classifiers')\n", (37229, 37264), False, 'import argparse\n'), ((3457, 3482), 'utils.abs_path_dir', 'utils.abs_path_dir', (['indir'], {}), '(indir)\n', (3475, 3482), False, 'import utils\n'), ((6836, 6860), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (6858, 6860), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((7276, 7309), 'utils.print_success', 'utils.print_success', (["('Tag ' + tag)"], {}), "('Tag ' + tag)\n", (7295, 7309), False, 'import utils\n'), ((8565, 8591), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[K"""'], {}), "('\\x1b[K')\n", (8581, 8591), False, 'import sys\n'), ((9544, 9588), 'utils.print_error', 'utils.print_error', (['"""No valid data provided."""'], {}), "('No valid data provided.')\n", (9561, 9588), False, 'import utils\n'), ((9736, 9759), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', (['(1)'], {}), '(1)\n', (9756, 9759), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((10389, 10413), 'utils.print_success', 'utils.print_success', (['key'], {}), '(key)\n', (10408, 10413), False, 'import utils\n'), ((10453, 10478), 'utils.print_info', 'utils.print_info', (['"""\tFit"""'], {}), "('\\tFit')\n", (10469, 10478), False, 'import utils\n'), ((10539, 10568), 'utils.print_info', 'utils.print_info', (['"""\tPredict"""'], {}), "('\\tPredict')\n", (10555, 10568), False, 'import utils\n'), ((11753, 11779), 'utils.abs_path_file', 'utils.abs_path_file', (['train'], {}), '(train)\n', (11772, 11779), False, 'import utils\n'), ((11795, 11820), 'utils.abs_path_file', 'utils.abs_path_file', (['test'], {}), '(test)\n', (11814, 11820), False, 'import utils\n'), ((11953, 11978), 'utils.create_dir', 'utils.create_dir', (['res_dir'], {}), '(res_dir)\n', (11969, 11978), False, 'import utils\n'), ((11998, 12025), 'utils.abs_path_dir', 'utils.abs_path_dir', (['res_dir'], {}), '(res_dir)\n', (12016, 12025), False, 'import utils\n'), ((12070, 12103), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (12092, 12103), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier\n'), ((12846, 12870), 'utils.print_success', 'utils.print_success', (['key'], {}), '(key)\n', (12865, 12870), False, 'import utils\n'), ((12910, 12935), 'utils.print_info', 'utils.print_info', (['"""\tFit"""'], {}), "('\\tFit')\n", (12926, 12935), False, 'import utils\n'), ((12996, 13025), 'utils.print_info', 'utils.print_info', (['"""\tPredict"""'], {}), "('\\tPredict')\n", (13012, 13025), False, 'import utils\n'), ((14649, 14674), 'utils.abs_path_dir', 'utils.abs_path_dir', (['indir'], {}), '(indir)\n', (14667, 14674), False, 'import utils\n'), ((15006, 15032), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[F"""'], {}), "('\\x1b[F')\n", (15022, 15032), False, 'import sys\n'), ((15069, 15095), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[K"""'], {}), "('\\x1b[K')\n", (15085, 15095), False, 'import sys\n'), ((17636, 17662), 'utils.abs_path_dir', 'utils.abs_path_dir', (['outdir'], {}), '(outdir)\n', (17654, 17662), False, 'import utils\n'), ((20520, 20550), 'utils.abs_path_dir', 'utils.abs_path_dir', (['models_dir'], {}), '(models_dir)\n', (20538, 20550), False, 'import utils\n'), ((20572, 20600), 'utils.abs_path_dir', 'utils.abs_path_dir', (['test_dir'], {}), '(test_dir)\n', (20590, 20600), False, 'import utils\n'), ((20744, 20770), 'utils.print_success', 'utils.print_success', (['model'], {}), '(model)\n', (20763, 20770), False, 'import utils\n'), ((20820, 20846), 'utils.create_dir', 'utils.create_dir', (['pred_dir'], {}), '(pred_dir)\n', (20836, 20846), False, 'import utils\n'), ((20861, 20915), 'joblib.load', 'joblib.load', (["(models_dir + model + '/' + model + '.pkl')"], {}), "(models_dir + model + '/' + model + '.pkl')\n", (20872, 20915), False, 'import joblib\n'), ((21383, 21409), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[K"""'], {}), "('\\x1b[K')\n", (21399, 21409), False, 'import sys\n'), ((22056, 22082), 'utils.create_dir', 'utils.create_dir', (['pred_dir'], {}), '(pred_dir)\n', (22072, 22082), False, 'import utils\n'), ((24286, 24316), 'utils.abs_path_dir', 'utils.abs_path_dir', (['models_dir'], {}), '(models_dir)\n', (24304, 24316), False, 'import utils\n'), ((24494, 24514), 'os.listdir', 'os.listdir', (['test_dir'], {}), '(test_dir)\n', (24504, 24514), False, 'import os\n'), ((26319, 26361), 'utils.print_warning', 'utils.print_warning', (['"""TODO for i in range"""'], {}), "('TODO for i in range')\n", (26338, 26361), False, 'import utils\n'), ((28568, 28602), 'json.dump', 'json.dump', (['data', 'outfile'], {'indent': '(2)'}), '(data, outfile, indent=2)\n', (28577, 28602), False, 'import json\n'), ((7640, 7665), 'numpy.arange', 'np.arange', (['(0.0)', '(1.0)', 'step'], {}), '(0.0, 1.0, step)\n', (7649, 7665), True, 'import numpy as np\n'), ((13894, 13955), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (13909, 13955), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((13977, 14035), 'sklearn.metrics.recall_score', 'recall_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (13989, 14035), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((14053, 14107), 'sklearn.metrics.f1_score', 'f1_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (14061, 14107), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((14786, 14803), 'os.listdir', 'os.listdir', (['indir'], {}), '(indir)\n', (14796, 14803), False, 'import os\n'), ((17894, 17947), 'utils.print_warning', 'utils.print_warning', (['"""TODO Manage train feat and gts"""'], {}), "('TODO Manage train feat and gts')\n", (17913, 17947), False, 'import utils\n'), ((18029, 18053), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {}), '()\n', (18051, 18053), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier\n'), ((18088, 18108), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (18106, 18108), False, 'from sklearn.linear_model import LogisticRegression\n'), ((18135, 18157), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {}), '()\n', (18155, 18157), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((18186, 18210), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (18208, 18210), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((18235, 18255), 'sklearn.ensemble.AdaBoostClassifier', 'AdaBoostClassifier', ([], {}), '()\n', (18253, 18255), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier\n'), ((18288, 18316), 'sklearn.ensemble.GradientBoostingClassifier', 'GradientBoostingClassifier', ([], {}), '()\n', (18314, 18316), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier\n'), ((18343, 18365), 'sklearn.ensemble.ExtraTreesClassifier', 'ExtraTreesClassifier', ([], {}), '()\n', (18363, 18365), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier\n'), ((18385, 18432), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""', 'C': '(0.025)', 'probability': '(True)'}), "(kernel='linear', C=0.025, probability=True)\n", (18388, 18432), False, 'from sklearn.svm import SVC\n'), ((18861, 18904), 'utils.print_success', 'utils.print_success', (["('Starting ' + clf_name)"], {}), "('Starting ' + clf_name)\n", (18880, 18904), False, 'import utils\n'), ((18963, 18988), 'utils.create_dir', 'utils.create_dir', (['clf_dir'], {}), '(clf_dir)\n', (18979, 18988), False, 'import utils\n'), ((19007, 19040), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (19029, 19040), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier\n'), ((19153, 19198), 'joblib.dump', 'joblib.dump', (['clf', "(clf_dir + clf_name + '.pkl')"], {}), "(clf, clf_dir + clf_name + '.pkl')\n", (19164, 19198), False, 'import joblib\n'), ((21032, 21058), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[F"""'], {}), "('\\x1b[F')\n", (21048, 21058), False, 'import sys\n'), ((21071, 21097), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[K"""'], {}), "('\\x1b[K')\n", (21087, 21097), False, 'import sys\n'), ((23754, 23813), 'utils.print_error', 'utils.print_error', (['"""Error in arg for test_model() function"""'], {}), "('Error in arg for test_model() function')\n", (23771, 23813), False, 'import utils\n'), ((24438, 24466), 'utils.abs_path_dir', 'utils.abs_path_dir', (['test_dir'], {}), '(test_dir)\n', (24456, 24466), False, 'import utils\n'), ((24616, 24692), 'utils.print_warning', 'utils.print_warning', (['"""TODO Error in arg for test_models_parallel() function"""'], {}), "('TODO Error in arg for test_models_parallel() function')\n", (24635, 24692), False, 'import utils\n'), ((25665, 25681), 'json.load', 'json.load', (['filep'], {}), '(filep)\n', (25674, 25681), False, 'import json\n'), ((30150, 30196), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['test_groundtruths', 'predictions'], {}), '(test_groundtruths, predictions)\n', (30164, 30196), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((33064, 33125), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (33079, 33125), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((34927, 34968), 'sklearn.metrics.roc_curve', 'roc_curve', (['test_groundtruths', 'predictions'], {}), '(test_groundtruths, predictions)\n', (34936, 34968), False, 'from sklearn.metrics import roc_curve\n'), ((35307, 35351), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr_rf', 'tpr_rf', 'color'], {'label': 'label'}), '(fpr_rf, tpr_rf, color, label=label)\n', (35315, 35351), True, 'import matplotlib.pyplot as plt\n'), ((36594, 36648), 'sklearn.metrics.precision_recall_curve', 'precision_recall_curve', (['test_groundtruths', 'predictions'], {}), '(test_groundtruths, predictions)\n', (36616, 36648), False, 'from sklearn.metrics import precision_recall_curve, average_precision_score\n'), ((7755, 7781), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[F"""'], {}), "('\\x1b[F')\n", (7771, 7781), False, 'import sys\n'), ((7798, 7824), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[K"""'], {}), "('\\x1b[K')\n", (7814, 7824), False, 'import sys\n'), ((13350, 13417), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': '"""weighted"""'}), "(test_groundtruths, predictions, average='weighted')\n", (13365, 13417), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((13444, 13508), 'sklearn.metrics.recall_score', 'recall_score', (['test_groundtruths', 'predictions'], {'average': '"""weighted"""'}), "(test_groundtruths, predictions, average='weighted')\n", (13456, 13508), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((13535, 13595), 'sklearn.metrics.f1_score', 'f1_score', (['test_groundtruths', 'predictions'], {'average': '"""weighted"""'}), "(test_groundtruths, predictions, average='weighted')\n", (13543, 13595), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((15901, 15912), 'time.time', 'time.time', ([], {}), '()\n', (15910, 15912), False, 'import time\n'), ((21822, 21833), 'time.time', 'time.time', ([], {}), '()\n', (21831, 21833), False, 'import time\n'), ((31331, 31357), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['gts', 'feats'], {}), '(gts, feats)\n', (31345, 31357), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((33201, 33262), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (33216, 33262), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((33295, 33353), 'sklearn.metrics.recall_score', 'recall_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (33307, 33353), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((33386, 33440), 'sklearn.metrics.f1_score', 'f1_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (33394, 33440), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((34021, 34068), 'sklearn.metrics.precision_score', 'precision_score', (['gts', 'predictions'], {'average': 'None'}), '(gts, predictions, average=None)\n', (34036, 34068), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((8042, 8103), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (8057, 8103), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((10662, 10729), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': '"""weighted"""'}), "(test_groundtruths, predictions, average='weighted')\n", (10677, 10729), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((10772, 10836), 'sklearn.metrics.recall_score', 'recall_score', (['test_groundtruths', 'predictions'], {'average': '"""weighted"""'}), "(test_groundtruths, predictions, average='weighted')\n", (10784, 10836), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((10875, 10935), 'sklearn.metrics.f1_score', 'f1_score', (['test_groundtruths', 'predictions'], {'average': '"""weighted"""'}), "(test_groundtruths, predictions, average='weighted')\n", (10883, 10935), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((27046, 27118), 'sklearn.metrics.precision_score', 'precision_score', (["dataset['test_groundtruths']", 'predictions'], {'average': 'None'}), "(dataset['test_groundtruths'], predictions, average=None)\n", (27061, 27118), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((27154, 27223), 'sklearn.metrics.recall_score', 'recall_score', (["dataset['test_groundtruths']", 'predictions'], {'average': 'None'}), "(dataset['test_groundtruths'], predictions, average=None)\n", (27166, 27223), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((27259, 27324), 'sklearn.metrics.f1_score', 'f1_score', (["dataset['test_groundtruths']", 'predictions'], {'average': 'None'}), "(dataset['test_groundtruths'], predictions, average=None)\n", (27267, 27324), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((27360, 27432), 'sklearn.metrics.precision_score', 'precision_score', (["dataset['test_groundtruths']", 'predictions'], {'average': 'None'}), "(dataset['test_groundtruths'], predictions, average=None)\n", (27375, 27432), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((27468, 27537), 'sklearn.metrics.recall_score', 'recall_score', (["dataset['test_groundtruths']", 'predictions'], {'average': 'None'}), "(dataset['test_groundtruths'], predictions, average=None)\n", (27480, 27537), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((27573, 27638), 'sklearn.metrics.f1_score', 'f1_score', (["dataset['test_groundtruths']", 'predictions'], {'average': 'None'}), "(dataset['test_groundtruths'], predictions, average=None)\n", (27581, 27638), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((30249, 30310), 'sklearn.metrics.precision_score', 'precision_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (30264, 30310), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((30559, 30622), 'utils.print_error', 'utils.print_error', (['"""classify.py line 735 metric argument error"""'], {}), "('classify.py line 735 metric argument error')\n", (30576, 30622), False, 'import utils\n'), ((31746, 31760), 'statistics.stdev', 'stdev', (['tmp_acc'], {}), '(tmp_acc)\n', (31751, 31760), False, 'from statistics import stdev\n'), ((8187, 8245), 'sklearn.metrics.recall_score', 'recall_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (8199, 8245), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((8421, 8475), 'sklearn.metrics.f1_score', 'f1_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (8429, 8475), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((18828, 18839), 'time.time', 'time.time', ([], {}), '()\n', (18837, 18839), False, 'import time\n'), ((22477, 22589), 'utils.print_warning', 'utils.print_warning', (['(\'predict_proba does not exists for \' + model +\n """\nRegular predict function is used.""")'], {}), '(\'predict_proba does not exists for \' + model +\n """\nRegular predict function is used.""")\n', (22496, 22589), False, 'import utils\n'), ((30365, 30423), 'sklearn.metrics.recall_score', 'recall_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (30377, 30423), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((31424, 31465), 'sklearn.metrics.precision_score', 'precision_score', (['gts', 'feats'], {'average': 'None'}), '(gts, feats, average=None)\n', (31439, 31465), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((30480, 30534), 'sklearn.metrics.f1_score', 'f1_score', (['test_groundtruths', 'predictions'], {'average': 'None'}), '(test_groundtruths, predictions, average=None)\n', (30488, 30534), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((31534, 31572), 'sklearn.metrics.recall_score', 'recall_score', (['gts', 'feats'], {'average': 'None'}), '(gts, feats, average=None)\n', (31546, 31572), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((35018, 35063), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['test_groundtruths', 'predictions'], {}), '(test_groundtruths, predictions)\n', (35031, 35063), False, 'from sklearn.metrics import roc_auc_score\n'), ((31643, 31677), 'sklearn.metrics.f1_score', 'f1_score', (['gts', 'feats'], {'average': 'None'}), '(gts, feats, average=None)\n', (31651, 31677), False, 'from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score\n'), ((16216, 16227), 'time.time', 'time.time', ([], {}), '()\n', (16225, 16227), False, 'import time\n'), ((23871, 23882), 'time.time', 'time.time', ([], {}), '()\n', (23880, 23882), False, 'import time\n'), ((36724, 36779), 'sklearn.metrics.average_precision_score', 'average_precision_score', (['test_groundtruths', 'predictions'], {}), '(test_groundtruths, predictions)\n', (36747, 36779), False, 'from sklearn.metrics import precision_recall_curve, average_precision_score\n'), ((19267, 19278), 'time.time', 'time.time', ([], {}), '()\n', (19276, 19278), False, 'import time\n')]
|