code
stringlengths 1
199k
|
|---|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Contact'
db.create_table('storybase_user_contact', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('storybase.fields.ShortTextField')(blank=True)),
('info', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('storybase_user', ['Contact'])
def backwards(self, orm):
# Deleting model 'Contact'
db.delete_table('storybase_user_contact')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'storybase_asset.asset': {
'Meta': {'object_name': 'Asset'},
'asset_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'asset_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datasets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'assets'", 'blank': 'True', 'to': "orm['storybase_asset.DataSet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'CC BY-NC-SA'", 'max_length': '25'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'assets'", 'null': 'True', 'to': "orm['auth.User']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'section_specific': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'storybase_asset.dataset': {
'Meta': {'object_name': 'DataSet'},
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dataset_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dataset_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'datasets'", 'null': 'True', 'to': "orm['auth.User']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'})
},
'storybase_story.story': {
'Meta': {'object_name': 'Story'},
'assets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_asset.Asset']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'stories'", 'null': 'True', 'to': "orm['auth.User']"}),
'byline': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'featured_assets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'featured_in_stories'", 'blank': 'True', 'to': "orm['storybase_asset.Asset']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'CC BY-NC-SA'", 'max_length': '25'}),
'on_homepage': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_user.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_user.Project']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'}),
'story_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'structure_type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'storybase_user.contact': {
'Meta': {'object_name': 'Contact'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('storybase.fields.ShortTextField', [], {'blank': 'True'})
},
'storybase_user.organization': {
'Meta': {'object_name': 'Organization'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'curated_stories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'curated_in_organizations'", 'blank': 'True', 'through': "orm['storybase_user.OrganizationStory']", 'to': "orm['storybase_story.Story']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'organizations'", 'blank': 'True', 'to': "orm['auth.User']"}),
'organization_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'storybase_user.organizationstory': {
'Meta': {'object_name': 'OrganizationStory'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Organization']"}),
'story': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_story.Story']"}),
'weight': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'storybase_user.organizationtranslation': {
'Meta': {'unique_together': "(('organization', 'language'),)", 'object_name': 'OrganizationTranslation'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '15'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('storybase.fields.ShortTextField', [], {}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Organization']"}),
'translation_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
'storybase_user.project': {
'Meta': {'object_name': 'Project'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'curated_stories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'curated_in_projects'", 'blank': 'True', 'through': "orm['storybase_user.ProjectStory']", 'to': "orm['storybase_story.Story']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'projects'", 'blank': 'True', 'to': "orm['auth.User']"}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'projects'", 'blank': 'True', 'to': "orm['storybase_user.Organization']"}),
'project_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'storybase_user.projectstory': {
'Meta': {'object_name': 'ProjectStory'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Project']"}),
'story': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_story.Story']"}),
'weight': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'storybase_user.projecttranslation': {
'Meta': {'unique_together': "(('project', 'language'),)", 'object_name': 'ProjectTranslation'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '15'}),
'name': ('storybase.fields.ShortTextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Project']"}),
'translation_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['storybase_user']
|
from setuptools import setup, find_packages
setup(name='monsql',
version='0.1.7',
packages = find_packages(),
author='firstprayer',
author_email='zhangty10@gmail.com',
description='MonSQL - Mongodb-style way for using mysql.',
url='https://github.com/firstprayer/monsql.git',
install_requires=[
'MySQL-python'
],
)
|
u"""
.. module:: organizations
"""
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.shortcuts import render
from django.utils.text import slugify
from django.views.generic import View
from apps.volontulo.forms import VolounteerToOrganizationContactForm
from apps.volontulo.lib.email import send_mail
from apps.volontulo.models import Offer
from apps.volontulo.models import Organization
from apps.volontulo.models import UserProfile
from apps.volontulo.utils import correct_slug
def organizations_list(request):
u"""View responsible for listing all organizations.
:param request: WSGIRequest instance
"""
organizations = Organization.objects.all()
return render(
request,
"organizations/list.html",
{'organizations': organizations},
)
class OrganizationsCreate(View):
u"""Class view supporting creation of new organization."""
@staticmethod
@login_required
def get(request):
u"""Method responsible for rendering form for new organization."""
return render(
request,
"organizations/organization_form.html",
{'organization': Organization()}
)
@staticmethod
@login_required
def post(request):
u"""Method responsible for saving new organization."""
if not (
request.POST.get('name') and
request.POST.get('address') and
request.POST.get('description')
):
messages.error(
request,
u"Należy wypełnić wszystkie pola formularza."
)
return render(
request,
"organizations/organization_form.html",
{'organization': Organization()}
)
organization = Organization(
name=request.POST.get('name'),
address=request.POST.get('address'),
description=request.POST.get('description'),
)
organization.save()
request.user.userprofile.organizations.add(organization)
messages.success(
request,
u"Organizacja została dodana."
)
return redirect(
'organization_view',
slug=slugify(organization.name),
id_=organization.id,
)
@correct_slug(Organization, 'organization_form', 'name')
@login_required
def organization_form(request, slug, id_): # pylint: disable=unused-argument
u"""View responsible for editing organization.
Edition will only work, if logged user has been registered as organization.
"""
org = Organization.objects.get(pk=id_)
users = [profile.user.email for profile in org.userprofiles.all()]
if (
request.user.is_authenticated() and
request.user.email not in users
):
messages.error(
request,
u'Nie masz uprawnień do edycji tej organizacji.'
)
return redirect(
reverse(
'organization_view',
args=[slugify(org.name), org.id]
)
)
if not (
request.user.is_authenticated() and
UserProfile.objects.get(user=request.user).organizations
):
return redirect('homepage')
if request.method == 'POST':
if (
request.POST.get('name') and
request.POST.get('address') and
request.POST.get('description')
):
org.name = request.POST.get('name')
org.address = request.POST.get('address')
org.description = request.POST.get('description')
org.save()
messages.success(
request,
u'Oferta została dodana/zmieniona.'
)
return redirect(
reverse(
'organization_view',
args=[slugify(org.name), org.id]
)
)
else:
messages.error(
request,
u"Należy wypełnić wszystkie pola formularza."
)
return render(
request,
"organizations/organization_form.html",
{'organization': org},
)
@correct_slug(Organization, 'organization_view', 'name')
def organization_view(request, slug, id_): # pylint: disable=unused-argument
u"""View responsible for viewing organization."""
org = get_object_or_404(Organization, id=id_)
offers = Offer.objects.filter(organization_id=id_)
allow_contact = True
allow_edit = False
allow_offer_create = False
if (
request.user.is_authenticated() and
request.user.userprofile in org.userprofiles.all()
):
allow_contact = False
allow_edit = True
allow_offer_create = True
if request.method == 'POST':
form = VolounteerToOrganizationContactForm(request.POST)
if form.is_valid():
# send email to first organization user (I assume it's main user)
profile = Organization.objects.get(id=id_).userprofiles.all()[0]
send_mail(
request,
'volunteer_to_organisation',
[
profile.user.email,
request.POST.get('email'),
],
{k: v for k, v in request.POST.items()},
)
messages.success(request, u'Email został wysłany.')
else:
messages.error(
request,
u"Formularz zawiera nieprawidłowe dane: {}".format(form.errors)
)
return render(
request,
"organizations/organization_view.html",
{
'organization': org,
'contact_form': form,
'offers': offers,
'allow_contact': allow_contact,
'allow_edit': allow_edit,
'allow_offer_create': allow_offer_create,
},
)
return render(
request,
"organizations/organization_view.html",
{
'organization': org,
'contact_form': VolounteerToOrganizationContactForm(),
'offers': offers,
'allow_contact': allow_contact,
'allow_edit': allow_edit,
'allow_offer_create': allow_offer_create,
}
)
|
import _plotly_utils.basevalidators
class TicktextsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="ticktextsrc", parent_name="layout.xaxis", **kwargs):
super(TicktextsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
"""
Example to show scheduling messages to and cancelling messages from a Service Bus Queue.
"""
import os
import datetime
from azure.servicebus import ServiceBusClient, ServiceBusMessage
CONNECTION_STR = os.environ["SERVICE_BUS_CONNECTION_STR"]
TOPIC_NAME = os.environ["SERVICE_BUS_TOPIC_NAME"]
def schedule_single_message(sender):
message = ServiceBusMessage("Message to be scheduled")
scheduled_time_utc = datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
sequence_number = sender.schedule_messages(message, scheduled_time_utc)
return sequence_number
def schedule_multiple_messages(sender):
messages_to_schedule = []
for _ in range(10):
messages_to_schedule.append(ServiceBusMessage("Message to be scheduled"))
scheduled_time_utc = datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
sequence_numbers = sender.schedule_messages(
messages_to_schedule, scheduled_time_utc
)
return sequence_numbers
def main():
servicebus_client = ServiceBusClient.from_connection_string(
conn_str=CONNECTION_STR, logging_enable=True
)
with servicebus_client:
sender = servicebus_client.get_topic_sender(topic_name=TOPIC_NAME)
with sender:
sequence_number = schedule_single_message(sender)
print(
"Single message is scheduled and sequence number is {}".format(
sequence_number
)
)
sequence_numbers = schedule_multiple_messages(sender)
print(
"Multiple messages are scheduled and sequence numbers are {}".format(
sequence_numbers
)
)
sender.cancel_scheduled_messages(sequence_number)
sender.cancel_scheduled_messages(sequence_numbers)
print("All scheduled messages are cancelled.")
if __name__ == "__main__":
main()
|
import pytest
GRAPHS = [
({},
[],
[]),
({'nodeA': {}},
['nodeA'],
[]),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
['nodeA', 'nodeB'],
[('nodeA', 'nodeB')]),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'}},
['nodeA', 'nodeB'],
[('nodeA', 'nodeB'), ('nodeB', 'nodeA')]),
({'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'},
'nodeB': {'nodeA': 'weight'},
'nodeC': {'nodeA': 'weight', 'nodeC': 'weight'}},
['nodeA', 'nodeB', 'nodeC'],
[('nodeA', 'nodeB'),
('nodeA', 'nodeC'),
('nodeB', 'nodeA'),
('nodeC', 'nodeA'),
('nodeC', 'nodeC')]),
]
GRAPHS_FOR_NODE_INSERT = [
({},
'nodeN',
{'nodeN': {}}),
({'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'}},
'nodeN',
{'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'},
'nodeN': {}}),
({'nodeA': {'nodeA': 'weight', 'nodeB': 'weight'},
'nodeB': {'nodeC': 'weight', 'nodeA': 'weight'}},
'nodeN',
{'nodeA': {'nodeA': 'weight', 'nodeB': 'weight'},
'nodeB': {'nodeC': 'weight', 'nodeA': 'weight'},
'nodeN': {}}),
]
GRAPHS_ADD_EDGE = [
({'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'}},
"nodeX",
"nodeY",
{'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'},
'nodeX': {'nodeY': 'weight'},
'nodeY': {}}),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'}},
'nodeA',
'nodeB',
{'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'}}),
({'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'},
'nodeB': {'nodeA': 'weight'},
'nodeC': {'nodeA': 'weight', 'nodeC': 'weight'}},
'nodeB',
'nodeC',
{'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'},
'nodeB': {'nodeA': 'weight', 'nodeC': 'weight'},
'nodeC': {'nodeA': 'weight', 'nodeC': 'weight'}}),
]
GRAPHS_DEL_NODE = [
({'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'},
'nodeX': {'nodeY': 'weight'},
'nodeY': {}},
'nodeA',
{'nodeB': {},
'nodeX': {'nodeY': 'weight'},
'nodeY': {}}),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {'nodeA': 'weight'}},
'nodeB',
{'nodeA': {}}),
]
GRAPHS_DEL_EDGE = [
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
'nodeA',
'nodeB',
{'nodeA': {},
'nodeB': {}}),
({'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'},
'nodeB': {},
'nodeC': {}},
'nodeA',
'nodeB',
{'nodeA': {'nodeC': 'weight'},
'nodeB': {},
'nodeC': {}})
]
NEIGHBORS = [
({'nodeA': {},
'nodeB': {'nodeA': 'weight'}},
'nodeB',
['nodeA']),
({'nodeA': {},
'nodeB': {'nodeA': 'weight'}},
'nodeA',
[]),
({'nodeA': {'nodeB': 'weight', 'nodeC': 'weight'},
'nodeB': {'nodeA': 'weight'},
'nodeC': {'nodeA': 'weight'}},
'nodeA',
['nodeB', 'nodeC']),
]
ADJACENT = [
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
'nodeA',
'nodeB',
True),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
'nodeB',
'nodeA',
False),
]
ADJACENT_NODES_GONE = [
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
'nodeX', 'nodeB'),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
'nodeX', 'nodeY'),
({'nodeA': {'nodeB': 'weight'},
'nodeB': {}},
'nodeA', 'nodeY'),
]
NODE_TRAVERSAL_BREADTH = [
({'A': {'B': 'weight', 'C': 'weight'},
'B': {'A': 'weight', 'D': 'weight', 'E': 'weight'},
'C': {'A': 'weight', 'F': 'weight', 'G': 'weight'},
'D': {'B': 'weight', 'H': 'weight'},
'E': {'B': 'weight'},
'F': {'C': 'weight'},
'G': {'C': 'weight'},
'H': {'D': 'weight'}},
'A',
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']),
({'A': {'B': 'weight', 'C': 'weight'},
'B': {'C': 'weight', 'D': 'weight'},
'C': {},
'D': {}},
'A',
['A', 'B', 'C', 'D']),
({'a': {}}, 'a', ['a']),
]
NODE_TRAVERSAL_DEPTH = [
({'A': {'B': 'weight', 'E': 'weight'},
"B": {'C': 'weight', 'D': 'weight'},
'E': {},
'C': {},
'D': {}},
'A',
['A', 'E', 'B', 'D', 'C']),
({'A': {'B': 'weight', 'E': 'weight'},
"B": {'C': 'weight', 'D': 'weight'},
'E': {},
'C': {'A': 'weight', 'E': 'weight'},
'D': {}},
'A',
['A', 'E', 'B', 'D', 'C']),
({'a': {'b': 'weight', 'g': 'weight'},
'b': {'c': 'weight'},
'g': {'h': 'weight', 'j': 'weight'},
'c': {'d': 'weight'},
'h': {'i': 'weight'},
'j': {'k': 'weight'},
'd': {'e': 'weight', 'f': 'weight'},
'i': {},
'k': {},
'e': {},
'f': {}},
'a',
['a', 'g', 'j', 'k', 'h', 'i', 'b', 'c', 'd', 'f', 'e']),
({'a': {}}, 'a', ['a']),
]
GET_WEIGHT = [
({'A': {'B': 'weight1', 'E': 'weight2'},
"B": {'C': 'weight3', 'D': 'weight4'},
'E': {},
'C': {},
'D': {}},
'A',
'B',
'weight1',),
({'A': {'B': 'weight1', 'E': 'weight2'},
"B": {'C': 'weight3', 'D': 'weight4'},
'E': {},
'C': {},
'D': {}},
'B',
'C',
'weight3',),
({'A': {'B': 'weight1', 'E': 'weight2'},
"B": {'C': 'weight3', 'D': 'weight4'},
'E': {},
'C': {},
'D': {}},
'B',
'D',
'weight4',),
]
@pytest.fixture
def graph_fixture(scope='function'):
from graph import Graph
return Graph()
@pytest.mark.parametrize(("built_graph", "node", "expected"), GRAPHS_DEL_NODE)
def test_del_node_exists(graph_fixture, built_graph, node, expected):
graph_fixture._container = built_graph
graph_fixture.del_node(node)
assert graph_fixture._container == expected
@pytest.mark.parametrize(("built_graph", "node_list", "edge_list"), GRAPHS)
def test_nodes(graph_fixture, built_graph, node_list, edge_list):
graph_fixture._container = built_graph
result = graph_fixture.nodes()
assert set(result) == set(node_list)
@pytest.mark.parametrize(("built_graph", "node_list", "edge_list"), GRAPHS)
def test_edges(graph_fixture, built_graph, node_list, edge_list):
graph_fixture._container = built_graph
result = graph_fixture.edges()
assert set(edge_list) == set(result)
@pytest.mark.parametrize(("built_graph", "new_node", "expected"),
GRAPHS_FOR_NODE_INSERT)
def test_add_node(graph_fixture, built_graph, new_node, expected):
graph_fixture._container = built_graph
graph_fixture.add_node(new_node)
assert graph_fixture._container == expected
@pytest.mark.parametrize(("built_graph", "n1", "n2", "expected"),
GRAPHS_ADD_EDGE)
def test_add_edge(graph_fixture, built_graph, n1, n2, expected):
graph_fixture._container = built_graph
graph_fixture.add_edge(n1, n2)
assert graph_fixture._container == expected
def test_del_node_not_exists(graph_fixture):
graph_fixture._container = {'nodeA': {'nodeA': 'weight'}, 'nodeB': {}}
with pytest.raises(KeyError):
graph_fixture.del_node('nodeX')
@pytest.mark.parametrize(("built_graph", "node1", "node2", "expected"),
GRAPHS_DEL_EDGE)
def test_del_edge(graph_fixture, built_graph, node1, node2, expected):
graph_fixture._container = built_graph
graph_fixture.del_edge(node1, node2)
assert graph_fixture._container == expected
def test_del_edge_not_exists(graph_fixture):
graph_fixture._container = {'nodeA': {}}
with pytest.raises(ValueError):
graph_fixture.del_edge('nodeA', 'nodeB')
def test_has_node_true(graph_fixture):
graph_fixture._container = {'nodeA': {}}
assert graph_fixture.has_node('nodeA')
def test_has_node_false(graph_fixture):
graph_fixture._container = {'nodeA': {}}
assert not graph_fixture.has_node('nodeB')
@pytest.mark.parametrize(("built_graph", 'node', 'expected'), NEIGHBORS)
def test_neighbors(graph_fixture, built_graph, node, expected):
graph_fixture._container = built_graph
assert set(graph_fixture.neighbors(node)) == set(expected)
def test_neighbors_none(graph_fixture):
graph_fixture._container = {'nodeA': {}}
with pytest.raises(KeyError):
graph_fixture.neighbors('nodeB')
@pytest.mark.parametrize(('built_graph', 'n1', 'n2', 'expected'), ADJACENT)
def test_adjacent(graph_fixture, built_graph, n1, n2, expected):
# if n1, n2 don't exist: raise error
graph_fixture._container = built_graph
assert graph_fixture.adjacent(n1, n2) == expected
@pytest.mark.parametrize(('built_graph', 'n1', 'n2'), ADJACENT_NODES_GONE)
def test_adjacent_not_exists(graph_fixture, built_graph, n1, n2):
# if n1, n2 don't exist: raise error
graph_fixture._container = built_graph
with pytest.raises(KeyError):
graph_fixture.adjacent(n1, n2)
@pytest.mark.parametrize(('built_graph', 'node', 'expected'), NODE_TRAVERSAL_BREADTH)
def test_traverse_breadth(graph_fixture, built_graph, node, expected):
graph_fixture._container = built_graph
assert graph_fixture.breadth_first_traversal(node) == expected
def test_empty_graph_breadth(graph_fixture):
graph_fixture._container = {}
with pytest.raises(IndexError):
graph_fixture.breadth_first_traversal('X')
@pytest.mark.parametrize(('built_graph', 'node', 'expected'), NODE_TRAVERSAL_DEPTH)
def test_traverse_depth(graph_fixture, built_graph, node, expected):
graph_fixture._container = built_graph
assert graph_fixture.depth_first_traversal(node) == expected
def test_traverse_depth_empty(graph_fixture):
graph_fixture._container = {}
with pytest.raises(IndexError):
graph_fixture.depth_first_traversal('node')
@pytest.mark.parametrize(('built_graph', 'n1', 'n2', 'expected'), GET_WEIGHT)
def test_get_weight(graph_fixture, built_graph, n1, n2, expected):
graph_fixture._container = built_graph
assert graph_fixture.get_weight(n1, n2) == expected
|
'''
Crea un programa que analice el fichero y muestre:
- Los años y sus temperaturas (máxima, mínima y media), ordenados por año
- Los años y su tempertura media, ordenados por temperatura en orden descendente
- Crea un fichero html:
Encabezado: Temperaturas de Zaragoza
Fuente: (la url, como un enlace)
Tabla con las temperaturas (media, máxima y mínima)
Los encabezados de la tabla serán claros.
'''
def obtener_listado(f):
listado = []
for n,linea in enumerate(f):
if n != 0:
registro = linea.split()
listado.append(registro)
return listado
def listado_anio(f):
listado = obtener_listado(f)
listado.sort()
for x in listado:
print x[0:4]
def listado_temp(f):
listado = obtener_listado(f)
listado.sort(key=itemgetter(1), reverse=True)
for x in listado:
if not '-' in x[1:4]:
print x[0:4]
def crear_html(f):
import sys
from operator import itemgetter
try:
# Instrucción con riesgo
f = open('temperaturas_zaragoza.txt')
except IOError:
print 'Error, el fichero temperaturas_zaragoza no existe'
sys.exit()
opcion = int(raw_input('''¿Qué quieres hacer?:
1 - Listado ordenado por año (1)
2 - Listado ordenado por temperatura media (2)
3 - Crear archivo html (3)
>> '''))
if opcion == 1:
listado_anio(f)
if opcion == 2:
listado_temp(f)
if opcion == 3:
crear_html(f)
|
'''
Scripts for automatically setting clean parameters
'''
import numpy as np
from warnings import warn
import os
from taskinit import tb
from cleanhelper import cleanhelper
def set_imagermode(vis, source):
tb.open(os.path.join(vis, 'FIELD'))
names = tb.getcol('NAME')
tb.close()
moscount = 0
for name in names:
chsrc = name.find(source)
if chsrc != -1:
moscount = moscount + 1
if moscount > 1:
imagermode = "mosaic"
else:
imagermode = "csclean"
return imagermode
def has_field(vis, source):
'''
Check if source is contained in at least one of the field names.
'''
tb.open(os.path.join(vis, 'FIELD'))
names = tb.getcol('NAME')
tb.close()
moscount = 0
for name in names:
chsrc = name.find(source)
if chsrc != -1:
moscount = moscount + 1
if moscount == 0:
return False
return True
try:
import analysisUtils as au
def set_cellsize(vis, spw, sample_factor=6., baseline_percentile=95,
return_type="str"):
syn_beam, prim_beam = \
find_expected_beams(vis, spw,
baseline_percentile=baseline_percentile)
# Round the cell size to some fraction, which becomes finer if it was
# previously rounded to 0
round_factor = 10
while True:
sel_cell_value = \
round((syn_beam / sample_factor) * round_factor) / round_factor
if sel_cell_value == 0:
round_factor += 5
else:
break
if return_type == "str":
return str(sel_cell_value) + 'arcsec'
else:
return sel_cell_value
def set_imagesize(vis, spw, source, sample_factor=6., pblevel=0.1,
max_size=15000, **kwargs):
'''
Set the image size for CLEAN to be a multiple of 2, 3, 5
based on the maximum baseline in the MS.
Parameters
----------
'''
if isinstance(max_size, (int, np.integer)):
max_size = [max_size] * 2
syn_beam, prim_beam = find_expected_beams(vis, spw)
cellsize = set_cellsize(vis, spw, sample_factor=sample_factor,
return_type='value', **kwargs)
if set_imagermode(vis, source) == "mosaic":
mosaic_props = get_mosaic_info(vis, spw, sourceid=source,
pblevel=pblevel)
sel_imsize = [int(np.ceil(mosaic_props["Size_RA"] / cellsize)),
int(np.ceil(mosaic_props["Size_Dec"] / cellsize))]
else:
sel_imsize = [int(round(prim_beam / cellsize))] * 2
# Check if this falls into the maximum allowed size. Otherwise, just
# use the max.
if sel_imsize[0] > max_size[0]:
warn("Shape in first dimension exceeds maximum. Using maximum"
" given.")
sel_imsize[0] = max_size[0]
if sel_imsize[1] > max_size[1]:
warn("Shape in second dimension exceeds maximum. Using maximum"
" given.")
sel_imsize[1] = max_size[1]
# The image size should be factorizable into some combo of
# 2, 3, 5 and 7 to work with clean so:
sel_imsize = [cleanhelper.getOptimumSize(size) for size in sel_imsize]
# Return the rounded value nearest to the original image size chosen.
return sel_imsize
def find_expected_beams(vis, spw, baseline_percentile=95):
'''
Return the expected synthesized beam (approximately) and the primary
beam size based on the baselines.
Parameters
----------
vis : str
Name of MS.
spw : int
Which SPW in the MS to consider.
baseline_percentile : int or float between 0 and 100
The percentile of the longest baseline to estimate the synthesized
beam with.
Returns
-------
syn_beam : float
Approximate Synthesized beam in arcseconds
prim_beam : float
Primary beam size in arcseconds.
'''
# Get percentile of max baseline and dish size
bline_max = getBaselinePercentile(vis, baseline_percentile)
tb.open(os.path.join(vis, 'ANTENNA'))
dishs = tb.getcol('DISH_DIAMETER')
dish_min = min(dishs)
tb.close()
tb.open(os.path.join(vis, 'SPECTRAL_WINDOW'))
ref_freqs = tb.getcol('REF_FREQUENCY')
try:
freq = ref_freqs[spw]
except IndexError:
raise IndexError("Given SPW ({0}) is not within the range of SPWs"
"found ({1})".format(spw, len(ref_freqs)))
# Find the beam
# XXX
# When astropy is easier to install (CASA 4.7??), switch to using the
# defined constants.
centre_lambda = 299792458.0 / freq
syn_beam = (centre_lambda / bline_max) * 180 / np.pi * 3600
prim_beam = (centre_lambda / dish_min) * 180 / np.pi * 3600
return syn_beam, prim_beam
def getBaselinePercentile(msFile, percentile):
"""
Based on getBaselineExtrema from analysisUtils
"""
return np.percentile(getBaselines(msFile), percentile)
def getBaselines(msFile):
'''
Return all baselines
'''
tb.open(msFile + '/ANTENNA')
positions = np.transpose(tb.getcol('POSITION'))
tb.close()
all_lengths = []
for i in range(len(positions)):
for j in range(i + 1, len(positions)):
length = au.computeBaselineLength(positions[i],
positions[j])
if length != 0.0:
all_lengths.append(length)
all_lengths = np.array(all_lengths)
return all_lengths
def get_mosaic_info(vis, spw, sourceid=None, intent='TARGET', pblevel=0.1):
'''
Return image size based on mosaic fields
Parameters
----------
vis : str
MS Name
spw : str or int
If str, searches for an exact match with the names in the MS. If
int, is the index of SPW in the MS.
sourceid : str, optional
The field names used will contain sourceid.
intent : str, optional
Use every field with the given intent (wildcards used by default).
pblevel : float between 0 and 1
PB level that defines the edges of the mosaic.
'''
mytb = au.createCasaTool(au.tbtool)
# Check SPWs to make sure given choice is valid
mytb.open(vis + '/SPECTRAL_WINDOW')
spwNames = mytb.getcol('NAME')
if isinstance(spw, str):
match = False
for spw_name in spwNames:
if spw == spw_name:
match = True
if not match:
raise ValueError("The given SPW ({0}) is not in the MS SPW"
" names ({1})".format(spw, spwNames))
elif isinstance(spw, (int, np.integer)):
try:
spwNames[spw]
except IndexError:
raise IndexError("The given SPW index {0} is not in the range"
" of SPWs in the MS ({1})."
.format(spw, len(spwNames)))
else:
raise TypeError("spw must be a str or int.")
refFreq = mytb.getcol("REF_FREQUENCY")[spw]
lambdaMeters = au.c_mks / refFreq
mytb.close()
# Get field info
mytb.open(vis + '/FIELD')
delayDir = mytb.getcol('DELAY_DIR')
ra = delayDir[0, :][0] * 12 / np.pi
for i in range(len(ra)):
if ra[i] < 0:
ra[i] += 24
ra *= 15
dec = np.degrees(delayDir[1, :][0])
# First choose fields by given sourceid
if sourceid is not None:
names = mytb.getcol('NAME')
fields = mytb.getcol("SOURCE_ID")
good_names = []
good_fields = []
for name, field in zip(names, fields):
# Check if it has sourceid
if name.find(sourceid) != -1:
good_names.append(name)
good_fields.append(field)
names = good_names
fields = good_fields
# Then try choosing all fields based on the given intent.
elif intent is not None:
# Ensure a string
intent = str(intent)
mymsmd = au.createCasaTool(au.msmdtool)
mymsmd.open(vis)
intentsToSearch = '*' + intent + '*'
fields = mymsmd.fieldsforintent(intentsToSearch)
names = mymsmd.namesforfields(fields)
mymsmd.close()
# On or the other must be given
else:
raise ValueError("Either sourceid or intent must be given.")
mytb.close()
ra = ra[fields]
dec = dec[fields]
raAverageDegrees = np.mean(ra)
decAverageDegrees = np.mean(dec)
raRelativeArcsec = 3600 * (ra - raAverageDegrees) * \
np.cos(np.deg2rad(decAverageDegrees))
decRelativeArcsec = 3600 * (dec - decAverageDegrees)
centralField = au.findNearestField(ra, dec,
raAverageDegrees,
decAverageDegrees)[0]
# This next step is crucial, as it converts from the field number
# determined from a subset list back to the full list.
centralFieldName = names[centralField]
centralField = fields[centralField]
# Find which antenna have data
mytb.open(vis)
antennasWithData = np.sort(np.unique(mytb.getcol('ANTENNA1')))
mytb.close()
if antennasWithData.size == 0:
raise Warning("No antennas with data found.")
# Now we need the dish diameters
mytb.open(vis + "/ANTENNA")
# These are in m
dish_diameters = \
np.unique(mytb.getcol("DISH_DIAMETER")[antennasWithData])
mytb.close()
# Find maxradius
maxradius = 0
for diam in dish_diameters:
arcsec = 0.5 * \
au.primaryBeamArcsec(wavelength=lambdaMeters * 1000,
diameter=diam, showEquation=False)
radius = arcsec / 3600.0
if radius > maxradius:
maxradius = radius
# Border about each point, down to the given pblevel
border = 2 * maxradius * au.gaussianBeamOffset(pblevel) * 3600.
size_ra = np.ptp(raRelativeArcsec) + 2 * border
size_dec = np.ptp(decRelativeArcsec) + 2 * border
mosaicInfo = {"Central_Field_ID": centralField,
"Central_Field_Name": centralFieldName,
"Center_RA": ra,
"Center_Dec": dec,
"Size_RA": size_ra,
"Size_Dec": size_dec}
return mosaicInfo
def append_to_cube(folder, prefix, suffix, num_imgs,
cube_name, chunk_size=250,
delete_chunk_cubes=True,
concat_kwargs={'relax': True, 'reorder': False,
'overwrite': True}):
'''
Append single images into a cube. Must be continuous along
spectral dimension.
Individual images in the folder must be sequentially numbered.
'''
from os.path import join as osjoin
import os
try:
import casatools
ia = casatools.image()
except ImportError:
try:
from taskinit import iatool
ia = iatool()
except ImportError:
raise ImportError("Cannot import iatool.")
imgs = [osjoin(folder, "{0}_{1}.{2}".format(prefix, chan, suffix))
for chan in range(num_imgs)]
# Make sure these all exist
for img in imgs:
if not os.path.exists(img):
raise OSError("{} does not exist.".format(img))
# Concatenate in chunks (Default of 250) b/c CASA doesn't like
# having too many images open at once.
num_chunks = (num_imgs // chunk_size) + 1
chunk_cubes = []
for i in range(num_chunks):
start = chunk_size * i
stop = min(chunk_size * (i + 1), num_imgs)
imgs_chunk = imgs[start:stop]
chunk_cube_name = "{0}_{1}".format(cube_name, i)
chunk_cubes.append(chunk_cube_name)
ia.imageconcat(outfile=chunk_cube_name,
infiles=imgs_chunk,
**concat_kwargs)
ia.done()
ia.close()
# Concat the chunks together
ia.imageconcat(outfile=cube_name,
infiles=chunk_cubes,
**concat_kwargs)
ia.done()
ia.close()
if delete_chunk_cubes:
for chunk_cube_name in chunk_cubes:
os.system("rm -rf {}".format(chunk_cube_name))
except ImportError:
warn("Could not import analysisUtils.")
|
"""
Tests for the Woopra template tags and filters.
"""
import pytest
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from utils import TagTestCase
from analytical.templatetags.woopra import WoopraNode
from analytical.utils import AnalyticalException
@override_settings(WOOPRA_DOMAIN='example.com')
class WoopraTagTestCase(TagTestCase):
"""
Tests for the ``woopra`` template tag.
"""
def test_tag(self):
r = self.render_tag('woopra', 'woopra')
assert 'var woo_settings = {"domain": "example.com"};' in r
def test_node(self):
r = WoopraNode().render(Context({}))
assert 'var woo_settings = {"domain": "example.com"};' in r
@override_settings(WOOPRA_DOMAIN=None)
def test_no_domain(self):
with pytest.raises(AnalyticalException):
WoopraNode()
@override_settings(WOOPRA_DOMAIN='this is not a domain')
def test_wrong_domain(self):
with pytest.raises(AnalyticalException):
WoopraNode()
@override_settings(WOOPRA_IDLE_TIMEOUT=1234)
def test_idle_timeout(self):
r = WoopraNode().render(Context({}))
assert 'var woo_settings = {"domain": "example.com", "idle_timeout": "1234"};' in r
def test_custom(self):
r = WoopraNode().render(Context({
'woopra_var1': 'val1',
'woopra_var2': 'val2',
}))
assert 'var woo_visitor = {"var1": "val1", "var2": "val2"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_name_and_email(self):
r = WoopraNode().render(Context({
'user': User(username='test',
first_name='Firstname',
last_name='Lastname',
email="test@example.com"),
}))
assert 'var woo_visitor = '
'{"email": "test@example.com", "name": "Firstname Lastname"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_username_no_email(self):
r = WoopraNode().render(Context({'user': User(username='test')}))
assert 'var woo_visitor = {"name": "test"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_no_identify_when_explicit_name(self):
r = WoopraNode().render(Context({
'woopra_name': 'explicit',
'user': User(username='implicit'),
}))
assert 'var woo_visitor = {"name": "explicit"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_no_identify_when_explicit_email(self):
r = WoopraNode().render(Context({
'woopra_email': 'explicit',
'user': User(username='implicit'),
}))
assert 'var woo_visitor = {"email": "explicit"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_anonymous_user(self):
r = WoopraNode().render(Context({'user': AnonymousUser()}))
assert 'var woo_visitor = {};' in r
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = WoopraNode().render(context)
assert r.startswith('<!-- Woopra disabled on internal IP address')
assert r.endswith('-->')
|
from .StateBase import StateBase
from neo.Core.Fixed8 import Fixed8
from neo.Core.IO.BinaryReader import BinaryReader
from neo.IO.MemoryStream import StreamManager
from neo.Core.AssetType import AssetType
from neo.Core.UInt160 import UInt160
from neo.Core.Cryptography.Crypto import Crypto
from neo.Core.Cryptography.ECCurve import EllipticCurve, ECDSA
from neo.Core.Size import Size as s
from neo.Core.Size import GetVarSize
class AssetState(StateBase):
def Size(self):
return super(AssetState, self).Size() + s.uint256 + s.uint8 + GetVarSize(
self.Name) + self.Amount.Size() + self.Available.Size() + s.uint8 + s.uint8 + self.Fee.Size() + s.uint160 + self.Owner.Size() + s.uint160 + s.uint160 + s.uint32 + s.uint8
def __init__(self, asset_id=None, asset_type=None, name=None, amount=None, available=None,
precision=0, fee_mode=0, fee=None, fee_addr=None, owner=None,
admin=None, issuer=None, expiration=None, is_frozen=False):
"""
Create an instance.
Args:
asset_id (UInt256):
asset_type (neo.Core.AssetType):
name (str): the asset name.
amount (Fixed8):
available (Fixed8):
precision (int): number of decimals the asset has.
fee_mode (int):
fee (Fixed8):
fee_addr (UInt160): where the fee will be send to.
owner (EllipticCurve.ECPoint):
admin (UInt160): the administrator of the asset.
issuer (UInt160): the issuer of the asset.
expiration (UInt32): the block number on which the asset expires.
is_frozen (bool):
"""
self.AssetId = asset_id
self.AssetType = asset_type
self.Name = name
self.Amount = Fixed8(0) if amount is None else amount
self.Available = Fixed8(0) if available is None else available
self.Precision = precision
self.FeeMode = fee_mode
self.Fee = Fixed8(0) if fee is None else fee
self.FeeAddress = UInt160(data=bytearray(20)) if fee_addr is None else fee_addr
if owner is not None and type(owner) is not EllipticCurve.ECPoint:
raise Exception("Owner must be ECPoint Instance")
self.Owner = owner
self.Admin = admin
self.Issuer = issuer
self.Expiration = expiration
self.IsFrozen = is_frozen
# def Size(self):
# return super(AssetState, self).Size()
@staticmethod
def DeserializeFromDB(buffer):
"""
Deserialize full object.
Args:
buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from.
Returns:
AssetState:
"""
m = StreamManager.GetStream(buffer)
reader = BinaryReader(m)
account = AssetState()
account.Deserialize(reader)
StreamManager.ReleaseStream(m)
return account
def Deserialize(self, reader):
"""
Deserialize full object.
Args:
reader (neo.Core.IO.BinaryReader):
"""
super(AssetState, self).Deserialize(reader)
self.AssetId = reader.ReadUInt256()
self.AssetType = ord(reader.ReadByte())
self.Name = reader.ReadVarString()
position = reader.stream.tell()
try:
self.Amount = reader.ReadFixed8()
except Exception:
reader.stream.seek(position)
self.Amount = reader.ReadFixed8()
self.Available = reader.ReadFixed8()
self.Precision = ord(reader.ReadByte())
# fee mode
reader.ReadByte()
self.Fee = reader.ReadFixed8()
self.FeeAddress = reader.ReadUInt160()
self.Owner = ECDSA.Deserialize_Secp256r1(reader)
self.Admin = reader.ReadUInt160()
self.Issuer = reader.ReadUInt160()
self.Expiration = reader.ReadUInt32()
self.IsFrozen = reader.ReadBool()
def Serialize(self, writer):
"""
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
"""
super(AssetState, self).Serialize(writer)
writer.WriteUInt256(self.AssetId)
writer.WriteByte(self.AssetType)
writer.WriteVarString(self.Name)
if self.Amount.value > -1:
writer.WriteFixed8(self.Amount, unsigned=True)
else:
writer.WriteFixed8(self.Amount)
if type(self.Available) is not Fixed8:
raise Exception("AVAILABLE IS NOT FIXED 8!")
writer.WriteFixed8(self.Available, unsigned=True)
writer.WriteByte(self.Precision)
writer.WriteByte(b'\x00')
writer.WriteFixed8(self.Fee)
writer.WriteUInt160(self.FeeAddress)
self.Owner.Serialize(writer)
writer.WriteUInt160(self.Admin)
writer.WriteUInt160(self.Issuer)
writer.WriteUInt32(self.Expiration)
writer.WriteBool(self.IsFrozen)
def GetName(self):
"""
Get the asset name based on its type.
Returns:
str: 'NEO' or 'NEOGas'
"""
if self.AssetType == AssetType.GoverningToken:
return "NEO"
elif self.AssetType == AssetType.UtilityToken:
return "NEOGas"
if type(self.Name) is bytes:
return self.Name.decode('utf-8')
return self.Name
def ToJson(self):
"""
Convert object members to a dictionary that can be parsed as JSON.
Returns:
dict:
"""
return {
'assetId': self.AssetId.To0xString(),
'assetType': self.AssetType,
'name': self.GetName(),
'amount': self.Amount.value,
'available': self.Available.value,
'precision': self.Precision,
'fee': self.Fee.value,
'address': self.FeeAddress.ToString(),
'owner': self.Owner.ToString(),
'admin': Crypto.ToAddress(self.Admin),
'issuer': Crypto.ToAddress(self.Issuer),
'expiration': self.Expiration,
'is_frozen': self.IsFrozen
}
def Clone(self):
return AssetState(asset_id=self.AssetId, asset_type=self.AssetType, name=self.Name, amount=self.Amount, available=self.Available, precision=self.Precision, fee=self.Fee, fee_addr=self.FeeAddress, owner=self.Owner, admin=self.Admin, issuer=self.Issuer, expiration=self.Expiration, is_frozen=self.IsFrozen)
|
from Devices.Input import Input
from Devices.Timer import Timer
from Devices.AnalogInput import AnalogInput
from Devices.Output import Output
class DeviceManager:
def __init__(self):
self.inputs = {}
self.outputs = {}
def addSimpleInput(self, name, location, invert = False):
if name in self.inputs:
raise KeyError('Cannot create device with name %s because input with that name already exists' % name)
self.inputs[name] = Input(name, location, invert)
def addTimer(self, name, interval = 's'):
if name in self.inputs:
raise KeyError('Cannot create device with name %s because input with that name already exists' % name)
self.inputs[name] = Timer(name, interval)
def addAnalogInput(self, name, location):
if name in self.inputs:
raise KeyError('Cannot create device with name %s because input with that name already exists' % name)
self.inputs[name] = AnalogInput(name, location)
def addOutput(self, name, location, invert = False):
if name in self.outputs:
raise KeyError('Cannot create device with name %s because output with that name already exists' % name)
self.outputs[name] = Output(name, location, invert)
def read(self, name):
if not name in self.inputs:
raise KeyError('Cannot find input with name %s, unable to read' % name)
return self.inputs[name].read()
def turnOn(self, name):
if not name in self.outputs:
raise KeyError('Cannot find output with name %s, unable to turn on' % name)
self.outputs[name].on()
def turnOff(self, name):
if not name in self.outputs:
raise KeyError('Cannot find output with name %s, unable to turn off' % name)
self.outputs[name].off()
|
try:
x
except NameError:
x = None
if x is None:
some_fallback_operation()
else:
some_operation(x)
|
dCellSize = 20
WindowWidth = 400
WindowHeight = 400
class SCell(object):
def __init__(self, xmin, xmax, ymin, ymax):
self._iTicksSpentHere = 0
self._left = xmin
self._right = xmax
self._top = ymin
self.bottom = ymax
def Update(self):
self._iTicksSpentHere += 1
def Reset(self):
self._iTicksSpentHere = 0
class CMapper(object):
def __init__(self, MaxRangeX, MaxRangeY):
self._dCellSize = dCellSize
self._NumCellsX = (MaxRangeX/self._dCellSize) + 1
self._NumCellsY = (MaxRangeY/self._dCellSize) + 1
self._2DvecCells = []
for x in xrange(self._NumCellsX):
temp = []
for y in xrange(self._NumCellsY):
temp.append(SCell(x*self._dCellSize, (x+1)*self._dCellSize, y*self._dCellSize, (y+1)*self._dCellSize))
self._2DvecCells.append(temp)
self._iTotalCells = self._NumCellsX * self._NumCellsY
def Update(self, xPos, yPos):
if ((xPos < 0) or (xPos > WindowWidth) or (yPos < 0) or (yPos > WindowHeight)):
return
cellX = int(xPos/self._dCellSize)
cellY = int(yPos/self._dCellSize)
self._2DvecCells[cellX][cellY].Update()
def TicksLingered(self, xPos, yPos):
if ((xPos < 0) or (xPos > WindowWidth) or (yPos < 0) or (yPos > WindowHeight)):
return 999
cellX = int(xPos/self._dCellSize)
cellY = int(yPos/self._dCellSize)
return self._2DvecCells[cellX][cellY]._iTicksSpentHere
def BeenVisited(self, xPos, yPos):
print "Not implemented!"
def Render(self):
print "To be implemented"
def Reset(self):
for i in xrange(self._NumCellsX):
for j in xrange(self._NumCellsY):
self._2DvecCells[i][j].Reset()
def NumCellsVisited(self):
total = 0
for i in xrange(self._NumCellsX):
for j in xrange(self._NumCellsY):
if self._2DvecCells[i][j]._iTicksSpentHere > 0:
total += 1
return total
|
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import sklearn
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sef_dr.classification import evaluate_svm
from sef_dr.datasets import load_mnist
from sef_dr.linear import LinearSEF
def supervised_reduction(method=None):
# Load data and init seeds
train_data, train_labels, test_data, test_labels = load_mnist(dataset_path='data')
np.random.seed(1)
sklearn.utils.check_random_state(1)
n_train = 5000
n_classes = len(np.unique(train_labels))
if method == 'lda':
proj = LinearDiscriminantAnalysis(n_components=n_classes - 1)
proj.fit(train_data[:n_train, :], train_labels[:n_train])
elif method == 's-lda':
proj = LinearSEF(train_data.shape[1], output_dimensionality=(n_classes - 1))
proj.cuda()
loss = proj.fit(data=train_data[:n_train, :], target_labels=train_labels[:n_train], epochs=50,
target='supervised', batch_size=128, regularizer_weight=1, learning_rate=0.001, verbose=True)
elif method == 's-lda-2x':
# SEF output dimensions are not limited
proj = LinearSEF(train_data.shape[1], output_dimensionality=2 * (n_classes - 1))
proj.cuda()
loss = proj.fit(data=train_data[:n_train, :], target_labels=train_labels[:n_train], epochs=50,
target='supervised', batch_size=128, regularizer_weight=1, learning_rate=0.001, verbose=True)
acc = evaluate_svm(proj.transform(train_data[:n_train, :]), train_labels[:n_train],
proj.transform(test_data), test_labels)
print("Method: ", method, " Test accuracy: ", 100 * acc, " %")
if __name__ == '__main__':
print("LDA: ")
supervised_reduction('lda')
print("S-LDA: ")
supervised_reduction('s-lda')
print("S-LDA (2x): ")
supervised_reduction('s-lda-2x')
|
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
RQ_QUEUES = {
'default': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
'PASSWORD': '',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
'''
Yescoin base58 encoding and decoding.
Based on https://yescointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Yescoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
h3 = checksum(result[:-4])
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/yescoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
|
from __future__ import unicode_literals
from ..symbols import *
TAG_MAP = {
# Explanation of Unidic tags:
# https://www.gavo.t.u-tokyo.ac.jp/~mine/japanese/nlp+slp/UNIDIC_manual.pdf
# Universal Dependencies Mapping:
# http://universaldependencies.org/ja/overview/morphology.html
# http://universaldependencies.org/ja/pos/all.html
"記号,一般,*,*":{POS: PUNCT}, # this includes characters used to represent sounds like ドレミ
"記号,文字,*,*":{POS: PUNCT}, # this is for Greek and Latin characters used as sumbols, as in math
"感動詞,フィラー,*,*": {POS: INTJ},
"感動詞,一般,*,*": {POS: INTJ},
# this is specifically for unicode full-width space
"空白,*,*,*": {POS: X},
"形状詞,一般,*,*":{POS: ADJ},
"形状詞,タリ,*,*":{POS: ADJ},
"形状詞,助動詞語幹,*,*":{POS: ADJ},
"形容詞,一般,*,*":{POS: ADJ},
"形容詞,非自立可能,*,*":{POS: AUX}, # XXX ADJ if alone, AUX otherwise
"助詞,格助詞,*,*":{POS: ADP},
"助詞,係助詞,*,*":{POS: ADP},
"助詞,終助詞,*,*":{POS: PART},
"助詞,準体助詞,*,*":{POS: SCONJ}, # の as in 走るのが速い
"助詞,接続助詞,*,*":{POS: SCONJ}, # verb ending て
"助詞,副助詞,*,*":{POS: PART}, # ばかり, つつ after a verb
"助動詞,*,*,*":{POS: AUX},
"接続詞,*,*,*":{POS: SCONJ}, # XXX: might need refinement
"接頭辞,*,*,*":{POS: NOUN},
"接尾辞,形状詞的,*,*":{POS: ADJ}, # がち, チック
"接尾辞,形容詞的,*,*":{POS: ADJ}, # -らしい
"接尾辞,動詞的,*,*":{POS: NOUN}, # -じみ
"接尾辞,名詞的,サ変可能,*":{POS: NOUN}, # XXX see 名詞,普通名詞,サ変可能,*
"接尾辞,名詞的,一般,*":{POS: NOUN},
"接尾辞,名詞的,助数詞,*":{POS: NOUN},
"接尾辞,名詞的,副詞可能,*":{POS: NOUN}, # -後, -過ぎ
"代名詞,*,*,*":{POS: PRON},
"動詞,一般,*,*":{POS: VERB},
"動詞,非自立可能,*,*":{POS: VERB}, # XXX VERB if alone, AUX otherwise
"動詞,非自立可能,*,*,AUX":{POS: AUX},
"動詞,非自立可能,*,*,VERB":{POS: VERB},
"副詞,*,*,*":{POS: ADV},
"補助記号,AA,一般,*":{POS: SYM}, # text art
"補助記号,AA,顔文字,*":{POS: SYM}, # kaomoji
"補助記号,一般,*,*":{POS: SYM},
"補助記号,括弧開,*,*":{POS: PUNCT}, # open bracket
"補助記号,括弧閉,*,*":{POS: PUNCT}, # close bracket
"補助記号,句点,*,*":{POS: PUNCT}, # period or other EOS marker
"補助記号,読点,*,*":{POS: PUNCT}, # comma
"名詞,固有名詞,一般,*":{POS: PROPN}, # general proper noun
"名詞,固有名詞,人名,一般":{POS: PROPN}, # person's name
"名詞,固有名詞,人名,姓":{POS: PROPN}, # surname
"名詞,固有名詞,人名,名":{POS: PROPN}, # first name
"名詞,固有名詞,地名,一般":{POS: PROPN}, # place name
"名詞,固有名詞,地名,国":{POS: PROPN}, # country name
"名詞,助動詞語幹,*,*":{POS: AUX},
"名詞,数詞,*,*":{POS: NUM}, # includes Chinese numerals
"名詞,普通名詞,サ変可能,*":{POS: NOUN}, # XXX: sometimes VERB in UDv2; suru-verb noun
"名詞,普通名詞,サ変可能,*,NOUN":{POS: NOUN},
"名詞,普通名詞,サ変可能,*,VERB":{POS: VERB},
"名詞,普通名詞,サ変形状詞可能,*":{POS: NOUN}, # ex: 下手
"名詞,普通名詞,一般,*":{POS: NOUN},
"名詞,普通名詞,形状詞可能,*":{POS: NOUN}, # XXX: sometimes ADJ in UDv2
"名詞,普通名詞,形状詞可能,*,NOUN":{POS: NOUN},
"名詞,普通名詞,形状詞可能,*,ADJ":{POS: ADJ},
"名詞,普通名詞,助数詞可能,*":{POS: NOUN}, # counter / unit
"名詞,普通名詞,副詞可能,*":{POS: NOUN},
"連体詞,*,*,*":{POS: ADJ}, # XXX this has exceptions based on literal token
"連体詞,*,*,*,ADJ":{POS: ADJ},
"連体詞,*,*,*,PRON":{POS: PRON},
"連体詞,*,*,*,DET":{POS: DET},
}
|
DOWNLOADER_VERSION = "0.0.1"
DOWNLOADER_LOG_FILE = "downloader.log"
DOWNLOADER_LOG_SIZE = 10485760
DOWNLOADER_LOG_COUNT = 10
DOWNLOADER_LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
DOWNLOADER_REQUIREMENTS_PATH = "requirements.txt"
|
class mmpmon(object):
def __init__(self):
self.name = 'mmpmon'
self.nodefields = { '_n_': 'nodeip', '_nn_': 'nodename',
'_rc_': 'status', '_t_': 'seconds', '_tu_': 'microsecs',
'_br_': 'bytes_read', '_bw_': 'bytes_written',
'_oc_': 'opens', '_cc_': 'closes', '_rdc_': 'reads',
'_wc_': 'writes', '_dir_': 'readdir', '_iu_': 'inode_updates' }
self.nodelabels = {}
self.fsfields = { '_n_': 'nodeip', '_nn_': 'nodename',
'_rc_': 'status', '_t_': 'seconds', '_tu_': 'microsecs',
'_cl_': 'cluster', '_fs_': 'filesystem', '_d_': 'disks',
'_br_': 'bytes_read', '_bw_': 'bytes_written',
'_oc_': 'opens', '_cc_': 'closes', '_rdc_': 'reads',
'_wc_': 'writes', '_dir_': 'readdir', '_iu_': 'inode_updates' }
self.fslabels = {}
def _add_nodes(self, nodelist):
"""Add nodes to the mmpmon nodelist"""
return
def _reset_stats(self):
"""Reset the IO stats"""
return
|
from keras.models import model_from_json
import theano.tensor as T
from utils.readImgFile import readImg
from utils.crop import crop_detection
from utils.ReadPascalVoc2 import prepareBatch
import os
import numpy as np
def Acc(imageList,model,sample_number=5000,thresh=0.3):
correct = 0
object_num = 0
count = 0
for image in imageList:
count += 1
#Get prediction from neural network
img = crop_detection(image.imgPath,new_width=448,new_height=448)
img = np.expand_dims(img, axis=0)
out = model.predict(img)
out = out[0]
for i in range(49):
preds = out[i*25:(i+1)*25]
if(preds[24] > thresh):
object_num += 1
row = i/7
col = i%7
'''
centerx = 64 * col + 64 * preds[0]
centery = 64 * row + 64 * preds[1]
h = preds[2] * preds[2]
h = h * 448.0
w = preds[3] * preds[3]
w = w * 448.0
left = centerx - w/2.0
right = centerx + w/2.0
up = centery - h/2.0
down = centery + h/2.0
if(left < 0): left = 0
if(right > 448): right = 447
if(up < 0): up = 0
if(down > 448): down = 447
'''
class_num = np.argmax(preds[4:24])
#Ground Truth
box = image.boxes[row][col]
if(box.has_obj):
for obj in box.objs:
true_class = obj.class_num
if(true_class == class_num):
correct += 1
break
return correct*1.0/object_num
def Recall(imageList,model,sample_number=5000,thresh=0.3):
correct = 0
obj_num = 0
count = 0
for image in imageList:
count += 1
#Get prediction from neural network
img = crop_detection(image.imgPath,new_width=448,new_height=448)
img = np.expand_dims(img, axis=0)
out = model.predict(img)
out = out[0]
#for each ground truth, see we have predicted a corresponding result
for i in range(49):
preds = out[i*25:i*25+25]
row = i/7
col = i%7
box = image.boxes[row][col]
if(box.has_obj):
for obj in box.objs:
obj_num += 1
true_class = obj.class_num
#see what we predict
if(preds[24] > thresh):
predcit_class = np.argmax(preds[4:24])
if(predcit_class == true_class):
correct += 1
return correct*1.0/obj_num
def MeasureAcc(model,sample_number,vocPath,imageNameFile):
imageList = prepareBatch(0,sample_number,imageNameFile,vocPath)
acc = Acc(imageList,model)
re = Recall(imageList,model)
return acc,re
|
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class GroupQueryTreeRequest(Request):
r'''A /g_queryTree request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.GroupQueryTreeRequest(
... node_id=0,
... include_controls=True,
... )
>>> request
GroupQueryTreeRequest(
include_controls=True,
node_id=0
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(57, 0, 1)
::
>>> message.address == requesttools.RequestId.GROUP_QUERY_TREE
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_include_controls',
'_node_id',
)
### INITIALIZER ###
def __init__(
self,
include_controls=False,
node_id=None,
):
Request.__init__(self)
self._node_id = node_id
self._include_controls = bool(include_controls)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
node_id = int(self.node_id)
include_controls = int(self.include_controls)
message = osctools.OscMessage(
request_id,
node_id,
include_controls,
)
return message
### PUBLIC PROPERTIES ###
@property
def include_controls(self):
return self._include_controls
@property
def node_id(self):
return self._node_id
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.QueryTreeResponse: None,
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.GROUP_QUERY_TREE
|
"""
The pyligadb module is a small python wrapper for the OpenLigaDB webservice.
The pyligadb module has been released as open source under the MIT License.
Copyright (c) 2014 Patrick Dehn
Due to suds, the wrapper is very thin, but the docstrings may be helpful.
Most of the methods of pyligadb return a list containing the requested data as
objects. So the attributes of the list items are accessible via the dot notation
(see example below). For a more detailed description of the return values see
the original documentation: http://www.openligadb.de/Webservices/Sportsdata.asmx
Example use (prints all matches at round 14 in season 2010 from the Bundesliga):
>>> from pyligadb.pyligadb import API
>>> matches = API().getMatchdataByGroupLeagueSaison(14, 'bl1', 2010)
>>> for match in matches:
>>> print u"{} vs. {}".format(match.nameTeam1, match.nameTeam2)
1. FSV Mainz 05 vs. 1. FC Nuernberg
1899 Hoffenheim vs. Bayer Leverkusen
...
...
"""
__version__ = "0.1.1"
try:
from suds.client import Client
except ImportError:
raise Exception("pyligadb requires the suds library to work. "
"https://fedorahosted.org/suds/")
class API:
def __init__(self):
self.client = Client('http://www.openligadb.de/Webservices/'
'Sportsdata.asmx?WSDL').service
def getAvailGroups(self, leagueShortcut, leagueSaison):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer)
@return: A list of available groups (half-final, final, etc.) for the
specified league and season.
"""
return self.client.GetAvailGroups(leagueShortcut, leagueSaison)[0]
def getAvailLeagues(self):
"""
@return: A list of all in OpenLigaDB available leagues.
"""
return self.client.GetAvailLeagues()[0]
def getAvailLeaguesBySports(self, sportID):
"""
@param sportID: The id related to a specific sport.
Use getAvailSports() to get all IDs.
@return: A list of all in OpenLigaDB available leagues of the specified
sport.
"""
return self.client.GetAvailLeaguesBySports(sportID)[0]
def getAvailSports(self):
"""
@return: An object containing all in OpenLigaDB available sports.
"""
return self.client.GetAvailSports()[0]
def getCurrentGroup(self, leagueShortcut):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@return: An object containing information about the current group for
the specified league (i.e. the round ("Spieltag") of the German
Bundesliga).
"""
return self.client.GetCurrentGroup(leagueShortcut)
def getCurrentGroupOrderID(self, leagueShortcut):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@return: The current group-ID for the specified league
(see getCurrentGroup()) as int value.
"""
return self.client.GetCurrentGroupOrderID(leagueShortcut)
def getGoalGettersByLeagueSaison(self, leagueShortcut, leagueSaison):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: A list of scorers from the specified league and season, sorted
by goals scored.
"""
return self.client.GetGoalGettersByLeagueSaison(leagueShortcut,
leagueSaison)[0]
def getGoalsByLeagueSaison(self, leagueShortcut, leagueSaison):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: A list of all goals from the specified league and season.
"""
return self.client.GetGoalsByLeagueSaison(leagueShortcut,
leagueSaison)[0]
def getGoalsByMatch(self, matchID):
"""
@param matchID: The ID of a specific Match. Use i.e. getLastMatch() to
obtain an ID.
@return: A list of all goals from the specified match or None.
"""
result = self.client.GetGoalsByMatch(matchID)
if result == "":
return None
else:
return result[0]
def getLastChangeDateByGroupLeagueSaison(self, groupOrderID, leagueShortcut,
leagueSaison):
"""
@param groupOrderID: The id of a specific group.
Use i.e. getCurrentGroupOrderID() to obtain an ID.
@param leagueShortcut: Shortcut for a specific leagueself.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: The date of the last change as datetime object.
"""
return self.client.GetLastChangeDateByGroupLeagueSaison(groupOrderID,
leagueShortcut, leagueSaison)
def getLastChangeDateByLeagueSaison(self, leagueShortcut, leagueSaison):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: The date of the last change as datetime object.
"""
return self.client.GetLastChangeDateByLeagueSaison(leagueShortcut,
leagueSaison)
def getLastMatch(self, leagueShortcut):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@return: An object containing information about the last match from the
specified league.
"""
return self.client.GetLastMatch(leagueShortcut)
def getLastMatchByLeagueTeam(self, leagueID, teamID):
"""
@param leagueID: Shortcut for a specific league.
Use getAvailLeagues() to get all IDs.
@param teamID: The ID of a team, which cab be obtained by using
getTeamsByLeagueSaison()
@return: An object containing information about the last played match
"""
return self.client.GetLastMatchByLeagueTeam(leagueID, teamID)
def getMatchByMatchID(self, matchID):
"""
@param matchID: The ID of a specific Match. Use i.e. getNextMatch()
to obtain an ID.
@return: An object containing information about the specified match.
"""
return self.client.GetMatchByMatchID(matchID)
def getMatchdataByGroupLeagueSaison(self, groupOrderID, leagueShortcut,
leagueSaison):
"""
@param groupOrderID: The ID of a specific group.
Use i.e. getCurrentGroupOrderID() to obtain an ID.
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: A list of matches. Each list item is an object containing
detailed information about the specified group/round.
"""
return self.client.GetMatchdataByGroupLeagueSaison(groupOrderID,
leagueShortcut, leagueSaison)[0]
def getMatchdataByGroupLeagueSaisonJSON(self, groupOrderID, leagueShortcut,
leagueSaison):
"""
@param groupOrderID: The ID of a specific group.
Use i.e. getCurrentGroupOrderID() to obtain an ID.
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: A JSON-Object containing detailed information about the
specified group/round.
"""
return self.client.GetMatchdataByGroupLeagueSaison(groupOrderID,
leagueShortcut, leagueSaison)
def getMatchdataByLeagueDateTime(self, fromDateTime, toDateTime,
leagueShortcut):
"""
@param fromDateTime: limit the result to matches later than fromDateTime
@type fromDateTime: datetime.datetime
@param toDateTime: limit the result to matches earlier than toDateTime
@type toDateTime: datetime.datetime
@return: A list of matches in the specified period.
"""
return self.client.GetMatchdataByLeagueDateTime(fromDateTime,
toDateTime, leagueShortcut)[0]
def getMatchdataByLeagueSaison(self, leagueShortcut, leagueSaison):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: A list of all matches in the specified league and season.
@note: May take some time...
"""
return self.client.GetMatchdataByLeagueSaison(leagueShortcut,
leagueSaison)[0]
def getMatchdataByTeams(self, teamID1, teamID2):
"""
@param teamID1: ID of the first team. Use i.e. getTeamsByLeagueSaison()
to obtain team IDs.
@param teamID1: ID of the second team.
@return: A list of matches, at which the specified teams play against
each other.
"""
return self.client.GetMatchdataByTeams(teamID1, teamID2)[0]
def getNextMatch(self, leagueShortcut):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@return: An object containing information about the next match from the
specified league.
"""
return self.client.GetNextMatch(leagueShortcut)
def getNextMatchByLeagueTeam(self, leagueID, teamID):
"""
@param leagueID: Shortcut for a specific league.
Use getAvailLeagues() to get all IDs.
@param teamID: The ID of a team, which cab be obtained by using
getTeamsByLeagueSaison()
@return: An object containing information about the next match
"""
return self.client.GetNextMatchByLeagueTeam(leagueID, teamID)
def getTeamsByLeagueSaison(self, leagueShortcut, leagueSaison):
"""
@param leagueShortcut: Shortcut for a specific league.
Use getAvailLeagues() to get all shortcuts.
@param leagueSaison: A specific season (i.e. the date 2011 as integer).
@return: A list of all teams playing in the specified league and season.
"""
return self.client.GetTeamsByLeagueSaison(leagueShortcut,
leagueSaison)[0]
|
from django.contrib import admin
from app.models import Home, Room, Thermostat, Door, Light, Refrigerator
"""
Administrator interface customization
This module contains customization classes to the admin interface
rendered by Django. This file is interpreted at run time to serve
the custom administrator actions that correspond to the application's
custom models.
"""
class ThermostatAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','home','current_temp','set_temp','pk')
search_fields = ('name','home')
class ThermostatInline(admin.StackedInline):
"""
StackedInline
"""
model = Thermostat
class DoorAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','room','is_locked','is_open','pk')
search_fields = ('name','room')
class DoorInline(admin.StackedInline):
"""
StackedInline
"""
model = Door
class LightAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','room','is_on','pk')
search_fields = ('name','room')
class LightInline(admin.StackedInline):
"""
StackedInline
"""
model = Light
class RefrigeratorAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','room','fridge_set_temp','fridge_current_temp','freezer_set_temp','freezer_current_temp','pk')
search_fields = ('name','room')
class RefrigeratorInline(admin.StackedInline):
"""
StackedInline
"""
model = Refrigerator
class RoomAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','home','room_type','pk')
search_fields = ('name','home')
inlines = (DoorInline, LightInline, RefrigeratorInline,)
class RoomInline(admin.StackedInline):
"""
StackedInline
"""
model = Room
class HomeAdmin(admin.ModelAdmin):
list_display = ('name','owner','position','secret_key','pk')
search_fields = ('name',)
readonly_fields=('secret_key',)
inlines = (ThermostatInline, RoomInline, )
admin.site.register(Home, HomeAdmin)
admin.site.register(Thermostat, ThermostatAdmin)
admin.site.register(Room, RoomAdmin)
admin.site.register(Door, DoorAdmin)
admin.site.register(Light, LightAdmin)
admin.site.register(Refrigerator, RefrigeratorAdmin)
|
"""
Annotates Old Bird call detections in the BirdVox-70k archive.
The annotations classify clips detected by the Old Bird Tseep and Thrush
detectors according to the archive's ground truth call clips.
This script must be run from the archive directory.
"""
from django.db.models import F
from django.db.utils import IntegrityError
import pandas as pd
import vesper.util.django_utils as django_utils
django_utils.set_up_django()
from vesper.django.app.models import (
AnnotationInfo, Clip, Processor, Recording, StringAnnotation, User)
import vesper.django.app.model_utils as model_utils
import scripts.old_bird_detector_eval.utils as utils
ANNOTATE = True
GROUND_TRUTH_DETECTOR_NAME = 'BirdVox-70k'
DETECTOR_DATA = (
('Old Bird Tseep Detector Redux 1.1', 'Call.High'),
('Old Bird Thrush Detector Redux 1.1', 'Call.Low'),
)
CLASSIFICATION_ANNOTATION_NAME = 'Classification'
CENTER_INDEX_ANNOTATION_NAME = 'Call Center Index'
CENTER_FREQ_ANNOTATION_NAME = 'Call Center Freq'
SAMPLE_RATE = 24000
def main():
rows = annotate_old_bird_calls()
raw_df = create_raw_df(rows)
aggregate_df = create_aggregate_df(raw_df)
add_precision_recall_f1(raw_df)
add_precision_recall_f1(aggregate_df)
print(raw_df.to_csv())
print(aggregate_df.to_csv())
def annotate_old_bird_calls():
center_index_annotation_info = \
AnnotationInfo.objects.get(name=CENTER_INDEX_ANNOTATION_NAME)
center_freq_annotation_info = \
AnnotationInfo.objects.get(name=CENTER_FREQ_ANNOTATION_NAME)
classification_annotation_info = \
AnnotationInfo.objects.get(name=CLASSIFICATION_ANNOTATION_NAME)
user = User.objects.get(username='Vesper')
sm_pairs = model_utils.get_station_mic_output_pairs_list()
ground_truth_detector = Processor.objects.get(
name=GROUND_TRUTH_DETECTOR_NAME)
rows = []
for detector_name, annotation_value in DETECTOR_DATA:
short_detector_name = detector_name.split()[2]
old_bird_detector = Processor.objects.get(name=detector_name)
window = utils.OLD_BIRD_CLIP_CALL_CENTER_WINDOWS[short_detector_name]
for station, mic_output in sm_pairs:
station_num = int(station.name.split()[1])
print('{} {}...'.format(short_detector_name, station_num))
ground_truth_clips = list(model_utils.get_clips(
station=station,
mic_output=mic_output,
detector=ground_truth_detector,
annotation_name=CLASSIFICATION_ANNOTATION_NAME,
annotation_value=annotation_value))
ground_truth_call_center_indices = \
[c.start_index + c.length // 2 for c in ground_truth_clips]
ground_truth_call_count = len(ground_truth_clips)
old_bird_clips = list(model_utils.get_clips(
station=station,
mic_output=mic_output,
detector=old_bird_detector))
old_bird_clip_count = len(old_bird_clips)
clips = [(c.start_index, c.length) for c in old_bird_clips]
matches = utils.match_clips_with_calls(
clips, ground_truth_call_center_indices, window)
old_bird_call_count = len(matches)
rows.append([
short_detector_name, station_num, ground_truth_call_count,
old_bird_call_count, old_bird_clip_count])
if ANNOTATE:
# Clear any existing annotations.
for clip in old_bird_clips:
model_utils.unannotate_clip(
clip, classification_annotation_info,
creating_user=user)
# Create new annotations.
for i, j in matches:
old_bird_clip = old_bird_clips[i]
call_center_index = ground_truth_call_center_indices[j]
ground_truth_clip = ground_truth_clips[j]
# Annotate Old Bird clip call center index.
model_utils.annotate_clip(
old_bird_clip, center_index_annotation_info,
str(call_center_index), creating_user=user)
# Get ground truth clip call center frequency.
annotations = \
model_utils.get_clip_annotations(ground_truth_clip)
call_center_freq = annotations[CENTER_FREQ_ANNOTATION_NAME]
# Annotate Old Bird clip call center frequency.
model_utils.annotate_clip(
old_bird_clip, center_freq_annotation_info,
call_center_freq, creating_user=user)
model_utils.annotate_clip(
old_bird_clip, classification_annotation_info,
annotation_value, creating_user=user)
return rows
def create_raw_df(rows):
columns = [
'Detector', 'Station', 'Ground Truth Calls', 'Old Bird Calls',
'Old Bird Clips']
return pd.DataFrame(rows, columns=columns)
def create_aggregate_df(df):
data = [
sum_counts(df, 'Tseep'),
sum_counts(df, 'Thrush'),
sum_counts(df, 'All')
]
columns = [
'Detector', 'Ground Truth Calls', 'Old Bird Calls', 'Old Bird Clips']
return pd.DataFrame(data, columns=columns)
def sum_counts(df, detector):
if detector != 'All':
df = df.loc[df['Detector'] == detector]
return [
detector,
df['Ground Truth Calls'].sum(),
df['Old Bird Calls'].sum(),
df['Old Bird Clips'].sum()]
def add_precision_recall_f1(df):
p = df['Old Bird Calls'] / df['Old Bird Clips']
r = df['Old Bird Calls'] / df['Ground Truth Calls']
df['Precision'] = to_percent(p)
df['Recall'] = to_percent(r)
df['F1'] = to_percent(2 * p * r / (p + r))
def to_percent(x):
return round(1000 * x) / 10
if __name__ == '__main__':
main()
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
import datetime, time, requests, re, os
import bs4
from django.contrib.admin.views.decorators import staff_member_required
from decimal import *
from .models import Gas, Region, Station, Site, Ship, Harvester, Setup, APICheck
from .forms import GasForm, SiteForm, SiteAnalyzer
def about(request):
return render(request, 'home/about.html')
def home(request):
if request.method == "POST":
form = GasForm(data=request.POST)
if form.is_valid():
data = form.cleaned_data
harv = data['harvester']
cycle = harv.cycle
yld = harv.yld
ship = data['ship']
yld_bonus = ship.yld_bonus
if data['skill'] > 5:
skill = 5
if data['skill'] < 1:
skill = 1
else:
skill = data['skill']
cycle_bonus = skill * .05
else:
form = GasForm()
cycle = 40
yld = 20
cycle_bonus = 0.25
yld_bonus = 1
if cycle_bonus > .25:
cycle_bonus = Decimal(0.25)
c = cycle * (1 - cycle_bonus)
y = yld * (1 + yld_bonus)
gases = Gas.objects.all()
isk_min = {}
for gas in gases:
g = gas.name
vol = gas.volume
isk_min_val = ((Decimal(y) / Decimal(gas.volume)) * 2) * (60 / Decimal(c)) * Decimal(gas.last_price)
isk_mthree = Decimal(gas.last_price) / Decimal(gas.volume)
isk_min[g] = [isk_min_val, isk_mthree]
u = APICheck.objects.get(id=1)
context = {'isk_min': isk_min, 'form': form, 'updated': str(u.updated)}
return render(request, "home/home.html", context)
def sites(request):
if request.method == "POST":
form = SiteForm(data=request.POST)
if form.is_valid():
data = form.cleaned_data
harv = data['harvester']
cycle = Decimal(harv.cycle)
yld = Decimal(harv.yld)
ship = data['ship']
yld_bonus = Decimal(ship.yld_bonus)
cargo = Decimal(ship.cargo)
num = Decimal(data['num'])
if data['skill'] > 5:
skill = 5
if data['skill'] < 1:
skill = 1
else:
skill = data['skill']
cycle_bonus = skill * .05
extra_data = data['extra_data']
else:
form = SiteForm()
cycle = Decimal(40)
yld = Decimal(20)
cycle_bonus = Decimal(0.25)
yld_bonus = Decimal(1)
num = Decimal(1)
cargo = 10000
extra_data = False
c = cycle * (Decimal(1) - Decimal(cycle_bonus))
y = yld * (Decimal(1) + Decimal(yld_bonus))
sites = Site.objects.all()
sites_calc = {}
for site in sites:
p_price = site.p_gas.last_price
s_price = site.s_gas.last_price
p_vol = site.p_gas.volume
s_vol = site.s_gas.volume
p_isk_min = ((Decimal(y) / Decimal(p_vol)) * 2) * (60 / Decimal(c)) * Decimal(p_price) * num
s_isk_min = ((Decimal(y) / Decimal(s_vol)) * 2) * (60 / Decimal(c)) * Decimal(s_price) * num
if p_isk_min < s_isk_min:
best_gas = site.s_gas
best_gas_isk_min = s_isk_min
best_qty = site.s_qty
other_gas = site.p_gas
other_gas_isk_min = p_isk_min
other_qty = site.p_qty
else:
best_gas = site.p_gas
best_gas_isk_min = p_isk_min
best_qty = site.p_qty
other_gas = site.s_gas
other_gas_isk_min = s_isk_min
other_qty = site.s_qty
p_units_min = ((y / best_gas.volume) * 2) * (60 / c) * num
s_units_min = ((y / other_gas.volume) * 2) * (60 / c) * num
time_to_clear = (best_qty / p_units_min) + (other_qty / s_units_min)
isk_pres = (p_price * site.p_qty) + (s_price * site.s_qty)
site_isk_min = Decimal(isk_pres) / Decimal(time_to_clear)
#extra data calculations
primary_time_to_clear = (best_qty / p_units_min)
secondary_time_to_clear = (other_qty / s_units_min)
#blue_loot_isk
#time to kill site
ships_needed = ((site.p_qty * p_vol) + (site.s_qty * s_vol)) / (cargo)
sites_calc[site.name] = [isk_pres, best_gas, best_gas_isk_min, other_gas, other_gas_isk_min, site_isk_min, time_to_clear, primary_time_to_clear, secondary_time_to_clear, ships_needed]
u = APICheck.objects.get(id=1)
context = {'form': form, 'sites_calc': sites_calc, 'updated': str(u.updated), 'extra_data': extra_data}
return render(request, "home/sites.html", context)
def site_an(request):
if request.method == 'POST':
form = SiteAnalyzer(data=request.POST)
if form.is_valid():
data = form.cleaned_data
scan = data['scan']
num = Decimal(data['num'])
ship = data['ship']
harvester = data['harvester']
skill = Decimal(data['skill'])
show_data = True
else:
form = SiteAnalyzer()
show_data = False
skill = 0
yld = 0
num = 1
ship = Ship.objects.get(id=1)
harvester = Harvester.objects.get(id=1)
cycle_bonus = skill * Decimal(0.05)
yld = harvester.yld
c = harvester.cycle * (1 - cycle_bonus)
y = yld * (1 + ship.yld_bonus) * num
#parse Dscan
sites = []
proc_sites = []
if show_data == True:
#print(scan)
scan_re = re.compile(r'Gas Site *(\S* \S* \S*) *')
scan_re_b = re.compile(r'(Instrumental Core Reservoir|Ordinary Perimeter Reservoir|Minor Perimeter Reservoir|Bountiful Frontier Reservoir|Barren Perimeter Reservoir|Token Perimeter Reservoir|Sizable Perimeter Reservoir|Vast Frontier Reservoir|Vital Core Reservoir)')
scan_results = scan_re.findall(scan)
if scan_results == []:
scan_results = scan_re_b.findall(scan)
print(scan_results)
for res in scan_results:
sites.append(res)
for s in sites:
site = Site.objects.get(name=s)
site_name = site.name
site_isk = (site.p_gas.last_price * site.p_qty) + (site.s_gas.last_price * site.s_qty)
#ninja scanning
#determine best gas
p_isk_min = ((Decimal(y) / Decimal(site.p_gas.volume)) * 2) * (60 / Decimal(c)) * Decimal(site.p_gas.last_price)
s_isk_min = ((Decimal(y) / Decimal(site.s_gas.volume)) * 2) * (60 / Decimal(c)) * Decimal(site.s_gas.last_price)
if p_isk_min >= s_isk_min:
first_cloud = site.p_gas
first_qty = site.p_qty
sec_cloud = site.s_gas
sec_qty = site.s_qty
if p_isk_min <= s_isk_min:
first_cloud = site.s_gas
first_qty = site.s_qty
sec_cloud = site.p_gas
sec_qty = site.p_qty
#calculate how much you can get in 15 minutes
units_15 = ((Decimal(y) / Decimal(first_cloud.volume)) * 2) * (60 / Decimal(c)) * 15
if units_15 <= first_qty:
ninja_isk = units_15 * first_cloud.last_price
if ninja_isk > site_isk:
ninja_isk = site_isk
m_per_s = (units_15 / num) * first_cloud.volume
#if it is more than the qty in the best cloud, calculate the remaining time
if units_15 > first_qty:
min_left = 15 - (first_qty / (units_15 / 15))
sec_units_min = ((Decimal(y) / Decimal(sec_cloud.volume)) * 2) * (60 / Decimal(c))
rem_units = sec_units_min * min_left
ninja_isk = (rem_units * sec_cloud.last_price) + (first_qty * first_cloud.last_price)
if ninja_isk > site_isk:
ninja_isk = site_isk
m_per_s = ((units_15 / num) * first_cloud.volume) + ((rem_units / num) * sec_cloud.volume)
if m_per_s * num > (site.p_qty * site.p_gas.volume) + (site.s_qty * site.s_gas.volume):
m_per_s = ((site.p_qty * site.p_gas.volume) + (site.s_qty * site.s_gas.volume)) / num
sipm = ninja_isk / 15 / num
nips = ninja_isk / num
if site_name == 'Ordinary Perimeter Reservoir':
sipm = 0
m_per_s = 0
nips = 0
ninja_isk = 0
ninja_si = (site_name, site_isk, sipm, first_cloud.name, m_per_s, nips, ninja_isk)
#print(ninja_si)
proc_sites.append(ninja_si)
t_site_isk = 0
t_sipm = 0
t_sipm_c = 0
t_m_per_s = 0
t_nips = 0
t_ninja_isk = 0
for s in proc_sites:
t_site_isk = t_site_isk + s[1]
t_sipm = t_sipm + s[2]
if s[0] != "Ordinary Perimeter Reservoir":
t_sipm_c = t_sipm_c + 1
t_m_per_s = t_m_per_s + s[4]
t_nips = t_nips + s[5]
t_ninja_isk = t_ninja_isk + s[6]
ships = t_m_per_s / ship.cargo
if t_sipm_c == 0:
t_sipm_c = 1
if t_site_isk == 0:
t_site_isk = 1
percent = (t_ninja_isk / t_site_isk) * 100
totals = (t_site_isk, t_sipm / t_sipm_c, t_m_per_s, t_nips, t_ninja_isk, ships, percent)
t_min = t_sipm_c * 15
u = APICheck.objects.get(id=1)
#site clearing
#take sites
#isk present, blue loot isk present, time to fully clear site, rat dps, rat ehp
context = {'show_data': show_data, 'form': form, 'sites': sites, 'proc_sites': proc_sites, 'totals': totals, 't_min': t_min, 'updated': str(u.updated)}
return render(request, "home/site_an.html", context)
def pull_prices(request):
tag_re = re.compile(r'<.*>(.*)</.*>')
gs = Gas.objects.all()
id_str = ''
for g in gs:
gid = g.item_id
id_str = id_str+'&typeid='+gid
#r = Region.objects.get(id=1)
#r = r.region_id
r = '10000002'
url = 'http://api.eve-central.com/api/marketstat?'+id_str+'®ionlimit='+r
xml_raw = requests.get(url)
if xml_raw.status_code == requests.codes.ok:
path = 'data/prices.xml'
xml = open(path, 'w')
xml.write(xml_raw.text)
xml.close()
status = 'OK'
else:
status = 'Error'
xml_file = open(path, 'r')
xml = xml_file.read()
soup = bs4.BeautifulSoup(xml, 'xml')
types = soup.find_all('type')
for t in types:
t_dict = dict(t.attrs)
type_id = t_dict['id']
buy = t.buy
avg = buy.find_all('max')
avg_in = tag_re.search(str(avg))
avg_in = avg_in.group(1)
avg_price = Decimal(avg_in)
avg_price = round(avg_price, 2)
g = Gas.objects.get(item_id=type_id)
g.last_price = avg_price
g.save()
gases = Gas.objects.all()
a, c = APICheck.objects.get_or_create(id=1)
a.save()
context = {'status': status, 'gases': gases}
return render(request, "home/pull_prices.html", context)
@staff_member_required
def wipe_db(request):
s = Site.objects.all()
s.delete()
g = Gas.objects.all()
g.delete()
r = Region.objects.all()
r.delete()
s = Station.objects.all()
s.delete()
s = Ship.objects.all()
s.delete()
h = Harvester.objects.all()
h.delete()
s = Setup.objects.all()
s.delete()
return HttpResponseRedirect(reverse('home:home'))
@staff_member_required
def setup_site(request):
try:
s = Setup.objects.get(id=1)
if s==1:
return HttpResponseRedirect(reverse('home:home'))
except:
g = Gas(name='Fullerite-C28',item_id='30375', volume='2')
g.save()
g = Gas(name='Fullerite-C32',item_id='30376', volume='5')
g.save()
g = Gas(name='Fullerite-C320',item_id='30377', volume='5')
g.save()
g = Gas(name='Fullerite-C50',item_id='30370', volume='1')
g.save()
g = Gas(name='Fullerite-C540',item_id='30378', volume='10')
g.save()
g = Gas(name='Fullerite-C60',item_id='30371', volume='1')
g.save()
g = Gas(name='Fullerite-C70',item_id='30372', volume='1')
g.save()
g = Gas(name='Fullerite-C72',item_id='30373', volume='2')
g.save()
g = Gas(name='Fullerite-C84',item_id='30374', volume='2')
g.save()
r = Region(name='The Forge', region_id='10000002')
r.save()
s = Station(name='Jita IV - Moon 4 - Caldari Navy Assembly Plant ( Caldari Administrative Station )',station_id='60003760')
s.save()
s = Ship(name='Venture',cargo=5000,yld_bonus=1.00)
s.save()
s = Ship(name='Prospect',cargo=10000,yld_bonus=1.00)
s.save()
h = Harvester(name='Gas Cloud Harvester I',harv_id='25266',cycle=30,yld=10)
h.save()
h = Harvester(name='\'Crop\' Gas Cloud Harvester',harv_id='25540',cycle=30,yld=10)
h.save()
h = Harvester(name='\'Plow\' Gas Cloud Harvester',harv_id='25542',cycle=30,yld=10)
h.save()
h = Harvester(name='Gas Cloud Harvester II',harv_id='25812',cycle=40,yld=20)
h.save()
h = Harvester(name='Syndicate Gas Cloud Harvester',harv_id='28788',cycle=30,yld=10)
h.save()
c50 = Gas.objects.get(name='Fullerite-C50')
c60 = Gas.objects.get(name='Fullerite-C60')
c70 = Gas.objects.get(name='Fullerite-C70')
c72 = Gas.objects.get(name='Fullerite-C72')
c84 = Gas.objects.get(name='Fullerite-C84')
c28 = Gas.objects.get(name='Fullerite-C28')
c32 = Gas.objects.get(name='Fullerite-C32')
c320 = Gas.objects.get(name='Fullerite-C320')
c540 = Gas.objects.get(name='Fullerite-C540')
s = Site(name='Barren Perimeter Reservoir',p_gas=c50,s_gas=c60,p_qty=3000,s_qty=1500)
s.save()
s = Site(name='Token Perimeter Reservoir',p_gas=c60,s_gas=c70,p_qty=3000,s_qty=1500)
s.save()
s = Site(name='Ordinary Perimeter Reservoir',p_gas=c72,s_gas=c84,p_qty=3000,s_qty=1500)
s.save()
s = Site(name='Sizable Perimeter Reservoir',p_gas=c84,s_gas=c50,p_qty=3000,s_qty=1500)
s.save()
s = Site(name='Minor Perimeter Reservoir',p_gas=c70,s_gas=c72,p_qty=3000,s_qty=1500)
s.save()
s = Site(name='Bountiful Frontier Reservoir',p_gas=c28,s_gas=c32,p_qty=5000,s_qty=1000)
s.save()
s = Site(name='Vast Frontier Reservoir',p_gas=c32,s_gas=c28,p_qty=5000,s_qty=1000)
s.save()
s = Site(name='Instrumental Core Reservoir',p_gas=c320,s_gas=c540,p_qty=6000,s_qty=500)
s.save()
s = Site(name='Vital Core Reservoir',p_gas=c540,s_gas=c320,p_qty=6000,s_qty=500)
s.save()
try:
os.mkdir('data/')
except:
pass
s = Setup(setup=1)
s.save()
return HttpResponseRedirect(reverse('home:home'))
|
""" 2018 AOC Day 09 """
import argparse
import typing
import unittest
class Node(object):
''' Class representing node in cyclic linked list '''
def __init__(self, prev: 'Node', next: 'Node', value: int):
''' Create a node with explicit parameters '''
self._prev = prev
self._next = next
self._value = value
@staticmethod
def default() -> 'Node':
''' Create a node linked to itself with value 0 '''
node = Node(None, None, 0) # type: ignore
node._prev = node
node._next = node
return node
def forward(self, n: int = 1) -> 'Node':
''' Go forward n nodes '''
current = self
for _ in range(n):
current = current._next
return current
def back(self, n: int = 1) -> 'Node':
''' Go backward n nodes '''
current = self
for _ in range(n):
current = current._prev
return current
def insert(self, value: int) -> 'Node':
''' Insert new node after current node with given value, and return newly inserted Node '''
new_node = Node(self, self._next, value)
self._next._prev = new_node
self._next = new_node
return self._next
def remove(self) -> 'Node':
''' Remove current Node and return the following Node '''
self._prev._next = self._next
self._next._prev = self._prev
return self._next
def value(self) -> int:
''' Get value '''
return self._value
def chain_values(self):
values = [self.value()]
current = self.forward()
while current != self:
values.append(current.value())
current = current.forward()
return values
def part1(nplayers: int, highest_marble: int) -> int:
""" Solve part 1 """
current = Node.default()
player = 0
scores = {p: 0 for p in range(nplayers)}
for idx in range(1, highest_marble + 1):
if idx % 23 == 0:
scores[player] += idx
current = current.back(7)
scores[player] += current.value()
current = current.remove()
else:
current = current.forward().insert(idx)
player = (player + 1) % nplayers
return max(scores.values())
def part2(nplayers: int, highest_node: int) -> int:
""" Solve part 2 """
return part1(nplayers, highest_node)
def main():
""" Run 2018 Day 09 """
parser = argparse.ArgumentParser(description='Advent of Code 2018 Day 09')
parser.add_argument('nplayers', type=int, help='# of players')
parser.add_argument(
'highest_marble',
type=int,
help='highest-valued marble',
)
opts = parser.parse_args()
print('Part 1:', part1(opts.nplayers, opts.highest_marble))
print('Part 2:', part2(opts.nplayers, opts.highest_marble * 100))
if __name__ == '__main__':
main()
class ExampleTest(unittest.TestCase):
def test_part1(self):
examples = {
(9, 25): 32,
(10, 1618): 8317,
(13, 7999): 146373,
(17, 1104): 2764,
(21, 6111): 54718,
(30, 5807): 37305,
}
for example, expected in examples.items():
self.assertEqual(part1(*example), expected)
|
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^add/$', 'cart.views.add'),
url(r'^clear/$', 'cart.views.clear'),
url(r'^show/$', 'cart.views.show'),
url(r'^remove/(?P<pk>\d+)/$', 'cart.views.remove'),
url(r'^checkout/$', 'cart.views.checkout'),
)
|
from __future__ import absolute_import
from __future__ import unicode_literals
import json
from django.test import TestCase
from django.test.client import Client
from webhook.base import WebhookBase
class TestIntegration(TestCase):
def setUp(self):
"""initialize the Django test client"""
self.c = Client()
def test_success(self):
python_dict = {
"eventId": "5c0007",
"portalId": 999,
"userEmail": "fake@email.com"
}
response = self.c.post('/webhook-receiver/',
json.dumps(python_dict),
content_type="application/json")
self.assertEqual(response.status_code, 200)
class TestWebhookBase(TestCase):
def test_unimplemented_process_webhook(self):
with self.assertRaises(NotImplementedError):
WebhookBase().process_webhook(data={})
|
from math import log2 #For converting numbers to log base 2
'''PIPE TO EXTERNAL FILE WITH > filename.txt'''
letters = 'abcdefghijklmnopqrstuvwxyz'
'''File to read in data from, change this name to read from other files'''
file_name = "typos20.data"
test_file = "typos20Test.data"
'''
NOTE: Spaces are uncorrupted. Words always have the same number of letters and transition to spaces at the end of the word
'''
def data_parser(name):
#Store columns
first_col = []
second_col = []
#Temporarily store words as they are built
word1 = ""
word2 = ""
#Emission dict
#Dictionary that stores the intended letter as key, and observed letters with frequencies as value
emis_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in letters:
emis_freq[i] = {}
for j in letters:
emis_freq[i][j] = 0
#Transition dictionary
#Dictionary that stores the first letter (t) as the key, and second letter (t+1) as the second key with frequencies as value
tran_freq = {}
#Fill dictionary with dictionaries, and those with letter entries (init to 0)
for i in (letters+"_"):
tran_freq[i] = {}
for j in (letters+"_"):
tran_freq[i][j] = 0
#Initial dictionary
#Dictionary to store frequency that a letter occurs in the first col (hidden, actual)
init_freq = {}
#Fill dictionary with letter entries (init to 0)
for i in (letters+"_"):
init_freq[i] = 0
#Open the file
with open(name,"r") as data_in:
#Store the last char
last_char = ""
#Bool to see if this is the rist char
first_char = True
#Iterate through the file line by line
for i in data_in.readlines():
#Initial
#Increment the first col characters frequency in the intial dict
init_freq[i[0]] += 1
#Transition
#Make sure this isn't the first
if first_char:
first_char = False
#Otherwise add to the transition frequency dict
else:
tran_freq[last_char][i[0]] += 1
#Set the last char to be the current first col char that we have added to the dict
last_char = i[0]
#Check if this line is a separation between words ("_")
if i[0] == "_":
#Append word to list of words
first_col.append(word1)
second_col.append(word2)
#Reset temperary word storage
word1 = ""
word2 = ""
#Otherwise line is letter
else:
#Append letters to their temporary storage containers
word1 += i[0]
word2 += i[2]
if i[2] in emis_freq[i[0]]:
emis_freq[i[0]][i[2]] += 1
else:
emis_freq[i[0]][i[2]] = 1
#Cleanup since data file doesn't end in a "_ _" line
first_col.append(word1)
second_col.append(word2)
'''Emission Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in emis_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
tot += emis_freq[i][j]
#Add 'tot' entry to dict
emis_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
emis_prob = {}
#Iterate through keys (actual letters)
for i in emis_freq:
#Create dictionary for this actual letter in new dict
emis_prob[i] = {}
#Iterate through evidence keys for letter i
for j in emis_freq[i]:
#Add one to the numerator and 26 (num of letters) to the denominator
emis_prob[i][j] = (emis_freq[i][j]+1)/(emis_freq[i]["tot"]+26)
#Add the very small, basically 0 chance of a "_" getting in the mix (chance is 0 in reality)
emis_prob[i]["_"] = 1/(emis_freq[i]["tot"]+26)
#Remove 'tot' key from probability dict
del emis_prob[i]["tot"]
'''Spaces are immutable, uncorruptable beasts, and have an emission probability of 1. They are not counted'''
emis_prob['_'] = {}
emis_prob['_']['_'] = 0.9999999999999999
for i in letters:
emis_prob['_'][i] = 0.0000000000000001
'''Transition Calulations'''
#Add entry to dict 'tot' that holds the total number of times the letter appears
#Iterate through keys (actual letters)
for i in tran_freq:
#Reset total
tot = 0
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
tot += tran_freq[i][j]
#Add 'tot' entry to dict
tran_freq[i]["tot"] = tot
#Now take this data (total) and create a probability dictionary
tran_prob = {}
#Iterate through keys (actual letters)
for i in tran_freq:
#Create dictionary for this actual letter in new dict
tran_prob[i] = {}
#Iterate through evidence keys for letter i
for j in tran_freq[i]:
#Add one to the numerator and 27 (num of letters + '_') to the denominator
tran_prob[i][j] = (tran_freq[i][j]+1)/(tran_freq[i]["tot"]+27)
#Remove 'tot' key from probability dict
del tran_prob[i]["tot"]
'''Initial Calculations'''
#Count the total number of characters in the first col (hidden)
tot = 0
for i in init_freq:
tot += init_freq[i]
#Dict that stores the probabilities of each letter
init_prob = {}
for i in init_freq:
init_prob[i] = (init_freq[i]/tot)#(init_freq[i]/len("_".join(first_col)))
#Return both lists as and probability dtionary
return first_col,second_col,emis_prob,tran_prob,init_prob
def furby(evid, hidd, star, tran, emis):
'''Spaces have a 1.0 emission prob, since they are uncorrupted'''
'''Use math libraries log2 to convert to log base 2 for math. Convert back with math libraries pow(2, num) if desired'''
'''Log2 can still use max. log2(0.8) > log2(0.2)'''
#Create list that uses the time as the index and the value is a dict to store probability
P = [{}]
#Create a dict for the path
path = {}
#Create dict for t(0) (seed dict with inital entries)
#Iterate through start dict (Contains all states that sequence can start with)
for i in star:
#Calculate probability with start[letter]*emission (add instead of multiply with log numbers)
P[0][i] = log2(star[i])+log2(emis[i][evid[0]])
path[i] = [i]
#Run for t > 1, start at second letter
for i in range(1,len(evid)):
#Create new dict at end of list of dicts (dict for each time value)
P.append({})
#Dict to temporarily store path for this iteration
temp_path = {}
#Iterate through all possible states that are connected to the previous state chosen
for j in hidd:
#Use list comprehension to iterate through states, calculate trans*emis*P[t-1] for each possible state, find max and store that in path
(prob, state) = max((P[i-1][k] + log2(tran[k][j]) + log2(emis[j][evid[i]]), k) for k in hidd)
P[i][j] = prob
temp_path[j] = path[state] + [j]
# Don't need to remember the old paths
path = temp_path
#Find max prob in the last iteration of the list of dicts (P)
n = len(evid)-1
(prob, state) = max((P[n][y], y) for y in hidd)
#Return the probability for the best last state and the path for it as a list of 1 char strings
return prob,path[state]
def error_rate(correct, check):
errors = 0
for i in range(0,len(correct)):
if correct[i] != check[i]:
errors += 1
return errors/len(correct)
if __name__ == "__main__":
#Set correct and actual as lists to hold words in each column
correct,actual,conditional,transitional,initial = data_parser(file_name)
#Get the data from another file to run the algorithm on. Get the 1st and 3rd column as strings
#String that had the hidden state sequence (1st column)
test_hidden = ""
#String that stores the observed column (3rd column)
test_observed = ""
#Open file to get data from
with open(test_file,"r") as test_in:
#Iterate through lines of file
for i in test_in.readlines():
#Store first column letter
test_hidden += i[0]
#Store third column letter
test_observed += i[2]
#Run Viterbi
prob, path = furby(test_observed, letters+"_", initial, transitional, conditional)
#Calculate error rates
print("Error rate of", test_file, "before Viterbi:",error_rate(test_hidden,test_observed)*100,"%")
print("Error rate of", test_file, "after Viterbi:",error_rate(test_hidden,path)*100,"%")
print("--------State Sequence--------")
#Print final sequence in more readable format by joining list
print("".join(path))
#Print the probability of the final state for fun
print("--------Final State Probability--------")
print("In Log2:", prob)
print("In Decimal:", pow(2,prob))
''' Part 1
#Print conditional
print("----------------Condition----------------")
#Iterate through keys of a sorted dictionary
for i in sorted(conditional):
print("--------Hidden:",i,"--------")
#Iterate through keys of dict in dict (value dict to the key "i")
for j in sorted(conditional[i]):
#Print the number of occurances
print(j, conditional[i][j])
#Print transitional
print("----------------Transition----------------")
#Iterate through keys of a sorted dictionary
for i in sorted(transitional):
print("--------Previous:",i,"--------")
#Iterate through keys of dict in dict (value dict to the key "i")
for j in sorted(transitional[i]):
#Print the number of occurances
print(j, transitional[i][j])
#Print Initial
print("----------------Initial (Using Hidden)----------------")
#Iterate through key of sorted dict
for i in sorted(initial):
print(i, initial[i])
'''
|
import os
from djangomaster.core import autodiscover
from djangomaster.sites import mastersite
def get_version():
path = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(path, 'version.txt')
return open(path).read().strip()
__version__ = get_version()
def get_urls():
autodiscover()
return mastersite.urlpatterns, 'djangomaster', 'djangomaster'
urls = get_urls()
|
import urllib2
import json
import time
import threading
import Queue
from utils import make_random_id, LOGINFO, LOGDEBUG, LOGERROR
class CommBase(object):
def __init__(self):
self.agents = []
def add_agent(self, agent):
self.agents.append(agent)
class HTTPComm(CommBase):
def __init__(self, config, url = 'http://localhost:8080/messages'):
super(HTTPComm, self).__init__()
self.config = config
self.lastpoll = -1
self.url = url
self.own_msgids = set()
def post_message(self, content):
msgid = make_random_id()
content['msgid'] = msgid
self.own_msgids.add(msgid)
LOGDEBUG( "----- POSTING MESSAGE ----")
data = json.dumps(content)
LOGDEBUG(data)
u = urllib2.urlopen(self.url, data)
return u.read() == 'Success'
def poll_and_dispatch(self):
url = self.url
if self.lastpoll == -1:
url = url + "?from_timestamp_rel=%s" % self.config['offer_expiry_interval']
else:
url = url + '?from_serial=%s' % (self.lastpoll+1)
print (url)
u = urllib2.urlopen(url)
resp = json.loads(u.read())
for x in resp:
if int(x.get('serial',0)) > self.lastpoll: self.lastpoll = int(x.get('serial',0))
content = x.get('content',None)
if content and not content.get('msgid', '') in self.own_msgids:
for a in self.agents:
a.dispatch_message(content)
class ThreadedComm(CommBase):
class AgentProxy(object):
def __init__(self, tc):
self.tc = tc
def dispatch_message(self, content):
self.tc.receive_queue.put(content)
def __init__(self, upstream_comm):
super(ThreadedComm, self).__init__()
self.upstream_comm = upstream_comm
self.send_queue = Queue.Queue()
self.receive_queue = Queue.Queue()
self.comm_thread = CommThread(self, upstream_comm)
upstream_comm.add_agent(self.AgentProxy(self))
def post_message(self, content):
self.send_queue.put(content)
def poll_and_dispatch(self):
while not self.receive_queue.empty():
content = self.receive_queue.get()
for a in self.agents:
a.dispatch_message(content)
def start(self):
self.comm_thread.start()
def stop(self):
self.comm_thread.stop()
self.comm_thread.join()
class CommThread(threading.Thread):
def __init__(self, threaded_comm, upstream_comm):
threading.Thread.__init__(self)
self._stop = threading.Event()
self.threaded_comm = threaded_comm
self.upstream_comm = upstream_comm
def run(self):
send_queue = self.threaded_comm.send_queue
receive_queue = self.threaded_comm.receive_queue
while not self._stop.is_set():
while not send_queue.empty():
self.upstream_comm.post_message(send_queue.get())
self.upstream_comm.poll_and_dispatch()
time.sleep(1)
def stop(self):
self._stop.set()
|
from kivy.core.window import Window
from kivy.uix.textinput import TextInput
__author__ = 'woolly_sammoth'
from kivy.config import Config
Config.set('graphics', 'borderless', '1')
Config.set('graphics', 'resizable', '0')
Config.set('graphics', 'fullscreen', '1')
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.uix.button import Button
from kivy.uix.screenmanager import ScreenManager
from kivy.uix.actionbar import ActionBar
from kivy.uix.screenmanager import SlideTransition
from kivy.uix.popup import Popup
from kivy.lang import Builder
from kivy.clock import Clock
import logging
import time
import utils
import os
import json
import sys
import screens.HomeScreen as HomeScreen
import overrides
class TopActionBar(ActionBar):
def __init__(self, PlungeApp, **kwargs):
super(TopActionBar, self).__init__(**kwargs)
self.PlungeApp = PlungeApp
self.top_action_view = self.ids.top_action_view.__self__
self.top_action_previous = self.ids.top_action_previous.__self__
self.top_settings_button = self.ids.top_settings_button.__self__
self.top_size_button = self.ids.top_size_button.__self__
self.standard_height = self.height
self.top_action_previous.bind(on_release=self.PlungeApp.open_settings)
self.top_settings_button.bind(on_release=self.PlungeApp.open_settings)
return
def minimise(self, override=None):
min = self.top_size_button.text if override is None else override
if min == self.PlungeApp.get_string("Minimise"):
self.top_size_button.text = self.PlungeApp.get_string("Maximise")
self.top_action_previous.bind(on_release=self.minimise)
self.PlungeApp.homeScreen.clear_widgets()
self.PlungeApp.homeScreen.add_widget(self.PlungeApp.homeScreen.min_layout)
self.PlungeApp.is_min = True
else:
self.top_size_button.text = self.PlungeApp.get_string("Minimise")
self.top_action_previous.color = (1, 1, 1, 1)
self.PlungeApp.homeScreen.clear_widgets()
self.PlungeApp.homeScreen.add_widget(self.PlungeApp.homeScreen.max_layout)
self.PlungeApp.is_min = False
return
class PlungeApp(App):
def __init__(self, **kwargs):
super(PlungeApp, self).__init__(**kwargs)
self.isPopup = False
self.use_kivy_settings = False
self.settings_cls = overrides.SettingsWithCloseButton
self.utils = utils.utils(self)
self.exchanges = ['ccedk', 'poloniex', 'bitcoincoid', 'bter', 'bittrex']
self.active_exchanges = []
self.currencies = ['btc', 'ltc', 'eur', 'usd', 'ppc']
self.active_currencies = []
self.client_running = False
self.is_min = False
if not os.path.isdir('logs'):
os.makedirs('logs')
if not os.path.isfile('api_keys.json'):
api_keys = []
with open('api_keys.json', 'a+') as api_file:
api_file.write(json.dumps(api_keys))
api_file.close()
if not os.path.isfile('user_data.json'):
user_data = {exchange: [] for exchange in self.exchanges}
with open('user_data.json', 'a+') as user_file:
user_file.write(json.dumps(user_data))
user_file.close()
self.first_run = True
self.logger = logging.getLogger('Plunge')
self.logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('logs/%s_%d.log' % ('Plunge', time.time()))
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
formatter = logging.Formatter(fmt='%(asctime)s %(levelname)s: %(message)s', datefmt="%Y/%m/%d-%H:%M:%S")
fh.setFormatter(formatter)
ch.setFormatter(formatter)
self.logger.addHandler(fh)
self.logger.addHandler(ch)
return
def log_uncaught_exceptions(self, exctype, value, tb):
self.logger.exception('\n===================\nException Caught\n\n%s\n===================\n' % value)
return
def build(self):
self.logger.info("Fetching language from config")
self.language = self.config.get('standard', 'language')
try:
self.lang = json.load(open('res/json/languages/' + self.language.lower() + '.json', 'r'))
except (ValueError, IOError) as e:
self.logger.error('')
self.logger.error('##################################################################')
self.logger.error('')
self.logger.error('There was an Error loading the ' + self.language + ' language file.')
self.logger.error('')
self.logger.error(str(e))
self.logger.error('')
self.logger.error('##################################################################')
raise SystemExit
self.root = BoxLayout(orientation='vertical')
self.mainScreenManager = ScreenManager(transition=SlideTransition(direction='left'))
Builder.load_file('screens/HomeScreen.kv')
self.homeScreen = HomeScreen.HomeScreen(self)
self.mainScreenManager.add_widget(self.homeScreen)
self.topActionBar = TopActionBar(self)
self.root.add_widget(self.topActionBar)
self.root.add_widget(self.mainScreenManager)
self.homeScreen.clear_widgets()
if self.config.getint('standard', 'start_min') == 1:
self.topActionBar.minimise(self.get_string("Minimise"))
self.is_min = True
else:
self.topActionBar.minimise(self.get_string("Maximise"))
self.is_min = False
self.set_monitor()
Window.fullscreen = 1
if self.config.getint('standard', 'show_disclaimer') == 1:
Clock.schedule_once(self.show_disclaimer, 1)
return self.root
def show_disclaimer(self, dt):
content = BoxLayout(orientation='vertical')
content.add_widget(TextInput(text=self.get_string('Disclaimer_Text'), size_hint=(1, 0.8), font_size=26,
read_only=True, multiline=True, background_color=(0.13725, 0.12157, 0.12549, 1),
foreground_color=(1, 1, 1, 1)))
content.add_widget(BoxLayout(size_hint=(1, 0.1)))
button_layout = BoxLayout(size_hint=(1, 0.1), spacing='20dp')
ok_button = Button(text=self.get_string('OK'), size_hint=(None, None), size=(200, 50))
cancel_button = Button(text=self.get_string('Cancel'), size_hint=(None, None), size=(200, 50))
ok_button.bind(on_press=self.close_popup)
cancel_button.bind(on_press=self.exit)
button_layout.add_widget(ok_button)
button_layout.add_widget(cancel_button)
content.add_widget(button_layout)
self.popup = Popup(title=self.get_string('Disclaimer'), content=content, auto_dismiss=False,
size_hint=(0.9, 0.9))
self.popup.open()
padding = ((self.popup.width - (ok_button.width + cancel_button.width)) / 2)
button_layout.padding = (padding, 0, padding, 0)
return
def exit(self):
sys.exit()
def set_monitor(self):
if self.is_min is False:
self.homeScreen.max_layout.remove_widget(self.homeScreen.run_layout)
if self.config.getint('standard', 'monitor') == 0:
self.homeScreen.max_layout.add_widget(self.homeScreen.run_layout)
def get_string(self, text):
try:
self.logger.debug("Getting string for %s" % text)
return_string = self.lang[text]
except (ValueError, KeyError):
self.logger.error("No string found for %s in %s language file" % (text, self.language))
return_string = 'Language Error'
return return_string
def build_config(self, config):
config.setdefaults('server', {'host': "", 'port': 80})
config.setdefaults('exchanges', {'ccedk': 0, 'poloniex': 0, 'bitcoincoid': 0, 'bter': 0, 'bittrex': 0})
config.setdefaults('standard', {'language': 'English', 'period': 30, 'monitor': 1, 'start_min': 0, 'data': 0,
'show_disclaimer': 0, 'smooth_line': 1})
config.setdefaults('api_keys', {'bitcoincoid': '', 'bittrex': '', 'bter': '', 'ccedk': '', 'poloniex': ''})
def build_settings(self, settings):
settings.register_type('string', overrides.SettingStringFocus)
settings.register_type('numeric', overrides.SettingNumericFocus)
settings.register_type('string_exchange', overrides.SettingStringExchange)
with open('user_data.json', 'a+') as user_data:
try:
saved_data = json.load(user_data)
except ValueError:
saved_data = []
user_data.close()
for exchange in self.exchanges:
if exchange not in saved_data:
self.config.set('exchanges', exchange, 0)
continue
self.config.set('exchanges', exchange, len(saved_data[exchange]))
settings.add_json_panel(self.get_string('Plunge_Configuration'), self.config, 'settings/plunge.json')
def on_config_change(self, config, section, key, value):
if section == "standard":
if key == "period":
Clock.unschedule(self.homeScreen.get_stats)
self.logger.info("Setting refresh Period to %s" % self.config.get('standard', 'period'))
Clock.schedule_interval(self.homeScreen.get_stats, self.config.getint('standard', 'period'))
if key == "monitor":
self.set_monitor()
self.active_exchanges = self.utils.get_active_exchanges()
self.homeScreen.exchange_spinner.values = [self.get_string(exchange) for exchange in self.active_exchanges]
self.homeScreen.set_exchange_spinners()
self.homeScreen.get_stats(0)
def show_popup(self, title, text):
content = BoxLayout(orientation='vertical')
content.add_widget(Label(text=text, size_hint=(1, 0.8), font_size=26))
content.add_widget(BoxLayout(size_hint=(1, 0.1)))
button_layout = BoxLayout(size_hint=(1, 0.1))
button = Button(text=self.get_string('OK'), size_hint=(None, None), size=(250, 50))
button.bind(on_press=self.close_popup)
button_layout.add_widget(button)
content.add_widget(button_layout)
self.popup = Popup(title=title, content=content, auto_dismiss=False, size_hint=(0.9, 0.9))
self.popup.open()
padding = ((self.popup.width - button.width) / 2)
button_layout.padding = (padding, 0, padding, 0)
self.isPopup = True
return
def close_popup(self, instance, value=False):
self.popup.dismiss()
self.isPopup = False
return
if __name__ == '__main__':
Plunge = PlungeApp()
Plunge.run()
|
class _Webhooks:
def __init__(self, client=None):
self.client = client
def create_webhook(self, params=None, **options):
"""Establish a webhook
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/webhooks"
return self.client.post(path, params, **options)
def delete_webhook(self, webhook_gid, params=None, **options):
"""Delete a webhook
:param str webhook_gid: (required) Globally unique identifier for the webhook.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/webhooks/{webhook_gid}".replace("{webhook_gid}", webhook_gid)
return self.client.delete(path, params, **options)
def get_webhook(self, webhook_gid, params=None, **options):
"""Get a webhook
:param str webhook_gid: (required) Globally unique identifier for the webhook.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/webhooks/{webhook_gid}".replace("{webhook_gid}", webhook_gid)
return self.client.get(path, params, **options)
def get_webhooks(self, params=None, **options):
"""Get multiple webhooks
:param Object params: Parameters for the request
- workspace {str}: (required) The workspace to query for webhooks in.
- resource {str}: Only return webhooks for the given resource.
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/webhooks"
return self.client.get_collection(path, params, **options)
def update_webhook(self, webhook_gid, params=None, **options):
"""Update a webhook
:param str webhook_gid: (required) Globally unique identifier for the webhook.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/webhooks/{webhook_gid}".replace("{webhook_gid}", webhook_gid)
return self.client.put(path, params, **options)
|
import sqlite3
import os
def init():
"""
Creates and initializes settings database.
Doesn't do anything if the file already exists. Remove the local copy to recreate the database.
"""
if not os.path.isfile("settings.sqlite"):
app_db_connection = sqlite3.connect('settings.sqlite')
app_db = app_db_connection.cursor()
app_db.execute("CREATE TABLE oauth (site, rate_remaining, rate_reset)")
app_db.execute("INSERT INTO oauth VALUES ('reddit', 30, 60)")
app_db_connection.commit()
app_db_connection.close()
if __name__ == "__main__":
init()
|
from .proxy_only_resource import ProxyOnlyResource
class ReissueCertificateOrderRequest(ProxyOnlyResource):
"""Class representing certificate reissue request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key_size: Certificate Key Size.
:type key_size: int
:param delay_existing_revoke_in_hours: Delay in hours to revoke existing
certificate after the new certificate is issued.
:type delay_existing_revoke_in_hours: int
:param csr: Csr to be used for re-key operation.
:type csr: str
:param is_private_key_external: Should we change the ASC type (from
managed private key to external private key and vice versa).
:type is_private_key_external: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'delay_existing_revoke_in_hours': {'key': 'properties.delayExistingRevokeInHours', 'type': 'int'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
}
def __init__(self, kind=None, key_size=None, delay_existing_revoke_in_hours=None, csr=None, is_private_key_external=None):
super(ReissueCertificateOrderRequest, self).__init__(kind=kind)
self.key_size = key_size
self.delay_existing_revoke_in_hours = delay_existing_revoke_in_hours
self.csr = csr
self.is_private_key_external = is_private_key_external
|
"""
LendingClub2 Filter Module
"""
import collections
from abc import abstractmethod
from abc import ABC
from lendingclub2.error import LCError
class BorrowerTrait(ABC):
"""
Abstract base class to define borrowers of interest
"""
@abstractmethod
def matches(self, borrower):
"""
Check if borrower has the trait
:param borrower: instance of :py:class:`~lendingclub2.loan.Borrower`.
:returns: boolean
"""
return True
class BorrowerEmployedTrait(BorrowerTrait):
"""
Check if borrower is employed
"""
def matches(self, borrower):
"""
Check if borrower has the trait
:param borrower: instance of :py:class:`~lendingclub2.loan.Borrower`.
:returns: boolean
"""
return borrower.employed
class Filter(ABC):
"""
Abstract base class for filtering the loan
"""
@abstractmethod
def meet_requirement(self, loan):
"""
Check if the loan is meeting the filter requirement
:param loan: instance of :py:class:`~lendingclub2.loan.Loan`.
:returns: boolean
"""
return True
class FilterByApproved(Filter):
"""
Filter by if the loan is already approved
"""
def meet_requirement(self, loan):
"""
Check if the loan is meeting the filter requirement
:param loan: instance of :py:class:`~lendingclub2.loan.Loan`.
:returns: boolean
"""
return loan.approved
class FilterByBorrowerTraits(Filter):
"""
Filter to have borrower matching specific traits
"""
# pylint: disable=super-init-not-called
def __init__(self, traits):
"""
Constructor
:param traits: instance of
:py:class:`~lendingclub2.filter.BorrowerTrait`
or iterable of instance of
:py:class:`~lendingclub2.filter.BorrowerTrait`.
"""
if isinstance(traits, collections.abc.Iterable):
self._specs = traits
elif isinstance(traits, BorrowerTrait):
self._specs = (traits, )
else:
fstr = "invalid traits type for {}".format(self.__class__.__name__)
raise LCError(fstr)
# pylint: enable=super-init-not-called
def meet_requirement(self, loan):
"""
Check if the loan is meeting the filter requirement
:param loan: instance of :py:class:`~lendingclub2.loan.Loan`.
:returns: boolean
"""
for spec in self._specs:
if not spec.matches(loan.borrower):
return False
return True
class FilterByFunded(Filter):
"""
Filter by percentage funded
"""
# pylint: disable=super-init-not-called
def __init__(self, percentage):
"""
Constructor.
:param percentage: float (between 0 and 100 inclusive)
"""
if percentage < 0.0 or percentage > 100.0:
fstr = "percentage needs to be between 0 and 100 (inclusive)"
raise LCError(fstr)
self._percentage = percentage
# pylint: enable=super-init-not-called
def meet_requirement(self, loan):
"""
The loan would have to be at least the percentage value to meet the
requirement.
:param loan: instance of :py:class:`~lendingclub2.loan.Loan`.
:returns: boolean
"""
return loan.percent_funded >= self._percentage
class FilterByGrade(Filter):
"""
Filter by grade
"""
# pylint: disable=super-init-not-called
def __init__(self, grades=None):
"""
Constructor
:param grades: iterable of string (default: None, example: ('A', 'B'))
"""
self._grades = grades
# pylint: enable=super-init-not-called
def meet_requirement(self, loan):
"""
Check if the loan is meeting the filter requirement
:param loan: instance of :py:class:`~lendingclub2.loan.Loan`.
:returns: boolean
"""
if self._grades and loan.grade in self._grades:
return True
return False
class FilterByTerm(Filter):
"""
Filter by term
"""
# pylint: disable=super-init-not-called
def __init__(self, value=36, min_val=None, max_val=None):
"""
Constructor. To filter by a specific value, set value to a number.
To filter by a range, set value to None, and set min_val and max_val
to integers.
:param value: int - exact term value (default: 36)
:param min_val: int - minimum term value (inclusive) (default: None)
:param max_val: int - maximum term value (inclusive) (default: None)
"""
if value is not None and (min_val is not None or max_val is not None):
fstr = "value and min_val, max_val are mutually exclusive"
details = "value: {}".format(value)
if min_val is not None:
details += ", min_val: {}".format(min_val)
if max_val is not None:
details += ", max_val: {}".format(max_val)
raise LCError(fstr, details=details)
if min_val is not None and max_val is not None:
if max_val > min_val:
fstr = "max_val cannot be greater than min_val"
raise LCError(fstr)
elif value is None and (min_val is None or max_val is None):
fstr = "invalid specification on the values"
hint = "either value or min_val + max_val combo should be specified"
raise LCError(fstr, hint=hint)
self._value = value
self._min_value = min_val
self._max_value = max_val
# pylint: enable=super-init-not-called
def meet_requirement(self, loan):
"""
Check if the loan is meeting the filter requirement
:param loan: instance of :py:class:`~lendingclub2.loan.Loan`.
:returns: boolean
"""
if self._value is not None:
return loan.term == self._value
return self._min_value <= loan.term <= self._max_value
|
import _plotly_utils.basevalidators
class XValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="x", parent_name="splom.marker.colorbar", **kwargs):
super(XValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
max=kwargs.pop("max", 3),
min=kwargs.pop("min", -2),
**kwargs
)
|
import re
osm = open("stops.txt", 'r', encoding="utf-8")
bugs = open("BAD-STOPS.txt", 'r', encoding="utf-8")
still = open("BUGS-NOT-IN-OSM.txt", 'w')
bugi = []
for line in bugs:
line = line.split(' ')
bugi.append(line[0])
print(len(bugi))
for line in osm:
line = line.split(',')
if line[0].isnumeric():
stop_nr = line[0]
if stop_nr in bugi:
bugi.remove(stop_nr)
for item in bugi:
still.write(item)
still.write("\n")
osm.close()
bugs.close()
still.close()
print(len(bugi))
|
import tornado.web
from datetime import date
from sqlalchemy.orm.exc import NoResultFound
from pyprint.handler import BaseHandler
from pyprint.models import User, Link, Post
class SignInHandler(BaseHandler):
def get(self):
return self.background_render('login.html')
def post(self):
username = self.get_argument('username', None)
password = self.get_argument('password', None)
if username and password:
try:
user = self.orm.query(User).filter(User.username == username).one()
except NoResultFound:
return self.redirect('/login')
if user.check(password):
self.set_secure_cookie('username', user.username)
self.redirect('/kamisama/posts')
return self.redirect('/login')
class ManagePostHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
posts = self.orm.query(Post.title, Post.id).order_by(Post.id.desc()).all()
self.background_render('posts.html', posts=posts)
@tornado.web.authenticated
def post(self):
action = self.get_argument('action', None)
if action == 'del':
post_id = self.get_argument('id', 0)
if post_id:
post = self.orm.query(Post).filter(Post.id == post_id).one()
self.orm.delete(post)
self.orm.commit()
class AddPostHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
self.background_render('add_post.html', post=None)
@tornado.web.authenticated
def post(self):
title = self.get_argument('title', None)
content = self.get_argument('content', None)
tags = self.get_argument('tags', '').strip().split(',')
if not title or not content:
return self.redirect('/kamisama/posts/add')
post = self.orm.query(Post.title).filter(Post.title == title).all()
if post:
return self.write('<script>alert("Title has already existed");window.history.go(-1);</script>')
self.orm.add(Post(title=title, content=content, created_time=date.today()))
self.orm.commit()
return self.redirect('/kamisama/posts')
class AddLinkHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
links = self.orm.query(Link).all()
self.background_render('links.html', links=links)
@tornado.web.authenticated
def post(self):
action = self.get_argument('action', None)
if action == 'add':
name = self.get_argument('name', '')
url = self.get_argument('url', '')
if not name or not url:
return self.redirect('/kamisama/links')
self.orm.add(Link(name=name, url=url))
self.orm.commit()
return self.redirect('/kamisama/links')
elif action == 'del':
link_id = self.get_argument('id', 0)
if link_id:
link = self.orm.query(Link).filter(Link.id == link_id).one()
self.orm.delete(link)
self.orm.commit()
|
"""convert the output file in a batch"""
import os
import os.path as op
import sys
import argparse
if os.getenv("PyFR") is None:
raise EnvironmentError("Environmental variable PyFR is not set")
else:
PyFRPath = os.getenv("PyFR")
if PyFRPath not in sys.path:
sys.path.append(PyFRPath)
try:
import pyfr
import pyfr.writers
except ImportError as err:
err.msg += "! Please check the path set in the environmental variable PyFR."
raise
def parseArgs(args=sys.argv[1:]):
"""parse arguments
Args:
args: list of strings. Default is sys.argv[1:].
Returns:
parser.parse_args(args)
"""
parser = argparse.ArgumentParser(
description="2D Cavity Flow Post-Precessor")
parser.add_argument(
"casePath", metavar="path",
help="The path to a PyFR case folder", type=str)
parser.add_argument(
"-s", "--soln-dir", metavar="soln-dir", dest="solnDir",
help="The directory (under casePath) containing *.pyfrs files. " +
"(Default = solutions)",
type=str, default="solutions")
parser.add_argument(
"-v", "--vtu-dir", metavar="vtu-dir", dest="vtuDir",
help="The directory (under casePath) in where *.vtu files will be. " +
"If the folder does not exist, the script will create it. "
"(Default = vtu)",
type=str, default="vtu")
parser.add_argument(
"-m", "--mesh", metavar="mesh", dest="mesh",
help="The mesh file required. " +
"The default is to use the first-found .pyfrm file in the case " +
"directory. If multiple .pyfrm files exist in the case directory, "
"it is suggested to set the argument.",
type=str, default=None)
parser.add_argument(
"-o", "--overwrite", dest="overwrite",
help="Whether to overwrite the output files if they already exist.",
action="store_true")
parser.add_argument(
"-d", "--degree", dest="degree",
help="The level of mesh. If the solver use higher-order " +
"polynomials, than it may be necessary to set larger degree.",
type=int, default=0)
return parser.parse_args(args)
def setup_dirs(args):
"""set up path to directories necessary
Args:
args: parsed arguments generated by parser.parse_args()
Returns:
areparse.Namespace object with full paths
"""
# set up the path to case directory
args.casePath = os.path.abspath(args.casePath)
# set up and check the path to case directory
args.solnDir = args.casePath + "/" + args.solnDir
if not op.isdir(args.solnDir):
raise RuntimeError(
"The path " + args.solnDir + " does not exist.")
# set up the path for .pyfrm file
if args.mesh is not None:
args.mesh = args.casePath + "/" + args.mesh
if not op.isfile(args.mesh):
raise RuntimeError(
"The input mesh file " + args.mesh + " does not exist.")
else:
for f in os.listdir(args.casePath):
if f.endswith(".pyfrm"):
args.mesh = args.casePath + "/" + f
if args.mesh is None:
raise RuntimeError(
"Could not find any .pyfrm file in the case folder " +
args.casePath)
# set up and create the directory for .vtu files, if it does not exist
args.vtuDir = args.casePath + "/" + args.vtuDir
if not op.isdir(args.vtuDir):
os.mkdir(args.vtuDir)
return args
def get_pyfrs_list(pyfrsDirPath):
"""get list of file names that end with .pyfrs in pyfrsDirPath
Args:
pyfrsDirPath: path to the folder of .pyfrs files
Returns:
a list of file names
"""
fileList = [f for f in os.listdir(pyfrsDirPath)
if op.splitext(f)[1] == ".pyfrs"]
if len(fileList) == 0:
raise RuntimeError(
"No .pyfrs file was found in the path " + pyfrsDirPath)
return fileList
def generate_vtu(vtuPath, pyfrsPath, pyfrsList, mesh, overwrite, degree):
"""generate .vtu files, if they do not exist
Args:
vtuPath: the path to folder of .vtu files
pyfrsPath: the path to .pyfrs files
pyfrsList: the list of .pyfrs which to be converted
mesh: the .pyfrm file
overwrite: whether to overwrite the .vtu file if it already exist
"""
vtuList = [op.splitext(f)[0]+".vtu" for f in pyfrsList]
for i, o in zip(pyfrsList, vtuList):
ifile = op.join(pyfrsPath, i)
ofile = op.join(vtuPath, o)
if op.isfile(ofile) and not overwrite:
print("Warning: " +
"the vtu file " + o + " exists " +
"and won't be overwrited because overwrite=False")
else:
output_vtu(mesh, ifile, ofile, degree)
def output_vtu(mesh, iFile, oFile, g=True, p="double", d=0):
"""convert a single .pyfrs file to .vtu file using PyFR's converter
Args:
mesh: mesh file (must end with .pyfrm)
input: input file name (must end with .pyfrs)
output: output file name (must end with .vtu)
g: whether to export gradients
p: precision, either "single" or "double"
d: degree of the element (set this according the order of the polynimal)
"""
writerArgs = argparse.Namespace(
meshf=mesh, solnf=iFile, outf=oFile, precision=p,
gradients=g, divisor=d)
writer = pyfr.writers.get_writer_by_extn(".vtu", writerArgs)
print("Converting " + iFile + " to " + oFile)
writer.write_out()
def get_pyfrs_files(pyfrsDirPath):
pass
if __name__ == "__main__":
args = parseArgs()
args = setup_dirs(args)
pyfrsList = get_pyfrs_list(args.solnDir)
generate_vtu(
args.vtuDir, args.solnDir, pyfrsList,
args.mesh, args.overwrite, args.degree)
|
from flask import render_template, request, redirect, session, flash, url_for
from functools import wraps
from user import app
import services2db
import log2db
import users
import json
import time
import sys
import asset
reload(sys)
sys.setdefaultencoding('gb18030')
def login_required(func):
@wraps(func)
def wrapper(*args, **kwargs):
if session.get('user') is None:
return redirect('/')
rt = func(*args, **kwargs)
return rt
return wrapper
def time_wrapper(func):
@wraps(func)
def wrapper():
print '计时开始:%s' % func.__name__
start = time.time()
rt = func()
print '计时结束:%s:%s' % (func.__name__,time.time() - start)
return rt
return wrapper
@app.route('/')
def index():
if session:
return redirect('/users/')
else:
return render_template('login.html')
@app.route('/login/', methods=['POST', 'GET'])
def login():
params = request.args if request.method == 'GET' else request.form
username = params.get('username', '')
password = params.get('password', '')
if users.validate_login(username, password):
print '登录成功'
session['user'] = {'username': username}
return redirect('/users/')
else:
return render_template('login.html', username=username, error=u'用户名或密码错误')
@app.route('/user/logout/')
def logout():
session.clear()
return redirect('/')
@app.route('/users/')
@login_required
def user_list():
return render_template('users.html', user_list=users.get_users())
@app.route('/user/adder/', methods=['POST', 'GET'])
@login_required
def user_create():
return render_template('user_create.html')
@app.route('/user/add/', methods=['POST'])
def user_add():
params = request.args if request.method == 'GET' else request.form
username = params.get('username', '')
password = params.get('password', '')
age = params.get('age', '')
# 检查用户信息
_is_ok, _error = users.validate_add_user(username, password, age)
_status = None
if _is_ok:
if users.add_users(username=username, age=age, password=password):
_status = '添加用户成功!'
else:
_status = '添加用户失败!'
return json.dumps({'is_ok': _is_ok, 'status': _status, 'error': _error})
@app.route('/user/update/', methods=['POST', 'GET'])
@login_required
def user_update():
_id = request.args.get('id', '')
_name = request.args.get('name', '')
# _users = []
# for i in users.get_users():
# if i.get('id') == int(_id):
# _users.append(i)
return render_template('user_update.html', uid=_id, username=_name)
@app.route('/user/upd/', methods=['POST', 'GET'])
def user_upd():
_id = request.form.get('id', '')
_mpassword = request.form.get('mpassword', '')
_upassword = request.form.get('upassword', '')
_age = request.form.get('age', '')
_is_ok, _error = users.validate_update_user(_id, session['user']['username'], _mpassword, _upassword, _age)
_status = None
if _is_ok:
if users.update_users(_id, _upassword, _age):
_status = '用户更新成功!'
else:
_status = '用户更新失败!'
return json.dumps({'is_ok': _is_ok, 'status': _status, 'error': _error})
@app.route('/user/delete/')
@login_required
def delete_user():
uid = request.args.get('uid', '')
if users.del_users(uid):
return redirect('/users/')
else:
return '用户删除失败'
@app.route('/logs/', methods=['POST', 'GET'])
@time_wrapper
@login_required
def logs():
files = request.files.get('files')
if files:
# print files.filename
files.save('./access.txt')
log_files = 'access.txt'
if log2db.log2db(log_files=log_files, fetch=False):
return redirect('/logs/')
else:
return '日志写入数据库失败!'
else:
topn = request.form.get('topn', 10)
topn = int(topn) if str(topn).isdigit() else 10
rt_list = log2db.log2db(topn=topn) # 读取数据
return render_template('logs.html', rt_list=rt_list)
@app.route('/services/', methods=['POST', 'GET'])
@login_required
def service_manage():
params = request.args if request.method == 'GET' else request.form
_url = params.get('url', 'Null')
_username = params.get('username', 'Null')
_password = params.get('password', 'Null')
_func = params.get('func', 'Null')
# 添加域名管理信息
if _url != 'Null':
if services2db.add_service(_url, _username, _password, _func):
return redirect('/services/')
else:
return '添加信息失败!'
# 查询域名管理信息
else:
service_list = services2db.get_service()
return render_template('services.html', service_list=service_list)
@app.route('/services/update/', methods=['POST'])
def update_service():
params = request.args if request.method == 'GET' else request.form
_id = params.get('id', '')
_url = params.get('url', '')
_username = params.get('username', '')
_password = params.get('password', '')
_func = params.get('func', '')
_is_ok = services2db.update_service(_url, _username, _password, _func, _id)
return json.dumps({'is_ok': _is_ok})
@app.route('/services/del/')
@login_required
def serviceDel():
uid = request.args.get('id', '')
if services2db.servicedel(uid):
return redirect('/services/')
else:
return '域名管理信息删除失败!'
@app.route('/assets/')
@login_required
def asset_list():
_asset_list = []
for i in asset.get_list():
_rt_list = asset.get_by_id(i.get('idc_id'))
i['idc_id'] = _rt_list[0][1]
_asset_list.append(i)
return render_template('assets.html', asset_list=_asset_list)
@app.route('/asset/create/', methods=['POST', 'GET'])
@login_required
def asset_create():
return render_template('asset_create.html', idcs=asset.get_idc())
@app.route('/asset/add/', methods=['POST', 'GET'])
@login_required
def asset_add():
lists = ['sn','ip','hostname','idc_id','purchase_date','warranty','vendor','model','admin','business','os','cpu','ram','disk']
asset_dict = {}
for i in lists:
asset_dict['_'+i] = request.form.get(i, '')
# 检查资产信息
_is_ok, _error = asset.validate_create(asset_dict)
status = None
if _is_ok:
if asset.create(asset_dict):
status = '添加资产成功!'
else:
status = '添加资产失败!'
return json.dumps({'is_ok': _is_ok, 'status': status, 'error': _error})
@app.route('/asset/delete/')
@login_required
def asset_del():
uid = request.args.get('id', '')
if asset.delete(uid):
return redirect('/assets/')
else:
return '资产删除失败!'
@app.route('/asset/update/', methods=['POST', 'GET'])
@login_required
def asset_update():
_id = request.args.get('id', '')
_asset_list = []
for i in asset.get_list():
if i.get('id') == int(_id):
_asset_list.append(i)
return render_template('asset_update.html', asset_list=_asset_list, idcs=asset.get_idc())
@app.route('/asset/upd/', methods=['POST', 'GET'])
@login_required
def asset_upd():
_id = request.form.get('id', '')
assets = ['sn','ip','hostname','idc_id','purchase_date','warranty','vendor','model','admin','business','os','cpu','ram','disk']
asset_dict = {}
for i in assets:
asset_dict['_'+i] = request.form.get(i, '')
# 检查资产信息
_is_ok, _error = asset.validate_update(asset_dict)
_status = None
if _is_ok:
if asset.update(asset_dict,_id):
_status = '更新资产成功!'
else:
_status = '更新资产失败!'
return json.dumps({'is_ok': _is_ok, 'status': _status, 'error': _error})
|
BOT_NAME = 'saymedia'
SPIDER_MODULES = ['saymedia.spiders']
NEWSPIDER_MODULE = 'saymedia.spiders'
ROBOTSTXT_OBEY = True
DOWNLOADER_MIDDLEWARES = {
'saymedia.middleware.ErrorConverterMiddleware': 1,
# 'saymedia.middleware.MysqlDownloaderMiddleware': 1,
'saymedia.middleware.OriginHostMiddleware': 2,
'saymedia.middleware.TimerDownloaderMiddleware': 998,
}
SPIDER_REPORTS = {
'xml': 'saymedia.reports.XmlReport',
'firebase': 'saymedia.reports.FirebaseReport',
}
SPIDER_MIDDLEWARES = {
'scrapy.contrib.spidermiddleware.httperror.HttpErrorMiddleware': None,
}
ITEM_PIPELINES = {
'saymedia.pipelines.DatabaseWriterPipeline': 0,
}
USER_AGENT = 'SEO Spider (+http://www.saymedia.com)'
DATABASE = {
'USER': 'YOUR_DATABASE_USER',
'PASS': 'YOUR_DATABASE_PASS',
}
FIREBASE_URL = "YOUR_FIREBASE_URL"
try:
# Only used in development environments
from .local_settings import *
except ImportError:
pass
|
""" Contains functions to fetch info from different simple online APIs."""
import util.web
def urbandictionary_search(search):
"""
Searches urbandictionary's API for a given search term.
:param search: The search term str to search for.
:return: defenition str or None on no match or error.
"""
if str(search).strip():
urban_api_url = 'http://api.urbandictionary.com/v0/define?term=%s' % search
response = util.web.http_get(url=urban_api_url, json=True)
if response['json'] is not None:
try:
definition = response['json']['list'][0]['definition']
return definition.encode('ascii', 'ignore')
except (KeyError, IndexError):
return None
else:
return None
def weather_search(city):
"""
Searches worldweatheronline's API for weather data for a given city.
You must have a working API key to be able to use this function.
:param city: The city str to search for.
:return: weather data str or None on no match or error.
"""
if str(city).strip():
api_key = ''
if not api_key:
return 'Missing api key.'
else:
weather_api_url = 'http://api.worldweatheronline.com/premium/v1/weather.ashx?key=%s&q=%s&format=json' % \
(api_key, city)
response = util.web.http_get(url=weather_api_url, json=True)
if response['json'] is not None:
try:
pressure = response['json']['data']['current_condition'][0]['pressure']
temp_c = response['json']['data']['current_condition'][0]['temp_C']
temp_f = response['json']['data']['current_condition'][0]['temp_F']
query = response['json']['data']['request'][0]['query'].encode('ascii', 'ignore')
result = '%s. Temperature: %sC (%sF) Pressure: %s millibars' % (query, temp_c, temp_f, pressure)
return result
except (IndexError, KeyError):
return None
else:
return None
def whois(ip):
"""
Searches ip-api for information about a given IP.
:param ip: The ip str to search for.
:return: information str or None on error.
"""
if str(ip).strip():
url = 'http://ip-api.com/json/%s' % ip
response = util.web.http_get(url=url, json=True)
if response['json'] is not None:
try:
city = response['json']['city']
country = response['json']['country']
isp = response['json']['isp']
org = response['json']['org']
region = response['json']['regionName']
zipcode = response['json']['zip']
info = country + ', ' + city + ', ' + region + ', Zipcode: ' + zipcode + ' Isp: ' + isp + '/' + org
return info
except KeyError:
return None
else:
return None
def chuck_norris():
"""
Finds a random Chuck Norris joke/quote.
:return: joke str or None on failure.
"""
url = 'http://api.icndb.com/jokes/random/?escape=javascript'
response = util.web.http_get(url=url, json=True)
if response['json'] is not None:
if response['json']['type'] == 'success':
joke = response['json']['value']['joke']
return joke
return None
|
def main() -> None:
N = int(input())
A = [int(x) for x in input().split()]
rev_A = A[:]
left = [-1] * N
left_cnt = [0] * N
A_left = [A[0]]
for i in range(1, N):
if rev_A[i-1] < rev_A[i]:
cnt = 0
while rev_A[i-1]
pass
elif rev_A[i-1] < rev_A[i] * 4:
now = i-1
while left[now] != -1:
now = left[now]
left[i] = now
A_left.append(A[i])
left[i] = i-1
else:
pass
ans = 10 ** 9
for i in range(N + 1):
A = AA[:]
cnt = 0
if i > 0:
A[i-1] *= -2
cnt += 1
for j in reversed(range(i-1)):
A[j] *= -2
cnt += 1
while A[j] > A[j+1]:
A[j] *= 4
cnt += 2
for j in range(i+1, N):
while A[j-1] > A[j]:
A[j] *= 4
cnt += 2
print(i, cnt, A)
ans = min(ans, cnt)
print(ans)
if __name__ == '__main__':
main()
|
import sys
from itertools import izip
from tacit import tac
ordered_list_path = 'data/ordered.list'
expected_lines = open(ordered_list_path).read().splitlines(True)
expected_lines.reverse()
expected_count = len(expected_lines)
for bsize in range(10):
count = 0
for expected_line, line in izip(
expected_lines,
tac(ordered_list_path, bsize)
):
if line != expected_line:
print >> sys.stderr, 'error: bsize=%d, expected_line=%r, line=%r' % (bsize, expected_line, line)
count += 1
if bsize > 0:
if count != expected_count:
print >> sys.stderr, 'error: bsize=%d, expected_count=%r, count=%r' % (bsize, expected_count, count)
else:
if count != 0:
print >> sys.stderr, 'error: bsize=%d, expected_count=0, count=%r' % (bsize, count)
|
EXPECTED = {'CAM-PDU-Descriptions': {'extensibility-implied': False,
'imports': {'ITS-Container': ['AccelerationControl',
'CauseCode',
'CenDsrcTollingZone',
'ClosedLanes',
'Curvature',
'CurvatureCalculationMode',
'DangerousGoodsBasic',
'DriveDirection',
'EmbarkationStatus',
'EmergencyPriority',
'ExteriorLights',
'Heading',
'ItsPduHeader',
'LanePosition',
'LateralAcceleration',
'Latitude',
'LightBarSirenInUse',
'Longitude',
'LongitudinalAcceleration',
'PathHistory',
'PerformanceClass',
'ProtectedCommunicationZone',
'ProtectedCommunicationZonesRSU',
'PtActivation',
'ReferencePosition',
'RoadworksSubCauseCode',
'SpecialTransportType',
'Speed',
'SpeedLimit',
'StationType',
'SteeringWheelAngle',
'TrafficRule',
'VehicleLength',
'VehicleRole',
'VehicleWidth',
'VerticalAcceleration',
'YawRate']},
'object-classes': {},
'object-sets': {},
'tags': 'AUTOMATIC',
'types': {'BasicContainer': {'members': [{'name': 'stationType',
'type': 'StationType'},
{'name': 'referencePosition',
'type': 'ReferencePosition'},
None],
'type': 'SEQUENCE'},
'BasicVehicleContainerHighFrequency': {'members': [{'name': 'heading',
'type': 'Heading'},
{'name': 'speed',
'type': 'Speed'},
{'name': 'driveDirection',
'type': 'DriveDirection'},
{'name': 'vehicleLength',
'type': 'VehicleLength'},
{'name': 'vehicleWidth',
'type': 'VehicleWidth'},
{'name': 'longitudinalAcceleration',
'type': 'LongitudinalAcceleration'},
{'name': 'curvature',
'type': 'Curvature'},
{'name': 'curvatureCalculationMode',
'type': 'CurvatureCalculationMode'},
{'name': 'yawRate',
'type': 'YawRate'},
{'name': 'accelerationControl',
'optional': True,
'type': 'AccelerationControl'},
{'name': 'lanePosition',
'optional': True,
'type': 'LanePosition'},
{'name': 'steeringWheelAngle',
'optional': True,
'type': 'SteeringWheelAngle'},
{'name': 'lateralAcceleration',
'optional': True,
'type': 'LateralAcceleration'},
{'name': 'verticalAcceleration',
'optional': True,
'type': 'VerticalAcceleration'},
{'name': 'performanceClass',
'optional': True,
'type': 'PerformanceClass'},
{'name': 'cenDsrcTollingZone',
'optional': True,
'type': 'CenDsrcTollingZone'}],
'type': 'SEQUENCE'},
'BasicVehicleContainerLowFrequency': {'members': [{'name': 'vehicleRole',
'type': 'VehicleRole'},
{'name': 'exteriorLights',
'type': 'ExteriorLights'},
{'name': 'pathHistory',
'type': 'PathHistory'}],
'type': 'SEQUENCE'},
'CAM': {'members': [{'name': 'header',
'type': 'ItsPduHeader'},
{'name': 'cam',
'type': 'CoopAwareness'}],
'type': 'SEQUENCE'},
'CamParameters': {'members': [{'name': 'basicContainer',
'type': 'BasicContainer'},
{'name': 'highFrequencyContainer',
'type': 'HighFrequencyContainer'},
{'name': 'lowFrequencyContainer',
'optional': True,
'type': 'LowFrequencyContainer'},
{'name': 'specialVehicleContainer',
'optional': True,
'type': 'SpecialVehicleContainer'},
None],
'type': 'SEQUENCE'},
'CoopAwareness': {'members': [{'name': 'generationDeltaTime',
'type': 'GenerationDeltaTime'},
{'name': 'camParameters',
'type': 'CamParameters'}],
'type': 'SEQUENCE'},
'DangerousGoodsContainer': {'members': [{'name': 'dangerousGoodsBasic',
'type': 'DangerousGoodsBasic'}],
'type': 'SEQUENCE'},
'EmergencyContainer': {'members': [{'name': 'lightBarSirenInUse',
'type': 'LightBarSirenInUse'},
{'name': 'incidentIndication',
'optional': True,
'type': 'CauseCode'},
{'name': 'emergencyPriority',
'optional': True,
'type': 'EmergencyPriority'}],
'type': 'SEQUENCE'},
'GenerationDeltaTime': {'named-numbers': {'oneMilliSec': 1},
'restricted-to': [(0,
65535)],
'type': 'INTEGER'},
'HighFrequencyContainer': {'members': [{'name': 'basicVehicleContainerHighFrequency',
'type': 'BasicVehicleContainerHighFrequency'},
{'name': 'rsuContainerHighFrequency',
'type': 'RSUContainerHighFrequency'},
None],
'type': 'CHOICE'},
'LowFrequencyContainer': {'members': [{'name': 'basicVehicleContainerLowFrequency',
'type': 'BasicVehicleContainerLowFrequency'},
None],
'type': 'CHOICE'},
'PublicTransportContainer': {'members': [{'name': 'embarkationStatus',
'type': 'EmbarkationStatus'},
{'name': 'ptActivation',
'optional': True,
'type': 'PtActivation'}],
'type': 'SEQUENCE'},
'RSUContainerHighFrequency': {'members': [{'name': 'protectedCommunicationZonesRSU',
'optional': True,
'type': 'ProtectedCommunicationZonesRSU'},
None],
'type': 'SEQUENCE'},
'RescueContainer': {'members': [{'name': 'lightBarSirenInUse',
'type': 'LightBarSirenInUse'}],
'type': 'SEQUENCE'},
'RoadWorksContainerBasic': {'members': [{'name': 'roadworksSubCauseCode',
'optional': True,
'type': 'RoadworksSubCauseCode'},
{'name': 'lightBarSirenInUse',
'type': 'LightBarSirenInUse'},
{'name': 'closedLanes',
'optional': True,
'type': 'ClosedLanes'}],
'type': 'SEQUENCE'},
'SafetyCarContainer': {'members': [{'name': 'lightBarSirenInUse',
'type': 'LightBarSirenInUse'},
{'name': 'incidentIndication',
'optional': True,
'type': 'CauseCode'},
{'name': 'trafficRule',
'optional': True,
'type': 'TrafficRule'},
{'name': 'speedLimit',
'optional': True,
'type': 'SpeedLimit'}],
'type': 'SEQUENCE'},
'SpecialTransportContainer': {'members': [{'name': 'specialTransportType',
'type': 'SpecialTransportType'},
{'name': 'lightBarSirenInUse',
'type': 'LightBarSirenInUse'}],
'type': 'SEQUENCE'},
'SpecialVehicleContainer': {'members': [{'name': 'publicTransportContainer',
'type': 'PublicTransportContainer'},
{'name': 'specialTransportContainer',
'type': 'SpecialTransportContainer'},
{'name': 'dangerousGoodsContainer',
'type': 'DangerousGoodsContainer'},
{'name': 'roadWorksContainerBasic',
'type': 'RoadWorksContainerBasic'},
{'name': 'rescueContainer',
'type': 'RescueContainer'},
{'name': 'emergencyContainer',
'type': 'EmergencyContainer'},
{'name': 'safetyCarContainer',
'type': 'SafetyCarContainer'},
None],
'type': 'CHOICE'}},
'values': {}}}
|
import random
print random.uniform(10, 30)
|
from daisychain.steps.outputs.file import OutputFile
from daisychain.steps.input import InMemoryInput
import tempfile
import os
TEST_STRING = 'THIS OUTPUT STRING IS COMPLETELY UNIQUE AND WILL NOT EXIST EVER AGAIN'
def test_output_file():
t = tempfile.NamedTemporaryFile(dir=os.path.dirname(__file__), delete=False)
t.close()
try:
i = OutputFile(path=t.name, input_step=InMemoryInput(output=TEST_STRING))
assert i.pending
i.run()
assert i.finished
with open(t.name) as f:
assert TEST_STRING in f.read()
finally:
if os.path.exists(t.name):
os.unlink(t.name)
def test_output_failure():
i = OutputFile(path='/thisdirectoryreallydoesnotexist', input_step=InMemoryInput(output=TEST_STRING))
assert i.pending
try:
i.run()
except Exception as e:
pass
else:
assert False, "Trying to output to a directory that doesn't exist should fail"
|
from twistedbot.plugins.base import PluginChatBase
from twistedbot.behavior_tree import InventorySelectActive
class Debug(PluginChatBase):
@property
def command_verb(self):
return "debug"
@property
def help(self):
return "help for debug"
def command(self, sender, command, args):
if subject:
what = subject[0]
if what == "inventoryselect":
item_name = " ".join(subject[1:])
if not item_name:
self.send_chat_message("specify item")
return
itemstack = InventorySelectActive.parse_parameters(item_name)
if itemstack is not None:
self.world.bot.behavior_tree.new_command(InventorySelectActive, itemstack=itemstack)
else:
self.send_chat_message("unknown item %s" % item_name)
else:
self.send_chat_message("unknown subject")
else:
self.send_chat_message("debug what?")
plugin = Debug
|
import math
import numpy
class NEB(object):
""" A Nudged Elastic Band implementation
This NEB implementation is based on http://dx.doi.org/10.1063/1.1323224
by Henkelman et al.
"""
def __init__(self, path, k):
""" Initialize the NEB with a predefined path and force
constants between images.
Typical use-case might look like:
>>> m1 = molecule_from_xyz('m1.xyz')
>>> m2 = molecule_from_xyz('m2.xyz')
>>> apath = neb.interpolate.Linear(m1, m2, 10)
>>> neb = neb.Neb(apath, 5.0)
>>> eandg = somefunction
>>> minimizer = neb.minimizers.SteepestDescent
>>> neb.minimize(100, 0.01, eandg, minimizer)
Arguments:
path -- Path between two endpoints to be optimized
k -- force constant in units of eV / A^2 between each bead in the path
"""
self._path = path
self._k = k
# set bead energies, tangents, forces and spring forces to zero initially
self._tangents = []
self._beadgradients = []
self._springforces = []
self._forces = []
self._energies = []
# accounting variables
self._grms = []
for bead in path:
(n, k) = numpy.shape(bead.getCoordinates())
self._tangents.append(numpy.zeros((n,k)))
self._springforces.append(numpy.zeros((n,k)))
self._beadgradients.append(numpy.zeros((n,k)))
self._forces.append(numpy.zeros((n,k)))
self._energies.append(0.0)
self._grms.append(-1.0)
# now we calculate the tangents and springforces
# for the initial beads
self._beadTangents()
self._springForces()
def innerBeads(self):
""" an iterator over the inner beads """
n = self._path.getNumBeads()
for i, bead in enumerate(self._path):
if i > 0 and i < n-1:
yield bead
def innerBeadForces(self):
""" iterator over the forces of the inner beads """
for i, bead in enumerate(self.innerBeads(), start=1):
yield self._forces[i]
def _beadTangents(self):
""" Evaluates all tangents for all the inner beads """
for ibead, bead in enumerate(self.innerBeads(), start=1):
self._tangents[ibead] = self._beadTangent(bead, self._path[ibead-1], self._path[ibead+1])
def _beadTangent(self, ibead, mbead, pbead):
""" Calculates the tangent for ibead given the bead
indexed by i-1 (mbead) and i+1 (pbead).
Calculated according to eq 2 in http://dx.doi.org/10.1063/1.1323224
Arguments:
ibead -- the current (i'th) bead
mbead -- the (i-1)'th bead to use in the calculation of the tanget
pbead -- the (i+1)'th bead to use in the calculation of the tanget
Returns:
tanget of the bead
"""
Ri = ibead.getCoordinates()
Rm = mbead.getCoordinates()
Rp = pbead.getCoordinates()
vm = Ri - Rm
vp = Rp - Ri
ti = vm / numpy.linalg.norm(numpy.ravel(vm)) + vp / numpy.linalg.norm(numpy.ravel(vp));
return ti / numpy.linalg.norm(ti)
def _springForces(self):
""" Evaluates all spring forces between the beads """
for ibead, bead in enumerate(self.innerBeads(), start=1):
self._springforces[ibead] = self._springForce(bead, self._path[ibead-1], self._path[ibead+1], self._tangents[ibead])
def _springForce(self, ibead, mbead, pbead, tangent):
""" Calculates the spring force for ibead given the bead
indexed by i-1 (mbead) and i+1 (pbead).
"""
Ri = numpy.ravel(ibead.getCoordinates())
Rm = numpy.ravel(mbead.getCoordinates())
Rp = numpy.ravel(pbead.getCoordinates())
# old spring force calculated according
# to eq 5 in http://dx.doi.org/10.1063/1.1323224
r = numpy.dot(numpy.ravel(Rp + Rm - 2*Ri), numpy.ravel(tangent))
return self._k * r * tangent
def _beadGradients(self, func):
""" Calculates the forces on each bead using the func supplied
Calculated according to eq 4 in http://dx.doi.org/10.1063/1.1323224
Arguments:
bead -- the bead whose internal force is to be evaluated
func -- function that returns energy and forces for a bead
Returns:
e, g -- internal energy and force with component projected out
"""
if func is None:
return
for ibead, bead in enumerate(self.innerBeads(), start=1):
energy, gradient = func(bead)
tangent = self._tangents[ibead]
grad_perp = numpy.dot(numpy.ravel(gradient), numpy.ravel(tangent))
# calculate regular NEB bead gradient
self._beadgradients[ibead] = gradient - grad_perp * tangent
self._energies[ibead] = energy
def beadForces(self, func):
""" Calculates the forces of all 'inner' beads
Arguments:
func -- function that returns energy and forces for a bead
"""
self._beadTangents()
self._springForces()
self._beadGradients(func)
for ibead, bead in enumerate(self.innerBeads(), start=1):
bead_force = - self._beadgradients[ibead]
bead_force += self._springforces[ibead]
self._forces[ibead] = bead_force[:]
# Accounting and statistics
f = numpy.ravel(bead_force)
self._grms[ibead] = math.sqrt(f.dot(f)/len(f))
def minimize(self, nsteps, opttol, func, minimizer):
""" Minimizes the NEB path
The minimization is carried out for nsteps to a tolerance
of opttol with the energy and gradients calculated
for each bead by func. The minimizer used is suppplied
via the minimizers argument.
When the method ends, one can iterate over all the beads
in this class to get the states and continue from there.
NOTE: The opttol argument is not active
Arguments:
nstesp -- perform a maximum of nsteps steps
opttol -- the maximum rms gradient shall be below this value
func -- energy and gradient function
minimizer -- a minimizer
"""
for i in range(1, nsteps):
self.beadForces(func)
s = "-"*89 + "\nI={0:3d} ENERGY={1:12.6f} G RMS={2:13.9f}"
s2 = " E ="
s3 = " F RMS ="
s4 = " F SPR ="
maxerg = max(self._energies[1:-1])
grms = 0.0
grmsnrm = 0
for ibead, bead in enumerate(self.innerBeads(), start=1):
c = bead.getCoordinates()
(n, k) = numpy.shape(c)
bead.setCoordinates(c + minimizer.step(self._energies[ibead], self._forces[ibead]))
f = numpy.ravel(self._forces[ibead])
grms += numpy.linalg.norm(f)
grmsnrm += len(f)
s2 += "{0:9.4f}".format(self._energies[ibead])
s3 += "{0:9.4f}".format(self._grms[ibead])
s4 += "{0:9.4f}".format(numpy.max(self._springforces[ibead]))
print s.format(i, maxerg, math.sqrt(grms/grmsnrm))
print s2
print s3
print s4
|
from django.db import models
from django.utils import timezone
from django.contrib import admin
from packages.generic import gmodels
from packages.generic.gmodels import content_file_name,content_file_name_same
from datetime import datetime
from django.core.validators import MaxValueValidator, MinValueValidator
from django.conf import settings as stg
import os
import Image as PImage
from embed_video.fields import EmbedVideoField
class Conference(models.Model):
title = models.CharField(max_length=160)
def __str__(self):
return self.title
class Category(models.Model):
title = models.CharField(max_length=160)
position = models.PositiveIntegerField(default='0')
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.title
def save(self, *args, **kwargs):
model = self.__class__
if self.position is None:
# Append
try:
last = model.objects.order_by('-position')[0]
self.position = last.position + 1
except IndexError:
# First row
self.position = 0
return super(Category, self).save(*args, **kwargs)
class Code(models.Model):
title = models.CharField(max_length=250)
file = models.FileField(upload_to=content_file_name_same,blank=True)
git_link = models.URLField(blank=True)
programming_language = models.CharField(max_length=40)
details = models.TextField(max_length=600,blank=True)
def __str__(self):
return str(self.title)
class Publication(models.Model):
title = models.CharField(max_length=160)
authors = models.CharField(max_length=220,null=True)
link = models.URLField(null=True, blank=True)
file = models.FileField(upload_to=content_file_name_same, null=True, blank=True)
short = models.CharField(max_length=50,null=True)
bibtex = models.TextField(max_length=1000)
conference_id = models.ForeignKey(Conference)
year = models.PositiveIntegerField(default=datetime.now().year,
validators=[
MaxValueValidator(datetime.now().year + 2),
MinValueValidator(1800)
])
# def __init__(self, *args, **kwargs):
# self.conference = Conference.objects.get(id=int(self.conference_id))
def __str__(self):
return self.title
def fullStr(self):
return "%s, \"%s\", %s, %s " % (self.authors, self.title, self.conference_id.title, self.year)
class Project(models.Model):
title = models.CharField(max_length=200)
text = models.TextField(max_length=500)
mtext = models.TextField(max_length=1000,blank=True)
date_created = models.DateTimeField(default=timezone.now)
category_id = models.ForeignKey(Category)
position = models.PositiveIntegerField(default='0')
publications = models.ManyToManyField(Publication, null=True, blank=True)
def save(self, *args, **kwargs):
model = self.__class__
if self.position is None:
# Append
try:
last = model.objects.order_by('-position')[0]
self.position = last.position + 1
except IndexError:
# First row
self.position = 0
return super(Project, self).save(*args, **kwargs)
def get_images(self):
return [y for y in ProjectImage.objects.filter(entity_id_id__exact=self.id)]
def getFirstImage(self):
try:
p = ProjectImage.objects.filter(entity_id_id__exact=self.id)[0]
except IndexError:
p = None
if None != p:
return p
else:
return "default.png"
def get_videos(self):
return [str(y) for y in ProjectVideo.objects.filter(entity_id_id__exact=self.id)]
def get_publications(self):
return [p for p in self.publications.all()]
class Meta:
ordering = ('position',)
def __str__(self):
return self.title
class ProjectImage(gmodels.GImage):
entity_id = models.ForeignKey(Project)
class ProjectVideo(models.Model):
entity_id = models.ForeignKey(Project)
link = EmbedVideoField(null=True) # same like models.URLField()
def __str__(self):
return str(self.link)
class ProjectImageInline(admin.TabularInline):
model = ProjectImage
extra = 1
readonly_fields = ('image_tag',)
class ProjectVideoInline(admin.TabularInline):
model = ProjectVideo
extra = 1
class ProjectAdmin(admin.ModelAdmin):
inlines = [ProjectImageInline, ProjectVideoInline, ]
class Media:
js = ('admin/js/listreorder.js',)
list_display = ('position',)
list_display_links = ('title',)
list_display = ('title', 'position',)
list_editable = ('position',)
class Article(models.Model):
title = models.CharField(max_length=200)
text = models.TextField(max_length=500)
date_created = models.DateTimeField(default=timezone.now)
category_id = models.ForeignKey(Category)
position = models.PositiveIntegerField(default='0')
def save(self, *args, **kwargs):
model = self.__class__
if self.position is None:
# Append
try:
last = model.objects.order_by('-position')[0]
self.position = last.position + 1
except IndexError:
# First row
self.position = 0
return super(Article, self).save(*args, **kwargs)
def get_images(self):
return [y for y in ArticleImage.objects.filter(entity_id_id__exact=self.id)]
def getFirstImage(self):
try:
p = ArticleImage.objects.filter(entity_id_id__exact=self.id)[0]
except IndexError:
p = None
if None != p:
return p
else:
return "default.png"
class Meta:
ordering = ('position',)
def __str__(self):
return self.title
class ArticleImage(gmodels.GImage):
entity_id = models.ForeignKey(Article)
class ArticleImageInline(admin.TabularInline):
model = ArticleImage
extra = 1
readonly_fields = ('image_tag',)
class ArticleAdmin(admin.ModelAdmin):
inlines = [ArticleImageInline, ]
class Media:
js = ('admin/js/listreorder.js',)
list_display = ('position',)
list_display_links = ('title',)
list_display = ('title', 'position',)
list_editable = ('position',)
class CodeSnippet(models.Model):
title = models.CharField(max_length=160)
programming_language = models.CharField(max_length=120)
text = models.TextField(max_length=500)
code = models.TextField(max_length=1000)
date_created = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.title
|
import networkx as nx
import numpy as np
import scipy as sp
import csv
folder = 'data/'
file_names = ['yelp_data.csv', 'trip_advisor_data.csv']
yelp = False
yelp_dataset = list()
file_name = file_names[1]
if yelp == True:
file_name = file_names[0]
with open(folder+file_name, 'r') as f:
reader = csv.reader(f)
for line in reader:
yelp_dataset.append(line)
yelp_dataset.remove(yelp_dataset[0])
print len(yelp_dataset)
G = nx.Graph()
for y in yelp_dataset:
# add the nodes if they don't already exist
G.add_node(y[4], type='restaurant')
G.add_node(y[13], type='reviewer')
# add the edge between the reviewer and restaurant, weight is in different position in each file.
if yelp == True:
G.add_edge(y[13], y[4], weight=float(y[2]))
else:
G.add_edge(y[13], y[4], weight=float(y[1]))
print nx.number_of_nodes(G)
print nx.number_of_edges(G)
nx.write_gml(G, 'ta_graph.gml')
|
"""
@file HybridVAControl.py
@author Craig Rafter
@date 19/08/2016
class for fixed time signal control
"""
import signalControl, readJunctionData, traci
from math import atan2, degrees
import numpy as np
from collections import defaultdict
class HybridVAControl(signalControl.signalControl):
def __init__(self, junctionData, minGreenTime=10, maxGreenTime=60, scanRange=250, packetRate=0.2):
super(HybridVAControl, self).__init__()
self.junctionData = junctionData
self.firstCalled = self.getCurrentSUMOtime()
self.lastCalled = self.getCurrentSUMOtime()
self.lastStageIndex = 0
traci.trafficlights.setRedYellowGreenState(self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString)
self.packetRate = int(1000*packetRate)
self.transition = False
self.CAMactive = False
# dict[vehID] = [position, heading, velocity, Tdetect]
self.newVehicleInfo = {}
self.oldVehicleInfo = {}
self.scanRange = scanRange
self.jcnCtrlRegion = self._getJncCtrlRegion()
# print(self.junctionData.id)
# print(self.jcnCtrlRegion)
self.controlledLanes = traci.trafficlights.getControlledLanes(self.junctionData.id)
# dict[laneID] = [heading, shape]
self.laneDetectionInfo = self._getIncomingLaneInfo()
self.stageTime = 0.0
self.minGreenTime = minGreenTime
self.maxGreenTime = maxGreenTime
self.secondsPerMeterTraffic = 0.45
self.nearVehicleCatchDistance = 25
self.extendTime = 1.0 # 5 m in 10 m/s (acceptable journey 1.333)
self.laneInductors = self._getLaneInductors()
def process(self):
# Packets sent on this step
# packet delay + only get packets towards the end of the second
if (not self.getCurrentSUMOtime() % self.packetRate) and (self.getCurrentSUMOtime() % 1000 > 500):
self.CAMactive = True
self._getCAMinfo()
else:
self.CAMactive = False
# Update stage decisions
# If there's no ITS enabled vehicles present use VA ctrl
if len(self.oldVehicleInfo) < 1 and not self.getCurrentSUMOtime() % 1000:
detectTimePerLane = self._getLaneDetectTime()
#print(detectTimePerLane)
# Set adaptive time limit
#print(detectTimePerLane < 3)
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If active and on the second, or transition then make stage descision
elif (self.CAMactive and not self.getCurrentSUMOtime() % 1000) or self.transition:
oncomingVeh = self._getOncomingVehicles()
# If new stage get furthest from stop line whose velocity < 5% speed
# limit and determine queue length
if self.transition:
furthestVeh = self._getFurthestStationaryVehicle(oncomingVeh)
if furthestVeh[0] != '':
meteredTime = self.secondsPerMeterTraffic*furthestVeh[1]
self.stageTime = max(self.minGreenTime, meteredTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# If we're in this state this should never happen but just in case
else:
self.stageTime = self.minGreenTime
# If currently staging then extend time if there are vehicles close
# to the stop line
else:
nearestVeh = self._getNearestVehicle(oncomingVeh)
# If a vehicle detected
if nearestVeh != '' and nearestVeh[1] <= self.nearVehicleCatchDistance:
if (self.oldVehicleInfo[nearestVeh[0]][2] != 1e6
and self.oldVehicleInfo[nearestVeh[0]][2] > 1.0/self.secondsPerMeterTraffic):
meteredTime = nearestVeh[1]/self.oldVehicleInfo[nearestVeh[0]][2]
else:
meteredTime = self.secondsPerMeterTraffic*nearestVeh[1]
elapsedTime = 0.001*(self.getCurrentSUMOtime() - self.lastCalled)
Tremaining = self.stageTime - elapsedTime
self.stageTime = elapsedTime + max(meteredTime, Tremaining)
self.stageTime = min(self.stageTime, self.maxGreenTime)
# no detectable near vehicle try inductive loop info
elif nearestVeh == '' or nearestVeh[1] <= self.nearVehicleCatchDistance:
detectTimePerLane = self._getLaneDetectTime()
print('Loops2')
# Set adaptive time limit
if np.any(detectTimePerLane < 2):
extend = self.extendTime
else:
extend = 0.0
self.stageTime = max(self.stageTime + extend, self.minGreenTime)
self.stageTime = min(self.stageTime, self.maxGreenTime)
else:
pass
# process stage as normal
else:
pass
# print(self.stageTime)
self.transition = False
if self.transitionObject.active:
# If the transition object is active i.e. processing a transition
pass
elif (self.getCurrentSUMOtime() - self.firstCalled) < (self.junctionData.offset*1000):
# Process offset first
pass
elif (self.getCurrentSUMOtime() - self.lastCalled) < self.stageTime*1000:
# Before the period of the next stage
pass
else:
# Not active, not in offset, stage not finished
if len(self.junctionData.stages) != (self.lastStageIndex)+1:
# Loop from final stage to first stage
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[self.lastStageIndex+1].controlString)
self.lastStageIndex += 1
else:
# Proceed to next stage
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.transitionObject.newTransition(
self.junctionData.id,
self.junctionData.stages[self.lastStageIndex].controlString,
self.junctionData.stages[0].controlString)
self.lastStageIndex = 0
#print(0.001*(self.getCurrentSUMOtime() - self.lastCalled))
self.lastCalled = self.getCurrentSUMOtime()
self.transition = True
self.stageTime = 0.0
super(HybridVAControl, self).process()
def _getHeading(self, currentLoc, prevLoc):
dy = currentLoc[1] - prevLoc[1]
dx = currentLoc[0] - prevLoc[0]
if currentLoc[1] == prevLoc[1] and currentLoc[0] == prevLoc[0]:
heading = -1
else:
if dy >= 0:
heading = degrees(atan2(dy, dx))
else:
heading = 360 + degrees(atan2(dy, dx))
# Map angle to make compatible with SUMO heading
if 0 <= heading <= 90:
heading = 90 - heading
elif 90 < heading < 360:
heading = 450 - heading
return heading
def _getJncCtrlRegion(self):
jncPosition = traci.junction.getPosition(self.junctionData.id)
otherJuncPos = [traci.junction.getPosition(x) for x in traci.trafficlights.getIDList() if x != self.junctionData.id]
ctrlRegion = {'N':jncPosition[1]+self.scanRange, 'S':jncPosition[1]-self.scanRange,
'E':jncPosition[0]+self.scanRange, 'W':jncPosition[0]-self.scanRange}
TOL = 10 # Exclusion region around junction boundary
if otherJuncPos != []:
for pos in otherJuncPos:
dx = jncPosition[0] - pos[0]
dy = jncPosition[1] - pos[1]
# North/South Boundary
if abs(dy) < self.scanRange:
if dy < -TOL:
ctrlRegion['N'] = min(pos[1] - TOL, ctrlRegion['N'])
elif dy > TOL:
ctrlRegion['S'] = max(pos[1] + TOL, ctrlRegion['S'])
else:
pass
else:
pass
# East/West Boundary
if abs(dx) < self.scanRange:
if dx < -TOL:
ctrlRegion['E'] = min(pos[0] - TOL, ctrlRegion['E'])
elif dx > TOL:
ctrlRegion['W'] = max(pos[0] + TOL, ctrlRegion['W'])
else:
pass
else:
pass
return ctrlRegion
def _isInRange(self, vehID):
vehPosition = np.array(traci.vehicle.getPosition(vehID))
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
distance = np.linalg.norm(vehPosition - jcnPosition)
if (distance < self.scanRange
and self.jcnCtrlRegion['W'] <= vehPosition[0] <= self.jcnCtrlRegion['E']
and self.jcnCtrlRegion['S'] <= vehPosition[1] <= self.jcnCtrlRegion['N']):
return True
else:
return False
def _getVelocity(self, vehID, vehPosition, Tdetect):
if vehID in self.oldVehicleInfo.keys():
oldX = np.array(self.oldVehicleInfo[vehID][0])
newX = np.array(vehPosition)
dx = np.linalg.norm(newX - oldX)
dt = Tdetect - self.oldVehicleInfo[vehID][3]
velocity = dx/dt
return velocity
else:
return 1e6
def _getCAMinfo(self):
self.oldVehicleInfo = self.newVehicleInfo.copy()
self.newVehicleInfo = {}
Tdetect = 0.001*self.getCurrentSUMOtime()
for vehID in traci.vehicle.getIDList():
if traci.vehicle.getTypeID(vehID) == 'typeITSCV' and self._isInRange(vehID):
vehPosition = traci.vehicle.getPosition(vehID)
vehHeading = traci.vehicle.getAngle(vehID)
vehVelocity = self._getVelocity(vehID, vehPosition, Tdetect)
self.newVehicleInfo[vehID] = [vehPosition, vehHeading, vehVelocity, Tdetect]
def _getIncomingLaneInfo(self):
laneInfo = defaultdict(list)
for lane in list(np.unique(np.array(self.controlledLanes))):
shape = traci.lane.getShape(lane)
width = traci.lane.getWidth(lane)
heading = self._getHeading(shape[1], shape[0])
dx = shape[0][0] - shape[1][0]
dy = shape[0][1] - shape[1][1]
if abs(dx) > abs(dy):
roadBounds = ((shape[0][0], shape[0][1] + width), (shape[1][0], shape[1][1] - width))
else:
roadBounds = ((shape[0][0] + width, shape[0][1]), (shape[1][0] - width, shape[1][1]))
laneInfo[lane] = [heading, roadBounds]
return laneInfo
def _getOncomingVehicles(self):
# Oncoming if (in active lane & heading matches oncoming heading &
# is in lane bounds)
activeLanes = self._getActiveLanes()
vehicles = []
for lane in activeLanes:
for vehID in self.oldVehicleInfo.keys():
# If on correct heading pm 10deg
if (np.isclose(self.oldVehicleInfo[vehID][1], self.laneDetectionInfo[lane][0], atol=10)
# If in lane x bounds
and min(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0]) <
self.oldVehicleInfo[vehID][0][0] <
max(self.laneDetectionInfo[lane][1][0][0], self.laneDetectionInfo[lane][1][1][0])
# If in lane y bounds
and min(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1]) <
self.oldVehicleInfo[vehID][0][1] <
max(self.laneDetectionInfo[lane][1][0][1], self.laneDetectionInfo[lane][1][1][1])):
# Then append vehicle
vehicles.append(vehID)
vehicles = list(np.unique(np.array(vehicles)))
return vehicles
def _getActiveLanes(self):
# Get the current control string to find the green lights
stageCtrlString = self.junctionData.stages[self.lastStageIndex].controlString
activeLanes = []
for i, letter in enumerate(stageCtrlString):
if letter == 'G':
activeLanes.append(self.controlledLanes[i])
# Get a list of the unique active lanes
activeLanes = list(np.unique(np.array(activeLanes)))
return activeLanes
def _getLaneInductors(self):
laneInductors = defaultdict(list)
for loop in traci.inductionloop.getIDList():
loopLane = traci.inductionloop.getLaneID(loop)
if loopLane in self.controlledLanes and 'upstream' not in loop:
laneInductors[loopLane].append(loop)
return laneInductors
def _getFurthestStationaryVehicle(self, vehIDs):
furthestID = ''
maxDistance = -1
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
speedLimit = traci.lane.getMaxSpeed(self._getActiveLanes()[0])
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - jcnPosition)
if distance > maxDistance and self.oldVehicleInfo[ID][2] < 0.05*speedLimit:
furthestID = ID
maxDistance = distance
return [furthestID, maxDistance]
def _getNearestVehicle(self, vehIDs):
nearestID = ''
minDistance = self.nearVehicleCatchDistance + 1
jcnPosition = np.array(traci.junction.getPosition(self.junctionData.id))
for ID in vehIDs:
vehPosition = np.array(self.oldVehicleInfo[ID][0])
distance = np.linalg.norm(vehPosition - jcnPosition)
if distance < minDistance:
nearestID = ID
minDistance = distance
return [nearestID, minDistance]
def _getLaneDetectTime(self):
activeLanes = self._getActiveLanes()
meanDetectTimePerLane = np.zeros(len(activeLanes))
for i, lane in enumerate(activeLanes):
detectTimes = []
for loop in self.laneInductors[lane]:
detectTimes.append(traci.inductionloop.getTimeSinceDetection(loop))
meanDetectTimePerLane[i] = np.mean(detectTimes)
return meanDetectTimePerLane
|
"""Module containing character feature extractors."""
import string
from unstyle.features.featregister import register_feat
@register_feat
def characterSpace(text):
"""Return the total number of characters."""
return len(text)
@register_feat
def letterSpace(text):
"""Return the total number of letters (excludes spaces and punctuation)"""
count = 0
alphabet = string.ascii_lowercase + string.ascii_uppercase
for char in text:
if char in alphabet:
count += 1
return count
|
from django.db import models
from django.contrib.auth.models import User
from police.models import Stationdata
class general_diary(models.Model):
ref_id = models.CharField(max_length=40,unique=True,default="00000")
firstname = models.CharField(max_length=20)
lastname = models.CharField(max_length=20)
mobile = models.CharField(max_length=10)
email = models.CharField(max_length=80)
address = models.TextField()
DOB = models.DateField('date of birth')
idType_1 = models.CharField(max_length=10)
idType_1_value = models.CharField(max_length=15)
idType_2 = models.CharField(max_length=20)
idType_2_value = models.CharField(max_length=15)
StationCode = models.ForeignKey(Stationdata)
Subject = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
detail = models.TextField()
Time = models.DateTimeField('Occurence')
Place = models.CharField(max_length=200)
Loss = models.CharField(max_length=200)
OTP = models.BooleanField(default=False)
def __str__(self): # __unicode__ on Python 2
return self.Subject
class Fir(models.Model):
ref_id = models.CharField(max_length=40,unique=True,default="00000")
firstname = models.CharField(max_length=20)
lastname = models.CharField(max_length=20)
mobile = models.CharField(max_length=10)
email = models.CharField(max_length=80)
address = models.TextField()
DOB = models.DateField('date of birth')
idType_1 = models.CharField(max_length=10)
idType_1_value = models.CharField(max_length=15)
idType_2 = models.CharField(max_length=20)
idType_2_value = models.CharField(max_length=15)
StationCode = models.ForeignKey(Stationdata)
Subject = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
detail = models.TextField()
Suspect = models.CharField(max_length=500)
Time = models.DateTimeField('Occurence')
Place = models.CharField(max_length=200)
Witness = models.CharField(max_length=500)
Loss = models.CharField(max_length=200)
OTP = models.BooleanField(default=False)
def __str__(self): # __unicode__ on Python 2
return self.Subject
class lookup_table(models.Model):
ref_id = models.CharField(max_length=40,unique=True,default="00000")
hashmap = models.CharField(max_length=70,unique=True,default="00000")
type = models.CharField(max_length=5,default="GD")
def __str__(self): # __unicode__ on Python 2
return self.hashmap
|
"""
pyexcel_xls
~~~~~~~~~~~~~~~~~~~
The lower level xls/xlsm file format handler using xlrd/xlwt
:copyright: (c) 2016-2017 by Onni Software Ltd
:license: New BSD License
"""
import sys
import math
import datetime
import xlrd
from xlwt import Workbook, XFStyle
from pyexcel_io.book import BookReader, BookWriter
from pyexcel_io.sheet import SheetReader, SheetWriter
PY2 = sys.version_info[0] == 2
if PY2 and sys.version_info[1] < 7:
from ordereddict import OrderedDict
else:
from collections import OrderedDict
DEFAULT_DATE_FORMAT = "DD/MM/YY"
DEFAULT_TIME_FORMAT = "HH:MM:SS"
DEFAULT_DATETIME_FORMAT = "%s %s" % (DEFAULT_DATE_FORMAT, DEFAULT_TIME_FORMAT)
class XLSheet(SheetReader):
"""
xls, xlsx, xlsm sheet reader
Currently only support first sheet in the file
"""
def __init__(self, sheet, auto_detect_int=True, **keywords):
SheetReader.__init__(self, sheet, **keywords)
self.__auto_detect_int = auto_detect_int
@property
def name(self):
return self._native_sheet.name
def number_of_rows(self):
"""
Number of rows in the xls sheet
"""
return self._native_sheet.nrows
def number_of_columns(self):
"""
Number of columns in the xls sheet
"""
return self._native_sheet.ncols
def cell_value(self, row, column):
"""
Random access to the xls cells
"""
cell_type = self._native_sheet.cell_type(row, column)
value = self._native_sheet.cell_value(row, column)
if cell_type == xlrd.XL_CELL_DATE:
value = xldate_to_python_date(value)
elif cell_type == xlrd.XL_CELL_NUMBER and self.__auto_detect_int:
if is_integer_ok_for_xl_float(value):
value = int(value)
return value
class XLSBook(BookReader):
"""
XLSBook reader
It reads xls, xlsm, xlsx work book
"""
def __init__(self):
BookReader.__init__(self)
self._file_content = None
def open(self, file_name, **keywords):
BookReader.open(self, file_name, **keywords)
self._get_params()
def open_stream(self, file_stream, **keywords):
BookReader.open_stream(self, file_stream, **keywords)
self._get_params()
def open_content(self, file_content, **keywords):
self._keywords = keywords
self._file_content = file_content
self._get_params()
def close(self):
if self._native_book:
self._native_book.release_resources()
def read_sheet_by_index(self, sheet_index):
self._native_book = self._get_book(on_demand=True)
sheet = self._native_book.sheet_by_index(sheet_index)
return self.read_sheet(sheet)
def read_sheet_by_name(self, sheet_name):
self._native_book = self._get_book(on_demand=True)
try:
sheet = self._native_book.sheet_by_name(sheet_name)
except xlrd.XLRDError:
raise ValueError("%s cannot be found" % sheet_name)
return self.read_sheet(sheet)
def read_all(self):
result = OrderedDict()
self._native_book = self._get_book()
for sheet in self._native_book.sheets():
if self.skip_hidden_sheets and sheet.visibility != 0:
continue
data_dict = self.read_sheet(sheet)
result.update(data_dict)
return result
def read_sheet(self, native_sheet):
sheet = XLSheet(native_sheet, **self._keywords)
return {sheet.name: sheet.to_array()}
def _get_book(self, on_demand=False):
if self._file_name:
xls_book = xlrd.open_workbook(self._file_name, on_demand=on_demand)
elif self._file_stream:
self._file_stream.seek(0)
file_content = self._file_stream.read()
xls_book = xlrd.open_workbook(
None,
file_contents=file_content,
on_demand=on_demand
)
elif self._file_content is not None:
xls_book = xlrd.open_workbook(
None,
file_contents=self._file_content,
on_demand=on_demand
)
else:
raise IOError("No valid file name or file content found.")
return xls_book
def _get_params(self):
self.skip_hidden_sheets = self._keywords.get(
'skip_hidden_sheets', True)
class XLSheetWriter(SheetWriter):
"""
xls sheet writer
"""
def set_sheet_name(self, name):
"""Create a sheet
"""
self._native_sheet = self._native_book.add_sheet(name)
self.current_row = 0
def write_row(self, array):
"""
write a row into the file
"""
for i, value in enumerate(array):
style = None
tmp_array = []
if isinstance(value, datetime.datetime):
tmp_array = [
value.year, value.month, value.day,
value.hour, value.minute, value.second
]
value = xlrd.xldate.xldate_from_datetime_tuple(tmp_array, 0)
style = XFStyle()
style.num_format_str = DEFAULT_DATETIME_FORMAT
elif isinstance(value, datetime.date):
tmp_array = [value.year, value.month, value.day]
value = xlrd.xldate.xldate_from_date_tuple(tmp_array, 0)
style = XFStyle()
style.num_format_str = DEFAULT_DATE_FORMAT
elif isinstance(value, datetime.time):
tmp_array = [value.hour, value.minute, value.second]
value = xlrd.xldate.xldate_from_time_tuple(tmp_array)
style = XFStyle()
style.num_format_str = DEFAULT_TIME_FORMAT
if style:
self._native_sheet.write(self.current_row, i, value, style)
else:
self._native_sheet.write(self.current_row, i, value)
self.current_row += 1
class XLSWriter(BookWriter):
"""
xls writer
"""
def __init__(self):
BookWriter.__init__(self)
self.work_book = None
def open(self, file_name,
encoding='ascii', style_compression=2, **keywords):
BookWriter.open(self, file_name, **keywords)
self.work_book = Workbook(style_compression=style_compression,
encoding=encoding)
def create_sheet(self, name):
return XLSheetWriter(self.work_book, None, name)
def close(self):
"""
This call actually save the file
"""
self.work_book.save(self._file_alike_object)
def is_integer_ok_for_xl_float(value):
"""check if a float value had zero value in digits"""
return value == math.floor(value)
def xldate_to_python_date(value):
"""
convert xl date to python date
"""
date_tuple = xlrd.xldate_as_tuple(value, 0)
ret = None
if date_tuple == (0, 0, 0, 0, 0, 0):
ret = datetime.datetime(1900, 1, 1, 0, 0, 0)
elif date_tuple[0:3] == (0, 0, 0):
ret = datetime.time(date_tuple[3],
date_tuple[4],
date_tuple[5])
elif date_tuple[3:6] == (0, 0, 0):
ret = datetime.date(date_tuple[0],
date_tuple[1],
date_tuple[2])
else:
ret = datetime.datetime(
date_tuple[0],
date_tuple[1],
date_tuple[2],
date_tuple[3],
date_tuple[4],
date_tuple[5]
)
return ret
_xls_reader_registry = {
"file_type": "xls",
"reader": XLSBook,
"writer": XLSWriter,
"stream_type": "binary",
"mime_type": "application/vnd.ms-excel",
"library": "pyexcel-xls"
}
_XLSM_MIME = (
"application/" +
"vnd.openxmlformats-officedocument.spreadsheetml.sheet")
_xlsm_registry = {
"file_type": "xlsm",
"reader": XLSBook,
"stream_type": "binary",
"mime_type": _XLSM_MIME,
"library": "pyexcel-xls"
}
_xlsx_registry = {
"file_type": "xlsx",
"reader": XLSBook,
"stream_type": "binary",
"mime_type": "application/vnd.ms-excel.sheet.macroenabled.12",
"library": "pyexcel-xls"
}
exports = (_xls_reader_registry,
_xlsm_registry,
_xlsx_registry)
|
import numpy as np
xdatcar = open('XDATCAR', 'r')
xyz = open('XDATCAR.xyz', 'w')
xyz_fract = open('XDATCAR_fract.xyz', 'w')
system = xdatcar.readline()
scale = float(xdatcar.readline().rstrip('\n'))
print scale
a1 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
a2 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
a3 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
print a1
print a2
print a3
lat_rec = open('lattice.vectors', 'w')
lat_rec.write(str(a1[0])+' '+str(a1[1])+' '+str(a1[2])+'\n')
lat_rec.write(str(a2[0])+' '+str(a2[1])+' '+str(a2[2])+'\n')
lat_rec.write(str(a3[0])+' '+str(a3[1])+' '+str(a3[2]))
lat_rec.close()
element_names = xdatcar.readline().rstrip('\n').split()
element_dict = {}
element_numbers = xdatcar.readline().rstrip('\n').split()
i = 0
N = 0
for t in range(len(element_names)):
element_dict[element_names[t]] = int(element_numbers[i])
N += int(element_numbers[i])
i += 1
print element_dict
while True:
line = xdatcar.readline()
if len(line) == 0:
break
xyz.write(str(N) + "\ncomment\n")
xyz_fract.write(str(N)+"\ncomment\n")
for el in element_names:
for i in range(element_dict[el]):
p = xdatcar.readline().rstrip('\n').split()
coords = np.array([ float(s) for s in p ])
cartesian_coords = coords[0]*a1+coords[1]*a2+coords[2]*a3
xyz.write(el+ " " + str(cartesian_coords[0])+ " " + str(cartesian_coords[1]) + " " + str(cartesian_coords[2]) +"\n")
xyz_fract.write(el+ " " + str(coords[0])+ " " + str(coords[1]) + " " + str(coords[2]) +"\n")
xdatcar.close()
xyz.close()
xyz_fract.close()
|
from setuptools import setup, find_packages
classifiers = [
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
setup(name="nanpy",
version="0.9.4",
description="Use your Arduino board with Python",
license="MIT",
author="Andrea Stagi",
author_email="stagi.andrea@gmail.com",
url="http://github.com/nanpy/nanpy",
packages = find_packages(),
keywords= "arduino library prototype",
install_requires=[
"pyserial",
],
classifiers=classifiers,
zip_safe = True)
|
from sitemaps import SiteMapRoot, SiteMap
from datetime import datetime
def generate_sitemap():
"""
build the sitemap
"""
sitemap = SiteMap()
sitemap.append("http://www.xxx.com", datetime.now(), "weekly", 0.9)
sitemap.append("http://www.xxx.com/a1", datetime.now(), "monthly", 0.7)
sitemap.save_xml("sitemap.xml")
def generate_sitemap_gz():
"""
get the gzip sitemap format
"""
sitemap = SiteMap()
sitemap.append("http://www.xxx.com", datetime.now(), "weekly", 0.9)
sitemap.append("http://www.xxx.com/a1", datetime.now(), "monthly", 0.7)
xml_string = sitemap.to_string
sitemap_root = SiteMapRoot("http://www.new.com", "root_sitemap.xml", False)
sitemap_root.append("sitemap1.xml.gz", xml_string)
sitemap_root.save_xml()
if __name__ == "__main__":
generate_sitemap()
generate_sitemap_gz()
|
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x04\x31\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xdd\x00\x00\x00\xdd\x01\
\x70\x53\xa2\x07\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\xdd\x50\x4c\x54\
\x45\xff\xff\xff\xff\xff\x00\xff\xff\x80\xff\xff\xff\xff\xcc\x66\
\xff\xdb\x49\xff\xbf\x60\xff\xb3\x4d\xff\xd1\x5d\xff\xc4\x4e\xed\
\xed\xed\xff\xb6\x49\xff\xc8\x5b\xdf\xef\xef\xff\xcf\x50\xff\xd2\
\x5a\xf2\xbf\x40\xf4\xbf\x40\xe2\xeb\xeb\xff\xd0\x55\xe4\xed\xed\
\xe5\xe5\xed\xff\xca\x58\xff\xcc\x55\xf8\xb8\x40\xff\xcd\x55\xff\
\xcc\x53\xe7\xe7\xed\xff\xcc\x55\xe3\xe9\xee\xf4\xb8\x41\xff\xce\
\x51\xff\xcc\x53\xf6\xbc\x43\xf6\xba\x41\xff\xce\x55\xf7\xbb\x44\
\xf7\xbc\x43\xf8\xbc\x43\xff\xcd\x55\xe7\xea\xee\xf5\xbd\x42\xe7\
\xea\xee\xf5\xb9\x42\xf6\xbb\x41\xf6\xbb\x41\xf6\xbb\x41\xe5\xea\
\xed\xe6\xe8\xed\xf5\xbc\x41\xf5\xba\x42\xf6\xbb\x42\xff\xce\x54\
\xe7\xe9\xed\xf5\xbb\x42\xff\xce\x54\xf6\xbb\x42\xf6\xbc\x42\xe8\
\xe9\xed\xf6\xbc\x42\xff\xcd\x53\xe5\xe9\xec\xf5\xba\x41\xe6\xe9\
\xec\xff\xce\x54\xe7\xea\xed\xff\xce\x53\xe7\xea\xef\xf6\xbc\x42\
\xff\xce\x54\xf7\xbc\x43\xf7\xbb\x43\xe7\xe9\xed\xe6\xe8\xec\xff\
\xcd\x55\xf7\xbd\x42\xff\xcf\x54\xe7\xe9\xee\xf6\xbb\x43\xff\xce\
\x55\xff\xcd\x55\xe6\xe9\xed\xf6\xbc\x42\xe7\xe9\xee\xe6\xe9\xed\
\xe7\xea\xed\xff\xce\x54\xe7\xe9\xed\xf6\xbc\x42\xe6\xe9\xed\xf6\
\xbb\x42\xf6\xbb\x42\xff\xce\x54\xf7\xbb\x43\xe7\xe9\xed\xe6\xe9\
\xed\xf6\xbb\x42\xf6\xbb\x42\xe8\xeb\xf0\xe8\xea\xee\xe8\xeb\xef\
\xe7\xea\xee\xeb\xed\xf1\xf8\xbe\x45\xf7\xbd\x44\xe7\xea\xee\xeb\
\xee\xf1\xf6\xbb\x43\xe6\xe9\xed\xea\xed\xf0\xf6\xbb\x42\xf7\xbe\
\x44\xf8\xc0\x46\xc6\xca\xce\xd3\xd6\xdb\xda\x44\x53\xdb\x46\x55\
\xdb\x4b\x5a\xdb\x4e\x5c\xdc\x48\x57\xdf\x65\x72\xe6\x93\x9d\xe6\
\x95\x9e\xe6\xe9\xed\xe7\x4f\x5e\xeb\xed\xf1\xeb\xee\xf1\xec\xb9\
\xc0\xec\xbd\xc4\xec\xef\xf2\xed\xc4\xcb\xed\xf0\xf3\xee\xf0\xf3\
\xee\xf1\xf4\xef\xce\xd4\xef\xf1\xf5\xf0\xd6\xdb\xf0\xf2\xf5\xf0\
\xf2\xf6\xf1\xf3\xf7\xf1\xf4\xf7\xf2\xe3\xe7\xf4\xef\xf2\xf4\xf6\
\xf9\xf5\xf5\xf8\xf5\xf6\xf9\xf5\xf7\xfa\xf6\xbb\x42\xf9\xc0\x47\
\xf9\xc1\x48\xf9\xc2\x49\xfa\xc3\x49\xfb\xc5\x4b\xfb\xc6\x4d\xfc\
\xc8\x4e\xfd\xc9\x4f\xfd\xca\x50\xfd\xca\x51\xff\xce\x54\x04\x23\
\x9d\x11\x00\x00\x00\x71\x74\x52\x4e\x53\x00\x01\x02\x02\x05\x07\
\x08\x0a\x0b\x0d\x0e\x0e\x0e\x10\x10\x11\x14\x18\x1a\x1b\x1c\x1d\
\x1d\x1e\x24\x24\x28\x2b\x2d\x2e\x2f\x2f\x37\x39\x3b\x3f\x40\x41\
\x45\x48\x49\x49\x4a\x4d\x52\x53\x56\x62\x64\x66\x68\x6d\x7d\x7e\
\x80\x83\x8b\x8c\x8e\x90\x90\x95\xa0\xa5\xa6\xa8\xa8\xaa\xae\xb1\
\xb6\xb8\xbd\xbe\xbe\xc0\xc3\xc8\xcb\xcd\xd3\xd8\xd8\xdb\xde\xe6\
\xe7\xe8\xe8\xe9\xea\xed\xf0\xf1\xf2\xf5\xf5\xf7\xf8\xfa\xfa\xfb\
\xfb\xfb\xfc\xfd\xfd\xfd\xfe\xfe\xfe\xfe\xfe\x22\xeb\xe2\xf5\x00\
\x00\x01\x49\x49\x44\x41\x54\x38\xcb\x63\x60\x00\x03\x59\x8f\xf8\
\x40\x03\x06\xdc\x40\x24\x2e\xa9\x35\x2d\x13\x8f\x0a\xd3\xd4\xfe\
\x49\x93\xd2\x02\x71\x2b\xb0\xcf\x9a\x34\x69\x52\x67\x0e\x6e\x05\
\x8e\x75\x40\x05\x5d\xd5\x94\x29\xe8\x25\xa4\xa0\xac\x89\x80\x82\
\xe2\x7a\x84\x02\x01\x42\x0a\xa2\xd5\x70\x2b\xe0\xe7\x03\x12\x09\
\xda\x0c\x0c\x2c\xc2\xd8\x15\x98\x87\x49\x32\x30\x48\x30\x30\x30\
\xba\x06\x60\x57\xc0\xe3\xa4\xae\xe8\x16\xe1\x67\xcc\xe6\xa5\x80\
\xa2\xa0\xa8\xa5\xb8\xbe\x10\xe2\x06\xbd\xbc\xfc\x19\x53\x26\xbb\
\xa0\xb9\x01\xa1\x80\x3d\x76\xea\xbc\x79\xf3\x66\x4d\xd6\xc4\xea\
\x48\x39\x3b\x43\xa5\xc9\x73\x80\x0a\xe6\x65\x58\x00\xd9\x98\x0a\
\x54\xdd\xcd\x54\x26\xcf\x05\x29\x48\xb7\x06\xb2\xb1\x86\x03\x77\
\xe2\x74\xa0\xfc\xec\xc9\xba\x38\x03\xca\x68\x72\xc1\xcc\x69\xd9\
\xde\x8c\x38\x14\xb0\xda\x28\xeb\x04\x65\x47\x59\x72\x3a\x48\x61\
\x57\x60\x12\x23\xc3\xc0\x20\xc8\xc0\xc0\xe4\xe3\x8f\x5d\x81\x98\
\x38\x34\x2e\x38\xe4\xf1\x44\x16\x28\x2e\xf0\xc6\xa6\x04\xba\x3c\
\x6f\x64\x23\x50\x41\x77\xb5\x16\xae\xf4\x62\x9b\xd3\xdf\x51\x5c\
\x39\x21\x37\x9c\x19\x87\x82\x90\xda\xbe\xd2\x92\xe2\x86\xae\x6a\
\x69\x1c\x0a\xe2\xdb\xdb\x8a\x6b\xca\xab\xfa\xab\x35\x70\x28\xf0\
\x6d\x9c\x58\x51\x5c\xda\xd1\x5d\x2d\x84\x43\x81\x7e\x66\xcf\xc4\
\xb6\xbe\xfe\x14\x4f\x9c\xa9\xda\x39\xb3\xb1\xbd\x39\x39\x54\x14\
\x77\xba\xd7\xf7\x8d\x0f\xb6\xe2\xc2\x26\x03\x00\x8f\xb4\x8c\xb5\
\x70\xac\xb2\xb2\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x02\x24\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x07\xa3\x00\x00\x07\xa3\x01\
\x30\x2f\xb2\xc5\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\xa2\x50\x4c\x54\
\x45\xff\xff\xff\x28\x38\x4d\x28\x37\x4d\x87\x39\x4f\x6b\x39\x4e\
\xfc\x38\x52\xef\xd5\xc9\x78\x38\x4e\x8a\x39\x4f\x94\x39\x4e\xfc\
\x39\x52\xef\xd8\xcd\xcd\x39\x51\xd6\x39\x51\xed\xea\xda\xfc\x39\
\x52\x26\xb9\x9a\x28\x38\x4c\x4f\xc2\xa6\x8c\xca\xb0\x91\xca\xb1\
\x9c\xcb\xb1\x9f\xcb\xb1\xc5\xca\xb0\xca\xca\xae\xcb\xca\xaf\xce\
\xc9\xae\xce\xc9\xaf\xcf\xca\xaf\xcf\xca\xb0\xd0\xcb\xb1\xd1\xcc\
\xb2\xd2\xcd\xb3\xd7\xd2\xba\xd7\xd3\xbb\xd8\xd3\xbc\xd8\xd4\xbc\
\xd9\xd4\xbd\xd9\xd5\xbd\xd9\xd5\xbe\xda\xd5\xbe\xda\xd6\xbf\xdb\
\xd6\xc0\xdb\xd7\xc1\xdc\xd8\xc3\xde\xda\xc5\xe0\xdc\xc8\xe5\xe2\
\xcf\xe6\xe3\xd0\xe7\xe4\xd2\xe9\xe6\xd4\xeb\xe8\xd7\xed\xea\xda\
\xfc\x39\x52\x19\x34\xb7\x9d\x00\x00\x00\x10\x74\x52\x4e\x53\x00\
\x60\x78\xae\xb0\xb5\xbd\xc0\xc2\xc4\xc8\xcc\xd8\xdf\xe8\xe8\x79\
\xe2\xaf\xf9\x00\x00\x00\xd8\x49\x44\x41\x54\x38\x4f\xad\x90\xc9\
\x0e\x82\x40\x10\x05\x01\x51\x16\x95\xcd\x05\x97\x01\x14\x11\x50\
\x76\x95\xff\xff\x35\x9f\xc6\x43\x93\x00\x26\x84\x3a\xd4\xcc\x74\
\x2a\xa1\x03\xc7\x7d\x10\x4c\xd3\xe4\x39\x02\x8f\x81\x40\xde\xbd\
\xc1\x54\x55\x55\x11\xef\x89\x4a\x98\x60\x20\xe2\x9c\x22\xd0\xeb\
\xba\x96\xf0\x56\x6a\x82\x82\x81\x84\x53\xff\x05\x0b\x59\x96\x97\
\x34\x58\x62\xb0\x20\x41\x27\xe3\x04\xb3\x79\x0f\x33\x04\xda\xab\
\x07\x6d\xb4\x20\x3f\xb5\x90\x93\x20\x3b\x17\x45\x11\xfa\x50\x10\
\x40\x7e\x08\x9d\x33\x1a\xc4\x50\x7a\x87\x92\x04\xba\xa7\x50\x3c\
\x72\xc0\x3c\xcf\x73\x9c\x86\x58\x23\xf0\xcb\xb2\x8c\x2e\xd0\x75\
\x63\x59\xd6\x36\xc2\xcd\xef\xf8\xc4\xca\x30\x8c\x75\xdf\x0e\x43\
\x03\xe6\xba\x2e\xfb\x6a\x67\xdb\xf6\xfe\x7b\x6b\x2e\x59\x55\xd5\
\x2d\x80\xa2\x08\x0a\x6e\x50\xd7\x92\x43\x7f\xd4\xdf\xe0\xc0\x18\
\x3b\x1e\x1b\x3a\xd0\xe0\xf9\x68\xe1\x49\x82\x4e\xc6\x08\xde\xa7\
\x27\x93\xce\xcf\x54\x3a\x2a\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x02\xb4\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xdd\x00\x00\x00\xdd\x01\
\x70\x53\xa2\x07\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\xd8\x50\x4c\x54\
\x45\xff\xff\xff\xff\xff\x00\xff\xbf\x80\xff\xdb\x6d\xff\xdf\x80\
\xff\xd1\x74\xff\xd8\x76\xff\xd9\x73\xff\xd5\x75\xff\xd5\x77\xff\
\xd7\x78\xff\xd7\x79\xff\xd5\x79\xfa\xd5\x75\xfa\xd5\x78\xfa\xd6\
\x76\xfb\xd7\x79\xfb\xd3\x78\xfb\xd5\x78\xed\xc4\x6f\xed\xc5\x6d\
\xfc\xd6\x79\xeb\xc2\x6d\xec\xc6\x70\xfc\xd5\x76\xfc\xd6\x78\xfd\
\xd6\x77\xfd\xd4\x77\xfd\xd6\x77\xfd\xd5\x76\xfb\xd5\x78\xfb\xd4\
\x77\xfc\xd5\x78\xfc\xd5\x77\xfc\xd5\x77\xfc\xd5\x77\xfc\xd5\x78\
\xfc\xd5\x77\xfc\xd5\x77\xfb\xd5\x77\xfc\xd5\x77\xed\xc5\x6f\xfc\
\xd5\x77\xed\xc6\x6f\xfc\xd5\x77\xfc\xd5\x77\xfc\xd5\x77\xfc\xd5\
\x77\xfc\xd5\x77\xfc\xd5\x77\xf2\xcc\x72\xfc\xd5\x77\xf3\xcc\x72\
\xf3\xcc\x73\xfc\xd5\x77\xf3\xcd\x72\xfc\xd5\x77\xf3\xcb\x72\xfc\
\xd5\x77\xfc\xd5\x77\xfc\xd5\x77\xfc\xd5\x77\xfc\xd5\x77\xea\xc3\
\x6e\xf2\xcb\x72\xf3\xcc\x72\xf4\xcd\x73\xf9\xd2\x76\xfa\xd3\x76\
\xfb\xd4\x76\xfb\xd4\x77\xfc\xd5\x77\xec\x0a\x60\x8f\x00\x00\x00\
\x3f\x74\x52\x4e\x53\x00\x01\x04\x07\x08\x0b\x0d\x14\x18\x1e\x20\
\x26\x2a\x30\x31\x38\x39\x40\x42\x45\x46\x4a\x4b\x50\x5b\x64\x69\
\x6b\x7c\x7f\x80\x89\x93\x9f\xaa\xb0\xb1\xbd\xc7\xd6\xdb\xdd\xdd\
\xdf\xe4\xe5\xe7\xe8\xec\xee\xf0\xf0\xf1\xf2\xf2\xf3\xf4\xf5\xf5\
\xf6\xf9\xfa\xfc\x92\x18\x52\x21\x00\x00\x01\x03\x49\x44\x41\x54\
\x38\x4f\x8d\xce\xd7\x5a\xc2\x40\x14\x45\xe1\x03\x58\x00\x05\x44\
\x90\x26\x52\x34\x88\x88\x28\x22\xc5\x12\x08\x84\xc9\xac\xf7\x7f\
\x23\x2f\xb0\xf0\x4d\x12\xc7\x7d\xbb\xfe\x8b\x2d\xb2\xb7\x7c\xd3\
\x19\x8d\x9c\x66\x5e\xa2\x97\x6a\xaf\x00\x60\xd5\x4e\x45\xf5\x4c\
\x9f\x9f\xf5\x33\xe1\x9e\xe8\x01\xa8\xc9\x44\x01\xf4\x12\x21\xd0\
\x00\x08\x06\xe5\xf2\x20\x00\x68\x98\x3d\x39\x07\x98\x96\x44\x4a\
\x53\x80\x79\xd2\x00\x39\x00\x16\x15\x91\xca\x02\x80\x9c\x01\xea\
\x00\x04\xc3\x6a\x75\x18\x00\x50\x37\x40\x67\x77\x3f\x98\xcd\x76\
\x9d\x8e\x01\x5a\x18\x6b\x19\xa0\x06\xa0\x95\x52\x4a\x29\x0d\x50\
\x33\x40\xda\x05\xd6\x9e\xe7\x79\x9e\xb7\x06\xdc\xb4\x01\xa4\x0b\
\xa0\xb5\xd6\x5a\x03\x74\xcd\x2e\xd9\xf1\xfe\x83\x71\x36\x04\xa4\
\xb8\xfc\xed\xcb\x62\xb8\x8b\x9c\xdd\x7f\xf7\xbb\x42\x54\x17\x39\
\x7a\x65\xbb\x05\x9e\x0f\xa3\xbb\xc8\x0b\x1b\x1f\x78\x8a\xeb\xff\
\x06\xb7\x16\xf0\x71\x6a\x01\x97\xb1\xfd\x0b\x5c\xd8\xc0\xe3\xb1\
\x05\x70\x6d\x03\x57\x16\xf0\x70\x60\x01\xee\x89\x05\xe0\x58\xc0\
\xfb\x79\x2c\xb8\x61\xe3\xff\xd5\x45\x6a\x6f\xbe\xd9\x3f\x01\xf5\
\xde\x54\x7e\xca\xf7\x18\x1d\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x03\x2c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x00\xe7\x00\x00\x00\xe7\x01\
\xf0\x1b\x58\xb5\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\xf0\x50\x4c\x54\
\x45\xff\xff\xff\x6d\x6d\x92\x60\x80\x80\x60\x75\x8a\x66\x7a\x8f\
\x66\x77\x88\x65\x7a\x8c\x64\x7a\x89\x64\x7a\x8a\x65\x79\x8a\x64\
\x7a\x89\x63\x79\x8a\x63\x78\x8a\x64\x79\x8a\x64\x79\x8a\x64\x79\
\x8b\x64\x79\x8a\x64\x79\x8a\x65\x7a\x8a\x66\x7b\x8c\x68\x7d\x8d\
\x74\x87\x96\x79\x8b\x9a\x7f\x91\x9e\x85\x95\xa3\x85\x96\xa3\x87\
\x97\xa5\x90\x9f\xab\x91\x9f\xab\x92\xa0\xac\x93\xa0\xab\x98\xa5\
\xb1\x98\xa6\xb2\xa0\xab\xb5\xa1\xae\xb8\xa3\xaf\xb9\xa5\xb1\xbb\
\xb4\xb5\xbc\xb4\xbe\xc6\xb5\xbf\xc7\xb9\xc2\xca\xbb\xc4\xcc\xbd\
\xc6\xcd\xbe\xc7\xce\xca\xd2\xd7\xcb\xd3\xd8\xcd\xd4\xd9\xd8\xdd\
\xe2\xd9\xde\xe2\xdb\xe0\xe4\xdc\xe1\xe5\xdd\xe2\xe5\xdf\xe4\xe7\
\xe0\xe5\xe8\xe6\xea\xed\xe7\xeb\xed\xe9\xec\xee\xea\xdd\xdd\xeb\
\xe9\xea\xeb\xee\xf0\xec\xef\xf1\xee\x9b\x91\xee\xf0\xf2\xef\xf2\
\xf4\xf0\xf2\xf4\xf2\xb1\xa9\xf3\xf4\xf6\xf4\xf6\xf7\xf5\xf7\xf8\
\xf8\xf9\xfa\xf9\xfa\xfb\xfb\xfc\xfc\xfc\xeb\xe9\xfd\xfe\xfe\xfe\
\xf6\xf6\xfe\xf8\xf7\xfe\xfe\xfe\xfe\xff\xff\xff\xfe\xfe\xff\xff\
\xff\x7d\x02\xb4\x15\x00\x00\x00\x11\x74\x52\x4e\x53\x00\x07\x08\
\x18\x19\x2d\x49\x84\x97\x98\xc1\xc8\xda\xe3\xf2\xf3\xf5\xd5\xa8\
\x31\x5b\x00\x00\x01\x91\x49\x44\x41\x54\x38\xcb\x85\x53\xe9\x5a\
\x82\x50\x10\xbd\xee\x82\x0a\x8e\x9a\x6b\x8b\x95\xa9\xa4\x52\x02\
\x2e\xe5\x46\x29\x9a\x85\xdd\xf7\x7f\x9b\xe6\x82\x20\x20\x7e\xcd\
\x0f\xc4\x39\x87\x59\xcf\x10\xe2\x5a\x24\xc9\xf1\x59\x51\xcc\xf2\
\x5c\x32\x42\xce\x2d\x9e\x16\xc0\x35\x21\x1d\x0f\xc0\xd1\x54\xde\
\x86\x4a\x25\xfb\x37\x9f\x8a\x7a\xf1\x58\x06\x7d\x85\xe6\x60\x34\
\x9e\xcd\x27\x5a\xbf\x59\xc0\xbf\x99\xd8\x09\x4f\xe4\xd0\x51\xd7\
\x96\x06\xa5\xb2\x4c\xe9\x7e\xa3\xd6\xd1\x91\x4b\xb8\xdf\x23\x5e\
\xe8\x8d\xb7\x14\x4d\x51\xd8\x93\xea\x12\x06\xc9\x1d\x63\x44\x31\
\x7e\x45\x5b\x99\xd4\x6b\x3b\xa5\x82\x59\xec\x3a\x52\xf8\xbd\x66\
\x1c\x81\xe9\xd4\xa1\x0c\x31\x46\xca\xea\x0f\xeb\xef\xad\x1c\xb7\
\x24\x39\x6f\x66\x07\x7b\x61\xdd\xa6\xb1\xbe\xb1\x79\x4e\xa0\xeb\
\x1a\x40\x1a\xe7\x27\x60\x82\x2d\x0d\x21\xb0\x24\x42\x84\x24\x01\
\x9a\x4b\x1a\x4a\x38\x34\x00\x92\x84\x03\x18\x18\xe1\x04\xda\x05\
\xe0\x08\x0f\x30\x72\x3d\xbf\xdf\x3e\x82\x0a\xc0\x93\x2c\xc0\xf8\
\x44\x58\x3c\x75\x3c\x04\x1d\x20\x4b\x44\x28\xcd\x64\x36\xbe\xe7\
\x47\xb4\xfb\xdb\x1b\x77\x10\x8a\x6c\x16\x41\x64\x84\xf9\x89\xf0\
\x70\x77\xfd\x16\x20\x60\x8a\x89\x27\xea\xe7\xfb\xe2\xcb\x9f\x02\
\x8b\xd4\x7c\x5b\xf8\x39\x31\xac\x22\xb1\xcd\xfe\xfe\x02\xa3\xcd\
\xda\x64\x83\xda\xd0\x70\x46\x95\x0d\x8a\x8d\x5a\xa5\xa1\x8c\x57\
\x6b\xd4\xd6\xb2\xf4\x20\xe3\x03\x1f\x46\xd9\x5a\x96\xb5\x6e\x69\
\x47\xcf\xad\x75\x5c\xb7\x25\x18\xe5\x1c\x7f\x71\x04\x63\x4b\x6e\
\x68\x06\xf1\x2b\x57\x72\xb6\x68\x3b\x6b\xea\x11\xad\xd1\xf2\x88\
\xf6\x28\xfb\xda\xf0\x40\x6d\xd9\x63\xfd\x65\x9f\xec\x9d\xc3\x69\
\x74\x55\xdd\x34\x75\xb5\x5d\x0d\x1e\x8e\xe7\xf4\x8a\xc5\xd0\xd3\
\xfb\xff\x78\x2f\x9f\xff\x1f\x2f\x83\xa9\x23\xd5\xf0\x7d\x09\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x0a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x08\x5b\x00\x00\x08\x5b\x01\
\xe8\x9f\x75\xd0\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x01\x83\x50\x4c\x54\
\x45\xff\xff\xff\x55\x55\xaa\x40\x80\x80\x66\x66\x99\xff\xff\xff\
\x49\x6d\x92\xdb\xdb\xff\x55\x55\x8e\xe6\xe6\xe6\xea\xea\xea\x51\
\x5d\x80\x59\x64\x85\x55\x60\x80\x52\x5c\x85\x55\x5e\x84\x58\x61\
\x84\x9e\xa7\xb9\x80\x88\x99\x52\x63\x84\x58\x60\x80\x57\x63\x80\
\x55\x60\x81\x54\x5f\x80\x54\x5e\x81\xe7\xee\xee\xe7\xea\xee\xe8\
\xeb\xee\xe8\xeb\xeb\x56\x5f\x80\xe6\xed\xed\x56\x61\x80\xe8\xec\
\xee\x56\x60\x81\x54\x60\x80\x55\x60\x80\xe6\xec\xed\x55\x5f\x80\
\xe7\xec\xee\x56\x60\x80\x56\x60\x80\x55\x5f\x80\xe4\xea\xeb\xe8\
\xec\xed\x55\x60\x80\x78\x82\x9a\x78\x82\x9b\x84\x8e\xa3\x86\x8f\
\xa4\x74\x7e\x97\x9a\xa3\xb3\x77\x80\x99\x78\x81\x99\x55\x60\x80\
\x9a\xa2\xb3\x55\x60\x80\x74\x7d\x97\xa3\xaa\xba\xa4\xac\xbb\x6b\
\x76\x91\x76\x80\x99\xa3\xac\xba\xa7\xaf\xbe\x6b\x75\x90\xe7\xec\
\xed\x67\x71\x8e\x55\x60\x80\x55\x5f\x80\x63\x6f\x8b\x62\x6c\x89\
\xb6\xbd\xc9\x55\x60\x80\x63\x6c\x8a\xb9\xc0\xcb\xba\xc0\xca\xba\
\xc2\xcc\x5e\x68\x86\x5f\x6a\x87\x5d\x67\x86\x5e\x69\x87\x55\x60\
\x80\x5d\x67\x86\x55\x61\x80\x5b\x65\x84\x5d\x68\x87\xc5\xcc\xd4\
\xc6\xcd\xd4\x55\x60\x80\xe7\xec\xed\xc8\xcf\xd6\x55\x5f\x80\xc9\
\xcf\xd6\xcb\xd0\xd7\xe7\xec\xec\x55\x60\x80\x55\x60\x80\xe7\xeb\
\xed\xce\xd4\xda\xcf\xd6\xdc\x55\x60\x80\x55\x60\x80\xd2\xd8\xdd\
\x55\x60\x80\xd6\xda\xe0\x55\x60\x80\x55\x60\x80\xd9\xde\xe3\x55\
\x60\x80\x55\x60\x80\xe8\xec\xed\x55\x60\x80\x56\x60\x80\x55\x5f\
\x80\xe7\xec\xed\x55\x60\x80\x56\x60\x80\xe1\xe6\xe9\xe7\xec\xed\
\x55\x60\x80\x55\x60\x80\x55\x60\x80\x55\x61\x80\x55\x60\x80\xe6\
\xeb\xec\xe6\xeb\xed\x55\x60\x80\x55\x60\x81\xe6\xeb\xed\x55\x60\
\x80\xe7\xec\xed\x3f\x91\xdf\xa4\x00\x00\x00\x7f\x74\x52\x4e\x53\
\x00\x03\x04\x05\x05\x07\x07\x09\x0a\x0c\x16\x17\x18\x19\x1b\x1d\
\x1d\x1e\x1f\x20\x2c\x45\x46\x49\x49\x4a\x4d\x4e\x6e\x71\x74\x78\
\x7d\x82\x90\x91\x93\x93\x95\x98\xb3\xb5\xb9\xbd\xbd\xbd\xbf\xbf\
\xc0\xc0\xc1\xc1\xc2\xc3\xc4\xc4\xc4\xc4\xc5\xc5\xc5\xc5\xc6\xc7\
\xc8\xca\xcb\xcb\xce\xce\xcf\xcf\xcf\xd0\xd0\xd1\xd1\xd4\xd4\xd5\
\xd5\xd6\xd6\xd6\xd7\xd7\xd8\xd8\xda\xdb\xdb\xdb\xdc\xdd\xde\xde\
\xdf\xdf\xe1\xe2\xe4\xe6\xe6\xe8\xe9\xea\xed\xee\xef\xf1\xf1\xf3\
\xf3\xf4\xf4\xf4\xf4\xf5\xf6\xf7\xfb\xfd\xfd\xfd\xfe\xfe\xfe\xe0\
\xf4\x89\xca\x00\x00\x01\x6e\x49\x44\x41\x54\x18\x19\x65\xc1\x09\
\x43\x4c\x61\x14\x06\xe0\x77\x4c\x89\x5c\x5b\x1a\xd9\xab\x4b\x0d\
\x06\xa1\xc5\x12\x5a\xc8\x92\x29\xfb\x90\xad\xe4\x96\x65\xd2\x14\
\x91\xed\xf6\x9e\x9f\x5e\xf3\xdd\xf3\x9d\xee\x34\xcf\x83\x4d\x8d\
\xb9\xb0\xaf\x54\x5e\x9d\x2a\xe4\xdb\xb2\xa8\xd7\x1c\x56\x68\xca\
\xdd\x01\x6a\x65\x3b\x97\x59\xa3\xd2\xd1\x84\x94\x60\x80\x75\x46\
\x02\x98\xd6\x39\x7a\x8b\x8b\xf4\x66\x5b\xa1\x82\x59\x3a\xff\x2f\
\x5e\x9b\x59\x5b\x9b\xb9\x71\x85\x89\xb9\x00\x4e\xd3\x08\x9d\xf8\
\x92\xa8\xfe\x98\xce\x40\x16\x55\x1d\x4c\xf4\x8a\xe9\x65\xa2\x13\
\x1b\x82\x0a\x13\xe3\x62\xc6\x99\x58\x6e\x06\xd0\x4d\x15\x89\x89\
\xa8\x42\x20\x5b\xa6\x8a\xc4\x44\x54\x95\x46\xb4\xd1\x8b\xc4\x44\
\xf4\x72\xc8\xd3\x9b\x17\x33\x4f\x2f\x44\x81\xea\xa1\xa4\x3c\xa3\
\xea\xc3\x14\xd5\x79\x49\x19\xa4\x2a\x61\x95\xea\x9c\xa4\x0c\x52\
\x95\xf1\x95\xea\xe9\x3f\xd9\x74\x8f\xea\x17\x1e\xd1\xbb\x29\xe6\
\x42\x4c\xf5\x04\x05\x7a\x45\xf1\x5e\xc7\xf4\x4e\x23\x4f\xf3\x4a\
\xd4\x2d\x9a\x10\x39\x9a\xeb\x92\x78\xf3\x87\xe6\x20\x32\x9f\x69\
\x9e\x8b\x73\x9b\xe6\x53\x06\x38\x45\xf3\x40\x9c\x49\x9a\x10\xc0\
\xee\x6f\xf4\x5e\x88\x73\x87\xde\xcf\x16\x6c\x38\x46\x35\xf1\x57\
\x9c\xab\x31\xd5\x09\x54\x65\x46\x59\xf5\xbd\xe7\x87\xa8\xfb\x45\
\x3a\x77\xb7\xc1\xd9\x55\x22\x7f\x5f\xfe\x22\x29\x63\x45\x92\xef\
\xf7\x42\xed\x79\x37\xfc\x41\xb6\x18\x7b\xfc\xf1\x00\xcc\xbe\xb3\
\x52\xe7\xcc\x7e\xa4\x1d\x7d\x2b\x35\x5e\x1e\xc6\x16\xdb\xdb\x97\
\xc4\x2c\x1c\x6f\x40\xbd\x9d\x47\xba\x86\xa6\x57\x56\xa6\x87\x4e\
\x1e\xda\x01\xb3\x0e\x29\x11\x78\xcc\x11\x55\x71\x85\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\xb6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x0e\x00\x00\x0b\x0e\
\x01\x40\xbe\xe1\x41\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x02\x33\x49\x44\
\x41\x54\x58\x85\xed\xd7\xdb\x8b\x4d\x51\x1c\x07\xf0\xcf\x3a\x33\
\x19\x1a\x73\x89\xc4\x0c\x43\x9a\x1a\x0f\x83\x44\xae\x29\x4a\x22\
\x92\xc4\xbc\xa9\x89\xe4\x92\x9a\x27\xa5\x44\xfe\x01\x79\xf3\xe0\
\xc1\xa5\x5c\x1e\x14\x4f\x3c\x08\xa1\x30\x72\x09\x45\x29\x93\x48\
\x9e\x64\xc4\x18\x4f\xb4\x3c\xec\x3d\x9a\xc6\x39\xce\x3e\x33\x73\
\x3a\x1e\x66\xd5\xaa\xbd\xd6\xef\xfb\x5b\xdf\xef\x5e\xbf\xb5\xbe\
\xbb\x1d\x62\x8c\x2a\xd9\x72\x15\x65\x1f\x13\xf0\x3f\x08\xa8\x1e\
\x49\x72\x08\x61\x0d\xd6\xa2\x1d\x4d\xf8\x88\x57\x38\x1d\x63\xec\
\xc9\xb4\x48\x8c\xb1\xe4\x8e\x85\xb8\x89\x53\x58\x87\x19\xa8\xc2\
\x6c\x74\xe0\x16\x2e\xa2\xbe\xe8\x5a\xc3\x20\x5f\x89\x07\x68\x2f\
\x82\xdb\x8a\xa7\x98\x39\x6a\x02\x30\x0f\xcf\x31\x2d\x23\x7e\x05\
\xee\xa0\x6a\xb4\x04\x5c\xc1\x82\x12\xf0\x0d\x38\x8f\x1e\x3c\xc4\
\x71\xcc\x1d\x8c\xc9\x7c\x0b\x42\x08\x73\xd0\x88\xaa\x10\x42\x5b\
\x08\xa1\xa6\x08\x7e\x03\x6e\xe0\x0d\x8e\xe1\x02\x5a\xd1\x1d\x42\
\xb8\x18\x42\xc8\x65\x3a\x84\xa8\xc5\x11\xbc\xc3\x13\x9c\xc0\x39\
\x74\xe3\x1a\xd6\xe7\xc9\xd9\x84\x4b\x68\xc8\x13\xab\xc7\x33\xdc\
\x2b\x5a\x02\xac\xc2\x0b\xec\xcb\x57\x47\xb4\xe1\x34\xce\x62\x7c\
\x09\xa5\xc9\xe1\x33\x3a\x8b\x1d\xa0\x47\x98\x9c\x61\xc1\xdd\xb8\
\x8e\x5c\x09\x22\xb6\xa3\xa7\x50\xb0\x3e\x7d\xf3\x96\x12\x16\x3c\
\x8a\xc3\xa8\xcd\x88\x1f\x87\xfe\x42\xc1\x83\xd8\x5d\x02\x79\x3b\
\xce\xa4\xdb\xfa\x1d\xfd\x78\x8b\x35\x45\xf2\x7e\x0c\x9d\x68\xc2\
\x16\x89\x9d\x4e\xc8\x48\xbe\x0b\xf7\xb1\x64\xe0\x9c\x48\x2c\x7e\
\x0f\xfa\x24\xb6\x9c\x2f\xaf\x05\x5f\x06\x06\x75\x12\x5b\xbd\x81\
\x43\x58\x94\x91\x3c\xe0\x00\x6a\x0a\xc4\x27\xe2\x13\x76\xe6\x89\
\x9d\x4c\x85\xab\xc2\x5d\x74\x64\xdd\xf2\x52\x3a\x96\xa2\x6f\xc8\
\xdc\xfc\xb4\x4c\xd3\xa1\x0b\x87\xcb\x41\x3e\x88\xf0\x03\x56\xa4\
\xcf\x5d\x29\xf9\xde\x74\xec\x76\x3e\xc3\x18\x65\x01\xdd\xe8\xc5\
\x37\xbc\x37\xc8\x8e\x73\x68\x8c\x31\x7e\x55\xde\x16\x71\x15\x93\
\x62\x8c\xb3\x62\x8c\x2f\x07\x02\xd5\xf8\x15\x42\xa8\x8e\x31\xfe\
\x2c\xa3\x80\x56\xc9\x41\xfc\x8b\x23\x27\xf9\xbc\xae\x2e\x17\x73\
\x08\xa1\x53\xe2\x90\xaf\x0b\x61\x5a\x24\x96\x5b\x57\x86\xda\x2f\
\x97\x18\xd3\xb2\x82\x98\x14\xb8\x11\x8f\xb1\x0d\x53\x47\x48\x3a\
\x0e\x9b\x71\x39\x25\xdf\xf1\x2f\x7c\x18\xb8\x0a\x21\x84\x29\xd8\
\x8f\xc5\x68\x96\x98\xcc\x70\x5a\xb3\xc4\x01\x9f\x60\x5f\x8c\xb1\
\xf7\x5f\xe0\x3f\x02\x2a\xd5\x2a\xfe\x5f\x30\x26\xa0\xe2\x02\x7e\
\x03\xb7\x39\xbc\xed\x20\x33\xf3\x9f\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x05\
\x00\x4f\xa6\x53\
\x00\x49\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x0f\
\x03\xec\xfb\x67\
\x00\x74\
\x00\x68\x00\x65\x00\x72\x00\x6d\x00\x6f\x00\x6d\x00\x65\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x07\xb5\x0f\xc7\
\x00\x63\
\x00\x61\x00\x6c\x00\x65\x00\x6e\x00\x64\x00\x61\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x0c\xa8\x9d\xc7\
\x00\x64\
\x00\x6f\x00\x6f\x00\x72\x00\x2d\x00\x6b\x00\x65\x00\x79\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x05\x9e\x83\x27\
\x00\x63\
\x00\x6c\x00\x6f\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x09\xc5\x58\xc7\
\x00\x75\
\x00\x73\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x0b\xb9\x11\x87\
\x00\x63\
\x00\x6c\x00\x6f\x00\x75\x00\x64\x00\x79\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x10\x00\x02\x00\x00\x00\x06\x00\x00\x00\x03\
\x00\x00\x00\x20\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x80\x00\x00\x00\x00\x00\x01\x00\x00\x09\x15\
\x00\x00\x00\x44\x00\x00\x00\x00\x00\x01\x00\x00\x04\x35\
\x00\x00\x00\x98\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x45\
\x00\x00\x00\xae\x00\x00\x00\x00\x00\x01\x00\x00\x10\x53\
\x00\x00\x00\x62\x00\x00\x00\x00\x00\x01\x00\x00\x06\x5d\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sim.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
class NotSupportedDayError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(" ".join([" Day ", value, " is not supported "]))
|
import numpy as np
import pandas as pd
class CountsTableSparse(pd.SparseDataFrame):
'''Sparse table of gene expression counts
- Rows are features, e.g. genes.
- Columns are samples.
'''
_metadata = [
'name',
'_spikeins',
'_otherfeatures',
'_normalized',
'pseudocount',
'dataset',
]
_spikeins = ()
_otherfeatures = ()
_normalized = False
pseudocount = 0.1
dataset = None
@property
def _constructor(self):
return CountsTableSparse
@classmethod
def from_tablename(cls, tablename):
'''Instantiate a CountsTable from its name in the config file.
Args:
tablename (string): name of the counts table in the config file.
Returns:
CountsTable: the counts table.
'''
from ..config import config
from ..io import parse_counts_table_sparse
self = cls(parse_counts_table_sparse({'countsname': tablename}))
self.name = tablename
config_table = config['io']['count_tables'][tablename]
self._spikeins = config_table.get('spikeins', [])
self._otherfeatures = config_table.get('other', [])
self._normalized = config_table['normalized']
return self
@classmethod
def from_datasetname(cls, datasetname):
'''Instantiate a CountsTable from its name in the config file.
Args:
datasetename (string): name of the dataset in the config file.
Returns:
CountsTableSparse: the counts table.
'''
from ..config import config
from ..io import parse_counts_table_sparse
self = cls(parse_counts_table_sparse({'datasetname': datasetname}))
self.name = datasetname
config_table = config['io']['datasets'][datasetname]['counts_table']
self._spikeins = config_table.get('spikeins', [])
self._otherfeatures = config_table.get('other', [])
self._normalized = config_table['normalized']
return self
def to_npz(self, filename):
'''Save to numpy compressed file format'''
from .io.npz import to_counts_table_sparse
to_counts_table_sparse(self, filename)
def exclude_features(self, spikeins=True, other=True, inplace=False,
errors='raise'):
'''Get a slice that excludes secondary features.
Args:
spikeins (bool): Whether to exclude spike-ins
other (bool): Whether to exclude other features, e.g. unmapped reads
inplace (bool): Whether to drop those features in place.
errors (string): Whether to raise an exception if the features
to be excluded are already not present. Must be 'ignore'
or 'raise'.
Returns:
CountsTable: a slice of self without those features.
'''
drop = []
if spikeins:
drop.extend(self._spikeins)
if other:
drop.extend(self._otherfeatures)
out = self.drop(drop, axis=0, inplace=inplace, errors=errors)
if inplace and (self.dataset is not None):
self.dataset._featuresheet.drop(drop, inplace=True, errors=errors)
return out
def get_spikeins(self):
'''Get spike-in features
Returns:
CountsTable: a slice of self with only spike-ins.
'''
return self.loc[self._spikeins]
def get_other_features(self):
'''Get other features
Returns:
CountsTable: a slice of self with only other features (e.g.
unmapped).
'''
return self.loc[self._otherfeatures]
def log(self, base=10):
'''Take the pseudocounted log of the counts.
Args:
base (float): Base of the log transform
Returns:
A transformed CountsTableSparse with zeros at the zero-count items.
'''
from scipy.sparse import coo_matrix
coo = self.to_coo()
coobase = np.log(self.pseudocount) * coo_matrix((np.ones(coo.nnz), (coo.row, coo.col)), shape=coo.shape)
coolog = ((coo / self.pseudocount).log1p() + coobase) / np.log(base)
# NOTE: the entries that should be log(pseudocount) are zeros now
clog = CountsTableSparse(
coolog,
index=self.index,
columns=self.columns,
dtype=float,
default_fill_value=0)
return clog
def unlog(self, base=10):
'''Reverse the pseudocounted log of the counts.
Args:
base (float): Base of the log transform
Returns:
A transformed CountsTableSparse.
'''
from scipy.sparse import coo_matrix
coo = self.to_coo()
coobase = np.log(self.pseudocount) * coo_matrix((np.ones(coo.nnz), (coo.row, coo.col)), shape=coo.shape)
cooexp = (coo * np.log(base) - coobase).expm1() * self.pseudocount
cexp = CountsTableSparse(
cooexp,
index=self.index,
columns=self.columns,
dtype=float,
default_fill_value=0)
return cexp
def normalize(
self,
method='counts_per_million',
include_spikeins=False,
**kwargs):
'''Normalize counts and return new CountsTable.
Args:
method (string or function): The method to use for normalization.
One of 'counts_per_million', 'counts_per_thousand_spikeins',
'counts_per_thousand_features'm 'counts_per_million_column'.
If this argument is a function, its signature depends on the
inplace argument. It must take the CountsTable as input and
return the normalized one as output. You can end your function
by self[:] = <normalized counts>.
include_spikeins (bool): Whether to include spike-ins in the
normalization and result.
inplace (bool): Whether to modify the CountsTable in place or
return a new one.
Returns:
A new, normalized CountsTableSparse.
NOTE: if method == 'counts_per_million_column', you have to use an
additional keyword argument called 'column' that specifies the column
of the samplesheet containing the normalization baseline. For instance,
if your samplesheet has a column called 'total_counts' that you want to
use for normalization, call:
CountsTableSparse.normalize(
method='counts_per_million_column',
column='total_counts')
This requires the count table to be linked to a Dataset.
'''
import copy
if method == 'counts_per_million':
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = counts.sum(axis=0)
counts_norm = 1e6 * counts / norm
elif method == 'counts_per_thousand_spikeins':
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.get_spikeins().sum(axis=0)
counts_norm = 1e3 * counts / norm
elif method == 'counts_per_thousand_features':
if 'features' not in kwargs:
raise ValueError('Set features=<list of normalization features>')
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.loc[kwargs['features']].sum(axis=0)
counts_norm = 1e3 * counts / norm
elif method == 'counts_per_million_column':
if 'column' not in kwargs:
raise ValueError('Specify a samplesheet column with column=<mycolumn>')
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.dataset[kwargs['column']].values
counts_norm = 1e6 * counts / norm
elif callable(method):
counts_norm = method(self)
method = 'custom'
else:
raise ValueError('Method not understood')
# Shallow copy of metadata
for prop in self._metadata:
# dataset if special, to avoid infinite loops
if prop == 'dataset':
counts_norm.dataset = None
else:
setattr(counts_norm, prop, copy.copy(getattr(self, prop)))
counts_norm._normalized = method
return counts_norm
def get_statistics(self, axis='features', metrics=('mean', 'cv')):
'''Get statistics of the counts.
Args:
axis (str): 'features' or 'samples'
metrics (sequence of strings): any of 'mean', 'var', 'std', 'cv',
'fano', 'min', 'max'.
Returns:
pandas.DataFrame with features as rows and metrics as columns.
'''
if axis == 'features':
axn = 1
elif axis == 'samples':
axn = 0
else:
raise ValueError('axis must be features or samples')
st = {}
if 'mean' in metrics or 'cv' in metrics or 'fano' in metrics:
st['mean'] = self.mean(axis=axn)
if ('std' in metrics or 'cv' in metrics or 'fano' in metrics or
'var' in metrics):
st['std'] = self.std(axis=axn)
if 'var' in metrics:
st['var'] = st['std'] ** 2
if 'cv' in metrics:
st['cv'] = st['std'] / np.maximum(st['mean'], 1e-10)
if 'fano' in metrics:
st['fano'] = st['std'] ** 2 / np.maximum(st['mean'], 1e-10)
if 'min' in metrics:
st['min'] = self.min(axis=axn)
if 'max' in metrics:
st['max'] = self.max(axis=axn)
df = pd.concat([st[m] for m in metrics], axis=1)
df.columns = pd.Index(list(metrics), name='metrics')
return df
|
"""
Dropbox Authentication for web2py
Developed by Massimo Di Pierro (2011)
Same License as Web2py License
"""
import os
import re
import urllib
from dropbox import client, rest, session
from gluon import *
from gluon.tools import fetch
from gluon.storage import Storage
import gluon.contrib.simplejson as json
class DropboxAccount(object):
"""
from gluon.contrib.login_methods.dropbox_account import DropboxAccount
auth.settings.actions_disabled=['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(request,
key="...",
secret="...",
access_type="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
when logged in
client = auth.settings.login_form.client
"""
def __init__(self,
request,
key = "",
secret = "",
access_type="app_folder",
login_url = "",
on_login_failure=None,
):
self.request=request
self.key=key
self.secret=secret
self.access_type=access_type
self.login_url = login_url
self.on_login_failure = on_login_failure
self.sess = session.DropboxSession(
self.key,self.secret,self.access_type)
def get_user(self):
request = self.request
token = current.session.dropbox_token
try:
access_token = self.sess.obtain_access_token(token)
except:
access_token = None
if access_token:
user = Storage()
self.client = client.DropboxClient(self.sess)
data = self.client.account_info()
display_name = data.get('display_name','').split(' ',1)
user = dict(email = data.get('email',None),
first_name = display_name[0],
last_name = display_name[-1],
registration_id = data.get('uid',None))
if not user['registration_id'] and self.on_login_failure:
redirect(self.on_login_failure)
return user
return None
def login_form(self):
token = self.sess.obtain_request_token()
current.session.dropbox_token = token
dropbox_url = self.sess.build_authorize_url(token,self.login_url)
redirect(dropbox_url)
form = IFRAME(_src=dropbox_url,
_scrolling="no",
_frameborder="no",
_style="width:400px;height:240px;")
return form
def logout_url(self, next = "/"):
current.session.dropbox_token=None
current.session.auth=None
redirect('https://www.dropbox.com/logout')
return next
def put(self,filename,file):
return json.loads(self.client.put_file(filename,file))['bytes']
def get(self,filename,file):
return self.client.get_file(filename)
def dir(self,path):
return json.loads(self.client.metadata(path))
def use_dropbox(auth,filename='private/dropbox.key',**kwargs):
path = os.path.join(current.request.folder,filename)
if os.path.exists(path):
request = current.request
key,secret,access_type = open(path,'r').read().strip().split(':')
host = current.request.env.http_host
login_url = "http://%s/%s/default/user/login" % \
(host,request.application)
auth.settings.actions_disabled = \
['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(
request,key=key,secret=secret,access_type=access_type,
login_url = login_url,**kwargs)
|
import sys
from collections import defaultdict
import itertools
import operator
from operator import itemgetter
counters = defaultdict(int)
trueCounters = defaultdict(int)
fr = open('allworks','r')
wc = 0
for line in fr:
line = line.strip()
words = ''.join(c for c in line if c.isalpha() or c.isspace()).split()
for word in words:
wc += 1
thresold = 0.01 * wc
fr.seek(0)
for line in fr:
line = line.strip()
words = ''.join(c for c in line if c.isalpha() or c.isspace()).split()
for word in words:
if word in counters:
counters[word] += 1
elif len(counters) < 99:
counters[word] = 1
else:
delCounters = []
for key in counters:
counters[key] -= 1
if counters[key] == 0:
delCounters.append(key)
for word in delCounters:
del counters[word]
fr.seek(0)
for line in fr:
line = line.strip()
words = ''.join(c for c in line if c.isalpha() or c.isspace()).split()
for word in words:
if word in counters:
if word in trueCounters:
trueCounters[word] += 1
else:
trueCounters[word] = 1
delCounters = []
for word in trueCounters:
if trueCounters[word] < thresold:
delCounters.append(word)
for word in delCounters:
del trueCounters[word]
for key, value in sorted(trueCounters.iteritems(), key=operator.itemgetter(1), reverse=True):
print key, value
|
from email.mime.text import MIMEText
from smtplib import SMTP
class Gmail(object):
"""Send an email with Google Mail
Can easily be used with other providers
by editing the server and port in send()
Args:
credentials (tuple): (username, password)
"""
def __init__(self, credentials):
self.user, self.password = credentials
def send(self, receiver, subject, body):
"""Send email
Args:
receiver (str): Address of receiver
subject (str): Subject of email
body (str): Body of email
"""
message = MIMEText(body)
message['Subject'] = subject
message['From'] = self.user
message['To'] = receiver
server = SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(self.user, self.password)
server.sendmail(self.user, receiver, message.as_string())
server.quit()
|
"""
Module which groups all the aggregated precomputed information in order to
save computational power.
"""
import pandas as pd
from FirmsLocations.Preprocess.preprocess_cols import cp2str
def read_agg(filepath):
"Read file of aggregated info."
table = pd.read_csv(filepath, sep=';')
table = cp2str(table)
return table
def read_aggregation(filepath, typevars):
## TODO
aggtable = read_agg(filepath)
aggfeatures = aggtable[typevars['feat_vars']]
agglocs = aggtable[typevars['loc_vars']]
return agglocs, aggfeatures
|
from ..style import use
use("km3pipe-notebook")
|
from django.utils.translation import ugettext_lazy as _ugl
default_app_config = 'django_sendgrid_parse.apps.DjangoSendgridParseAppConfig'
|
"""User-friendly exception handler for swood."""
import http.client
import traceback
import sys
import os
__file__ = os.path.abspath(__file__)
class ComplainToUser(Exception):
"""When used with ComplaintFormatter, tells the user what error (of theirs) caused the failure and exits."""
pass
def can_submit():
if not os.path.isdir(os.path.expanduser("~/.swood")):
os.mkdir(os.path.expanduser("~/.swood"))
sbpath = os.path.expanduser("~/.swood/submit-bugs")
if os.path.isfile(sbpath):
try:
with open(sbpath) as sb:
resp = sb.read(1)
if resp == "1":
return 1
elif resp == "0":
return 0
except:
pass
while True:
resp = input(
"Something went wrong. Do you want to send an anonymous bug report? (Type Y or N): ").lower()
if resp in ("yes", "y", "true"):
try:
with open(sbpath, "w") as sb:
sb.write("1")
except:
pass
return 1
elif resp in ("no", "n", "false"):
try:
with open(sbpath, "w") as sb:
sb.write("0")
except:
pass
return 0
class ComplaintFormatter:
"""Notifies the user when the program fails predictably and uploads bug reports.
When used in a with statement, ComplaintFormatter catches all exceptions. If the
exception is a ComplainToUser exception, it will simply print the error message
and exit (with an exit code of 1). If the exception is something else (i.e. an
actual, unexpected exception), it will upload the traceback to the swood debug
server (unless the user has opted out of sending bug reports.)
"""
def __init__(self, version=None):
self.version = version
def __enter__(self):
pass
def __exit__(self, exc_type, exc, tb):
if isinstance(exc, ComplainToUser):
print("Error: {}".format(exc), file=sys.stderr)
sys.exit(1)
elif isinstance(exc, Exception):
# scrub stack of full path names for extra privacy
# also normalizes the paths, helping to detect dupes
scrubbed_stack = traceback.extract_tb(tb)
# cut off traces of stuff that isn't ours
others_cutoff = next(idx for idx, fs in enumerate(scrubbed_stack) if os.path.samefile(
os.path.dirname(fs.filename), os.path.dirname(__file__)))
scrubbed_stack = scrubbed_stack[others_cutoff:]
# rewrite paths so they contain only relative directories
# (hides username on Windows and Linux)
dirstart = os.path.abspath(
os.path.join(os.path.dirname(__file__), ".."))
for fs in scrubbed_stack:
fs.filename = os.path.relpath(
fs.filename, start=dirstart).replace("\\", "/")
str_tb = "Traceback (most recent call last):\n" + \
"".join(traceback.format_list(scrubbed_stack)) + \
"".join(traceback.format_exception_only(exc_type, exc))
if self.version is not None:
str_tb = "# " + self.version + "\n" + str_tb
if "--optout" in sys.argv or "-o" in sys.argv:
print(
"Something went wrong. A bug report will not be sent because of your command-line flag.", file=sys.stderr)
return False
elif os.environ.get("SWOOD_OPTOUT") == "1":
print(
"Something went wrong. A bug report will not be sent because of your environment variable.", file=sys.stderr)
return False
elif not can_submit():
print(
"Something went wrong. A bug report will not be sent because of your config setting.", file=sys.stderr)
return False
else:
print(
"Something went wrong. A bug report will be sent to help figure it out. (see --optout)", file=sys.stderr)
try:
conn = http.client.HTTPSConnection("meme.institute")
conn.request("POST", "/swood/bugs/submit", str_tb)
resp = conn.getresponse().read().decode("utf-8")
if resp == "done":
print("New bug submitted!", file=sys.stderr)
elif resp == "dupe":
print(
"This bug is already in the queue to be fixed.", file=sys.stderr)
else:
raise Exception
except Exception:
print("Submission of bug report failed.", file=sys.stderr)
traceback.print_exc()
return True
|
__author__ = "Andrew Hankinson (andrew.hankinson@mail.mcgill.ca)"
__version__ = "1.5"
__date__ = "2011"
__copyright__ = "Creative Commons Attribution"
__license__ = """The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE."""
import multiprocessing
from optparse import OptionParser
import os
import sys
import hashlib
import codecs
import re
from pybagit.exceptions import *
HASHALG = 'sha1'
ENCODING = "utf-8"
def write_manifest(datadir, encoding, update=False):
bag_root = os.path.split(os.path.abspath(datadir))[0]
manifest_file = os.path.join(bag_root, "manifest-{0}.txt".format(HASHALG))
checksums = dict()
files_to_checksum = set(dirwalk(datadir))
if update and os.path.isfile(manifest_file):
for line in codecs.open(manifest_file, 'rb', encoding):
checksum, file_ = line.strip().split(' ', 1)
full_file = os.path.join(bag_root, file_)
if full_file in files_to_checksum:
files_to_checksum.remove(full_file)
checksums[os.path.join(bag_root, file_)] = checksum
p = multiprocessing.Pool(processes=multiprocessing.cpu_count())
result = p.map_async(csumfile, files_to_checksum)
checksums.update((k, v) for v, k in result.get())
p.close()
p.join()
mfile = codecs.open(manifest_file, 'wb', encoding)
for file_, checksum in sorted(checksums.iteritems()):
rp = os.path.relpath(file_, bag_root)
fl = ensure_unix_pathname(rp)
mfile.write(u"{0} {1}\n".format(checksum, fl))
mfile.close()
def dirwalk(datadir):
datafiles = []
for dirpath, dirnames, filenames in os.walk(u"{0}".format(datadir)):
for fn in filenames:
datafiles.append(os.path.join(dirpath, fn))
return datafiles
def csumfile(filename):
""" Based on
http://abstracthack.wordpress.com/2007/10/19/calculating-md5-checksum/
"""
hashalg = getattr(hashlib, HASHALG)() # == 'hashlib.md5' or 'hashlib.sha1'
blocksize = 0x10000
def __upd(m, data):
m.update(data)
return m
fd = open(filename, 'rb')
try:
contents = iter(lambda: fd.read(blocksize), "")
m = reduce(__upd, contents, hashalg)
finally:
fd.close()
return (m.hexdigest(), filename)
def ensure_unix_pathname(pathname):
# it's only windows we have to worry about
if sys.platform != "win32":
return pathname
replace = re.compile(r"\\", re.UNICODE)
fnm = re.sub(replace, "/", pathname)
return fnm
if __name__ == "__main__":
parser = OptionParser()
usage = "%prog [options] arg1 arg2"
parser.add_option("-a", "--algorithm", action="store", help="checksum algorithm to use (sha1|md5)")
parser.add_option("-c", "--encoding", action="store", help="File encoding to write manifest")
parser.add_option("-u", "--update", action="store_true", help="Only update new/removed files")
(options, args) = parser.parse_args()
if options.algorithm:
if not options.algorithm in ('md5', 'sha1'):
raise BagCheckSumNotValid('You must specify either "md5" or "sha1" as the checksum algorithm')
HASHALG = options.algorithm
if options.encoding:
ENCODING = options.encoding
if len(args) < 1:
parser.error("You must specify a data directory")
write_manifest(args[0], ENCODING, update=options.update)
|
"""Basic TCP Server that will listen to port 6633."""
import logging
from socket import error as SocketError
from socketserver import BaseRequestHandler, TCPServer, ThreadingMixIn
from threading import current_thread
from kytos.core.connection import CONNECTION_STATE, Connection
from kytos.core.events import KytosEvent
__all__ = ('KytosServer', 'KytosRequestHandler')
log = logging.getLogger(__name__)
class KytosServer(ThreadingMixIn, TCPServer):
"""Abstraction of a TCPServer to listen to packages from the network.
The KytosServer will listen on the specified port
for any new TCP request from the network and then instantiate the
specified RequestHandler to handle the new request.
It creates a new thread for each Handler.
"""
allow_reuse_address = True
main_threads = {}
def __init__(self, server_address, RequestHandlerClass, controller):
"""Constructor of KytosServer.
Args:
server_address (tuple): Address which the server is listening.
example: ('127.0.0.1', 80)
RequestHandlerClass(socketserver.BaseRequestHandler):
Class that will be instantiated to handle each request.
controller (:class:`~kytos.core.controller.Controller`):
An instance of Kytos Controller class.
"""
super().__init__(server_address, RequestHandlerClass,
bind_and_activate=False)
self.controller = controller
def serve_forever(self, poll_interval=0.5):
"""Handle requests until an explicit shutdown() is called."""
try:
self.server_bind()
self.server_activate()
log.info("Kytos listening at %s:%s", self.server_address[0],
self.server_address[1])
super().serve_forever(poll_interval)
except Exception:
log.error('Failed to start Kytos TCP Server.')
self.server_close()
raise
class KytosRequestHandler(BaseRequestHandler):
"""The socket/request handler class for our controller.
It is instantiated once per connection between each switch and the
controller.
The setup method will dispatch a KytosEvent (``kytos/core.connection.new``)
on the controller, that will be processed by a Core App.
The finish method will close the connection and dispatch a KytonEvents
(``kytos/core.connection.closed``) on the controller.
"""
known_ports = {
6633: 'openflow'
}
def __init__(self, request, client_address, server):
"""Contructor takes the parameters below.
Args:
request (socket.socket):
Request sent by client.
client_address (tuple):
Client address, tuple with host and port.
server (socketserver.BaseServer):
Server used to send messages to client.
"""
super().__init__(request, client_address, server)
self.connection = None
def setup(self):
"""Method used to setup the new connection.
This method builds a new controller Connection, and places a
``kytos/core.connection.new`` KytosEvent in the app buffer.
"""
self.ip = self.client_address[0]
self.port = self.client_address[1]
log.info("New connection from %s:%s", self.ip, self.port)
self.connection = Connection(self.ip, self.port, self.request) # noqa
server_port = self.server.server_address[1]
if server_port in self.known_ports:
protocol_name = self.known_ports[server_port]
else:
protocol_name = f'{server_port:04d}'
self.connection.protocol.name = protocol_name
self.request.settimeout(30)
self.exception = None
event_name = \
f'kytos/core.{self.connection.protocol.name}.connection.new'
event = KytosEvent(name=event_name,
content={'source': self.connection})
self.server.controller.buffers.app.put(event)
def handle(self):
"""Handle each request and places its data in the raw event buffer.
This method loops reading the binary data from the connection socket,
and placing a ``kytos/core.messages.new`` KytosEvent in the raw event
buffer.
"""
curr_thread = current_thread()
MAX_SIZE = 2**16
while True:
try:
new_data = self.request.recv(MAX_SIZE)
except (SocketError, OSError, InterruptedError,
ConnectionResetError) as exception:
self.exception = exception
log.debug('Socket handler exception while reading: %s',
exception)
break
if new_data == b'':
self.exception = 'Request closed by client.'
break
if not self.connection.is_alive():
continue
log.debug("New data from %s:%s at thread %s", self.ip,
self.port, curr_thread.name)
content = {'source': self.connection,
'new_data': new_data}
event_name = \
f'kytos/core.{self.connection.protocol.name}.raw.in'
event = KytosEvent(name=event_name,
content=content)
self.server.controller.buffers.raw.put(event)
def finish(self):
"""Method is called when the client connection is finished.
This method closes the connection socket and generates a
``kytos/core.connection.lost`` KytosEvent in the App buffer.
"""
log.info("Connection lost with Client %s:%s. Reason: %s",
self.ip, self.port, self.exception)
self.connection.state = CONNECTION_STATE.FINISHED
self.connection.close()
content = {'source': self.connection}
if self.exception:
content['exception'] = self.exception
event_name = \
f'kytos/core.{self.connection.protocol.name}.connection.lost'
event = KytosEvent(name=event_name,
content=content)
self.server.controller.buffers.app.put(event)
|
import smtplib
from django.contrib.auth.models import Permission
from django.test import TestCase
from principal.forms import *
from principal.models import *
from principal.services import DepartmentService, CertificationService, UserService, ImpartSubjectService, \
AdministratorService
from gestionalumnos.settings import *
from django.core import mail
from django.test.utils import override_settings
class CertificationTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='2050032',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2050013',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='2060045',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='2050039',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Certifications
self.certification_isw = Titulacion.objects.create(
codigo='1',
nombre='Grado en Informatica - Ingenieria del Software',
)
self.certification_isw.asignaturas.add(self.subject_rc, self.subject_ispp, self.subject_egc)
self.certification_isc = Titulacion.objects.create(
codigo='2',
nombre='Grado en Informatica - Ingenieria de Computadores',
)
self.certification_isc.asignaturas.add(self.subject_rc)
self.certification_iti = Titulacion.objects.create(
codigo='3',
nombre='Grado en Informatica - Tecnologias Informaticas',
)
self.certification_iti.asignaturas.add(self.subject_cm, self.subject_rc)
def test_create_and_save_ok_1(self):
data_form = {
'code': '123456',
'name': 'Grado en Informatica - Tecnologias Informaticas'
}
form = CertificationEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
certification = CertificationService.create_and_save(form)
certification_bd = Titulacion.objects.get(codigo=123456)
self.assertEqual(certification_bd, certification)
def test_create_and_save_error_1(self):
data_form = {
'code': '1',
'name': 'Grado en Informatica - Ingenieria del Software'
}
form = CertificationEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_find_all_ok_1(self):
certifications = list(CertificationService.find_all())
list_certifications = [self.certification_isc, self.certification_isw, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_find_by_code_ok_1(self):
certification = CertificationService.find_by_code('2')
self.assertEqual(certification, self.certification_isc)
def test_find_by_code_error_1(self):
certification = CertificationService.find_by_code('99')
self.assertEqual(certification, None)
def test_find_by_subject_ok_1(self):
certifications = list(CertificationService.find_by_subject(self.subject_rc.id))
list_certifications = [self.certification_isw, self.certification_isc, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_find_by_subject_ok_2(self):
certifications = list(CertificationService.find_by_subject(self.subject_ispp.id))
list_certifications = [self.certification_isw]
self.assertListEqual(certifications, list_certifications)
def test_find_by_subject_ok_3(self):
certifications = list(CertificationService.find_by_subject('4874'))
self.assertListEqual(certifications, [])
def test_search_ok_1(self):
certifications = list(CertificationService.search('Grado'))
list_certifications = [self.certification_isc, self.certification_isw, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_search_ok_2(self):
certifications = list(CertificationService.search('i'))
list_certifications = [self.certification_isc, self.certification_isw, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_search_ok_3(self):
certifications = list(CertificationService.search('Tecnologias'))
list_certifications = [self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_search_ok_4(self):
certifications = list(CertificationService.search('z'))
self.assertListEqual(certifications, [])
def test_find_one_ok_1(self):
certification = CertificationService.find_one(self.certification_isw.id)
self.assertEqual(certification, self.certification_isw)
class AdministratorTestCase(TestCase):
def setUp(self):
# Administrators
self.administrator1 = Administrador.objects.create(
username='admin',
is_staff=True,
is_superuser=False
)
self.administrator1.set_password('admin')
self.administrator1.user_permissions.add(Permission.objects.get(codename='administrator'))
self.administrator1.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
def test_find_one_ok_1(self):
administrator = AdministratorService.find_one(self.administrator1.id)
self.assertEqual(administrator, self.administrator1)
class DepartmentTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
def test_reconstruct_and_save_ok_1(self):
data_form = {
'code': '4',
'name': 'Departamento de Fisica',
'web': 'http://www.fisica.us.es/'
}
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
department = DepartmentService.reconstruct_and_save(form)
department_bd = Departamento.objects.get(codigo=4)
self.assertEqual(department_bd, department)
def test_reconstruct_and_save_ok_2(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['name'] = 'Test'
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
department = DepartmentService.reconstruct_and_save(form)
department_bd = Departamento.objects.get(id=self.department_lsi.id)
self.assertEqual(department_bd, department)
self.assertEqual(department_bd.nombre, 'Test')
def test_reconstruct_and_save_error_1(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['code'] = '3'
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_2(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['id'] = '4944'
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_3(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['id'] = None
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_find_all_ok_1(self):
departments = list(DepartmentService.find_all())
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
def test_find_by_code_ok_1(self):
department = DepartmentService.find_by_code('3')
self.assertEqual(department, self.department_atc)
def test_find_by_code_error_1(self):
department = DepartmentService.find_by_code('99')
self.assertEqual(department, None)
def test_get_form_data_ok_1(self):
data_form = DepartmentService.get_form_data(self.department_atc)
data_form1 = {
'id': self.department_atc.id,
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertDictEqual(data_form, data_form1)
def test_get_form_data_error_1(self):
data_form = DepartmentService.get_form_data(self.department_atc)
data_form1 = {
'id': self.department_atc.id,
'code': '324245',
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertNotEqual(data_form, data_form1)
def test_search_ok_1(self):
departments = list(DepartmentService.search('Departamento'))
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
def test_search_ok_2(self):
departments = list(DepartmentService.search('i'))
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
def test_search_ok_3(self):
departments = list(DepartmentService.search('Lenguajes'))
list_departments = [self.department_lsi]
self.assertListEqual(departments, list_departments)
def test_search_ok_4(self):
departments = list(DepartmentService.search('zz'))
self.assertListEqual(departments, [])
def test_get_form_data_xml_ok_1(self):
department = {
'codigo': self.department_atc.codigo,
'nombre': self.department_atc.nombre,
'web': self.department_atc.web
}
data_form = DepartmentService.get_form_data_xml(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertDictEqual(data_form, data_form1)
def test_get_form_data_xml_error_1(self):
department = {
'codigo': '946514',
'nombre': self.department_atc.nombre,
'web': self.department_atc.web
}
data_form = DepartmentService.get_form_data_xml(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertNotEqual(data_form, data_form1)
def test_get_form_data_csv_ok_1(self):
department = [
self.department_atc.codigo,
self.department_atc.nombre,
self.department_atc.web
]
data_form = DepartmentService.get_form_data_csv(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertDictEqual(data_form, data_form1)
def test_get_form_data_csv_error_1(self):
department = [
'49498',
self.department_atc.nombre,
self.department_atc.web
]
data_form = DepartmentService.get_form_data_csv(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertNotEqual(data_form, data_form1)
def test_rollback_ok_1(self):
departments = list(DepartmentService.find_all())
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
DepartmentService.rollback(list_departments)
departments = list(DepartmentService.find_all())
self.assertListEqual([], departments)
def test_find_one_ok_1(self):
department = DepartmentService.find_one(self.department_atc.id)
self.assertEqual(department, self.department_atc)
class ImpartSubjectTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='2050032',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2050013',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='2060045',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='2050039',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Lecturers
self.lecturer_benavides = Profesor.objects.create(
username='benavides',
email='benavides@us.es',
categoria='Profesor Titular de Universidad',
telefono='954559897',
despacho='F 0.48',
web='http://www.lsi.us.es/~dbc/',
first_name='David',
last_name='Benavides Cuevas',
tutoriaactivada=True,
dni='55555555X'
)
self.lecturer_benavides.set_password('practica')
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_corchuelo = Profesor.objects.create(
username='corchu',
email='corchu@us.es',
categoria='Profesor Titular de Universidad',
telefono='954552770',
despacho='F 1.63',
first_name='Rafael',
last_name='Corchuelo Gil',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=12',
tutoriaactivada=True,
dni='66666666X'
)
self.lecturer_corchuelo.set_password('practica')
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_muller = Profesor.objects.create(
username='cmuller',
email='cmuller@lsi.us.es',
categoria='Becario FPI',
telefono='954553868',
despacho='F 0.43',
first_name='Carlos',
last_name='Muller Cejas',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=108',
tutoriaactivada=True,
dni='77777777X'
)
self.lecturer_muller.set_password('practica')
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_veronica = Profesor.objects.create(
username='averonica',
email='cmuller@lsi.us.es',
categoria='Profesor Titular de Universidad ',
telefono='954557095 ',
despacho='G 1.69',
first_name='Ana Veronica',
last_name='Medina Rodriguez',
web='http://www.dte.us.es/personal/vmedina/',
tutoriaactivada=True,
dni='88888888X'
)
self.lecturer_veronica.set_password('practica')
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
def test_reconstruct_and_save_ok_1(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': 'Coordinador'
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), True)
impart_subject = ImpartSubjectService.reconstruct_and_save(form)
impart_subject_bd = Imparteasignatura.objects.get(profesor=self.lecturer_corchuelo,
asignatura=self.subject_ispp)
self.assertEqual(impart_subject, impart_subject_bd)
def test_reconstruct_and_save_error_1(self):
data_form = {
'subject_id': '',
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': 'Coordinador'
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_2(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': '',
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': 'Coordinador'
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_3(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': ''
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_4(self):
data_form = {
'subject_id': '99854',
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': ''
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_5(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': '74985',
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': ''
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_get_form_data_xml_ok_1(self):
lecturer = {
'uvus': self.lecturer_muller.username,
'cargo': 'Profesor'
}
data_form = ImpartSubjectService.get_form_data_xml(lecturer, self.subject_ispp)
data = {
'subject_id': self.subject_ispp.id,
'lecturer_id': self.lecturer_muller.id,
'lecturer': self.lecturer_muller.first_name + self.lecturer_muller.last_name,
'position': 'Profesor'
}
self.assertDictEqual(data_form, data)
def test_get_form_data_xml_error_1(self):
lecturer = {
'uvus': self.lecturer_muller.username,
'cargo': 'Profesor'
}
data_form = ImpartSubjectService.get_form_data_xml(lecturer, self.subject_ispp)
data = {
'subject_id': self.subject_ispp.id,
'lecturer_id': '-1',
'lecturer': self.lecturer_muller.first_name + self.lecturer_muller.last_name,
'position': 'Profesor'
}
self.assertNotEqual(data_form, data)
def test_get_form_data_xml_error_2(self):
lecturer = {
'uvus': self.lecturer_muller.username,
'cargo': 'Profesor'
}
data_form = ImpartSubjectService.get_form_data_xml(lecturer, self.subject_ispp)
data = {
'subject_id': '-1',
'lecturer_id': self.lecturer_muller.id,
'lecturer': self.lecturer_muller.first_name + self.lecturer_muller.last_name,
'position': 'Profesor'
}
self.assertNotEqual(data_form, data)
# def test_get_form_data_csv_ok_1(self):
#
# lecturer = [
# 'Profesor',
# self.lecturer_muller.dni,
# self.lecturer_muller.last_name + "," + self.lecturer_muller.first_name,
# self.lecturer_muller.username,
# 'null',
# 'Coordinador'
# ]
# data_form = ImpartSubjectService.get_form_data_csv(lecturer, self.subject_ispp)
# data = {
# 'subject_id': self.subject_ispp.id,
# 'lecturer_id': self.lecturer_muller.id,
# 'lecturer': "" + self.lecturer_muller.first_name + " " + self.lecturer_muller.last_name,
# 'position': 'Profesor'
# }
# self.assertEqual(data_form, data)
class UserTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='2050032',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2050013',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='2060045',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='2050039',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Alumnos
self.student_carborgar = Alumno.objects.create(
username='carborgar',
first_name='Carlos',
last_name='Borja Garcia - Baquero',
email='carborgar@alum.us.es',
dni='47537495X'
)
self.student_carborgar.set_password('practica')
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_juamaiosu = Alumno.objects.create(
username='juamaiosu',
first_name='Juan Elias',
last_name='Maireles Osuna',
email='juamaiosu@alum.us.es',
dni='47537560X'
)
self.student_juamaiosu.set_password('practica')
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_rubgombar = Alumno.objects.create(
username='rubgombar',
first_name='Ruben',
last_name='Gomez Barrera',
email='ruben@alum.us.es',
dni='11111111X'
)
self.student_rubgombar.set_password('practica')
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_davjimvar = Alumno.objects.create(
username='davjimvar',
first_name='David',
last_name='Jimenez Vargas',
email='david@alum.us.es',
dni='22222222X'
)
self.student_davjimvar.set_password('practica')
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_javrodleo = Alumno.objects.create(
username='javrodleo',
first_name='Javier',
last_name='Rodriguez Leon',
email='javier@alum.us.es',
dni='33333333X'
)
self.student_javrodleo.set_password('practica')
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
# Lecturers
self.lecturer_benavides = Profesor.objects.create(
username='benavides',
email='benavides@us.es',
categoria='Profesor Titular de Universidad',
telefono='954559897',
despacho='F 0.48',
web='http://www.lsi.us.es/~dbc/',
first_name='David',
last_name='Benavides Cuevas',
tutoriaactivada=True,
dni='55555555X'
)
self.lecturer_benavides.set_password('practica')
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_corchuelo = Profesor.objects.create(
username='corchu',
email='corchu@us.es',
categoria='Profesor Titular de Universidad',
telefono='954552770',
despacho='F 1.63',
first_name='Rafael',
last_name='Corchuelo Gil',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=12',
tutoriaactivada=True,
dni='66666666X'
)
self.lecturer_corchuelo.set_password('practica')
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_muller = Profesor.objects.create(
username='cmuller',
email='cmuller@lsi.us.es',
categoria='Becario FPI',
telefono='954553868',
despacho='F 0.43',
first_name='Carlos',
last_name='Muller Cejas',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=108',
tutoriaactivada=True,
dni='77777777X'
)
self.lecturer_muller.set_password('practica')
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_veronica = Profesor.objects.create(
username='averonica',
email='cmuller@lsi.us.es',
categoria='Profesor Titular de Universidad ',
telefono='954557095 ',
despacho='G 1.69',
first_name='Ana Veronica',
last_name='Medina Rodriguez',
web='http://www.dte.us.es/personal/vmedina/',
tutoriaactivada=True,
dni='88888888X'
)
self.lecturer_veronica.set_password('practica')
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.impart_ispp = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_corchuelo,
asignatura=self.subject_ispp
)
self.impart_ispp = Imparteasignatura.objects.create(
cargo='Profesor',
profesor=self.lecturer_muller,
asignatura=self.subject_ispp
)
self.impart_egc = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_benavides,
asignatura=self.subject_egc
)
self.student_carborgar.asignaturas = [self.subject_egc, self.subject_ispp]
self.student_juamaiosu.asignaturas = [self.subject_egc]
def test_find_by_username_ok_1(self):
user = UserService.find_by_username(self.student_carborgar.username)
user_db = User.objects.get(username=self.student_carborgar.username)
self.assertEqual(user, user_db)
def test_find_by_username_error_1(self):
user = UserService.find_by_username('ghslih')
self.assertEqual(user, None)
def test_delete_ok_1(self):
username = self.student_carborgar.username
UserService.delete(self.student_carborgar)
error = False
try:
User.objects.get(username=username)
except User.DoesNotExist:
error = True
self.assertTrue(error)
def test_rollback_users_ok_1(self):
user_create = {self.lecturer_muller: 'password', self.lecturer_corchuelo: 'password'}
len_list1 = len(list(UserService.find_all()))
UserService.rollback_users(user_create)
len_list2 = len(list(UserService.find_all()))
self.assertIs(len_list1 - 2, len_list2)
def test_rollback_ok_1(self):
number_link_student_carborgar1 = len(list(self.student_carborgar.asignaturas.all()))
number_link_student_juamaiosu1 = len(list(self.student_juamaiosu.asignaturas.all()))
number_link_lecturer_benavides1 = len(list(self.lecturer_benavides.imparteasignatura_set.all()))
student_link = [self.student_juamaiosu, self.student_carborgar]
lecturer_link = [self.lecturer_benavides]
user_create = [self.lecturer_veronica]
username = self.lecturer_veronica.username
UserService.rollback(user_create, student_link, lecturer_link, self.subject_egc.id)
number_link_student_carborgar2 = len(list(self.student_carborgar.asignaturas.all()))
number_link_student_juamaiosu2 = len(list(self.student_juamaiosu.asignaturas.all()))
number_link_lecturer_benavides2 = len(list(self.lecturer_benavides.imparteasignatura_set.all()))
self.assertEqual(number_link_student_carborgar1 - 1, number_link_student_carborgar2)
self.assertEqual(number_link_student_juamaiosu1 - 1, number_link_student_juamaiosu2)
self.assertEqual(number_link_lecturer_benavides1 - 1, number_link_lecturer_benavides2)
error = False
try:
User.objects.get(username=username)
except User.DoesNotExist:
error = True
self.assertTrue(error)
class SubjectTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='1',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='3',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='4',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Alumnos
self.student_carborgar = Alumno.objects.create(
username='carborgar',
first_name='Carlos',
last_name='Borja Garcia - Baquero',
email='carborgar@alum.us.es',
dni='47537495X'
)
self.student_carborgar.set_password('practica')
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_juamaiosu = Alumno.objects.create(
username='juamaiosu',
first_name='Juan Elias',
last_name='Maireles Osuna',
email='juamaiosu@alum.us.es',
dni='47537560X'
)
self.student_juamaiosu.set_password('practica')
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_rubgombar = Alumno.objects.create(
username='rubgombar',
first_name='Ruben',
last_name='Gomez Barrera',
email='ruben@alum.us.es',
dni='11111111X'
)
self.student_rubgombar.set_password('practica')
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_davjimvar = Alumno.objects.create(
username='davjimvar',
first_name='David',
last_name='Jimenez Vargas',
email='david@alum.us.es',
dni='22222222X'
)
self.student_davjimvar.set_password('practica')
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_javrodleo = Alumno.objects.create(
username='javrodleo',
first_name='Javier',
last_name='Rodriguez Leon',
email='javier@alum.us.es',
dni='33333333X'
)
self.student_javrodleo.set_password('practica')
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
# Lecturers
self.lecturer_benavides = Profesor.objects.create(
username='benavides',
email='benavides@us.es',
categoria='Profesor Titular de Universidad',
telefono='954559897',
despacho='F 0.48',
web='http://www.lsi.us.es/~dbc/',
first_name='David',
last_name='Benavides Cuevas',
tutoriaactivada=True,
dni='55555555X'
)
self.lecturer_benavides.set_password('practica')
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_corchuelo = Profesor.objects.create(
username='corchu',
email='corchu@us.es',
categoria='Profesor Titular de Universidad',
telefono='954552770',
despacho='F 1.63',
first_name='Rafael',
last_name='Corchuelo Gil',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=12',
tutoriaactivada=True,
dni='66666666X'
)
self.lecturer_corchuelo.set_password('practica')
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_muller = Profesor.objects.create(
username='cmuller',
email='cmuller@lsi.us.es',
categoria='Becario FPI',
telefono='954553868',
despacho='F 0.43',
first_name='Carlos',
last_name='Muller Cejas',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=108',
tutoriaactivada=True,
dni='77777777X'
)
self.lecturer_muller.set_password('practica')
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_veronica = Profesor.objects.create(
username='averonica',
email='cmuller@lsi.us.es',
categoria='Profesor Titular de Universidad ',
telefono='954557095 ',
despacho='G 1.69',
first_name='Ana Veronica',
last_name='Medina Rodriguez',
web='http://www.dte.us.es/personal/vmedina/',
tutoriaactivada=True,
dni='88888888X'
)
self.lecturer_veronica.set_password('practica')
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.impart_ispp_corchu = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_corchuelo,
asignatura=self.subject_ispp
)
self.impart_ispp_muller = Imparteasignatura.objects.create(
cargo='Profesor',
profesor=self.lecturer_muller,
asignatura=self.subject_ispp
)
self.impart_ispp_benavides = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_benavides,
asignatura=self.subject_ispp
)
self.impart_egc_benavides = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_benavides,
asignatura=self.subject_egc
)
self.student_carborgar.asignaturas = [self.subject_egc, self.subject_ispp]
self.student_juamaiosu.asignaturas = [self.subject_egc]
def test_get_student_subjects_ok_1(self):
subjects = list(SubjectService.get_student_subjects(self.student_carborgar.id))
subjects1 = [self.subject_egc, self.subject_ispp]
self.assertListEqual(subjects, subjects1)
def test_get_lecturer_subjects_ok_1(self):
subjects = list(SubjectService.get_lecturer_subjects(self.lecturer_benavides.id))
subjects1 = [self.subject_egc, self.subject_ispp]
self.assertListEqual(subjects, subjects1)
def test_create_and_save_ok_1(self):
data_form = {
'name': 'Prueba',
'course': '1',
'code': '5',
'quarter': '1',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
subject = SubjectService.create(form)
SubjectService.save(subject)
subject_bd = Asignatura.objects.get(codigo=subject.codigo)
self.assertEqual(subject, subject_bd)
def test_create_and_save_error_1(self):
data_form = {
'name': 'Prueba',
'course': '1',
'code': '4',
'quarter': '1',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_create_and_save_error_2(self):
data_form = {
'name': 'Prueba',
'course': '10',
'code': '5',
'quarter': '1',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_create_and_save_error_3(self):
data_form = {
'name': 'Prueba',
'course': '1',
'code': '5',
'quarter': '8',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_find_by_code_ok_1(self):
subject = SubjectService.find_by_code(self.subject_ispp.codigo)
self.assertEqual(subject, self.subject_ispp)
def test_find_by_code_error_1(self):
subject = SubjectService.find_by_code('5')
self.assertEqual(subject, None)
def test_find_one_ok_1(self):
subject = SubjectService.find_one(self.subject_ispp.id)
self.assertEqual(subject, self.subject_ispp)
def test_find_one_error_1(self):
subject = SubjectService.find_one('-1')
self.assertEqual(subject, None)
@override_settings(EMAIL_BACKEND='django.core.mail.backends.smtp.EmailBackend')
class EmailTestCase(TestCase):
def test_send_email(self):
try:
mail_sent_success = mail.send_mail('Test',
'Test',
EMAIL_HOST_USER, [EMAIL_HOST_USER],
fail_silently=True)
self.assertEqual(mail_sent_success, 1)
except Exception:
self.assertEqual(False, True, 'No se ha podido enviar el correo')
|
"""
Base class for exporters
"""
import os
from pyhmsa.util.monitorable import _Monitorable, _MonitorableThread
class _ExporterThread(_MonitorableThread):
def __init__(self, datafile, dirpath, *args, **kwargs):
args = (datafile, dirpath,) + args
super().__init__(args=args, kwargs=kwargs)
def _run(self, datafile, dirpath, *args, **kwargs):
raise NotImplementedError
class _Exporter(_Monitorable):
def _create_thread(self, datafile, dirpath, *args, **kwargs):
args = (datafile, dirpath,) + args
super()._create_thread(*args, **kwargs)
def validate(self, datafile):
pass
def can_export(self, datafile):
try:
self.validate(datafile)
except:
return False
else:
return True
def export(self, datafile, dirpath):
self.validate(datafile)
if not os.path.exists(dirpath):
raise ValueError('Path does not exist: %s' % dirpath)
if not os.path.isdir(dirpath):
raise ValueError('Path is not a directory: %s' % dirpath)
self._start(datafile, dirpath)
|
from __future__ import absolute_import
from .base import *
from bundle_config import config
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config['postgres']['database'],
'USER': config['postgres']['username'],
'PASSWORD': config['postgres']['password'],
'HOST': config['postgres']['host'],
}
}
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '{host}:{port}'.format(
host=config['redis']['host'],
port=config['redis']['port']),
'OPTIONS': {
'PASSWORD': config['redis']['password'],
},
'VERSION': config['core']['version'],
},
}
DEBUG = False
|
from django.apps import AppConfig
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class AppConfig(AppConfig):
name = '.'.join(__name__.split('.')[:-1])
label = 'icekit_plugins_iiif'
verbose_name = "IIIF Basics"
def ready(self):
# Create custom permission pointing to User, because we have no other
# model to hang it off for now...
# TODO This is a hack, find a better way
User = get_user_model()
try:
# this doesn't work if migrations haven't been updated, resulting
# in "RuntimeError: Error creating new content types. Please make
# sure contenttypes is migrated before trying to migrate apps
# individually."
content_type = ContentType.objects.get_for_model(User)
Permission.objects.get_or_create(
codename='can_use_iiif_image_api',
name='Can Use IIIF Image API',
content_type=content_type,
)
except RuntimeError:
pass
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20161208_1214'),
]
operations = [
migrations.AlterField(
model_name='tournamentteam',
name='name',
field=models.CharField(max_length=255, verbose_name='holdnavn'),
),
migrations.AlterField(
model_name='tournamentteam',
name='profiles',
field=models.ManyToManyField(to='main.Profile', verbose_name='medlemmer'),
),
]
|
from functools import wraps
def retry_task(f):
@wraps(f)
def decorated_function(*args, **kwargs):
retry = kwargs.get('retry', False)
if retry == 0:
return f(*args, **kwargs)
elif retry > 0:
for x in range(0, retry):
result = f(*args, **kwargs)
if result['status'] != 500:
return result
return f(*args, **kwargs)
elif retry == -1:
while retry:
result = f(*args, **kwargs)
if result['status'] != 500:
return result
return decorated_function
|
""" Defines an action for moving the workspace to the parent directory.
"""
from os.path import dirname
from enthought.traits.api import Bool, Instance
from enthought.pyface.api import ImageResource
from enthought.pyface.action.api import Action
from enthought.envisage.ui.workbench.api import WorkbenchWindow
from puddle.resource.resource_view import RESOURCE_VIEW
from common import IMAGE_LOCATION
class UpAction(Action):
""" Defines an action for moving the workspace to the parent directory.
"""
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# A longer description of the action:
description = "Move workspace to the parent directory"
# The action"s name (displayed on menus/tool bar tools etc):
name = "&Up"
# A short description of the action used for tooltip text etc:
tooltip = "Open parent directory"
# Keyboard accelerator:
accelerator = "Alt+Up"
# The action's image (displayed on tool bar tools etc):
image = ImageResource("up", search_path=[IMAGE_LOCATION])
#--------------------------------------------------------------------------
# "UpAction" interface:
#--------------------------------------------------------------------------
window = Instance(WorkbenchWindow)
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
# Note that we always offer the service via its name, but look it up
# via the actual protocol.
from puddle.resource.i_workspace import IWorkspace
workspace = self.window.application.get_service(IWorkspace)
workspace.path = dirname(workspace.absolute_path)
view = self.window.get_view_by_id(RESOURCE_VIEW)
if view is not None:
workspace = self.window.application.get_service(IWorkspace)
view.tree_viewer.refresh(workspace)
|
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pkg_resources import parse_version
def check_dependencies():
'''
setuptools causes problems for installing packages (especially
statsmodels). Use this function to abort installation instead.
'''
try:
import cython
except ImportError:
raise ImportError("Install cython before installing TurbuStat.")
try:
import matplotlib
mpl_version = matplotlib.__version__
if parse_version(mpl_version) < parse_version('1.2'):
print("***Before installing, upgrade matplotlib to 1.2***")
raise ImportError
except:
raise ImportError(
"Install or upgrade matplotlib before installing TurbuStat.")
try:
from numpy.version import version as np_version
if parse_version(np_version) < parse_version('1.6'):
print("***Before installing, upgrade numpy to 1.6***")
raise ImportError
except:
raise ImportError(
"Install or upgrade numpy before installing TurbuStat.")
try:
from scipy.version import version as sc_version
if parse_version(sc_version) < parse_version('0.12'):
print("***Before installing, upgrade scipy to 0.12***")
raise ImportError
except:
raise ImportError(
"Install or upgrade scipy before installing TurbuStat.")
try:
from pandas.version import version as pa_version
if parse_version(pa_version) < parse_version('0.13'):
print("***Before installing, upgrade pandas to 0.13***")
raise ImportError
except:
raise ImportError(
"Install or upgrade pandas before installing TurbuStat.")
try:
from statsmodels.version import version as sm_version
if parse_version(sm_version) < parse_version('0.4.0'):
print("***Before installing, upgrade statsmodels to 0.4.0***")
raise ImportError
except:
raise ImportError(
"Install or upgrade statsmodels before installing TurbuStat.")
try:
import sklearn
skl_version = sklearn.__version__
if parse_version(skl_version) < parse_version('0.13.0'):
print("***Before installing, upgrade sklearn to 0.13.0***")
raise ImportError
except:
raise ImportError(
"Install or upgrade sklearn before installing TurbuStat.")
try:
from astropy.version import version as ast_version
if parse_version(ast_version[:3]) < parse_version('0.4'):
print(("""***Before installing, upgrade astropy to 0.4.
NOTE: This is the dev version as of 17/06/14.***"""))
raise ImportError("")
except:
raise ImportError(
"Install or upgrade astropy before installing TurbuStat.")
try:
import astrodendro
except:
raise ImportError(("""Install or upgrade astrodendro before installing
TurbuStat. ***NOTE: Need dev version as
of 17/06/14.***"""))
if __name__ == "__main__":
check_dependencies()
setup(name='turbustat',
version='0.0',
description='Distance metrics for comparing spectral line data cubes.',
author='Eric Koch, Caleb Ward, Jason Loeppky and Erik Rosolowsky',
author_email='koch.eric.w@gmail.com',
url='http://github.com/Astroua/TurbuStat',
scripts=[],
packages=find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
)
|
from __future__ import print_function
import os
import shutil
import time
import subprocess
import numpy as np
from .phonopy_conf_creator import PhonopyConfCreator
from vasp.poscar import Poscar
from autotools import symlink_force
class PhononCalculator(object):
def __init__(self,
directory_data="./",
poscar_filename="POSCAR",
poscar_average_filename=None,
is_average_mass=False,
dim_sqs=None,
is_primitive=False,
is_band=True,
is_partial_dos=False,
is_tetrahedron=False,
is_tprop=False,
mesh=None,
nac=None):
if dim_sqs is None:
dim_sqs = np.array([1, 1, 1])
if mesh is None:
mesh = np.array([1, 1, 1])
self._variables = None
self._home = os.path.expanduser("~")
self._phonopy = subprocess.check_output(["which", "phonopy"]).strip()
print("phonopy_path:", self._phonopy)
self._directory_data = directory_data
self._poscar_filename = poscar_filename
self._poscar_average_filename = poscar_average_filename
self._is_average_mass = is_average_mass
self.set_dim_sqs(dim_sqs)
self._is_band = is_band
self.set_is_tetrahedron(is_tetrahedron)
self.set_is_partial_dos(is_partial_dos)
self.set_is_tprop(is_tprop)
self._is_primitive = is_primitive
self._mesh = np.array(mesh)
self._nac = nac
def set_dim_sqs(self, dim_sqs):
self._dim_sqs = dim_sqs
def set_is_tetrahedron(self, is_tetrahedron):
self._is_tetrahedron = is_tetrahedron
def set_is_partial_dos(self, is_partial_dos):
self._is_partial_dos = is_partial_dos
def set_is_tprop(self, is_tprop):
self._is_tprop = is_tprop
def set_mesh(self, mesh):
self._mesh = mesh
def set_variables(self, variables):
self._variables = variables
def run(self):
self.copy_files()
self.create_phonopy_conf()
conf_files = self.gather_conf_files()
for conf_file in conf_files:
self.run_phonopy(conf_file)
def copy_files(self):
dir_data = self._directory_data
symlink_force(os.path.join(dir_data, 'writefc.conf'), 'writefc.conf')
symlink_force(os.path.join(dir_data, 'POSCAR'), 'POSCAR')
symlink_force(os.path.join(dir_data, 'POSCAR_ideal'), 'POSCAR_ideal')
symlink_force(os.path.join(dir_data, 'FORCE_CONSTANTS'), 'FORCE_CONSTANTS')
def create_phonopy_conf(self):
directory_data = self._directory_data
dim_sqs = self._dim_sqs
variables = self._variables
mesh = self._mesh.copy()
print("directory_data:", directory_data)
print("mesh:", mesh)
spg_number = self.create_spg_number()
# Get band path for the specific space group
phonopy_conf_creator = PhonopyConfCreator(
spg_number,
mesh=mesh,
tmax=3000,
dim_sqs=dim_sqs,
is_average_mass=self._is_average_mass,
is_primitive=self._is_primitive,
band_points=101,
poscar_name="POSCAR", # For getting the chemical symbols
magmom_line=None,
variables=variables,
nac=self._nac,
)
phonopy_conf_creator.run()
def create_spg_number(self):
"""
spg_number is used to determine the primitive axis and band paths.
"""
if self._poscar_average_filename is not None:
poscar_filename = self._poscar_average_filename
else:
poscar_filename = self._poscar_filename
print('SPG number is searched from {}'.format(poscar_filename))
spg_number = Poscar(poscar_filename).get_symmetry_dataset()["number"]
print("spg_number:", spg_number)
return spg_number
def gather_conf_files(self):
conf_files = [
"dos_smearing.conf",
]
if self._is_band:
conf_files.append("band.conf")
if self._is_tetrahedron:
conf_files.append("dos_tetrahedron.conf")
if self._is_partial_dos:
conf_files.append("partial_dos_smearing.conf")
if self._is_tetrahedron and self._is_partial_dos:
conf_files.append("partial_dos_tetrahedron.conf")
if self._is_tprop:
conf_files.append("tprop.conf")
return conf_files
def run_phonopy(self, conf_file):
root = os.getcwd()
home = self._home
phonopy = self._phonopy
print("=" * 80)
print(conf_file)
print("=" * 80)
dir_name = conf_file.replace(".conf", "_calc")
log_file = conf_file.replace(".conf", ".log")
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
os.mkdir(dir_name)
os.chdir(dir_name)
for fn in [conf_file, "POSCAR", "FORCE_CONSTANTS", "BORN"]:
if os.path.exists(os.path.join("..", fn)):
os.symlink("../" + fn, fn)
if os.path.exists(log_file):
os.remove(log_file)
time1 = time.time()
with open(log_file, "w") as f:
subprocess.call(
[phonopy, conf_file, "-v"],
stdout=f,
)
time2 = time.time()
dtime = time2 - time1
print("Time for calc.: {:12.6f} s".format(dtime))
if conf_file == "tprop.conf":
subprocess.call(
["python", home + "/script/python/phonopy_tprop_arranger.py"]
)
os.chdir(root)
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--datadir",
default="..",
type=str,
help="Data directory")
parser.add_argument("--tetrahedron",
action="store_true",
help="Calculate using tetrahedron method.")
parser.add_argument("--partial_dos",
action="store_true",
help="Calculate partial DOS.")
parser.add_argument("--tprop",
action="store_true",
help="Calculate thermal properties.")
args = parser.parse_args()
phonon_analyzer = PhononCalculator(
directory_data=args.datadir,
is_tetrahedron=args.tetrahedron,
is_partial_dos=args.partial_dos,
is_tprop=args.tprop,
)
phonon_analyzer.run()
if __name__ == "__main__":
main()
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'CryptoKnocker.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', include("mainpage.urls")),
url(r"^login/$", "mainpage.views.login_form"),
url(r'^management/$', "management.views.index"),
url(r'^management/login$', "management.views.user_login"),
url(r'^logout/$', "management.views.user_logout"),
url(r'^management/registration/$', "management.views.registration"),
url(r'^management/keys/$', "management.views.manageKeys"),
url(r'^management/keys/changeKey$', "management.views.changeKey"),
url(r'^management/getPorts/$', "management.views.getPorts"),
url(r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT})
)
|
"""
annotations_line2d module
Created on Thu Sep 10 21:51:23 2015
@author: James Sorenson
"""
import matplotlib
import matplotlib.pyplot as plt
import threading
attr_name = 'annotations_line2d'
_event= None # Used for debugging
class DraggableAnnotationLine2D(matplotlib.offsetbox.DraggableBase):
"""This class is like Matplotlib’s DraggableAnnotation, but this one actually works.
Apparently, the original class can't handle annotations that are created
using 'offset points' from a data point. This class ONLY works with those.
Left-click to move the annotation without changing the data point.
Middle-click to slide the annotation to a different data point.
Right-click to delete the annotation.
The original annotation artist is in self.ref_artist.
We save additional info in self.line, self.index, and self.formatter.
"""
# Class-level lock to make sure only ONE annotation is moved at a time.
# Due to QT's multi—threaded nature, it‘s best to use a real thread lock.
_drag_lock=threading.Lock()
_counter=0 # Just a counter to give each annotation a unique ID.
def __init__(self, ref_artist, line=None, index=None, formatter=None, use_blit=True):
# Use the base init (This isn‘t C++ where the parent is called automatically.)
super().__init__(ref_artist, use_blit=use_blit)
# Store the other parameters
self.line=line
self.index=index
self.formatter=formatter
# Create a unique ID for this annotation (for debugging)
DraggableAnnotationLine2D._counter += 1
DraggableAnnotationLine2D._counter %= 2**31 # Not too big
self.id = DraggableAnnotationLine2D._counter
#print('Init',self.id)
if formatter is not None:
# Get and set the text
self.ref_artist.set_text(self.formatter(line, index))
#Update the canvas to make sure the annotation is visible
self.canvas.draw()
def artist_picker(self, artist, event):
"""
Determines if the artist should enable move for this mouse button event
"""
# Make sure this only happens with a click. Ignore scroll.
# Left or Right click works on all of these annotations
# Middle click (slide) requires that line and index are assigned
if (event.button in (1,3)) or \
(event.button ==2 and self.line is not None and self.index is not None):
# Good action. We only want to drag if the cursor is inside the
# box, not the arrow and the area around it.
# contains(event) returns (bool,attr)
#print('Picked',self.id)
drag = self.ref_artist.get_bbox_patch().contains(event)
if drag[0]:
#Make sure no other annotation are dragging.
# wait=False means no block. True if a successful lock.
if DraggableAnnotationLine2D._drag_lock.acquire(False):
# Record the mouse button
self.button=event.button
#print('Claim',self.id)
return drag
# If we made it here, then we're not moving
return (False, None)
def save_offset(self):
"""
On button-down, this saves the current location of the annotation.
Annotation object is in self.ref_artist.
"""
#print('Save',self.id)
if self.button == 1:
# Left-click. Move the annotation while pointing at the same data.
# Get the starting position of the artist in points (relative to data point)
self.drag_start_text_points = self.ref_artist.get_position()
# Get the inverted transform so we can convert pixels to paints.
self.drag_trans_mat = self.ref_artist.get_transform().inverted().get_matrix()
elif self.button == 2:
# Middle-click. We need some additional information to slide the data.
self.xydata=self.line.get_xydata() #just makes it easier (this does NOT copy)
# we need the pixels of the starting data point (not the cursor)
self.drag_start_pixels = self.ref_artist.get_axes().transData.transform(self.ref_artist.xy)
# Get the translation from pixels to data for annotation.xy
self.drag_trans_pix2dat = self.ref_artist.get_axes().transData.inverted()
def update_offset(self, dx, dy):
"""
dx and dy is the total pixel offset from the point where the mouse
drag started.
"""
if self.button == 1: # Left—click
# Scale delta pixels to delta points using parts of annotation transform.
# The full transform includes the data offset, but set position already does that.
new_position=(self.drag_start_text_points[0] + dx * self.drag_trans_mat[0,0],
self.drag_start_text_points[1] + dy * self.drag_trans_mat[1,1])
# Apply as delta points from data point
self.ref_artist.set_position(new_position)
elif self.button == 2: # Middle—click
# We may have a logarithmic scale, but update offset only gives us delta pixels.
# Add the delta to the starting pixels, then convert to data coordinates
pixels_dxy = matplotlib.numpy.array((dx,dy))
new_data_xy = self.drag_trans_pix2dat.transform(self.drag_start_pixels+pixels_dxy)
# Determine if the new data coordinates reach or exceed the next line data point.
index=self.index
while (index > 0) and (self.xydata[index-1][0] > new_data_xy[0]):
#Move left
index -= 1
while (index < self.xydata.shape[0] - 1) and (self.xydata[index+1][0] < new_data_xy[0]):
# Move right
index += 1
if index != self.index:
# we moved an index! Update the annotation
self.ref_artist.xy=self.xydata[index,:]
self.index=index
if self.formatter is not None:
# Update the text in the annotation
self.ref_artist.set_text(self.formatter(self.line, index))
def finalize_offset(self):
"""Called when the mouse button is released, if this was picked in the first place."""
#print('Finalize',self.id)
if self.button == 2 and self.formatter is not None:
# Print out annotation text for the user to copy/paste
self.print_annotation()
elif self.button == 3:
# Delete annotation
self.remove()
def on_release(self,event):
"""
Called when the mouse button is released, whether or not this was picked.
We extend this function so that we are guaranteed to release the thread lock.
"""
# Call the original
super().on_release(event)
#Everyone tries to remove the block, just in case the controlling annotation was removed.
try:
DraggableAnnotationLine2D._drag_lock.release()
except RuntimeError:
pass # Already released. Not a concern.
def print_annotation(self):
"""Does exactly what you think it does"""
print('Annotation: {0}, ind={1}\n{2}'.format(self.line.get_label(), self.index, self.ref_artist.get_text()))
def remove(self):
"""Disconnect and delete the annotation."""
#print('Remove',self.id)
self.disconnect() # Disconnect the callbacks
self.ref_artist.remove() # Delete the annotation artist
self.got_artist=False # Tell this class it no longer has an artist
self.canvas.draw() # Update the whole canvas so the annotation disappears
class AnnotationPicker(object):
"""
A class to enable convenient annotations to any plot.
This is meant only for 2D lines.
Left-click to move the annotation without changing the data point.
Middle-click to slide the annotation to a different data point.
Right-click to delete the annotation.
Optional arguments:
artists: (default None) A single or list of artists to attach this to as 'artist annotations'
tolerance : (default 5) Picker tolerance to a line's data point to create an annotation.
formatter : function to generate the string in the annotation. fcn(Line2D artist, index)
All other keyword arguments Will be passed to the annotation.
"""
def __init__(self, artists=None, tolerance=5, formatter=None, button=1, key = 'control', use_blit=True, **kwargs):
# Parse the arguments
self.tolerance = tolerance
self.use_blit = use_blit
self.button = button
self.key=key
if formatter is None: # Use default
self.formatter=self._annotate_line_str
else:
self.formatter = formatter
# Save the annotation parameters
self.annotation_kwargs = dict(xycoords='data', textcoords='offset points',
fontsize=11, picker=True, xytext=(20, 20),
bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
arrowprops=dict(shrink=0.05, headwidth=5, width=1))
# Add in additional/modified user parameters
self.annotation_kwargs.update(kwargs)
# Apply this annotation instance to the given artists and children
if artists is not None:
self.apply(artists)
def apply(self, artists):
"""
Enable picker on lines so that annotations are activated.
This particular Annotation instance will be applied to this artist and
its children (unless the children already have their own instance.
Use 'clear annotaions' if you wish to override children settings.
"""
# This is overly complex, but it allows the user to throw anything at it (figure, axes, line, etc)
# Make it iterable for convenience
artists = _make_iterable(artists)
for artist in artists:
if artist is None:
continue
# Attach this instance to the given artists
setattr(artist, attr_name, self)
# Enable picker to any line contained in this artist that is not already enabled.
if isinstance(artist, matplotlib.lines.Line2D) and not artist.pickable():
lines = [artist]
elif isinstance(artist, matplotlib.axes.Axes):
lines = [line for line in artist.get_lines() if not line.pickable()]
elif isinstance(artist, matplotlib.figure.Figure):
lines = [line for ax in artist.get_axes() for line in ax.get_lines() if not line.pickable()]
else:
lines=[]
for line in lines:
line.set_picker(self.tolerance)
# Make sure the callbacks are enabled for the parent canvas
enable_callbacks(artist)
def annotate(self, line, index, text=None):
"""
Makes a draggable, interactive annotation on the given line,
at the given index, with the given text.
line : Line2D object to annotate
index : The index of the line to put the annotation
text : The text to fill the annotation with. If None, then use default.
Returns a DraggableAnnotationLine2D instance where the annotation artist is in self.ref_artist.
"""
if text is None:
# Get the text from the formatter
formatter=self.formatter
else:
# Manual text is given. Don't use the formatter
formatter = None
# Create the annotation at the designated point
ax=line.get_axes()
annot=ax.annotate(text, line.get_xydata()[index,:], **self.annotation_kwargs)
# Make it draggable using our class, then return the object
return DraggableAnnotationLine2D(annot, line, index, formatter, use_blit=self.use_blit)
def _annotate_line_str(self, line, index):
"""
The default function to take a Line2D artist and index and generate a
string for the annotation box.
"""
xy=line.get_xydata()[index]
return '{0}[{1}]:\nx={2:.9}\ny:{3:.9}'.format(line.get_label(),index,xy[0],xy[1])
def _onpick(self,event):
"""Called by canvas pick event."""
if event.mouseevent.button == self.button and \
event.mouseevent.key == self.key and \
isinstance(event.artist, matplotlib.lines.Line2D):
# More than one index may be in range. Determine the middle index.
ind = event.ind[len(event.ind)//2]
global _event
_event=event
# Generate the annotation
self.annotate(event.artist, ind)
def enable_callbacks(artist):
"""
Enable annotation callbacks within this canvas/figure.
This adds the .annotations attribute to the canvas to hold the callbacks.
"""
if isinstance(artist, matplotlib.figure.Figure):
canvas=artist.canvas
elif hasattr(artist, 'get_figure'):
canvas=artist.get_figure().canvas
else:
canvas=artist
if not hasattr(canvas,attr_name):
# Add the callbacks and store as a list in the canvas attribute
callbacks=[]
callbacks.append(canvas.mpl_connect('pick_event', _on_pick_event))
callbacks.append(canvas.mpl_connect('figure_enter_event', _on_figure_enter_event))
setattr(canvas, attr_name, callbacks)
def disable_callbacks(canvas):
"""
Disable all annotation callbacks pertaining to this callback.
We leave the pickers and annotation instances in the artists.
We just get rid of the callback attached to the canvas.
"""
if isinstance(canvas, matplotlib.figure.Figure):
canvas=canvas.canvas # We were given the figure instead
for callback in getattr(canvas, attr_name, []):
canvas.mpl_disconnect(callback)
delattr(canvas, attr_name)
print('AnnotationPicker callback removed from canvas.')
def annotate(line, index, text=None):
"""
Wrapper function around AnnotationPicker.annotate()
This will find the controlling instance of Annotations for the given line
and create an interactive annotation at the given index with the given text.
Input:
line: The matplotlib line object to annotate (plt.figure(1).axes[0].lines[0])
index: The index of the line to annotate.
text: The annotation text. It None, then the AnnotationPicker.formatter()
is used to generate text at the given line and index.
Returns:
DraggableAnnotationLine2D object
"""
annotations_instance = _find_annotations_instance(line)
if annotations_instance is None:
# Create a default annotation for this line
annotations_instance = AnnotationPicker(line)
setattr(line, attr_name, annotations_instance)
annotations_instance.annotate(line, index, text)
def subplots(*args, anno=None, **kwargs):
"""
Identical to plt.subplots(), but also assigns an AnnotationPicker class
to the figure. Use "anno=AnnotationPickerInstance" to use a specific instance
of the AnnotationPicker.
"""
# Since we are using plt.subplots, this will show immediately if interactive.
# gca and gcf will also be updated.
fig,ax_list=plt.subplots(*args, **kwargs)
if anno is None:
# Create default AnnotationPicker that will be connected to the figure
AnnotationPicker(fig)
else:
anno.apply(fig)
return (fig,ax_list)
def _make_iterable(obj):
"""Return obj as a list if it is not already an iterable object"""
if hasattr(obj,'__iter__'):
return obj
else:
# Make it iterable for consistency
return [obj]
def _find_annotations_instance(artist):
"""
Find the controlling Annotations instance for this artists.
It could be attached to the artist itself, or on the parent axes or figure.
Returns the controlling Annotations instance.
"""
if hasattr(artist, attr_name):
# Instance is attached to the artist itself
return getattr(artist, attr_name)
elif hasattr(artist, 'get_axes' ) and hasattr(artist.get_axes(), attr_name):
# Instance is attached to the axes
return getattr(artist.get_axes(), attr_name)
elif hasattr(artist, 'get_figure') and hasattr(artist.get_figure(), attr_name):
# Instance is attached to the figure
return getattr(artist.get_figure(), attr_name)
# No instance found
return None
def _clear_annotations(artist):
"""
Call this on any artist to clear the annotation instances for that artist
and all of its children. Mostly useful for debugging.
"""
artists = _make_iterable(artist)
for artist in artists:
if hasattr(artist, attr_name):
delattr(artist, attr_name)
if hasattr(artist,'get chlldren'):
_clear_annotations(artist.get_children())
print('All annotations in artist and children deleted.')
def _on_pick_event(event):
"""
This is what initially gets called when ANY artist in the figure with
picking enabled is picked.
Startlng with the artist itself, thls function will determine the closest
AnnotationPicker instance to call. This permits different settings per
line or per axes.
"""
annotations_instance = _find_annotations_instance(event.artist)
if annotations_instance is not None:
# Call the controlling Annotations instance
annotations_instance._onpick(event)
def _on_figure_enter_event(event):
"""
When the mouse enters the figure, this will make sure all lines have
picker enabled so that new lines can be annotated.
"""
fig=event.canvas.figure
# Only lines that are not already pickable will be updated.
lines=[line for ax in fig.axes for line in ax.lines if not line.pickable()]
for line in lines:
# The controlling Annotations instance is either in the axes or figure.
annotations_instance=_find_annotations_instance(line)
if annotations_instance is not None:
line.set_picker(annotations_instance.tolerance)
# We may need to update legends if the user manually plotted or deleted a line.
#legend_update(fig, draw=True) #Draw if a change was detected
if __name__ == '__main__':
import numpy as np
plt.ion()
# Use our subplots wrapper to make sure annotations are enabled
fig,ax=subplots(2,1)
ax[0].set_title('click on points')
x=np.r_[-5:5:.1]
y=x**2-5*x+3
lines=[]
lines += ax[0].plot(x,x**2-5*x+3, '-.',label='My Line')
lines += ax[1].plot(x,5*x+4,label='Line2')
# Enable Annotations
anno=AnnotationPicker(fig)
an=anno.annotate(ax[0].lines[0],30, 'A manual annotation')
# Add a legend
#leg=legend(ax)
# Add another line and see if moving the mouse in catches it
ax[1].plot(x,2*x+7, label='New line')
# Create custom string for 2nd axes
def custom_text(line,ind):
xy=line.get_xydata()[ind]
custom='Custom text\nData[{0}]: {1:.9}, {2:.9}'.format(ind,xy[0],xy[1])
return custom
anno2=AnnotationPicker(ax[1],formatter=custom_text, key=None)
ax[1].plot(x,y, '.-',label='No picker yet') # See if the picker gets enabled
ax[1].legend()
plt.draw()
|
import argparse
import base64
import datetime
from decimal import Decimal
import math
import os.path
import sys
parser = argparse.ArgumentParser(description="Combine images of Italic calligraphy practice sheets into a single OpenDocument file. Note that this program does not verify that the specified images will fit and retain their aspect ratios within the specified page dimensions: You must verify that yourself. The generated flat OpenDocument file is printed on standard output.")
parser.add_argument("-d", "--description", default="", help="""description of the file (added before the public domain dedication [see -p], if any; default is blank)""")
parser.add_argument("-p", "--public-domain-dedication", metavar="AUTHOR", default=None, help="""add a Creative Commons CC0 Public Domain Dedication to the generated image using the specified AUTHOR""")
parser.add_argument("-t", "--title", default="Italic Calligraphy Practice Sheets", help="""the document's title in its metadata (default: "Italic Calligraphy Practice Sheets")""")
parser.add_argument("-u", "--units", default="mm", help="""units used for page and margin dimensions (can be any unit suffix recognized by the OpenDocument standard; default: mm)""")
parser.add_argument("width", type=Decimal, help="""the width of the page""")
parser.add_argument("height", type=Decimal, help="""the height of the page""")
parser.add_argument("margin", type=Decimal, help="""the width of page margins""")
parser.add_argument("sheetimage", nargs="+", help="""a list of SVG images of Italic calligraphy practice sheets""")
errors = False
def error(message):
global errors
sys.stderr.write(os.path.basename(sys.argv[0]) + ": error: " + message + "\n")
errors = True
if __name__ == "__main__":
try:
args = parser.parse_args()
except Exception:
error("invalid command line arguments (invalid syntax?)")
sys.exit(1)
if args.width <= 0:
error("width must be positive")
if args.height <= 0:
error("height must be positive")
if args.margin < 0:
error("margin must be positive or zero")
if args.margin > args.width * Decimal(0.5):
error("margin exceeds horizontal page dimensions (i.e., it's too large!)")
if args.margin > args.height * Decimal(0.5):
error("margin exceeds vertical page dimensions (i.e., it's too large!)")
if args.units not in {"mm", "cm", "m", "km", "pt", "pc", "inch", "ft", "mi"}:
error("unrecognized units: must be one of mm, cm, m, km, pt, pc, inch, ft, or mi")
if errors:
sys.exit(1)
if not args.sheetimage:
sys.exit(0)
imgwidth = args.width - 2 * args.margin
imgheight = args.height - 2 * args.margin
now = datetime.datetime.today()
sys.stdout.write("""<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<office:document xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0" xmlns:style="urn:oasis:names:tc:opendocument:xmlns:style:1.0" xmlns:text="urn:oasis:names:tc:opendocument:xmlns:text:1.0" xmlns:draw="urn:oasis:names:tc:opendocument:xmlns:drawing:1.0" xmlns:fo="urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:meta="urn:oasis:names:tc:opendocument:xmlns:meta:1.0" xmlns:svg="urn:oasis:names:tc:opendocument:xmlns:svg-compatible:1.0" office:version="1.2" office:mimetype="application/vnd.oasis.opendocument.text">
<office:meta>
<meta:creation-date>{0}</meta:creation-date>
<dc:description>{1}Pages are {2}{5}x{3}{5} with {4}{5} margins.""".format(now.strftime("%FT%TZ"), "{0}\n\n".format(args.description) if args.description else "", args.width, args.height, args.margin, args.units))
if args.public_domain_dedication:
sys.stdout.write("""
Created on {0} by {1}.
To the extent possible under law, {1} has waived all copyright and related or neighboring rights to this image. You can copy, modify, distribute and perform this image, even for commercial purposes, all without asking permission. Please see <http://creativecommons.org/publicdomain/zero/1.0/> for more information.""".format(now.strftime("%F"), args.public_domain_dedication.strip()))
sys.stdout.write("""</dc:description>
<dc:title>{0}</dc:title>
<dc:date>{1}</dc:date>
</office:meta>
<office:styles>
<style:style style:name="Standard" style:family="paragraph" style:class="text"/>
<style:style style:name="Graphics" style:family="graphic">
<style:graphic-properties text:anchor-type="paragraph" svg:x="0mm" svg:y="0mm" style:wrap="dynamic" style:number-wrapped-paragraphs="no-limit" style:wrap-contour="false" style:vertical-pos="top" style:vertical-rel="paragraph" style:horizontal-pos="center" style:horizontal-rel="paragraph"/>
</style:style>
</office:styles>
<office:automatic-styles>
<style:style style:name="P1" style:family="paragraph" style:parent-style-name="Standard">
<style:paragraph-properties fo:break-before="page"/>
</style:style>
<style:style style:name="fr1" style:family="graphic" style:parent-style-name="Graphics">
<style:graphic-properties style:mirror="none"/>
</style:style>
<style:page-layout style:name="pm1">
<style:page-layout-properties fo:page-width="{2}{5}" fo:page-height="{3}{5}" fo:margin-top="{4}{5}" fo:margin-bottom="{4}{5}" fo:margin-left="{4}{5}" fo:margin-right="{4}{5}"/>
</style:page-layout>
</office:automatic-styles>
<office:master-styles>
<style:master-page style:name="Standard" style:page-layout-name="pm1"/>
</office:master-styles>
<office:body>
<office:text>\n""".format(args.title, now.strftime("%FT%TZ"), args.width, args.height, args.margin, args.units))
def add_image(path, imgno, paragraph_style):
sys.stdout.write(""" <text:p text:style-name="{0}"><draw:frame draw:style-name="fr1" draw:name="n{1}" text:anchor-type="paragraph" svg:width="{2}{4}" svg:height="{3}{4}" draw:z-index="0"><draw:image><office:binary-data>""".format(paragraph_style, imgno, imgwidth, imgheight, args.units))
data = None
try:
with open(path, "rb") as imgfile:
data = imgfile.read()
except OSError as e:
error("unable to read " + path + ": " + e.strerror)
if data:
sys.stdout.write(str(base64.b64encode(data), encoding="UTF-8"))
sys.stdout.write("""</office:binary-data></draw:image></draw:frame></text:p>\n""")
for index, path in enumerate(args.sheetimage):
add_image(path, index, "Standard" if index is 0 else "P1")
sys.stdout.write(""" </office:text>
</office:body>
</office:document>\n""")
if errors:
sys.exit(2)
|
import OOMP
newPart = OOMP.oompItem(9452)
newPart.addTag("oompType", "RESE")
newPart.addTag("oompSize", "0805")
newPart.addTag("oompColor", "X")
newPart.addTag("oompDesc", "O271")
newPart.addTag("oompIndex", "67")
OOMP.parts.append(newPart)
|
"""Cascade UserAffiliation deletes
Revision ID: 5de499ab5b62
Revises: 14f51f27a106
Create Date: 2016-12-13 00:21:39.842218
"""
revision = '5de499ab5b62'
down_revision = '14f51f27a106'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('user_affiliation_user_fk', 'user_affiliation', type_='foreignkey')
op.drop_constraint('user_affiliation_cgac_fk', 'user_affiliation', type_='foreignkey')
op.create_foreign_key('user_affiliation_user_fk', 'user_affiliation', 'users', ['user_id'], ['user_id'], ondelete='CASCADE')
op.create_foreign_key('user_affiliation_cgac_fk', 'user_affiliation', 'cgac', ['cgac_id'], ['cgac_id'], ondelete='CASCADE')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('user_affiliation_cgac_fk', 'user_affiliation', type_='foreignkey')
op.drop_constraint('user_affiliation_user_fk', 'user_affiliation', type_='foreignkey')
op.create_foreign_key('user_affiliation_cgac_fk', 'user_affiliation', 'cgac', ['cgac_id'], ['cgac_id'])
op.create_foreign_key('user_affiliation_user_fk', 'user_affiliation', 'users', ['user_id'], ['user_id'])
### end Alembic commands ###
|
from __future__ import unicode_literals
from gazetteer.models import GazSource,GazSourceConfig,LocationTypeField,CodeFieldConfig,NameFieldConfig
from skosxl.models import Concept, Scheme, MapRelation
from gazetteer.settings import TARGET_NAMESPACE_FT
def load_base_ft():
(sch,created) = Scheme.objects.get_or_create(uri=TARGET_NAMESPACE_FT[:-1], defaults = { 'pref_label' :"Gaz Feature types" })
try:
(ft,created) = Concept.objects.get_or_create(term="ADMIN", defaults = { 'pref_label' :"Populated Place", 'definition':"Populated place"} , scheme = sch)
except:
pass
def load_ft_mappings() :
pass
def load_config() :
try:
GazSourceConfig.objects.filter(name="TM_WorldBoundaries").delete()
except:
pass
config=GazSourceConfig.objects.create(lat_field="lat", name="TM_WorldBoundaries", long_field="lon")
NameFieldConfig.objects.create(config=config,language="en", as_default=True, languageNamespace="", field="name", languageField="")
LocationTypeField.objects.create(field='"ADMIN"',namespace=TARGET_NAMESPACE_FT, config=config)
CodeFieldConfig.objects.create(config=config,field="iso3",namespace="http://mapstory.org/id/countries/iso3")
CodeFieldConfig.objects.create(config=config,field="iso2",namespace="http://mapstory.org/id/countries/iso2")
CodeFieldConfig.objects.create(config=config,field="un",namespace="http://mapstory.org/id/countries/un")
CodeFieldConfig.objects.create(config=config,field="fips",namespace="http://mapstory.org/id/countries/fips")
(s,created) = GazSource.objects.get_or_create(source="tm_world_borders", config=config, source_type="mapstory")
print (s,created)
"""
class Migration(migrations.Migration):
initial = True
dependencies = [
#('yourappname', '0001_initial'),
]
operations = [
migrations.RunPython(load_ft_mappings),
migrations.RunPython(load_config),
]
"""
|
from flask import *
from playhouse.flask_utils import *
import string
from app import app
from model import Major, Minor, Store, Transaction, Item
@app.route('/major', methods=['GET', 'POST'])
def major_list():
query = Major \
.select(Major, Minor) \
.join(Minor, on=(Major.id == Minor.major).alias('minor')) \
.order_by(Major.id)
last = None
minors = []
majors = []
for major in query:
minor = { 'id': major.minor.id, 'name': major.minor.name }
if last != None and major.id != last.id:
majors.append({'id': last.id, 'income': last.income,
'name': last.name, 'minors': minors})
minors = [minor]
else:
minors.append(minor)
last = major
if last != None:
majors.append({'id': last.id, 'income': last.income,
'name': last.name, 'minors': minors})
return render_template('major.html', majors=majors)
@app.route('/major/add', methods=['GET', 'POST'])
def major_add():
if request.method == 'POST':
if request.form.get('major_id'):
major = get_object_or_404(Major, Major.id == request.form['major_id'])
minors = Minor.listWithStats(request.form['major_id'])
major.name = request.form['name']
major.income = bool(request.form.get('income'))
major.save()
flash('Category #%d updated successfully.' % major.id, 'success')
else:
major = Major.create(name=request.form['name'],
income=bool(request.form.get('income')))
minors = []
for minor_name in string.split(request.form['minors'], ','):
if len(minor_name) > 0:
minor = Minor.create(name=string.strip(minor_name), major=major)
minors.append(minor)
flash('A category created successfully.', 'success')
return render_template('major.html', major=major, minors=minors)
return render_template('major.html')
@app.route('/major/<int:id>', methods=['GET', 'POST'])
def major_detail(id):
major = get_object_or_404(Major, Major.id == id)
minors = Minor.listWithStats(id)
num_items = 0
for minor in minors:
num_items += minor.count
return render_template('major.html',
major=major, minors=minors, num_items=num_items)
@app.route('/major/delete/<int:id>', methods=['GET', 'POST'])
def major_delete(id):
major = get_object_or_404(Major, Major.id == id)
major.delete_instance()
minors = Minor.delete().where(Minor.major == id).execute()
flash('Category #%d is deleted.' % id, 'success')
return jsonify(success=True)
@app.route('/_minor/add', methods=['POST'])
def minor_add():
try:
major_id = request.form['major_id']
major = get_object_or_404(Major, Major.id == major_id)
minor = Minor.create(name=request.form['name'], major=major)
except:
flash('Category #%d not found.' % major_id, 'danger')
return jsonify(success=False)
flash('A new subcategory is added.', 'success')
return jsonify(success=True)
@app.route('/_minor/delete/<int:id>', methods=['GET'])
def minor_delete(id):
try:
minor = get_object_or_404(Minor, Minor.id == id)
minor.delete_instance()
except:
return jsonify(success=False)
return jsonify(success=True)
@app.route('/minor/<int:id>', methods=['GET'])
def minor_detail(id):
minor = get_object_or_404(Minor, Minor.id == id)
majors = Major.select().order_by(Major.id)
return render_template('minor.html', minor=minor, majors=majors)
@app.route('/_minor/edit/<int:id>', methods=['POST'])
def minor_edit(id):
try:
minor = Minor.get(Minor.id == id)
minor.name = request.form['name']
minor.major = request.form['major_id']
minor.save()
except:
return jsonify(success=False)
return jsonify(success=True)
|
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
window = Gtk.Window()
window.set_default_size(200, 200)
window.connect("destroy", Gtk.main_quit)
overlay = Gtk.Overlay()
window.add(overlay)
textview = Gtk.TextView()
textview.set_wrap_mode(Gtk.WrapMode.WORD_CHAR)
textbuffer = textview.get_buffer()
textbuffer.set_text("Welcome to the PyGObject Tutorial\n\nThis guide aims to provide an introduction to using Python and GTK+.\n\nIt includes many sample code files and exercises for building your knowledge of the language.", -1)
overlay.add(textview)
button = Gtk.Button(label="Overlayed Button")
button.set_valign(Gtk.Align.CENTER)
button.set_halign(Gtk.Align.CENTER)
overlay.add_overlay(button)
overlay.show_all()
window.show_all()
Gtk.main()
|
"""Tests to ensure that the html5lib tree builder generates good trees."""
import warnings
try:
from bs4.builder import HTML5TreeBuilder
HTML5LIB_PRESENT = True
except ImportError, e:
HTML5LIB_PRESENT = False
from bs4.element import SoupStrainer
from bs4.testing import (
HTML5TreeBuilderSmokeTest,
SoupTest,
skipIf,
)
@skipIf(
not HTML5LIB_PRESENT,
"html5lib seems not to be present, not testing its tree builder.")
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
"""See ``HTML5TreeBuilderSmokeTest``."""
@property
def default_builder(self):
return HTML5TreeBuilder
def test_soupstrainer(self):
# The html5lib tree builder does not support SoupStrainers.
strainer = SoupStrainer("b")
markup = "<p>A <b>bold</b> statement.</p>"
with warnings.catch_warnings(record=True) as w:
soup = self.soup(markup, parse_only=strainer)
self.assertEqual(
soup.decode(), self.document_for(markup))
self.assertTrue(
"the html5lib tree builder doesn't support parse_only" in
str(w[0].message))
def test_correctly_nested_tables(self):
"""html5lib inserts <tbody> tags where other parsers don't."""
markup = ('<table id="1">'
'<tr>'
"<td>Here's another table:"
'<table id="2">'
'<tr><td>foo</td></tr>'
'</table></td>')
self.assertSoupEquals(
markup,
'<table id="1"><tbody><tr><td>Here\'s another table:'
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
'</td></tr></tbody></table>')
self.assertSoupEquals(
"<table><thead><tr><td>Foo</td></tr></thead>"
"<tbody><tr><td>Bar</td></tr></tbody>"
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
def test_xml_declaration_followed_by_doctype(self):
markup = '''<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<p>foo</p>
</body>
</html>'''
soup = self.soup(markup)
# Verify that we can reach the <p> tag; this means the tree is connected.
self.assertEqual(b"<p>foo</p>", soup.p.encode())
def test_reparented_markup(self):
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
soup = self.soup(markup)
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
self.assertEqual(2, len(soup.find_all('p')))
def test_reparented_markup_ends_with_whitespace(self):
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
soup = self.soup(markup)
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
self.assertEqual(2, len(soup.find_all('p')))
def test_reparented_markup_containing_identical_whitespace_nodes(self):
"""Verify that we keep the two whitespace nodes in this
document distinct when reparenting the adjacent <tbody> tags.
"""
markup = '<table> <tbody><tbody><ims></tbody> </table>'
soup = self.soup(markup)
space1, space2 = soup.find_all(string=' ')
tbody1, tbody2 = soup.find_all('tbody')
assert space1.next_element is tbody1
assert tbody2.next_element is space2
def test_reparented_markup_containing_children(self):
markup = '<div><a>aftermath<p><noscript>target</noscript>aftermath</a></p></div>'
soup = self.soup(markup)
noscript = soup.noscript
self.assertEqual("target", noscript.next_element)
target = soup.find(string='target')
# The 'aftermath' string was duplicated; we want the second one.
final_aftermath = soup.find_all(string='aftermath')[-1]
# The <noscript> tag was moved beneath a copy of the <a> tag,
# but the 'target' string within is still connected to the
# (second) 'aftermath' string.
self.assertEqual(final_aftermath, target.next_element)
self.assertEqual(target, final_aftermath.previous_element)
def test_processing_instruction(self):
"""Processing instructions become comments."""
markup = b"""<?PITarget PIContent?>"""
soup = self.soup(markup)
assert str(soup).startswith("<!--?PITarget PIContent?-->")
def test_cloned_multivalue_node(self):
markup = b"""<a class="my_class"><p></a>"""
soup = self.soup(markup)
a1, a2 = soup.find_all('a')
self.assertEqual(a1, a2)
assert a1 is not a2
def test_foster_parenting(self):
markup = b"""<table><td></tbody>A"""
soup = self.soup(markup)
self.assertEqual(u"<body>A<table><tbody><tr><td></td></tr></tbody></table></body>", soup.body.decode())
def test_extraction(self):
"""
Test that extraction does not destroy the tree.
https://bugs.launchpad.net/beautifulsoup/+bug/1782928
"""
markup = """
<html><head></head>
<style>
</style><script></script><body><p>hello</p></body></html>
"""
soup = self.soup(markup)
[s.extract() for s in soup('script')]
[s.extract() for s in soup('style')]
self.assertEqual(len(soup.find_all("p")), 1)
def test_empty_comment(self):
"""
Test that empty comment does not break structure.
https://bugs.launchpad.net/beautifulsoup/+bug/1806598
"""
markup = """
<html>
<body>
<form>
<!----><input type="text">
</form>
</body>
</html>
"""
soup = self.soup(markup)
inputs = []
for form in soup.find_all('form'):
inputs.extend(form.find_all('input'))
self.assertEqual(len(inputs), 1)
def test_tracking_line_numbers(self):
# The html.parser TreeBuilder keeps track of line number and
# position of each element.
markup = "\n <p>\n\n<sourceline>\n<b>text</b></sourceline><sourcepos></p>"
soup = self.soup(markup)
self.assertEqual(2, soup.p.sourceline)
self.assertEqual(5, soup.p.sourcepos)
self.assertEqual("sourceline", soup.p.find('sourceline').name)
# You can deactivate this behavior.
soup = self.soup(markup, store_line_numbers=False)
self.assertEqual("sourceline", soup.p.sourceline.name)
self.assertEqual("sourcepos", soup.p.sourcepos.name)
|
"""
Python environments and packages
================================
This module provides tools for using Python `virtual environments`_
and installing Python packages using the `pip`_ installer.
.. _virtual environments: http://www.virtualenv.org/
.. _pip: http://www.pip-installer.org/
"""
from __future__ import with_statement
from contextlib import contextmanager
from distutils.version import StrictVersion as V
from pipes import quote
import os
import posixpath
import re
from fabric.api import cd, hide, prefix, run, settings, sudo
from fabric.utils import puts
from fabtools.files import is_file
from fabtools.utils import abspath, download, run_as_root
GET_PIP_URL = 'https://raw.githubusercontent.com/pypa/pip/master/contrib/get-pip.py'
def is_pip_installed(version=None, pip_cmd='pip'):
"""
Check if `pip`_ is installed.
.. _pip: http://www.pip-installer.org/
"""
with settings(hide('running', 'warnings', 'stderr', 'stdout'), warn_only=True):
res = run('%(pip_cmd)s --version 2>/dev/null' % locals())
if res.failed:
return False
if version is None:
return res.succeeded
else:
m = re.search(r'pip (?P<version>.*) from', res)
if m is None:
return False
installed = m.group('version')
if V(installed) < V(version):
puts("pip %s found (version >= %s required)" % (installed, version))
return False
else:
return True
def install_pip(python_cmd='python', use_sudo=True):
"""
Install the latest version of `pip`_, using the given Python
interpreter.
::
import fabtools
if not fabtools.python.is_pip_installed():
fabtools.python.install_pip()
.. note::
pip is automatically installed inside a virtualenv, so there
is no need to install it yourself in this case.
.. _pip: http://www.pip-installer.org/
"""
with cd('/tmp'):
download(GET_PIP_URL)
command = '%(python_cmd)s get-pip.py' % locals()
if use_sudo:
run_as_root(command, pty=False)
else:
run(command, pty=False)
run('rm -f get-pip.py')
def is_installed(package, pip_cmd='pip'):
"""
Check if a Python package is installed (using pip).
Package names are case insensitive.
Example::
from fabtools.python import virtualenv
import fabtools
with virtualenv('/path/to/venv'):
fabtools.python.install('Flask')
assert fabtools.python.is_installed('flask')
.. _pip: http://www.pip-installer.org/
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = run('%(pip_cmd)s freeze' % locals())
packages = [line.split('==')[0].lower() for line in res.splitlines()]
return (package.lower() in packages)
def install(packages, upgrade=False, download_cache=None, allow_external=None,
allow_unverified=None, quiet=False, pip_cmd='pip', use_sudo=False,
user=None, exists_action=None):
"""
Install Python package(s) using `pip`_.
Package names are case insensitive.
Starting with version 1.5, pip no longer scrapes insecure external
urls by default and no longer installs externally hosted files by
default. Use ``allow_external=['foo', 'bar']`` or
``allow_unverified=['bar', 'baz']`` to change these behaviours
for specific packages.
Examples::
import fabtools
# Install a single package
fabtools.python.install('package', use_sudo=True)
# Install a list of packages
fabtools.python.install(['pkg1', 'pkg2'], use_sudo=True)
.. _pip: http://www.pip-installer.org/
"""
if isinstance(packages, basestring):
packages = [packages]
if allow_external in (None, False):
allow_external = []
elif allow_external == True:
allow_external = packages
if allow_unverified in (None, False):
allow_unverified = []
elif allow_unverified == True:
allow_unverified = packages
options = []
if upgrade:
options.append('--upgrade')
if download_cache:
options.append('--download-cache="%s"' % download_cache)
if quiet:
options.append('--quiet')
for package in allow_external:
options.append('--allow-external="%s"' % package)
for package in allow_unverified:
options.append('--allow-unverified="%s"' % package)
if exists_action:
options.append('--exists-action=%s' % exists_action)
options = ' '.join(options)
packages = ' '.join(packages)
command = '%(pip_cmd)s install %(options)s %(packages)s' % locals()
if use_sudo:
sudo(command, user=user, pty=False)
else:
run(command, pty=False)
def install_requirements(filename, upgrade=False, download_cache=None,
allow_external=None, allow_unverified=None,
quiet=False, pip_cmd='pip', use_sudo=False,
user=None, exists_action=None):
"""
Install Python packages from a pip `requirements file`_.
::
import fabtools
fabtools.python.install_requirements('project/requirements.txt')
.. _requirements file: http://www.pip-installer.org/en/latest/requirements.html
"""
if allow_external is None:
allow_external = []
if allow_unverified is None:
allow_unverified = []
options = []
if upgrade:
options.append('--upgrade')
if download_cache:
options.append('--download-cache="%s"' % download_cache)
for package in allow_external:
options.append('--allow-external="%s"' % package)
for package in allow_unverified:
options.append('--allow-unverified="%s"' % package)
if quiet:
options.append('--quiet')
if exists_action:
options.append('--exists-action=%s' % exists_action)
options = ' '.join(options)
command = '%(pip_cmd)s install %(options)s -r %(filename)s' % locals()
if use_sudo:
sudo(command, user=user, pty=False)
else:
run(command, pty=False)
def create_virtualenv(directory, system_site_packages=False, venv_python=None,
use_sudo=False, user=None, clear=False, prompt=None,
virtualenv_cmd='virtualenv'):
"""
Create a Python `virtual environment`_.
::
import fabtools
fabtools.python.create_virtualenv('/path/to/venv')
.. _virtual environment: http://www.virtualenv.org/
"""
options = ['--quiet']
if system_site_packages:
options.append('--system-site-packages')
if venv_python:
options.append('--python=%s' % quote(venv_python))
if clear:
options.append('--clear')
if prompt:
options.append('--prompt=%s' % quote(prompt))
options = ' '.join(options)
directory = quote(directory)
command = '%(virtualenv_cmd)s %(options)s %(directory)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
def virtualenv_exists(directory):
"""
Check if a Python `virtual environment`_ exists.
.. _virtual environment: http://www.virtualenv.org/
"""
return is_file(posixpath.join(directory, 'bin', 'python'))
@contextmanager
def virtualenv(directory, local=False):
"""
Context manager to activate an existing Python `virtual environment`_.
::
from fabric.api import run
from fabtools.python import virtualenv
with virtualenv('/path/to/virtualenv'):
run('python -V')
.. _virtual environment: http://www.virtualenv.org/
"""
path_mod = os.path if local else posixpath
# Build absolute path to the virtualenv activation script
venv_path = abspath(directory)
activate_path = path_mod.join(venv_path, 'bin', 'activate')
# Source the activation script
with prefix('. %s' % quote(activate_path)):
yield
|
from setuptools import setup
def readme():
with open('README.rst.example') as f:
return f.read()
setup(name='manifold_gui',
version='0.1',
description='GUI for a manifold technique',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Alpha',
'Environment :: Console',
'Environment :: X11 Applications',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python :: 2.7 :: chimera',
'Intended Audience :: End Users/Desktop',
],
keywords='manifold chimera',
author='Hstau Y Liao',
platform='linux chimera',
author_email='hstau.y.liao@gmail.com',
packages=['gui'],
include_package_data=True,
zip_safe=False)
|
import csv
import datetime
import os
import copy
class Trip:
dols = []
dists = []
gals = []
octs = []
eths = []
drivers = []
tires = []
miles = 0
gallons = 0
actualGals = 0
days = 0
octane = 0
snowtires = 0
make = 0
model = 0
year = 0
engineIV = 0
enginecyl = 0
engineL = 0
ethanol = 0
driver = 0
avgMileage = 0
beginDate = 0
hybrid = 0
def write(self):
out = [self.miles,self.gallons,self.actualGals,self.dollars,self.days,self.octane,self.snowtires,self.make,self.model,self.year,self.engineIV,self.enginecyl,self.engineL,self.ethanol,self.driver,self.avgMileage,self.beginDate,self.hybrid]
return out
def clear(self):
self.dols[:] = []
self.dists[:] = []
self.gals[:] = []
self.octs[:] = []
self.eths[:] = []
self.drivers[:] = []
self.tires[:] = []
self.miles = 0
self.gallons = 0
self.actualGals = 0
self.days = 0
self.octane = 0
self.snowtires = 0
self.make = 0
self.model = 0
self.year = 0
self.engineIV = 0
self.enginecyl = 0
self.engineL = 0
self.ethanol = 0
self.driver = ""
self.avgMileage = 0
self.beginDate = 0
self.hybrid = 0
def wavg(series, weight):
avg = 0
if (weight[0] <= 0):
weight = weight[1:]
assert(len(series) == len(weight))
for i in range(len(weight)):
avg += float(series[i])*float(weight[i])/float(sum(weight))
return avg
def octaneCode(inOct):
if (inOct == 1):
return 87;
elif (inOct == 2):
return 89;
elif (inOct == 3):
return 93;
else:
print "Unknown octane code", inOct
assert(1 == 0)
def driverCode(driver):
if (driver == "Mark"):
return 0
elif (driver == "Mary"):
return 1
elif (driver == "Andy"):
return 2
elif (driver == "Jeff"):
return 3
else:
print "Unknown driver: ", driver
assert(1 == 0)
def makeCode(make):
if (make == "Chevrolet"):
return 0
elif (make == "Buick"):
return 1
elif (make == "Oldsmobile"):
return 2
elif (make == "Mercury"):
return 3
elif (make == "Plymouth"):
return 4
elif (make == "Volkswagen"):
return 5
elif (make == "Toyota"):
return 6
elif (make == "Honda"):
return 7
else:
print "Unknown make: ", make
assert(1 == 0)
def modelCode(model):
if (model == "Concourse"):
return 0
elif (model == "Vega"):
return 1
elif (model == "Century"):
return 2
elif (model == "Cierra"):
return 3
elif (model == "Sable"):
return 4
elif (model == "Voyager"):
return 5
elif (model == "Highlander"):
return 6
elif (model == "CRV"):
return 7
elif (model == "Jetta"):
return 8
else:
print "Unknown model: ", model
assert(1 == 0)
def gasTank(model,year):
if (model == "Concourse"):
return 21.0
elif (model == "Vega"):
return 16.0
elif (model == "Century"):
return 15.0
elif (model == "Cierra"):
return 15.7
elif (model == "Sable"):
return 16.0
elif (model == "Voyager"):
return 20.0
elif (model == "Highlander"):
if (year == 2003):
return 19.8
elif (year == 2008):
return 17.2
elif (model == "CRV"):
return 15.3
elif (model == "Jetta"):
return 14.5
else:
print "Unknown model: ", model
assert(1 == 0)
def dateMaker(date):
start = 0
while date.find("/",start) > -1:
start = date.find("/",start) + 1
year = date[start:]
if len(year) == 2:
if (int(year) > 50):
year = 1900 + int(year)
if (int(year) <= 50):
year = 2000 + int(year)
return date[0:start] + str(year)
def check(fill,gastype,driver,snowtires,ethanol,hybrid):
assert(fill == 0 or fill == 1)
assert(gastype == 1 or gastype == 2 or gastype == 3)
assert(driver == "Andy" or driver == "Mark" or driver == "Mary" or driver == "Jeff")
assert(snowtires == 0 or snowtires == 1)
assert(ethanol == 0 or ethanol == 1)
assert(hybrid == 0 or hybrid == 1)
#ethanol
def checkTrip(a):
a.miles = sum(a.dists)
a.dollars = sum(a.dols)
a.actualGals = sum(i for i in a.gals if i > 0)
a.gallons = sum(a.gals)
a.octane = wavg(a.octs,a.gals)
print "octane",a.octane
a.ethanol = wavg(a.eths,a.gals)
print "ethanol",a.ethanol
a.snowtires = wavg(a.tires,a.dists)
a.driver = sorted(a.drivers)[len(a.drivers)/2]
print a.beginDate
assert(min(a.dists) > 0)
assert(min(a.dols) > 0)
assert(a.days > 0)
assert(a.miles > 0)
assert(a.dollars > 0)
assert(a.gallons > 0)
def checkInterTrip(a,b):
print a.beginDate
print "mpg: ", a.miles/a.gallons, b.miles/b.gallons
print "price: ", a.dollars/a.actualGals, b.dollars/b.actualGals
if(abs((a.miles/a.gallons)/(b.miles/b.gallons) - 1) > 0.5):
status = raw_input("Press Enter to continue... (mpg)")
if(abs((a.dollars/a.actualGals)/(b.dollars/b.actualGals) - 1) > 0.2):
status = raw_input("Press Enter to continue... (price)")
print ""
def main(dir,outfile):
trips = []
for file in os.listdir(dir):
if not file.endswith('.csv'):
continue
print file
f = open(dir+file,'rU')
datareader = csv.reader(f, dialect = csv.excel_tab)
lineNum = 0
beginMiles = 0
beginDate = 0
for row in datareader:
lineNum += 1
line = str(row)
line = line[2:-2].split(',')
if (line[0] == "Date"):
continue
date = dateMaker(str(line[0]))
odometer = int(line[1])
fill = int(line[2])
gastype = int(line[3])
gallons = float(line[4])
dollars = float(line[5])
driver = str(line[6])
snowtires = int(line[7])
ethanol = int(line[8])
make = str(line[9])
model = str(line[10])
year = int(line[11])
engineL = float(line[12])
enginecyl = int(line[13])
engineIV = int(line[14])
hybrid = int(line[15])
if (fill == -1):
#begin trip
#make trip opject
a = Trip()
beginMiles = odometer
beginDate = date
beginOctane = 87
beginEthanol = 0
if (year >= 1994):
beginEthanol = 1
a.gals.append(gallons)
else:
#check and add to trip
a.dols.append(dollars)
a.gals.append(gallons)
a.dists.append(odometer - beginMiles)
a.octs.append(beginOctane)
a.eths.append(beginEthanol)
a.drivers.append(driverCode(driver))
a.tires.append(snowtires)
check(fill,gastype,driver,snowtires,ethanol,hybrid)
beginMiles = odometer
#update gas contents
tank = gasTank(model, year)
beginOctane = (gallons * octaneCode(gastype) + (tank - gallons) * beginOctane) / tank
beginEthanol = (gallons * ethanol + (tank - gallons) * beginEthanol) / tank
if (fill == 1):
#end trip
tripMiles = sum(a.dists)
dateobj1 = datetime.datetime.strptime(beginDate,'%m/%d/%Y').date()
dateobj2 = datetime.datetime.strptime(date,'%m/%d/%Y').date()
tripDate = dateobj2 - dateobj1
tripDays = tripDate.days
if (tripDays == 0):
tripDays += 1
a.days = tripDays
a.make = makeCode(make)
a.model = modelCode(model)
a.year = year
a.engineIV = engineIV
a.enginecyl = enginecyl
a.engineL = engineL
a.beginDate = beginDate
a.hybrid = hybrid
a.avgMileage = odometer - 0.5*tripMiles
#check and save trip
checkTrip(a)
if (len(trips) > 0):
checkInterTrip(a,trips[-1])
trips.append(copy.deepcopy(a))
#reset dollars and gallons
#make trip opject
a.clear()
beginDate = date
beginMiles = odometer
fo = open(outfile,'wb')
datareader = csv.writer(fo, delimiter=',')
#print trips
for thisTrip in trips:
out = thisTrip.write()
datareader.writerow(out)
dir = './raw/'
outfile = './car_data.csv'
main(dir,outfile)
|
import itertools
import subprocess
import sys
sys_script = '''
tell application "System Events" to tell process "SecurityAgent"
set value of text field 1 of window 1 to $(PASS)
click button 1 of group 1 of window 1
end tell
'''
keys = ['s','t','a','r','t']
def automate_login():
for l in xrange(0, len(keys)+1):
for subset in itertools.permutations(keys, l):
guess = ''.join(subset)
tmp = sys_script.replace('$(PASS)', '"%s"' % guess)
try:
subprocess.check_output('osascript -e \'%s\'' % tmp, shell=True)
sys.stdout.write('\rtrying %s ' % guess)
sys.stdout.flush()
except subprocess.CalledProcessError:
print('\nfailed')
return
return
automate_login()
|
'''
Created on 17/2/2015
@author: PC06
Primer cambio en el proyecto
'''
from include import app
if __name__ == '__main__':
app.run("127.0.0.1", 9000, debug=True)
|
import socket
import time
import traceback
from oyoyo.parse import *
from oyoyo import helpers
from oyoyo.cmdhandler import CommandError
class IRCClientError(Exception):
pass
class IRCClient:
""" IRC Client class. This handles one connection to a server.
This can be used either with or without IRCApp ( see connect() docs )
"""
def __init__(self, cmd_handler, **kwargs):
""" the first argument should be an object with attributes/methods named
as the irc commands. You may subclass from one of the classes in
oyoyo.cmdhandler for convenience but it is not required. The
methods should have arguments (prefix, args). prefix is
normally the sender of the command. args is a list of arguments.
Its recommened you subclass oyoyo.cmdhandler.DefaultCommandHandler,
this class provides defaults for callbacks that are required for
normal IRC operation.
all other arguments should be keyword arguments. The most commonly
used will be nick, host and port. You can also specify an "on connect"
callback. ( check the source for others )
Warning: By default this class will not block on socket operations, this
means if you use a plain while loop your app will consume 100% cpu.
To enable blocking pass blocking=True.
>>> from oyoyo import helpers >>> class My_Handler(DefaultCommandHandler):
... def privmsg(self, prefix, command, args):
... print "%s said %s" % (prefix, args[1])
...
>>> def connect_callback(c):
... helpers.join(c, '#myroom')
...
>>> cli = IRCClient(My_Handler,
... host="irc.freenode.net",
... port=6667,
... nick="myname",
... connect_cb=connect_callback)
...
>>> cli_con = cli.connect()
>>> while 1:
... cli_con.next()
...
"""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.nick = None
self.real_name = None
self.host = None
self.port = None
self.connect_cb = None
self.blocking = False
self.__dict__.update(kwargs)
self.command_handler = cmd_handler(self)
self._end = 0
def send(self, *args, **kwargs):
""" send a message to the connected server. all arguments are joined
with a space for convenience, for example the following are identical
>>> cli.send("JOIN %s" % some_room)
>>> cli.send("JOIN", some_room)
In python 2, all args must be of type str or unicode, *BUT* if they are
unicode they will be converted to str with the encoding specified by
the 'encoding' keyword argument (default 'utf8').
In python 3, all args must be of type str or bytes, *BUT* if they are
str they will be converted to bytes with the encoding specified by the
'encoding' keyword argument (default 'utf8').
"""
# Convert all args to bytes if not already
encoding = kwargs.get('encoding') or 'utf8'
bargs = []
for arg in args:
if isinstance(arg, str):
bargs.append(bytes(arg, encoding))
elif isinstance(arg, bytes):
bargs.append(arg)
elif type(arg).__name__ == 'unicode':
bargs.append(arg.encode(encoding))
else:
raise IRCClientError('Refusing to send one of the args from provided: %s'
% repr([(type(arg), arg) for arg in args]))
msg = bytes(" ", "ascii").join(bargs)
logging.info('---> send "%s"' % msg)
self.socket.send(msg + bytes("\r\n", "ascii"))
def connect(self):
""" initiates the connection to the server set in self.host:self.port
and returns a generator object.
>>> cli = IRCClient(my_handler, host="irc.freenode.net", port=6667)
>>> g = cli.connect()
>>> while 1:
... g.next()
"""
try:
logging.info('connecting to %s:%s' % (self.host, self.port))
self.socket.connect(("%s" % self.host, self.port))
if self.blocking:
# this also overrides default timeout
self.socket.setblocking(1)
else:
self.socket.setblocking(0)
helpers.nick(self, self.nick)
helpers.user(self, self.nick, self.real_name)
if self.connect_cb:
self.connect_cb(self)
buffer = bytes()
while not self._end:
try:
buffer += self.socket.recv(1024)
except socket.error as e:
try: # a little dance of compatibility to get the errno
errno = e.errno
except AttributeError:
errno = e[0]
if not self.blocking and errno == 11:
pass
else:
raise e
else:
data = buffer.split(bytes("\n", "ascii"))
buffer = data.pop()
for el in data:
prefix, command, args = parse_raw_irc_command(el)
try:
self.command_handler.run(command, prefix, *args)
except CommandError:
# error will of already been loggingged by the handler
pass
yield True
finally:
if self.socket:
logging.info('closing socket')
self.socket.close()
class IRCApp:
""" This class manages several IRCClient instances without the use of threads.
(Non-threaded) Timer functionality is also included.
"""
class _ClientDesc:
def __init__(self, **kwargs):
self.con = None
self.autoreconnect = False
self.__dict__.update(kwargs)
def __init__(self):
self._clients = {}
self._timers = []
self.running = False
self.sleep_time = 0.5
def addClient(self, client, autoreconnect=False):
""" add a client object to the application. setting autoreconnect
to true will mean the application will attempt to reconnect the client
after every disconnect. you can also set autoreconnect to a number
to specify how many reconnects should happen.
warning: if you add a client that has blocking set to true,
timers will no longer function properly """
logging.info('added client %s (ar=%s)' % (client, autoreconnect))
self._clients[client] = self._ClientDesc(autoreconnect=autoreconnect)
def addTimer(self, seconds, cb):
""" add a timed callback. accuracy is not specified, you can only
garuntee the callback will be called after seconds has passed.
( the only advantage to these timers is they dont use threads )
"""
assert callable(cb)
logging.info('added timer to call %s in %ss' % (cb, seconds))
self._timers.append((time.time() + seconds, cb))
def run(self):
""" run the application. this will block until stop() is called """
# TODO: convert this to use generators too?
self.running = True
while self.running:
found_one_alive = False
for client, clientdesc in self._clients.items():
if clientdesc.con is None:
clientdesc.con = client.connect()
try:
clientdesc.con.next()
except Exception as e:
logging.error('client error %s' % e)
logging.error(traceback.format_exc())
if clientdesc.autoreconnect:
clientdesc.con = None
if isinstance(clientdesc.autoreconnect, (int, float)):
clientdesc.autoreconnect -= 1
found_one_alive = True
else:
clientdesc.con = False
else:
found_one_alive = True
if not found_one_alive:
logging.info('nothing left alive... quiting')
self.stop()
now = time.time()
timers = self._timers[:]
self._timers = []
for target_time, cb in timers:
if now > target_time:
logging.info('calling timer cb %s' % cb)
cb()
else:
self._timers.append((target_time, cb))
time.sleep(self.sleep_time)
def stop(self):
""" stop the application """
self.running = False
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import *
from crispy_forms.bootstrap import *
from crispy_forms.layout import Layout, Submit, Reset, Div
from django import forms
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from silo.models import TolaUser
from django.contrib.auth.models import User
class RegistrationForm(UserChangeForm):
"""
Form for registering a new account.
"""
def __init__(self, *args, **kwargs):
user = kwargs.pop('initial')
super(RegistrationForm, self).__init__(*args, **kwargs)
del self.fields['password']
print user['username'].is_superuser
# allow country access change for now until we know how we will use this GWL 012617
# if they aren't a super user or User Admin don't let them change countries form field
# if 'User Admin' not in user['username'].groups.values_list('name', flat=True) and not user['username'].is_superuser:
# self.fields['country'].widget.attrs['disabled'] = "disabled"
self.fields['created'].widget.attrs['disabled'] = "disabled"
class Meta:
model = TolaUser
fields = '__all__'
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.layout = Layout(Fieldset('','title', 'name',
'country'),
Submit('submit', 'Submit', css_class='btn-default'),
Reset('reset', 'Reset', css_class='btn-warning'))
class NewUserRegistrationForm(UserCreationForm):
"""
Form for registering a new account.
"""
class Meta:
model = User
fields = ['first_name', 'last_name','email','username']
def __init__(self, *args, **kwargs):
super(NewUserRegistrationForm, self).__init__(*args, **kwargs)
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.form_tag = False
class NewTolaUserRegistrationForm(forms.ModelForm):
"""
Form for registering a new account.
"""
class Meta:
model = TolaUser
fields = ['title', 'country', 'privacy_disclaimer_accepted']
def __init__(self, *args, **kwargs):
super(NewTolaUserRegistrationForm, self).__init__(*args, **kwargs)
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.form_tag = False
helper.layout = Layout(
Fieldset('Information','title', 'country'),
Fieldset('Privacy Statement','privacy_disclaimer_accepted',),
)
|
import search_duplicated_task
|
import Globals
from Products.ZenModel.ZenPack import ZenPack as ZenPackBase
from Products.ZenUtils.Utils import unused, zenPath
import os
unused(Globals)
_plugins = [
'rig_host_app_transform1.py',
'copy_server_config_file.sh',
]
class ZenPack(ZenPackBase):
def install(self, app):
super(ZenPack, self).install(app)
self.symlink_plugins()
def symlink_plugins(self):
libexec = os.path.join(os.environ.get('ZENHOME'), 'libexec')
if not os.path.isdir(libexec):
# Stack installs might not have a \$ZENHOME/libexec directory.
os.mkdir(libexec)
# Now get the path to the file in the ZenPack's libexec directory
filepath = __file__ # Get path to this file
(zpdir, tail) = os.path.split(filepath)
zp_libexec_dir = os.path.join(zpdir,'libexec')
for plugin in _plugins:
plugin_path = zenPath('libexec', plugin)
zp_plugin_path = os.path.join(zp_libexec_dir, plugin)
#os.system('ln -sf "%s" "%s"' % (self.path(plugin), plugin_path))
os.system('ln -sf "%s" "%s"' % (zp_plugin_path, plugin_path))
os.system('chmod 0755 %s' % plugin_path)
def remove_plugin_symlinks(self):
for plugin in _plugins:
os.system('rm -f "%s"' % zenPath('libexec', plugin))
def remove(self, app, leaveObjects=False):
if not leaveObjects:
self.remove_plugin_symlinks()
super(ZenPack, self).remove(app, leaveObjects=leaveObjects)
|
import piksemel
import os
def updateGConf (filepath, remove=False):
parse = piksemel.parse (filepath)
schemaList = list()
for xmlfile in parse.tags ("File"):
path = xmlfile.getTagData ("Path")
# Only interested in /etc/gconf/schemas
if "etc/gconf/schemas" in path:
schemaList.append ("/%s" % path)
if len(schemaList) > 0:
os.environ['GCONF_CONFIG_SOURCE'] = 'xml:merged:/etc/gconf/gconf.xml.defaults'
operation = "--makefile-uninstall-rule" if remove else "--makefile-install-rule"
cmd = "/usr/bin/gconftool-2 %s %s" % (operation, " ".join(schemaList))
os.system (cmd)
def setupPackage (metapath, filepath):
updateGConf (filepath)
def postCleanupPackage (metapath, filepath):
updateGConf (filepath)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.