index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
997,500 | 7d243749035c1f076379e55e3f0754d48bd213ca | # Generated by Django 3.1.3 on 2020-11-23 02:06
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uid', models.CharField(max_length=20)),
('email', models.CharField(max_length=20, unique=True)),
('name', models.CharField(max_length=100)),
('given_name', models.CharField(max_length=100)),
('family_name', models.CharField(max_length=100)),
('picture', models.CharField(max_length=100)),
('token', models.CharField(max_length=30)),
('verified_email', models.BooleanField(default=False)),
('locale', models.CharField(max_length=4)),
],
),
]
|
997,501 | 2a35e0f6c9b4a47e1be246871dd05a452200bf90 | from datetime import date, datetime
import hashlib, inspect
from django.db.models import Q
from django.contrib.auth import authenticate, login, logout, models as auth_models
from django.contrib.auth.hashers import make_password
from django.conf.urls import url
from django.utils import timezone
from tastypie import resources, fields
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.serializers import Serializer
from tastypie.utils import trailing_slash
from tastypie.http import HttpUnauthorized, HttpForbidden
from youtune.account import models, forms
from youtune.api.helpers import FieldsValidation
from youtune.api.authorization import UserObjectsOnlyAuthorization
from youtune.fileupload import models as file_models
class CommentDateSerializer(Serializer):
def format_datetime(self, data):
if self.datetime_formatting == 'rfc-2822':
return super(CommentDateSerializer, self).format_datetime(data)
return data.isoformat()
class UserProfileResource(resources.ModelResource):
id = fields.IntegerField(attribute="id", null=True)
class Meta:
queryset = models.UserProfile.objects.all()
resource_name = 'userprofile'
# TODO:
# Add custom Authorization (important)
authentication = Authentication()
authorization = Authorization()
# excludes = ['email', 'is_staff', 'is_superuser']
filtering = {
'username': ALL
}
def dehydrate_password(self, bundle):
return ''
def dehydrate(self, bundle):
if bundle.request.user.pk == bundle.obj.pk:
bundle.data['email'] = bundle.obj.email
bundle.data['is_staff'] = bundle.obj.is_staff
bundle.data['is_superuser'] = bundle.obj.is_superuser
model = bundle.obj.channel
ret = {}
for f in sorted(model._meta.fields + model._meta.many_to_many):
ret[f.name] = getattr(model, f.name)
bundle.data['channel'] = ret
return bundle
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/login%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('login'), name="api_login"),
url(r'^(?P<resource_name>%s)/logout%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('logout'), name='api_logout'),
url(r'^(?P<resource_name>%s)/loggedin%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('loggedin'), name='api_loggedin'),
url(r'^(?P<resource_name>%s)/checkfordupe%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('checkfordupe'), name='api_checkfordupe'),
url(r'^(?P<resource_name>%s)/update%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('update'), name='api_update'),
url(r'^(?P<resource_name>%s)/count%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('count'), name='api_count'),
]
def login(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
username = data.get('username', '')
password = data.get('password', '')
user = authenticate(username=username, password=password)
if user:
if user.is_active:
login(request, user)
return self.create_response(request, {
'success': True
})
else:
return self.create_response(request, {
'success': False,
'reason': 'disabled',
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
def logout(self, request, **kwargs):
self.method_check(request, allowed=['get'])
if request.user and request.user.is_authenticated():
logout(request)
return self.create_response(request, {'success': True})
else:
return self.create_response(request, {'success': False}, HttpUnauthorized)
def hydrate(self, bundle):
# About to do some ninja skills
if bundle.request.method == 'PATCH':
bundle.data['password'] = models.UserProfile.objects.get(pk=int(bundle.data['id'])).password
else:
bundle.data['password'] = make_password(bundle.data['password'])
if bundle.data['birthdate']:
birthdate = bundle.data['birthdate'].split("-")
birthdate = date(year=int(birthdate[0]), month=int(
birthdate[1]), day=int(birthdate[2]))
bundle.data['birthdate'] = birthdate
bundle.data['avatar'] = "http://www.gravatar.com/avatar/" + hashlib.md5(bundle.data['email'].lower()).hexdigest();
return bundle
def loggedin(self, request, **kwargs):
self.method_check(request, allowed=['get'])
if request.user.is_authenticated():
return self.create_response(request, {
'success': True,
'id': request.user.id,
})
else:
return self.create_response(request, {
'success': False
})
def checkfordupe(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
username = data.get('username', '')
user = None;
try:
user = models.UserProfile.objects.get(username__iexact=username)
except models.UserProfile.DoesNotExist:
return self.create_response(request, {
'success': True,
})
else:
return self.create_response(request, {
'success': False,
'id': user.id,
})
def update(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
player_volume = data.get('player_volume', '')
player_autoplay = data.get('player_autoplay', '')
player_repeat = data.get('player_repeat', '')
player_format = data.get('player_format', '')
if request.user:
if request.user.is_authenticated():
user = request.user
user.player_volume = player_volume
user.player_autoplay = player_autoplay
user.player_repeat = player_repeat
user.player_format = player_format
user.save(update_fields=['player_volume',
'player_autoplay',
'player_repeat',
'player_format'])
return self.create_response(request, {
'success': True
})
else:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
def count(self, request, **kwargs):
self.method_check(request, allowed=['get'])
count = models.UserProfile.objects.count()
return self.create_response(request, {
'count': count,
})
def save(self, bundle, skip_errors=False):
bundle = super(UserProfileResource, self).save(bundle, skip_errors)
desc = bundle.obj.username + "'s channel description."
channel = models.Channel(description=desc, owner=bundle.obj)
channel.save()
return bundle
class FileResource(resources.ModelResource):
objects_returned = 0
owner = fields.ForeignKey(UserProfileResource, 'owner')
class Meta:
allowed_methods = ['get']
queryset = file_models.File.objects.all()
resource_name = 'music'
filtering = {
'base64id': ALL,
'upload_date': ALL,
'owner': ALL_WITH_RELATIONS,
'views': ALL,
'lastview_date': ALL,
'query': ['icontains',],
}
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/vote%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('vote'), name="api_vote"),
]
# to sort by descending insert '-' (i.e. '-title')
def apply_sorting(self, objects, options=None):
if options:
if 'sortby' in options:
return objects.order_by(options['sortby'])
return super(FileResource, self).apply_sorting(objects, options)
def vote(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
vote = data.get('vote', '')
base64id = data.get('base64id', '')
userid = data.get('userid', '')
track = None
try:
track = file_models.File.objects.get(base64id__exact=base64id)
user = models.UserProfile.objects.get(pk=userid)
exists = False
if user in track.votes.all():
exists = True
if vote == "like":
track.likes.add(user)
if exists:
track.dislikes.remove(user)
else:
track.dislikes.add(user)
if exists:
track.likes.remove(user)
if not exists:
track.votes.add(user)
except file_models.File.DoesNotExist, models.UserProfile.DoesNotExist:
return self.create_response(request, {
'success': False,
})
else:
return self.create_response(request, {
'success': True,
'dislikes': track.votes.count() - track.likes.count(),
'likes': track.likes.count(),
})
def build_filters(self, filters=None):
if filters is None:
filters = {}
orm_filters = super(FileResource, self).build_filters(filters)
if('query' in filters):
query = filters['query']
query = query.split(' ')
qset = Q()
for q in query:
if len(q.strip()) > 1:
qset &= (
Q(title__icontains=q) |
Q(tags__icontains=q) |
Q(artist__icontains=q)
)
orm_filters.update({'custom': qset})
return orm_filters
def apply_filters(self, request, applicable_filters):
if 'custom' in applicable_filters:
custom = applicable_filters.pop('custom')
else:
custom = None
semi_filtered = super(FileResource, self).apply_filters(request, applicable_filters)
return semi_filtered.filter(custom) if custom else semi_filtered
def dehydrate(self, bundle):
track = file_models.File.objects.get(pk=bundle.data['id'])
bundle.data['likes'] = track.likes.count()
bundle.data['dislikes'] = track.dislikes.count()
if self.objects_returned == 1:
bundle.data['owner'] = bundle.obj.owner.username
bundle.data['avatar'] = bundle.obj.owner.avatar + "?s=64"
if bundle.request.user and bundle.request.user.is_authenticated():
if bundle.request.user in track.likes.all():
bundle.data['voted'] = "like"
elif bundle.request.user in track.dislikes.all():
bundle.data['voted'] = "dislike"
else:
bundle.data['voted'] = "none"
else:
bundle.data['voted'] = "disallowed"
return bundle
def obj_get_list(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_get_list``.
Takes an optional ``request`` object, whose ``GET`` dictionary can be
used to narrow the query.
"""
filters = {}
if hasattr(bundle.request, 'GET'):
# Grab a mutable copy.
filters = bundle.request.GET.copy()
# Update with the provided kwargs.
filters.update(kwargs)
channel = False
if 'owner' in filters:
channel = True
applicable_filters = self.build_filters(filters=filters)
try:
objects = self.apply_filters(bundle.request, applicable_filters)
self.objects_returned = len(objects)
if len(objects) == 1 and applicable_filters and not channel:
obj = objects[0]
obj.views = obj.views + 1
obj.lastview_date = timezone.now()
obj.save(update_fields=['views', 'lastview_date'])
return self.authorized_read_list(objects, bundle)
except ValueError:
raise BadRequest("Invalid resource lookup data provided (mismatched type).")
class ChannelResource(resources.ModelResource):
class Meta:
allowed_methods = []
queryset = models.Channel.objects.all()
resource_name = 'channel'
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/update%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('update'), name="api_update"),
]
def update(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
desc = data.get('description', '')
if request.user:
if request.user.is_authenticated():
channel = request.user.channel;
channel.description = desc;
channel.save(update_fields=['description'])
return self.create_response(request, {
'success': True
})
else:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
class CommentResource(resources.ModelResource):
class Meta:
allowed_methods = ['get']
queryset = file_models.Comment.objects.all()
resource_name = 'comment'
serializer = CommentDateSerializer()
filtering = {
'base64id': ALL,
}
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/post%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('post'), name="api_post"),
]
def post(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
body = data.get('commenttext', '')
fileid = data.get('fileid', '')
if request.user:
if request.user.is_authenticated():
try:
file = file_models.File.objects.get(pk=fileid)
except file_models.File.DoesNotExist:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
comment = file_models.Comment(owner=request.user, body=body, file=file)
comment.save()
file.comments.add(comment)
return self.create_response(request, {
'success': True,
'date': comment.post_date,
})
else:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
def apply_sorting(self, objects, options=None):
if options:
if 'sortby' in options:
return objects.order_by(options['sortby'])
return super(CommentResource, self).apply_sorting(objects, options)
def dehydrate(self, bundle):
bundle.data['owner'] = bundle.obj.owner.username
bundle.data['avatar'] = bundle.obj.owner.avatar + "?s=64"
return bundle
class UserValidation(FieldsValidation):
def __init__(self):
super(
UserValidation, self).__init__(required=['username', 'first_name', 'last_name'],
validated=['username'],
required_post=[
'email', 'password'],
validated_post=['password'],
)
@staticmethod
def password_is_valid(password, bundle):
if len(password) < 6:
return False, 'Password is too short.'
return True, ""
@staticmethod
def username_is_valid(username, bundle):
try:
user = User.objects.get(username=username)
if user is not None and str(user.id) != str(bundle.data.get('id', 0)):
return False, "The username is already taken."
except User.DoesNotExist:
return True, ""
return True, ""
|
997,502 | a230c9fe5b03df50cf7c26f74f8af3a979877dfa | # -*- coding: utf-8 -*-
{
'name': 'NCSS Appraisal',
'version': '13.0.1',
'summary': 'NCSS Appraisal',
'category': 'hr',
'author': 'Magdy,TeleNoc',
'description': """
NCSS Survey
""",
'depends': ['base', 'mail', 'hr_appraisal'],
'demo': [
'demo/demo.xml'
],
'data': [
'security/security.xml',
'security/ir.model.access.csv',
# 'views/sequence.xml',
'views/hr_employee_appraisal.xml',
'views/hr_appraisal.xml',
# 'report/print_barcode_report.xml',
# 'report/administrative_communication_report.xml',
]
}
|
997,503 | df5378c239f6bbc4ffd076f27c878b88e9693b29 | from station import Station
from BeautifulSoup import BeautifulStoneSoup
import urllib, urllib2
import re
HOST = "http://clientes.domoblue.es/onroll/"
SERVICE_URL = HOST+"generaXml.php?token={token}&cliente={client_id}"
TOKEN_URL = HOST+"generaMapa.php?cliente={client_id}"
TOKEN_RE = "generaXml\.php\?token\=(.*?)\&cliente"
SERVICES = {
'albacete':{'id':2,'name':'albacete'},
'alhamademurcia':{'id':37,'name':'alhamademurcia'},
'almunecar':{'id':70,'name':'almunecar'},
'antequera':{'id':29,'name':'antequera'},
'arandadeduero':{'id':68,'name':'arandadeduero'},
'arua':{'id':64,'name':'arua'},
'badajoz':{'id':49,'name':'badajoz'},
'baeza':{'id':25,'name':'baeza'},
'biciambiental':{'id':69,'name':'biciambiental'},
'bicielx':{'id':57,'name':'bicielx'},
'blanca':{'id':59,'name':'blanca'},
'cieza':{'id':61,'name':'cieza'},
'ciudadreal':{'id':3,'name':'ciudadreal'},
'elcampello':{'id':41,'name':'elcampello'},
'guadalajara':{'id':43,'name':'guadalajara'},
'jaen':{'id':26,'name':'jaen'},
'lalin':{'id':62,'name':'lalin'},
'montilla':{'id':28,'name':'montilla'},
'mula':{'id':56,'name':'mula'},
'novelda':{'id':50,'name':'novelda'},
'obarco':{'id':67,'name':'obarco'},
'paiporta':{'id':47,'name':'paiporta'},
'palencia':{'id':7,'name':'palencia'},
'priegodecordoba':{'id':27,'name':'priegodecordoba'},
'puertollano':{'id':1,'name':'puertollano'},
'puertolumbreras':{'id':60,'name':'puertolumbreras'},
'redondela':{'id':44,'name':'redondela'},
'salamanca':{'id':65,'name':'salamanca'},
'sanjavier':{'id':38,'name':'sanjavier'},
'sanpedrodelpinatar':{'id':20,'name':'sanpedrodelpinatar'},
'santjoan':{'id':45,'name':'santjoan'},
'segovia':{'id':5,'name':'segovia'},
'soria':{'id':46,'name':'soria'},
'talavera':{'id':4,'name':'talavera'},
'ubeda':{'id':24,'name':'ubeda'},
'ugr':{'id':30,'name':'ugr'},
'viaverde':{'id':63,'name':'viaverde'},
'vigo':{'id':71,'name':'vigo'},
'vilarreal':{'id':19,'name':'vilarreal'},
'villanuevadonbenito':{'id':53,'name':'villanuevadonbenito'},
'villaquilambre':{'id':6,'name':'villaquilambre'},
'vinaros':{'id':51,'name':'vinaros'}
}
"""
This headers are persistent between calls, use them in
every call, in the state they are.
"""
headers = {
'User-Agent': 'CityBikes',
'Accept': '*/*'
}
opener = urllib2.build_opener()
def request(url):
req = urllib2.Request(url, headers = headers)
response = opener.open(req)
head = response.info()
if 'set-cookie' in head:
headers['Cookie'] = head['set-cookie']
return response.read()
def get_xml(client_id):
token = get_token(client_id)
url = SERVICE_URL.format(token = token, client_id = client_id)
return request(url)
def get_all():
stations = []
for service in SERVICES:
for station in get_all_service(SERVICES[service]):
stations.append(station)
return stations
def get_token(client_id):
url = TOKEN_URL.format(client_id = client_id)
html_data = request(url)
res = re.findall(TOKEN_RE, html_data)
return res[0]
def get_services():
xml_data = get_xml('todos')
dom = BeautifulStoneSoup(xml_data)
markers = dom.findAll('marker')
services = {}
for marker in markers:
tmp = {
'id': marker['codigocliente'],
'name': marker['nombre']
}
tmp['service'] = slugfy(tmp['name'], '_')
services[tmp['service']] = tmp;
return services
def get_all_service(service):
ID = service.get('id')
NAME = service.get('service')
xml_data = get_xml(ID)
dom = BeautifulStoneSoup(xml_data)
markers = dom.findAll('marker')
stations = []
for index, marker in enumerate(markers):
station = DomoBlueStation(index,str(NAME))
station.from_xml(marker)
stations.append(station)
return stations
def slugfy(text, separator):
ret = ""
for c in text.lower():
try:
ret += htmlentitydefs.codepoint2name[ord(c)]
except:
ret += c
ret = re.sub("([a-zA-Z])(uml|acute|grave|circ|tilde|cedil)", r"\1", ret)
ret = re.sub("\W", " ", ret)
ret = re.sub(" +", separator, ret)
return ret.strip()
class DomoBlueStation(Station):
def __init__(self, idx, prefix):
Station.__init__(self,idx)
self.prefix = prefix
def update(self):
return self
def from_xml(self, xml_data):
self.name = xml_data['nombre']
self.lat = int(float(xml_data['lat'])*1E6)
self.lng = int(float(xml_data['lng'])*1E6)
self.bikes = int(xml_data['bicicletas'])
self.free = int(xml_data['candadoslibres'])
self.status = int(xml_data['estado'])
return self
|
997,504 | 13f7029aaa68686784c2a0c04274cb304c614156 | #!/usr/bin/python
import subprocess
import os
import sys
subprocess.call(["/usr/bin/git diff"])
|
997,505 | 4930b37a3c1e3ad86eee48f449f2f5032c3208a0 | from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from dao import Connections
from utilities import *
class ViewItemById(QWidget):
def __init__(self):
super().__init__()
self.PrepareScreen()
def PrepareScreen(self):
self.setWindowTitle("View Item BY ID Screen")
self.setGeometry(300, 100, 500, 400)
grid = QGridLayout()
grid.setSpacing(10)
lblitemid=QLabel("Enter Item Id")
self.itemidEdit=QLineEdit()
self.btn=QPushButton("View")
self.tablewidget=QTableWidget()
newfont = QFont("Bell MT", 18, QFont.Bold)
lblitemid.setFont(newfont)
self.itemidEdit.setFont(newfont)
self.btn.setFont(newfont)
grid.addWidget(lblitemid,1,0,1,2)
grid.addWidget(self.itemidEdit, 2, 0, 1, 2)
grid.addWidget(self.btn,3,0,1,2)
grid.addWidget(self.tablewidget, 4, 0)
self.btn.clicked.connect(self.ViewDetails)
self.itemidEdit.setToolTip("Enter Id of item to view its details")
self.btn.setToolTip("Click to view record of item")
self.tablewidget.setStyleSheet("background-color:#CCDADA;")
self.setLayout(grid)
#self.show()
def ViewDetails(self):
try:
message=""
id=int(self.itemidEdit.text())
print(4)
if id >0:
print(id)
column_headers = ("ItemId", "ItemName", "SubCategoryId", "AvailableQty", "Price")
self.tablewidget.setColumnCount(5)
self.tablewidget.setHorizontalHeaderLabels(column_headers)
con = Connections.Connection()
print(1)
query = "select * from iteminfo where ItemId="+str(id)
print(query)
row=0
records = con.ExecuteQuery(query)
print(records)
if len(records)>0:
self.tablewidget.setRowCount(1)
for record in records:
self.tablewidget.setItem(row, 0, QTableWidgetItem(str(record[0])))
self.tablewidget.setItem(row, 1, QTableWidgetItem(record[1]))
self.tablewidget.setItem(row, 2, QTableWidgetItem(str(record[2])))
self.tablewidget.setItem(row, 3, QTableWidgetItem(str(record[3])))
self.tablewidget.setItem(row, 4, QTableWidgetItem(str(record[4])))
message="Record is Displayed"
else:
self.tablewidget.setRowCount(1)
self.tablewidget.setItem(row, 0, QTableWidgetItem(str("No Record")))
self.tablewidget.setItem(row, 1, QTableWidgetItem(str("No Record")))
self.tablewidget.setItem(row, 2, QTableWidgetItem(str("No Record")))
self.tablewidget.setItem(row, 3, QTableWidgetItem(str("No Record")))
self.tablewidget.setItem(row, 4, QTableWidgetItem(str("No Record")))
message="Record does not exist"
else:
message="Enter a Valid Item Id"
ShowMessageDialog(self,message)
except BaseException as ex:
print(ex)
|
997,506 | 29876abf8be229a59032affbbca511425e90846b | from abc import ABC, abstractmethod
from enum import Enum
from errors import TooManyRetriesError
class FailAction(Enum):
Cancel = 0
Retry = 1
class Manager(ABC):
def __init__(self, validator, queries):
self.queries = queries
self.validator = validator
def send(self, query: str, data: dict):
try:
q = self.queries[query]
except KeyError:
raise KeyError("{} not found in queries, available options are {}".format(query, list(self.queries.keys())))
tries = 1
errors = []
while True:
if tries > 3:
for e in errors:
print(str(e))
raise TooManyRetriesError()
try:
raw_response = q(**data)
self.validator.validate(raw_response)
break
except Exception as e:
errors.append(e)
action = self._handle_error(e)
if action == FailAction.Cancel:
raise e
tries += 1
return self._parse(raw_response)
def _handle_error(self, e):
return FailAction.Cancel
def _get_fetch_args(self, request):
return []
@abstractmethod
def _parse(self, raw_response):
pass
|
997,507 | 0a6095392b826041c5023c8bf223ff432b7d5323 | from askmath.entities import TextMessage
from askmath.models.classe import Classe as ClasseModel
from askmath.models.users import Student as StudentModel
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.decorators import method_decorator
from .iranking import IRanking
from .ranking import Ranking
class ProxyRanking(IRanking):
def __init__(self):
self.__ranking = Ranking()
@method_decorator(login_required)
def view_ranking(self, request, id_classe = None):
if request.user.has_perm("askmath.read_ranking") and request.user.has_perm("askmath.access_content"):
try:
classe = ClasseModel.objects.filter(id=id_classe, exists=True, visible=True)[0]
except:
classe = None
try:
student = request.user.get_person_class(request.user, StudentModel)
return self.__ranking.view_ranking(request, student, classe)
except Exception, e:
print e
messages.error(request, TextMessage.ERROR)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect(reverse('askmath:home'))
|
997,508 | 4250a6e00141e048b8afa7b78adc14fd51433a5d | #!/bin/python3
import math
import os
import random
import re
import sys
#begining of my code
########################
#looking for nodes
def look_for_nodes(edges):
#get the list of edges
nodes=[]
for i in range(len(edges)):
nodes.append(edges[i][0])
nodes.append(edges[i][1])
#clean
cleaned_nodes = []
for i in range(max(nodes)+1):
for node in nodes:
if i == node:
cleaned_nodes.append(i)
break
return cleaned_nodes
def ditance_xy(egdes,s,destiny_nodes,y):
# Complete the shortestReach function below.
def shortestReach(n, edges, s):
print(edges)
#looking for nodes
nodes = look_for_nodes(edges)
#setting destinies
destiny_nodes = nodes
destiny_nodes.remove(s)
#calculate the distances
pass
#ending of my code
########################
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
t = int(input())
for t_itr in range(t):
nm = input().split()
n = int(nm[0])
m = int(nm[1])
edges = []
for _ in range(m):
edges.append(list(map(int, input().rstrip().split())))
s = int(input())
result = shortestReach(n, edges, s)
fptr.write(' '.join(map(str, result)))
fptr.write('\n')
fptr.close()
|
997,509 | 2de8d3548d6f4d321d94210cfacaadaf8223edf9 | from aiogram import types
from aiogram.types import CallbackQuery
from keyboards.inline import feedback
from loader import dp
@dp.message_handler(text="Для жалоб и предложений")
async def price_list(message: types.Message):
await message.answer("Вот кому ты можешь обратится", reply_markup=feedback)
@dp.callback_query_handler(text="garat")
async def call_electronic(call: CallbackQuery):
await call.message.answer("вот твой запрос")
await call.message.answer("<a href='t.me/Good_opt_Egor'>@goodopt_egor</a>")
await call.message.edit_reply_markup(reply_markup=None)
|
997,510 | 86234666011cd777b7c98030dd436376e20b1ca9 | import yaml
import argparse
import os
from os import path as osp
import argparse
import joblib
from time import sleep
from collections import defaultdict
import numpy as np
import torch
import rlkit.torch.pytorch_util as ptu
from rlkit.envs.few_shot_fetch_env import _BaseParamsSampler
from rlkit.envs.few_shot_fetch_env import StatsFor50Tasks25EachScaled0p9LinearBasicFewShotFetchEnv as EvalEnv
from rlkit.launchers.launcher_util import setup_logger, set_seed
from rlkit.torch.sac.policies import PostCondMLPPolicyWrapper
from rlkit.data_management.path_builder import PathBuilder
'''
Things I need:
- (done) being able to set seeds for the replay buffers
- an expert dataset generated using the task identities I am using for evaluation
'''
MAX_PATH_LENGTH = 65
EXPERT_BUFFER_PATH = '/scratch/hdd001/home/kamyar/expert_demos/few_shot_fetch_eval_expert_trajs/extra_data.pkl'
EVAL_SEED = 89205
# NUM_EVAL_TASKS = 16
NUM_EVAL_TASKS = 4
NUM_CONTEXT_SAMPLES = 1
# NUM_POST_SAMPLES = 1
NUM_POST_SAMPLES = 1
NUM_ROLLOUTS_PER_POST_SAMPLE = 10
# NUM_ROLLOUTS_PER_POST_SAMPLE = 5
def rollout_path(env, task_params, obs_task_params, post_cond_policy):
cur_eval_path_builder = PathBuilder()
# reset the env using the params
observation = env.reset(task_params=task_params, obs_task_params=obs_task_params)
terminal = False
task_identifier = env.task_identifier
while (not terminal) and len(cur_eval_path_builder) < MAX_PATH_LENGTH:
agent_obs = observation['obs']
action, agent_info = post_cond_policy.get_action(agent_obs)
next_ob, raw_reward, terminal, env_info = (env.step(action))
terminal = False
reward = raw_reward
terminal = np.array([terminal])
reward = np.array([reward])
cur_eval_path_builder.add_all(
observations=observation,
actions=action,
rewards=reward,
next_observations=next_ob,
terminals=terminal,
agent_infos=agent_info,
env_infos=env_info,
task_identifiers=task_identifier
)
observation = next_ob
return cur_eval_path_builder.get_all_stacked()
def gather_eval_data(policy, np_encoder, expert_buffer_for_eval_tasks, max_context_size=6, sample_from_prior=False):
# return all the metrics we would need for evaluating the models
# for each trajectory we need to know 1) was it successful 2) was it a good reach
# policy.cuda()
# np_encoder.cuda()
policy.eval()
np_encoder.eval()
params_sampler = _BaseParamsSampler(random=52269, num_colors=NUM_EVAL_TASKS)
env = EvalEnv()
all_statistics = {}
task_num = 0
if sample_from_prior: max_context_size = 1
all_good_reach = defaultdict(list)
all_solved = defaultdict(list)
all_no_op_fail = defaultdict(list)
for task_params, obs_task_params in params_sampler:
print('\tEvaluating task %d...' % task_num)
task_num += 1
env.reset(task_params=task_params, obs_task_params=obs_task_params)
task_id = env.task_identifier
for context_size in range(1, max_context_size+1):
print('\t\tEvaluating context size %d...' % context_size)
paths_for_context_size = []
for _ in range(NUM_CONTEXT_SAMPLES):
# get a context
list_of_trajs = expert_buffer_for_eval_tasks.sample_trajs_from_task(
task_id,
context_size
)
post_dist = np_encoder([list_of_trajs])
for _ in range(NUM_POST_SAMPLES):
# sample from the posterior and get the PostCondPolicy
# z = post_dist.sample()
z = post_dist.mean
z = z.cpu().data.numpy()[0]
if sample_from_prior:
z = np.random.normal(size=z.shape)
post_cond_policy = PostCondMLPPolicyWrapper(policy, z)
for _ in range(NUM_ROLLOUTS_PER_POST_SAMPLE):
stacked_path = rollout_path(
env,
task_params,
obs_task_params,
post_cond_policy
)
paths_for_context_size.append(stacked_path)
stats_for_context_size = env.log_statistics(paths_for_context_size)
all_good_reach[context_size].append(stats_for_context_size['Percent_Good_Reach'])
all_solved[context_size].append(stats_for_context_size['Percent_Solved'])
all_no_op_fail[context_size].append(stats_for_context_size['Percent_NoOp_Fail'])
return {'algorithm_good_reach': all_good_reach, 'algorithm_solved': all_solved, 'algorithm_no_op_fail': all_no_op_fail}
if __name__ == '__main__':
# Arguments
parser = argparse.ArgumentParser()
parser.add_argument('-e', '--experiment', help='experiment specification file')
args = parser.parse_args()
with open(args.experiment, 'r') as spec_file:
spec_string = spec_file.read()
exp_specs = yaml.load(spec_string)
exp_path = exp_specs['exp_path']
sub_exp = exp_specs['sub_exp']
sample_from_prior = exp_specs['sample_from_prior']
print('\n\nUSING GPU\n\n')
ptu.set_gpu_mode(True)
# seed
set_seed(EVAL_SEED)
# load the expert replay buffer
expert_buffer = joblib.load(EXPERT_BUFFER_PATH)['meta_train']['context']
# for each subdir experiment evaluate it
try:
alg = joblib.load(osp.join(exp_path, sub_exp, 'best_meta_test.pkl'))['algorithm']
# alg = joblib.load(osp.join(exp_path, sub_exp, 'extra_data.pkl'))['algorithm']
print('\nLOADED ALGORITHM\n')
if exp_specs['evaluating_np_airl']:
alg.cuda()
alg.main_policy.preprocess_model.cuda()
else:
alg.cuda()
except Exception as e:
print('Failed on {}/{}'.format(exp_path, sub_exp))
raise e
sub_exp_stats = gather_eval_data(
alg.main_policy if exp_specs['evaluating_np_airl'] else alg.policy,
alg.encoder,
expert_buffer,
sample_from_prior=sample_from_prior
)
print(sub_exp_stats)
# save all of the results
save_name = 'all_few_shot_stats.pkl'
if sample_from_prior: save_name = 'prior_sampled_' + save_name
joblib.dump(
sub_exp_stats,
osp.join(exp_path, sub_exp, save_name),
compress=3
)
|
997,511 | 0e7a3c47516cf45b2982ea0cb35829d79dadc729 | #!venv/bin/python3
from include.dto.UserDTO import UserDTO
from include.dto.VacancyDTO import VacancyDTO
from include.ArgsParse import ArgsParser
from include.api.ZarplataApi import ZpApi
from include.helpers.PhoneFormat import PhoneFormat
from include.helpers.PasswordGen import PasswordGen
import requests
from include.db import *
import json
if __name__ == '__main__':
args = ArgsParser.parse()
api = ZpApi()
# geo_count = api.do_geo_request(limit=0, offset=0)['metadata']['resultset']['count']
# for counter in range(0, geo_count, 100):
# geo_res = api.do_geo_request(limit=100, offset=counter)
# for geo_item in geo_res['geo']:
# rubric_res = api.do_rubric_request()['rubrics']
# for rubric in rubric_res:
# vacancy_count = api.do_vacancy_request(geo_id=geo_item['id'], rubric_id=rubric['id'], limit=0, offset=0)['metadata']['resultset']['count']
# company_list = []
# for counter in range(0, vacancy_count, 100):
# vacancy_res = api.do_vacancy_request(geo_id=geo_item['id'], rubric_id=rubric['id'], limit=100, offset=counter)
# for vacancy in vacancy_res['vacancies']:
# vacancy_dto = VacancyDTO(vacancy)
# if vacancy_dto.owner not in company_list:
# company_res = api.do_company_request(company_id=vacancy_dto.owner)
# user_dto = UserDTO(company_res['companies'][0])
# company_list.append(vacancy_dto.owner)
# company_list.append(vacancy_dto.owner)
# user_insert = UserModel.crate_user(user_dto)
# employer_insert = EmployerModel.create_employer(user_insert, user_dto)
# company_insert = CompanyModel.create_company(user_insert, user_dto)
# phone_insert = PhoneModel.create_phone(user_dto.phone, user_insert, company_insert, employer_insert)
# vacancy_insert = VacancyModel.create_vacancy(vacancy_dto)
#TODO: add proxies, args
vacancy_count = api.do_vacancy_request(geo_id=61, rubric_id=138, limit=0, offset=0)['metadata']['resultset']['count']
company_list = []
for counter in range(0, vacancy_count, 100):
vacancy_res = api.do_vacancy_request(geo_id=61, rubric_id=138, limit=100, offset=counter)
with db_handle.atomic():
for vacancy in vacancy_res['vacancies']:
vacancy_dto = VacancyDTO(vacancy)
if vacancy_dto.owner not in company_list:
company_res = api.do_company_request(company_id=vacancy_dto.owner)
user_dto = UserDTO(company_res['companies'][0])
company_list.append(vacancy_dto.owner)
user_insert = UserModel.crate_user(user_dto)
employer_insert = EmployerModel.create_employer(user_insert, user_dto)
company_insert = CompanyModel.create_company(user_insert, user_dto)
phone_insert = PhoneModel.create_phone(user_dto.phone, user_insert, company_insert, employer_insert)
vacancy_insert = VacancyModel.create_vacancy(vacancy_dto)
db_handle.commit()
point = 'test'
# vacancy_dto = VacancyDTO(res['vacancies'][0])
# user_dto = UserDTO(res['companies'][0])
# user = UserModel.crate_user(user_dto)
# employer = EmployerModel.create_employer(user, user_dto)
# company = CompanyModel.create_company(user, user_dto)
# phone = PhoneModel.create_phone(user_dto.phone, user, company)
# print(f'{args.base_region_url}')
|
997,512 | 782f0b76591777abc5bab93d4affb3689df4d284 | import numpy as np
from PIL import Image
def preprocess(img):
img = Image.fromarray(img)
img = img.resize((84, 110))
img = img.crop((0, 26, 84, 110))
img = img.convert('L')
img = img.resize((64, 64))
img = np.array(img) / 255
img = np.float16(img)
return img
def init_state(img) :
state = preprocess(img)
state = state[np.newaxis, :, :]
history = np.stack((state, state, state, state), axis=1)
return history
|
997,513 | 74d0ce75a8086d5c3a32163199cb1c0b92facc4d | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-07-06 08:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prodsys', '0028_auto_20170626_1131'),
]
operations = [
migrations.AddField(
model_name='job',
name='status_x_check',
field=models.CharField(default='no', max_length=300),
),
]
|
997,514 | 57f589b7778d873e09adaa65da31d36296ab97e6 | from django.db import models
from django.db.models import Q
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
class Tag(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Skill(models.Model):
short_name = models.CharField(max_length=10)
long_name = models.CharField(max_length=100)
description = models.TextField()
def __str__(self):
return self.short_name
class Organization(models.Model):
name = models.CharField(max_length=100)
karma = models.IntegerField(default=0)
email = models.EmailField()
phone = models.CharField(max_length=20)
address = models.CharField(max_length=200)
home_page = models.URLField()
icon = models.ImageField(default=None, blank=True, null=True)
def __str__(self):
return self.name
class Person(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
karma = models.IntegerField(default=0)
skills = models.ManyToManyField(Skill, blank=True)
phone = models.CharField(max_length=20)
organization = models.ForeignKey(
Organization, models.CASCADE, null=True, blank=True
)
icon = models.ImageField(default=None, blank=True, null=True)
def get_name(self):
return self.user.first_name + " " + self.user.last_name
def __unicode__(self):
return self.user.username
def __str__(self):
return str(self.user.username)
def is_volunteer(self):
return self.organization is None
def is_organizer(self):
return self.organization is not None
def contains_skill(self, tag):
return self.skills.filter(short_name=tag).count() > 0
class Event(models.Model):
organization = models.ForeignKey(Organization, models.CASCADE)
organizers_volunteers = models.ManyToManyField(Person, blank=True)
tags = models.ManyToManyField(Tag, blank=True)
skills = models.ManyToManyField(Skill, blank=True)
address = models.CharField(max_length=200)
minimum_karma = models.IntegerField(default=0)
name = models.CharField(max_length=100)
date = models.DateField()
duration = models.DurationField()
blurb = models.TextField(blank=True)
description = models.CharField(max_length=280, blank=True)
icon = models.ImageField(default=None, blank=True, null=True)
def __str__(self):
return self.name
def contains_tag(self, tag):
return self.tags.filter(name=tag).count() > 0
def contains_person(self, pk):
return self.organizers_volunteers.filter(pk=pk).count() > 0
|
997,515 | 1549139c082cff86e0d6b2bde5d914f4a60dcb5e | N, K = map(int, input().split())
A = list(map(int, input().split()))
A = sorted(A)
def ncr(n, r, p):
num = den = 1
for i in range(r):
num = (num * (n - i)) % p
den = (den * (i + 1)) % p
return (num * pow(den, p - 2, p)) % p
Mod = 10**9+7
fac = [1, 1]
finv = [1, 1]
inv = [0, 1]
def COMinit():
#N_C_kのN
for i in range(2, N+10):
fac.append(fac[-1]*i%Mod)
inv.append((-inv[Mod%i] * (Mod//i)) % Mod)
finv.append(finv[-1] * inv[-1] % Mod)
def COM(n, k):
if n < 0 or k < 0 or n < k:
return 0
return fac[n] * (finv[k] * finv[n-k] % Mod) % Mod
COMinit()
p=10**9+7
ans = 0
for i in range(N-K+1):
j = N-1-i
#ans += A[j] * ncr(j, K-1, p)
ans += A[j] * COM(j, K-1)
ans %= p
for i in range(N-K+1):
#ans -= A[i] * ncr(N-i-1, K-1, p)
ans -= A[i] * COM(N-i-1, K-1)
ans %= p
print(ans) |
997,516 | 937c7f28a66392ba8e7b2da4e894bf524d0eb848 | def reverse(s):
t = []
for letter in reversed(s):
t.append(letter)
|
997,517 | 610177912de2a3ec3c37d40a0f739d91419d5c73 | from __future__ import unicode_literals
import youtube_dl
import glob, os
def vid2aud():
ydl_opts = {
'format': 'bestaudio/best',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'wav',
'preferredquality': '192',
}],
}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download(['https://www.youtube.com/watch?v=jlmyJLnIOYw'])
for file in glob.glob("./*.wav"):
os.rename(file, 'audio.wav')
#https://www.youtube.com/watch?v=JvOT4strzrA |
997,518 | 7f446dfe41ab08345dedfd9920e82578cf02cccc | # program with operation on 2 sets
x = set("runoob")
y = set("google")
print("x =", x)
print("y =", y)
print("x & y =", x & y)
print("x | y =", x | y)
print("x - y =", x - y) |
997,519 | 892299e773795313480429831cd624e708049edb | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/battle_control/arena_info/team_overrides.py
import VOIP
from gui.battle_control import avatar_getter
from gui.battle_control.arena_info.arena_vos import VehicleActions
from gui.battle_control.arena_info import settings
_DELIVERY_STATUS = settings.INVITATION_DELIVERY_STATUS
_P_STATUS = settings.PLAYER_STATUS
class DefaultTeamOverrides(object):
__slots__ = ('team', 'personal', 'isReplayPlaying')
def __init__(self, team, personal, isReplayPlaying=False):
super(DefaultTeamOverrides, self).__init__()
self.team = team
self.personal = personal
self.isReplayPlaying = isReplayPlaying
def isPlayerSelected(self, vo):
return False
def isPersonalSquad(self, vo):
return False
def isTeamKiller(self, vo):
return self.personal.teamKillSuspected or vo.isTeamKiller(playerTeam=self.team) if self.isPlayerSelected(vo) else vo.isTeamKiller(playerTeam=self.team)
def getAction(self, vo):
return VehicleActions.getBitMask(vo.events)
def getPlayerStatus(self, vo, isTeamKiller=False):
playerStatus = _P_STATUS.DEFAULT
if vo.isActionsDisabled() or self.isReplayPlaying:
playerStatus |= _P_STATUS.IS_ACTION_DISABLED
if vo.isSquadMan():
playerStatus |= _P_STATUS.IS_SQUAD_MAN
if self.isPersonalSquad(vo):
playerStatus |= _P_STATUS.IS_SQUAD_PERSONAL
if self.isTeamKiller(vo) or isTeamKiller:
playerStatus |= _P_STATUS.IS_TEAM_KILLER
if self.isPlayerSelected(vo) and not self.personal.isOtherSelected() or self.isPostmortemView(vo):
playerStatus |= _P_STATUS.IS_PLAYER_SELECTED
return playerStatus
def isPostmortemView(self, vo):
return vo.vehicleID == self.personal.selectedID
def getInvitationDeliveryStatus(self, vo):
return _DELIVERY_STATUS.FORBIDDEN_BY_RECEIVER
def getColorScheme(self):
pass
def clear(self):
self.personal = None
return
class PlayerTeamOverrides(DefaultTeamOverrides):
__slots__ = ('__isVoipSupported',)
def __init__(self, team, personal, isVoipSupported=False, isReplayPlaying=False):
super(PlayerTeamOverrides, self).__init__(team, personal, isReplayPlaying)
self.__isVoipSupported = isVoipSupported
def isPlayerSelected(self, vo):
return vo.vehicleID == self.personal.vehicleID
def isPersonalSquad(self, vo):
return vo.isSquadMan(prebattleID=self.personal.prebattleID)
def getAction(self, vo):
pass
def getPlayerStatus(self, vo, isTeamKiller=False):
status = super(PlayerTeamOverrides, self).getPlayerStatus(vo)
voipMgr = VOIP.getVOIPManager()
if self.personal.vehicleID == vo.vehicleID and vo.isSquadMan() and self.__isVoipSupported and not (voipMgr.isEnabled() and voipMgr.isCurrentChannelEnabled()) and not self.isReplayPlaying:
status |= _P_STATUS.IS_VOIP_DISABLED
return status
def getInvitationDeliveryStatus(self, vo):
return vo.invitationDeliveryStatus
def getColorScheme(self):
pass
class PersonalInfo(object):
__slots__ = ('realName', 'vehicleID', 'selectedID', 'prebattleID', 'teamKillSuspected')
def __init__(self):
super(PersonalInfo, self).__init__()
self.vehicleID = avatar_getter.getPlayerVehicleID()
self.selectedID = self.vehicleID
self.prebattleID = 0
self.teamKillSuspected = avatar_getter.isPlayerTeamKillSuspected()
def changeSelected(self, selectedID):
previousID, self.selectedID = self.selectedID, selectedID
return previousID
def isOtherSelected(self):
return self.vehicleID != self.selectedID
def makeOverrides(isEnemy, team, personal, arenaVisitor, isReplayPlaying=False):
if isEnemy:
ctx = DefaultTeamOverrides(team, personal, isReplayPlaying=isReplayPlaying)
else:
isVoipSupported = arenaVisitor.gui.isRandomBattle() or arenaVisitor.gui.isInEpicRange()
ctx = PlayerTeamOverrides(team, personal, isVoipSupported=isVoipSupported, isReplayPlaying=isReplayPlaying)
return ctx
|
997,520 | 687c541cebb563651fae80a21ac80a62f6deb1b0 | import asyncio
import mimetypes
import os
import pathlib
from . import hdrs
from .helpers import create_future
from .http_writer import PayloadWriter
from .log import server_logger
from .web_exceptions import HTTPNotModified, HTTPOk, HTTPPartialContent, HTTPRequestRangeNotSatisfiable
from .web_response import StreamResponse
var4127 = ('FileResponse',)
var1131 = bool(os.environ.get('AIOHTTP_NOSENDFILE'))
class Class219(PayloadWriter):
def function851(self, arg462):
self.attribute250 = arg462
if (self.attribute205 is not None):
(var33, self._drain_maiter) = (self._drain_maiter, None)
if (not var33.done()):
var33.set_result(None)
def function508(self, arg2367):
self.output_size += len(arg2367)
self._buffer.append(arg2367)
def function310(self, arg2195, arg1324, arg400, arg982, arg2241, arg502, arg1215):
if arg1215:
arg502.remove_writer(arg1324)
if arg2195.cancelled():
return
try:
var2551 = os.sendfile(arg1324, arg400, arg982, arg2241)
if (var2551 == 0):
var2551 = arg2241
except (BlockingIOError, InterruptedError):
var2551 = 0
except Exception as var2481:
arg2195.set_exception(var2481)
return
if (var2551 < arg2241):
arg502.add_writer(arg1324, self.function310, arg2195, arg1324, arg400, (arg982 + var2551), (arg2241 - var2551), arg502, True)
else:
arg2195.set_result(None)
@asyncio.coroutine
def function2338(self, arg1291, arg2241):
if (self.attribute250 is None):
if (self.attribute205 is None):
self.attribute205 = create_future(self.arg502)
yield from self.attribute205
var1030 = self.attribute250.get_extra_info('socket').dup()
var1030.setblocking(False)
arg1324 = var1030.fileno()
arg400 = arg1291.fileno()
arg982 = arg1291.tell()
arg502 = self.arg502
try:
yield from arg502.sock_sendall(var1030, b''.join(self._buffer))
arg2195 = create_future(arg502)
self.function310(arg2195, arg1324, arg400, arg982, arg2241, arg502, False)
yield from fut
except:
server_logger.debug('Socket error')
self.attribute250.close()
finally:
var1030.close()
self.output_size += arg2241
self.attribute250 = None
self._stream.release()
@asyncio.coroutine
def function807(self, arg2194=b''):
pass
class Class136(StreamResponse):
'A response object can be used to send files.'
def __init__(self, arg10, arg1059=(256 * 1024), *args, **kwargs):
super().__init__(*args, None=kwargs)
if isinstance(arg10, str):
arg10 = pathlib.Path(arg10)
self.attribute1162 = arg10
self.attribute345 = arg1059
@asyncio.coroutine
def function1413(self, arg628, arg1063, arg2241):
var1538 = arg628.var1538
if (var1538.get_extra_info('sslcontext') or (var1538.get_extra_info('socket') is None)):
var1034 = yield from self.function512(arg628, arg1063, arg2241)
else:
var1034 = arg628._protocol.var1034.replace(arg628._writer, Class219)
arg628._writer = var1034
yield from super().function2721(arg628)
yield from var1034.function2338(arg1063, arg2241)
return var1034
@asyncio.coroutine
def function512(self, arg32, arg1356, arg2241):
var3276 = yield from super().function2721(arg32)
self.set_tcp_cork(True)
try:
var1258 = self.attribute345
var76 = arg1356.read(var1258)
while True:
yield from var3276.write(var76)
arg2241 = (arg2241 - var1258)
if (arg2241 <= 0):
break
var76 = arg1356.read(min(var1258, arg2241))
finally:
self.set_tcp_nodelay(True)
yield from var3276.drain()
return var3276
if (hasattr(os, 'sendfile') and (not var1131)):
var620 = function1413
else:
var620 = function512
@asyncio.coroutine
def function2721(self, arg296):
var151 = self.attribute1162
var4300 = False
if ('gzip' in arg296.headers.get(hdrs.ACCEPT_ENCODING, '')):
var530 = var151.with_name((var151.name + '.gz'))
if var530.is_file():
var151 = var530
var4300 = True
var2460 = var151.stat()
var1186 = arg296.if_modified_since
if ((var1186 is not None) and (var2460.st_mtime <= var1186.timestamp())):
self.set_status(HTTPNotModified.status_code)
return yield from super().function2721(arg296)
(var4369, var4702) = mimetypes.guess_type(str(var151))
if (not var4369):
var4369 = 'application/octet-stream'
var1746 = HTTPOk.status_code
var2185 = var2460.st_size
arg2241 = var2185
try:
var751 = arg296.http_range
var2553 = var751.var2553
var3528 = var751.stop
except ValueError:
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return yield from super().function2721(arg296)
if ((var2553 is not None) or (var3528 is not None)):
if ((var2553 is None) and (var3528 < 0)):
var2553 = (var2185 + var3528)
arg2241 = (- var3528)
else:
arg2241 = ((end or file_size) - var2553)
if ((var2553 + arg2241) > var2185):
arg2241 = (var2185 - var2553)
if (var2553 >= var2185):
arg2241 = 0
if (arg2241 != var2185):
var1746 = HTTPPartialContent.status_code
self.set_status(var1746)
self.attribute2112 = var4369
if var4702:
self.headers[hdrs.CONTENT_ENCODING] = var4702
if var4300:
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
self.attribute1592 = var2460.st_mtime
self.attribute1894 = arg2241
if arg2241:
with var151.open('rb') as var919:
if var2553:
var919.seek(var2553)
return yield from self.var620(arg296, var919, arg2241)
return yield from super().function2721(arg296) |
997,521 | b05fda2e75058756fbd8f19bb79d1646ed38c4e3 | # -*- coding:utf-8 -*-
'''
This module mainly handle user's settings,which include
basic information and contact information.
Also,the 3th auth and user confirm are here.
'''
from __init__ import BaseHandler
from __init__ import USER_STATUS, AUTHORIZE_OPTIONS, set_image_size
import tornado.web
from hashlib import sha224
import os
import sys
JOB_NUM = 3
JOB_ORDER = ["第一职业", "第二职业", "第三职业"]
JOB_ID_ORDER = ["user-job-first", "user-job-second", "user-job-third"]
class SettingHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
if(cuser.status >= USER_STATUS["normal"]):
self.redirect("/user/set-basic")
else:
self.redirect("/user/action/init")
class SetAvatarHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
return self.render("user1.0beta/user-1.html", cuser=cuser)
def post(self):
cuid = int(self.get_secure_cookie("_yoez_uid"))
img = self.get_argument("avatar")
setrst = set_image_size((200, 200), img)
upd_sql = "update user set img='%s' where uid=%d" % (img, cuid)
img_sql = "select img from user where uid=%d" % cuid
if setrst:
oldimg = self.db.get(img_sql).img
self.db.execute(upd_sql)
path = os.path.dirname(sys.argv[0])+oldimg
#print path
os.remove(path)
result = dict(url="/"+str(cuid), status=1, code='')
else:
result = dict(url="/", status=0, code='set image error!')
self.write(result)
class SetBasicHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
prosql = "select * from property where proper_id=%d" % cuser.uid
infosql = "select * from basicinfo where bsc_id=%d" % cuser.uid
pro = self.db.get(prosql)
info = self.db.get(infosql)
is_woman = int(pro.sex)
joblen = 0
if(not info.job or info.job == "--"):
info.job = []
else:
info.job = info.job.split("+")
if info.organ:
info.organ = info.organ.split("&")
if info.birth:
info.birth = info.birth.split("&")
if info.weight:
info.weight = info.weight.split("&")
if info.height:
info.height = info.height.split("&")
joblen = len(info.job)
self.render("user1.0beta/user-2.html", cuser=cuser, is_woman=is_woman,
info=info, options=AUTHORIZE_OPTIONS, jobnum=JOB_NUM,
joborder=JOB_ORDER, jobid=JOB_ID_ORDER, joblen=joblen)
def post(self):
name = self.get_argument("name", "--")
area = self.get_argument("area", "--")
organ = self.get_argument("organ", "--")
job = self.get_argument("job", "--")
height = self.get_argument("height", "--")
weight = self.get_argument("weight", "--")
birth = self.get_argument("birth", "--")
extend = self.get_argument("extend", "--")
cuid = int(self.get_secure_cookie("_yoez_uid"))
is_set = self.db.get(("select bsc_id,status from basicinfo join user "
"on basicinfo.bsc_id=user.uid where bsc_id=%d")
% cuid)
result = dict()
if is_set:
updsql = ("update basicinfo set uname='%s',area='%s',organ='%s',"
"job='%s',height='%s',weight='%s',birth='%s',extend='%s'"
" where bsc_id=%d") % (name, area, organ, job, height,
weight, birth, extend, cuid)
self.db.execute(updsql)
if is_set.status < USER_STATUS["infoset"]:
updusr = ("update user set status=%d where "
"uid=%d") % (USER_STATUS["infoset"], cuid)
self.db.execute(updusr)
result = dict(status=1, msg='')
else:
addsql = ("insert into basicinfo(bsc_id,uname,area,organ,job,"
"height,weight,birth,extend) values(%d,'%s','%s','%s',"
"'%s','%s','%s','%s','%s')") % (cuid, name, area, organ,
job, height, weight,
birth, extend)
updusr = ("update user set status=%d where "
"uid=%d") % (USER_STATUS["infoset"], cuid)
self.db.execute(addsql)
self.db.execute(updusr)
result = dict(status=1, msg='')
self.write(result)
class SetContactHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
is_authenticate = int(cuser.status) == USER_STATUS["authenticate"]
info_sql = "select * from contactinfo where con_id=%d" % cuser.uid
info = self.db.get(info_sql)
if info.telphone:
info.telphone = info.telphone.split("&")
if info.conmail:
info.conmail = info.conmail.split("&")
if info.conaddress:
info.conaddress = info.conaddress.split("&")
if info.sinawb:
info.sinawb = info.sinawb.split("&")
if info.qqwb:
info.qqwb = info.qqwb.split("&")
if info.qq:
info.qq = info.qq.split("&")
if info.qzone:
info.qzone = info.qzone.split("&")
if info.renren:
info.renren = info.renren.split("&")
if info.douban:
info.douban = info.douban.split("&")
self.render("user1.0beta/user-3.html", cuser=cuser,
is_auth=is_authenticate, info=info,
options=AUTHORIZE_OPTIONS)
def post(self):
cuid = int(self.get_secure_cookie("_yoez_uid"))
agent = int(self.get_argument("agent", 0))
phone = self.get_argument("phone", "")
mail = self.get_argument("mail", "")
address = self.get_argument("address", "")
sina = self.get_argument("sina", "")
tqq = self.get_argument("tqq", "")
qq = self.get_argument("qq", "")
qzone = self.get_argument("qzone", "")
renren = self.get_argument("renren", "")
douban = self.get_argument("douban", "")
domain = str(cuid)
is_contact_set = self.db.get(("select con_id from contactinfo where "
"con_id=%d") % cuid)
if is_contact_set:
upd_sql = ("update contactinfo set agent_id=%d,telphone='%s',"
"conmail='%s',conaddress='%s',sinawb='%s',qqwb='%s',"
"qq='%s',qzone='%s',renren='%s',douban='%s' where "
"con_id=%d") % (agent, phone, mail, address, sina,
tqq, qq, qzone, renren, douban, cuid)
self.db.execute(upd_sql)
result = dict(status=1, code='')
else:
add_sql = ("insert into contactinfo(con_id,agent_id,telphone,"
"conmail,conaddress,sinawb,qqwb,qq,qzone,renren,douban,"
"psldomain) values(%d,%d,'%s','%s','%s','%s','%s','%s',"
"'%s','%s','%s','%s')") % (cuid, agent, phone, mail,
address, sina, tqq, qq,
qzone, renren, douban,
domain)
self.db.execute(add_sql)
result = dict(status=1, code='')
self.write(result)
class SetPasswordHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
return self.render("user1.0beta/user-4.html", cuser=cuser)
def post(self):
cuser = self.get_current_user()
oldpsw = sha224(self.get_argument("oldpsw")).hexdigest()
newpsw = sha224(self.get_argument("newpsw")).hexdigest()
chksql = ("select account from user where uid=%d and password"
"='%s'") % (cuser.uid, oldpsw)
chkresult = self.db.get(chksql)
result = {}
if chkresult:
updatesql = ("update user set password='%s' where "
"uid=%d") % (newpsw, cuser.uid)
self.db.execute(updatesql)
result = dict(status=1, code='')
else:
result = dict(status=0, code='你无权修改他人密码')
self.write(result)
class SetDomainHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
dmsql = "select * from contactinfo where con_id=%d" % cuser.uid
domain = self.db.get(dmsql)
return self.render("user1.0beta/user-5.html",
cuser=cuser, domain=domain)
def post(self):
domain = self.get_argument("domain", None)
result = dict(status=0, msg="缺少参数")
if domain:
cuid = int(self.get_secure_cookie("_yoez_uid"))
is_domain_set = self.db.get(("select con_id from contactinfo "
"where con_id=%d") % cuid)
if not is_domain_set:
addsql = ("insert into contactinfo(con_id,psldomain) values"
"(%d,'%s')") % (cuid, domain)
self.db.execute(updsql)
result = dict(status=1, msg="设置成功")
else:
chksql = ("select psldomain from contactinfo where psldomain"
"='%s'") % domain
chkrst = self.db.get(chksql)
if chkrst:
result = dict(status=0, msg="该域名已存在,请再选一个")
else:
updsql = ("update contactinfo set psldomain='%s' where "
"con_id=%d") % (domain, cuid)
self.db.execute(updsql)
result = dict(status=1, msg="设置成功")
self.write(result)
class SetAuthHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
self.render("user1.0beta/user-6.html", cuser=cuser)
class SetConfirmHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
self.render("user1.0beta/user-7.html", cuser=cuser)
class SetRealnameConfirmHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
cuser = self.get_current_user()
self.render("user1.0beta/user-7-1-1.html", cuser=cuser)
HandlerList = [
(r"/user/setting", SettingHandler),
(r"/user/set-avatar", SetAvatarHandler),
(r"/user/set-basic", SetBasicHandler),
(r"/user/set-contact", SetContactHandler),
(r"/user/set-password", SetPasswordHandler),
(r"/user/set-domain", SetDomainHandler),
(r"/user/set-auth", SetAuthHandler),
(r"/user/set-confirm", SetConfirmHandler),
(r"/user/set/confirm-realname", SetRealnameConfirmHandler),
]
|
997,522 | ff1f329f1c821b05418ee9482ac0504c3ecd1dc1 | from django.db import models
class Fruitmodel(models.Model):
fruitname=models.CharField(max_length=40)
price=models.IntegerField()
desc=models.CharField(max_length=300)
favourite=models.BooleanField()
imgpath=models.CharField(max_length=40)
rating=models.IntegerField()
color=models.CharField(max_length=20)
class usermodel(models.Model):
username=models.CharField(max_length=40)
password=models.CharField(max_length=40)
userid=models.CharField(primary_key=True,max_length=40)
class cartmodel(models.Model):
productid=models.CharField(max_length=40)
userid=models.CharField(max_length=40)
count=models.IntegerField()
class favmodel(models.Model):
productid=models.CharField(max_length=40)
userid=models.CharField(max_length=40)
isfav=models.CharField(max_length=5)
|
997,523 | 16287e31dbf2e632a97f1cc29e947458a143e12f | import torch
import numpy as np
import cv2
from os import listdir
import pandas as pd
try:
from itertools import ifilterfalse
except ImportError: # py3k
from itertools import filterfalse
def mean(l, ignore_nan=False, empty=0):
"""
nanmean compatible with generators.
"""
l = iter(l)
if ignore_nan:
l = ifilterfalse(np.isnan, l)
try:
n = 1
acc = next(l)
except StopIteration:
if empty == 'raise':
raise ValueError('Empty mean')
return empty
for n, v in enumerate(l, 2):
acc += v
if n == 1:
return acc
return acc / n
def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True):
"""
IoU for foreground class
binary: 1 foreground, 0 background
"""
if not per_image:
preds, labels = (preds,), (labels,)
ious = []
for pred, label in zip(preds, labels):
intersection = ((label == 1) & (pred == 1)).sum()
union = ((label == 1) | ((pred == 1) & (label != ignore))).sum()
if not union:
iou = EMPTY
else:
iou = float(intersection) / union
ious.append(iou)
iou = mean(ious) # mean accross images if per_image
return 100 * iou
def jaccard(y_true, y_pred):
# This does not count the mean
intersection = (y_true * y_pred).sum()
union = y_true.sum() + y_pred.sum() - intersection
return (intersection + 1e-15) / (union + 1e-15)
def dice(y_true, y_pred):
# this does not count the mean
return (2 * (y_true * y_pred).sum() + 1e-15) / (y_true.sum() + y_pred.sum() + 1e-15)
def save_model(cust_model, name = "fcn.pt"):
torch.save(cust_model.module.state_dict(), name)
def load_model(cust_model, model_dir = "./fcn.pt", map_location_device = "cpu"):
if map_location_device == "cpu":
cust_model.load_state_dict(torch.load(model_dir, map_location = map_location_device))
elif map_location_device == "gpu":
cust_model.load_state_dict(torch.load(model_dir))
cust_model.eval()
return cust_model
def output_trchtensor_to_numpy(torch_tensor):
numpy_ndarray = torch_tensor.squeeze(0)
numpy_ndarray = numpy_ndarray.permute(1, 2, 0)
numpy_ndarray = numpy_ndarray.squeeze()
numpy_ndarray = numpy_ndarray.detach().numpy()
return numpy_ndarray
def mask_overlay(image, mask, color=(0, 1, 0)):
"""
Helper function to visualize mask on the top of the image
"""
mask = np.dstack((mask, mask, mask)) * np.array(color)
weighted_sum = cv2.addWeighted(mask, 0.5, image, 0.5, 0.)
img = image.copy()
ind = mask[:, :, 1] > 0
img[ind] = weighted_sum[ind]
return img
def make_mask_overlay(mask_img, orig_img, mask_alpha):
original_img = cv2.imread(orig_img)
mask = cv2.imread(mask_img)
overlayed_img = cv2.addWeighted(original_img, 1, mask, mask_alpha, 0, 0)
cv2.imwrite("overlayed_" + orig_img + ".png", overlayed_img)
def get_ids_from_file_in_list(path):
list_ids = [f for f in listdir(path)]
list_ids = sorted(list_ids)
return list_ids
|
997,524 | 93fce8c8f28a642242cbec866830b52e2e0fa5a6 | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test 'stratisd'.
"""
import time
import unittest
from dbus_python_client_gen import DPClientInvalidArgError
from stratisd_client_dbus import Manager
from stratisd_client_dbus import ObjectManager
from stratisd_client_dbus import get_object
from stratisd_client_dbus._constants import TOP_OBJECT
from .._misc import Service
class StratisTestCase(unittest.TestCase):
"""
Test meta information about stratisd.
"""
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
self._proxy = get_object(TOP_OBJECT)
Manager.Methods.ConfigureSimulator(self._proxy, {'denominator': 8})
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testStratisVersion(self):
"""
Getting version should succeed.
Major version number should be 0.
"""
version = Manager.Properties.Version.Get(get_object(TOP_OBJECT))
(major, _, _) = version.split(".")
self.assertEqual(major, "0")
class StratisTestCase2(unittest.TestCase):
"""
Test exceptions raised by various errors.
"""
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
self._proxy = get_object(TOP_OBJECT)
Manager.Methods.ConfigureSimulator(self._proxy, {'denominator': 8})
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testArguments(self):
"""
Incorrect arguments should cause a type error.
"""
with self.assertRaises(TypeError):
Manager.Properties.Version.Get(get_object(TOP_OBJECT), {})
def testFunctionName(self):
"""
We know that it is impossible to set the Stratis version, so Set
method should not exist, and this should result in an Attribute error.
"""
with self.assertRaises(AttributeError):
Manager.Properties.Version.Set(get_object(TOP_OBJECT), {})
def testFunctionArgs(self):
"""
If the arguments to the D-Bus method are incorrect, the exception is
a DPClientInvalidArgError.
Incorrectness can be caused by incorrect keyword args, but also
by incorrect type of argument.
"""
with self.assertRaises(DPClientInvalidArgError):
ObjectManager.Methods.GetManagedObjects(self._proxy, {'bogus': 2})
with self.assertRaises(DPClientInvalidArgError):
Manager.Methods.DestroyPool(self._proxy, {'pool': 2})
|
997,525 | 501767568dfdb71160b2a5d00eebdb2a7eab080a |
# -*- coding: utf-8 -*-
import jinja2
from mail.mail import sendpost
from contextlib import closing
import ConfigParser
import sqlite3
import sys, getopt
def main(argv):
reload(sys)
sys.setdefaultencoding('utf8')
section = 'CATS'
opts, args = getopt.getopt(argv,"s:")
for opt,arg in opts:
section = arg.upper()
tplLoader = jinja2.FileSystemLoader(searchpath='./templates/'+section.lower())
tplEnv = jinja2.Environment(loader=tplLoader)
config = ConfigParser.ConfigParser()
config.read('./mysettings.ini')
server = config.get(section,'server')
user = config.get(section,'username')
password = config.get(section,'password')
recipient = config.get(section,'recipient')
debug = config.get(section,'debug')
dbpath = config.get(section,'dbpath')
kw = config.get(section,'keywords').split(",")
try:
with closing(sqlite3.connect(dbpath, timeout=1)) as conn:
cursor = conn.cursor()
cursor.execute("""
SELECT * FROM content WHERE posted=0 LIMIT 1
""")
metadata = cursor.fetchone()
tplVars = {
'parts': range(1,11),
'lines' : range(5,8),
'title' : metadata[2],
'keywords' : kw
}
template = tplEnv.get_template( "meta.jinja" )
meta = template.render(tplVars)
subject = metadata[2] + '¤' + metadata[1] + '¤' + kw[0] + '¤' + meta + '¤' + str(metadata[5])
template = tplEnv.get_template( "body.jinja" )
body = template.render(tplVars)
sendpost(recipient, user, subject, body, metadata[3], server, password, debug)
#print recipient, user, subject, body, metadata[3], server, password, debug
cursor.execute("""
UPDATE content SET posted = 1 WHERE id = ?
""",(metadata[0],))
conn.commit()
cursor.close()
except ValueError:
raise Exception( "Failed to send post")
except sqlite3.Error,e:
raise Exception( "Error in silk: %s" % e)
if __name__ == "__main__":
main(sys.argv[1:])
|
997,526 | 61f8c7e3f123ba1f710b8ce64a843962c7b4fe22 | # coding: utf-8
import logging
import os
import tweepy
# https://github.com/tweepy/tweepy
# https://dev.twitter.com/docs
# https://dev.twitter.com/apps/ID/show
def tweet(message, consumer_key=None, consumer_secret=None, access_token=None, access_token_secret=None, debug=False):
consumer_key = os.environ.get('TWITTER_CONSUMER_KEY', consumer_key)
consumer_secret = os.environ.get('TWITTER_CONSUMER_SECRET', consumer_secret)
access_token = os.environ.get('TWITTER_ACCESS_TOKEN', access_token)
access_token_secret = os.environ.get('TWITTER_ACCESS_TOKEN_SECRET', access_token_secret)
if not debug:
try:
#authenticator = tweepy.auth.BasicAuthHandler(TWITTER_USERNAME, TWITTER_PASSWORD)
authenticator = tweepy.auth.OAuthHandler(consumer_key, consumer_secret)
authenticator.set_access_token(access_token, access_token_secret)
api = tweepy.API(authenticator)
api.update_status(message[0:140])
except Exception as e:
logging.error('%s' % str(e))
logging.error('Fail to tweet: %s' % message)
logging.exception(e)
# We do not want to interrupt the application because a tweet error.
|
997,527 | 0b686e358ccfef3aa8e4ef429e811f1ebfbf4ab5 | # coding=utf-8
import json
import re
import requests
from bs4 import BeautifulSoup
import sys
dir_path = sys.argv[1]
#url = 'https://buy.yungching.com.tw/region/%E5%8F%B0%E5%8C%97%E5%B8%82-_c/'
header_s={
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding':'gzip, deflate, sdch, br',
'Accept-Language':'en-US,en;q=0.8',
'Cache-Control':'no-cache',
'Connection':'keep-alive',
'Cookie':'_last_search_data=%7B%22searchFor%22%3A%22all%22%2C%22mainType%22%3A%22region%22%2C%22addr%22%3A%5B%5B%22%E5%8F%B0%E5%8C%97%E5%B8%82%22%5D%2C%5B%22%22%5D%5D%2C%22mrt%22%3A%5B%5D%2C%22isMap%22%3Afalse%2C%22price%22%3A%5B%22%22%2C%22%22%5D%2C%22pyeong%22%3A%5B%22%22%2C%22%22%5D%2C%22keywords%22%3A%5B%22%22%5D%2C%22filterBy%22%3A%5B%22%22%5D%2C%22sortBy%22%3A%5B%22undefined%22%5D%2C%22advConditions%22%3A%7B%22car%22%3A%5B%5D%2C%22houseType%22%3A%5B%5D%2C%22accessible%22%3A%5B%5D%2C%22houseAge%22%3A%5B%5D%2C%22directions%22%3A%5B%5D%2C%22floors%22%3A%7B%22sp%22%3A%5B%22false%22%5D%2C%22val%22%3A%5B%5D%7D%2C%22rooms%22%3A%7B%22sp%22%3A%5B%22false%22%5D%2C%22val%22%3A%5B%5D%7D%2C%22sp%22%3A%5B%5D%7D%2C%22od%22%3A%22%22%2C%22pyeongType%22%3A0%2C%22coords%22%3A%22%22%2C%22searchBland%22%3A%22%E5%85%A8%E9%83%A8%E4%BB%B2%E4%BB%8B%22%2C%22originalDomain%22%3A%22%22%7D; userid=4ab02f33-f90f-4578-bde1-e0799e65d646; TRID_G=e0bcf196-3909-493e-acd2-795fb5d4b925; ez2o_UNID=1522215564489490; __ltmwga=utmcsr=(direct)|utmcmd=(none); WMX_Channel=,1,; _last_search_data=%7B%22searchFor%22%3A%22all%22%2C%22mainType%22%3A%22region%22%2C%22addr%22%3A%5B%5B%22%E5%8F%B0%E5%8C%97%E5%B8%82%22%5D%2C%5B%22%22%5D%5D%2C%22mrt%22%3A%5B%5D%2C%22isMap%22%3Afalse%2C%22price%22%3A%5B%22%22%2C%22%22%5D%2C%22pyeong%22%3A%5B%22%22%2C%22%22%5D%2C%22keywords%22%3A%5B%22%22%5D%2C%22filterBy%22%3A%5B%22%22%5D%2C%22sortBy%22%3A%5B%22undefined%22%5D%2C%22advConditions%22%3A%7B%22car%22%3A%5B%5D%2C%22houseType%22%3A%5B%5D%2C%22accessible%22%3A%5B%5D%2C%22houseAge%22%3A%5B%5D%2C%22directions%22%3A%5B%5D%2C%22floors%22%3A%7B%22sp%22%3A%5B%22false%22%5D%2C%22val%22%3A%5B%5D%7D%2C%22rooms%22%3A%7B%22sp%22%3A%5B%22false%22%5D%2C%22val%22%3A%5B%5D%7D%2C%22sp%22%3A%5B%5D%7D%2C%22od%22%3A%22%22%2C%22pyeongType%22%3A0%2C%22coords%22%3A%22%22%2C%22searchBland%22%3A%22%E5%85%A8%E9%83%A8%E4%BB%B2%E4%BB%8B%22%2C%22originalDomain%22%3A%22%22%7D; userid=4ab02f33-f90f-4578-bde1-e0799e65d646; _gat_UA-35108030-1=1; _dc_gtm_UA-35108030-1=1; yawbewkcehc=0; __asc=582560d51626b1e3bd135ad7d44; __auc=5983cf801626561cf77f6c77d0b; __ltm_https_flag=true; _pk_id.5.f7c6=0225278645f347ab.1522119332.2.1522216079.1522215567.; _pk_ses.5.f7c6=*; _uetsid=_uetafb7b681; _ga=GA1.4.1967887631.1522119332; _gid=GA1.4.1424818617.1522215566',
'Host':'buy.yungching.com.tw',
'Pragma':'no-cache',
'Referer':'https://buy.yungching.com.tw/region/%E5%8F%B0%E5%8C%97%E5%B8%82-_c/',
'Upgrade-Insecure-Requests':'1',
'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36'
}
###
def get_items_from_page(url, header_s, pg_num):
real_url = url+'?pg='+str(pg_num)
yungching_items = {}
items_info = []
try:
res = requests.get(real_url, headers=header_s)
res.encoding = 'utf-8'
soup = BeautifulSoup(res.text, "html5lib")
items_list = soup.select('.l-item-list .m-list-item')
total_items_num = len(items_list)
print("total items num = %s"%total_items_num)
for num in range(total_items_num):
one_item_info = {}
one_item_info['item-description'] = re.sub(' +', ' ', items_list[num].select('.item-info .item-description')[0].text.replace('\xa0', '').replace('\n', ' ').replace('\u3000', ''))
one_item_info['item-info-detail'] = re.sub(' +', ' ', items_list[num].select('.item-info .item-info-detail')[0].text.replace('\n', ' ').strip())
item_tags_num = len(items_list[num].select('.item-info .item-tags span'))
print('tags num = %s'%item_tags_num)
one_item_info['item-tags'] = []
for tag_num in range(item_tags_num):
one_item_info['item-tags'].append(items_list[num].select('.item-info .item-tags span')[tag_num].text)
one_item_info['item-price'] = items_list[num].select('.item-price .price .price-num')[0].text
one_item_info['title'] = items_list[num].select('.item-info a')[0]['title'].split(' ',1)[0]
one_item_info['address'] = items_list[num].select('.item-info a')[0]['title'].split(' ',1)[1]
one_item_info['detail-href'] = items_list[num].select('.item-info a')[0]['href']
items_info.append(one_item_info)
print('%s append succeed'%str(num))
yungching_items['items_info'] = items_info
with open(dir_path+'/yungching_items_pg'+str(pg_num)+'.json', 'w', encoding='utf-8') as outfile:
json.dump(yungching_items, outfile, ensure_ascii=False)
print('[INFO]Done crawl page %s'%pg_num)
#return yungching_items
except:
print("wrong url")
def get_total_pg_num(url, header_s):
res = requests.get(url, headers=header_s)
res.encoding = 'utf-8'
soup = BeautifulSoup(res.text, "html5lib")
total_pg_num = int(re.findall( 'pg=(\d{,5})', soup.select('.m-pagination-bd li a[ga_label="buy_page_last"]')[0]['href'])[0])
return total_pg_num
def yungching_crawler(url, header_s):
total_pg_num = get_total_pg_num(url, header_s)
print('total pages num = %s'%total_pg_num)
for pg_num in range(1, total_pg_num+1):
get_items_from_page(url, header_s, pg_num)
url = 'https://buy.yungching.com.tw/region/%E5%8F%B0%E5%8C%97%E5%B8%82-_c/'
yungching_crawler(url, header_s) |
997,528 | 6f860612a06d2c4ce10172c7183e90e892ce76a3 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import visualize.models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Album',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=32)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image_file', models.ImageField(height_field=b'height', width_field=b'width', upload_to=visualize.models.create_file_path)),
('width', models.IntegerField(blank=True)),
('heigth', models.IntegerField(blank=True)),
('album', models.ForeignKey(to='visualize.Album')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='VisualizationMetadata',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ac_hex_color_avg', models.IntegerField(max_length=16777215)),
('ac_color_sort_order', models.IntegerField()),
('image_file', models.ForeignKey(to='visualize.Photo')),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='album',
unique_together=set([('user', 'name')]),
),
]
|
997,529 | f673e10486e7039981a185af597ac7aa24dbbf40 | import wikipedia
import os
inpt = open("pageTitleList.txt", "r")
lines = inpt.readlines()
try:
os.mkdir("output")
except FileExistsError:
pass
for line in lines:
line = line.strip()
print("Looking for: \""+line+"\"")
try:
page = wikipedia.page(line)
print("found: \""+line+"\"")
f = open("output/"+page.title+".txt", "w")
f.write(page.content)
f.close()
print("Written to file: output/"+line+".txt \n--------------------------------------------------\n")
except Exception as e:
print(e)
print("Skipping fetch for \""+line+"\" \n--------------------------------------------------\n")
|
997,530 | 5dc150eb93d74597d283b3dc0fed204ec90cee52 | # -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Name: web_ui.py
# Purpose: The image svg file handler module
#
# Author: Richard Liao <richard.liao.i@gmail.com>
#
#----------------------------------------------------------------------------
from trac.core import *
from trac.web.chrome import *
from trac.util.html import html
from trac.web import IRequestHandler
from trac.web.api import RequestDone, HTTPException
from pkg_resources import resource_filename
import sys, os
import time
__all__ = ['ImageSvg']
class ImageSvg(Component):
implements(
IRequestHandler,
)
# IRequestHandler methods
def match_request(self, req):
return req.path_info.startswith("/svg")
def process_request(self, req):
if req.path_info.startswith("/svg"):
pathSegs = req.path_info.split("/")
image_path = "/".join(pathSegs[2:])
f = os.path.join(self.env.path, image_path)
try:
message = open(f).read()
except:
raise HTTPException(404)
req.send_response(200)
req.send_header('Cache-control', 'no-cache')
req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT')
req.send_header('Content-Type', 'image/svg+xml')
req.send_header('Content-Length', len(message))
req.end_headers()
if req.method != 'HEAD':
req.write(message)
raise RequestDone
|
997,531 | 21930a951dac4e7df14565149a70c28c075cd6d3 | '''<><><><><><><><><><><><><><><><><><><><><><><><>
TouchPlus
This defines an augmented touch object that includes
its own simple tracking algorithms, a sprite representation,
and gesture area calculations
BEGIN
<><><><><><><><><><><><><><><><><><><><><><><><>'''
import scene
import console
from colorsys import hsv_to_rgb
from random import random
class TouchPlus():
'''
A class for touches on the screen which has some added functionality beyond that of the built-in touch class in the scene module (such as simple touch tracking, and visualization of the touch on-screen)
Positional Args:
parent : a scene.Scene object in which to draw the touch as a sprite representation
kwargs:
'position' : The scene.Point representation of the touch's position
'start_time' : The timestamp when the touch started, obtained from scene.t
'visible' : A True or False value for whether to represent the touch as a sprite (show it on screen)
'touch_id' : The touch_id value (unique ID) for the touch object (best obtained from the scene.touch.touch_id)
Attributes:
start_position : The starting position of the touch as a scene.Point
last_position : The previous position of the touch as a scene.Point
location : Another accessor for the touch position
sprite : The graphical representation object to be shown on the scene.Scene
area : A scene.Rect representation of the area that the touch has covered from start to finish
'''
def __init__(self, parent=None, **kwargs):
if not isinstance(parent, scene.Scene):
#raise TypeError('parent must be a Scene object!')
self.visible = False
self.parent_scene = None
else:
self.parent_scene = parent
# Extra kwargs handling
if 'position' in kwargs:
self.start_position = kwargs.get('position')
else:
self.start_position = scene.Point(0,0)
if 'start_time' in kwargs:
self.start_time = kwargs.get('start_time')
else:
self.start_time = 0
if 'visible' in kwargs:
self.visible = kwargs.get('visible')
else:
self.visible = False
if 'touch_id' in kwargs:
self.touch_id = kwargs.get('touch_id')
else:
self.touch_id = None
# Other
self.A = scene.Action
self.last_position = self.position = self.start_position
self.location = self.position
self.last_time = self.time = self.start_time
self.gesture_area = None
self.sprite = None
self.area = scene.Rect(self.start_position.x, self.start_position.y, 1, 1)
if self.visible:
self.show()
else:
self.hide()
def duration(self, time=0):
'''
A function to update / track the touch duration
Given a non-zero time (best obtained from scene.t) this will store the new time and return the delta from when the touch started until now
If no arg is passed, the function will return the time from when the touch started to the last updated time
'''
if time > 0:
self.last_time = self.time
self.time = time
return self.time - self.start_time
def hide(self):
'''
Removes any representation of the touch from the screen and disables visibility of the touch
'''
self.remove_highlight()
if self.sprite is not None:
self.sprite.remove_from_parent()
del self.sprite
self.sprite = None
self.visble = False
def highlight(self):
'''
Generates a highlighted area on-screen from where the touch began to where the touch currently resides
When called, if the gesture_area has not been established, it will generate one
If the gesture_area has been established, it updates the attributes of the gesture_area to reflect the touch's current position
'''
start = self.start_position
stop = self.position
new = False
if self.gesture_area is None:
self.gesture_area = scene.SpriteNode(color='#4db9ff', alpha=0.2)
new = True
self.gesture_area.position=((start + stop) / 2)
delta = stop - start
self.gesture_area.size=(abs(delta.x), abs(delta.y))
if self.visible and new:
self.parent_scene.add_child(self.gesture_area)
# <><><><><><><><><><><><><><><><><><><><><><><><>
def remove_highlight(self):
'''
Removes the highlighted area representation from the screen
'''
if self.gesture_area is not None:
self.gesture_area.remove_from_parent()
del self.gesture_area
self.gesture_area = None
# <><><><><><><><><><><><><><><><><><><><><><><><>
def show(self, parent=None):
'''
Enables visual representation of the touch on the screen, but only if a parent_scene is present to display the touch on.
If an arg is passed, it assumed to be a parent scene (scene.Scene) object and will update the parent_scene object
If the parent_scene object is valid the function will attempt to draw a sprite on the scene object
If no parent scene object exists, the function will not draw anything and visibility will remain off
'''
if parent is not None:
if self.parent_scene is not None and self.sprite is not None:
self.sprite.remove_from_parent() #Remove from the current parent
self.parent_scene = parent #Assign a new parent for this touch
if self.parent_scene is not None:
self.visible = True #The touch has a parent, therefore it can be shown
if self.sprite is None:
self.sprite = scene.SpriteNode(
texture='shp:wavering',
position=self.position,
color=hsv_to_rgb(random(), 1, 1)) #Give the touch a visible representation
self.parent_scene.add_child(self.sprite) #Add the representation to the parent scene
else:
self.visble = False #If no parent scene, then there cannot be a sprite shown
self.sprite = None
def update(self, pos, time=0):
'''
A function to update the touch's position and (optionally) time
This function also handles the recalculation of certain tracking attributes such as:
last_position
position
area
The function also takes care of the movement of the sprite representing the touch to its newest location on screen
'''
self.duration(time)
self.last_position = self.position
self.position = pos
self.location = self.position
wh = self.position - self.start_position
self.area = scene.Rect(self.start_position.x,
self.start_position.y,
wh.x, wh.y)
if self.sprite is not None:
self.sprite.run_action(self.A.move_to(pos.x, pos.y, 0.01))
'''<><><><><><><><><><><><><><><><><><><><><><><><>
touch_plus.py file END
<><><><><><><><><><><><><><><><><><><><><><><><>'''
|
997,532 | 30258c9d63c6a694b54da8d653dc8f6d43142125 | # Copyright (c) OpenMMLab. All rights reserved.
import torch
from mmagic.models.editors.nafnet.naf_layerNorm2d import LayerNorm2d
def test_layer_norm():
inputs = torch.ones((1, 3, 64, 64))
targets = torch.zeros((1, 3, 64, 64))
layer_norm_2d = LayerNorm2d(inputs.shape[1])
outputs = layer_norm_2d(inputs)
assert outputs.shape == targets.shape
assert torch.all(torch.eq(outputs, targets))
|
997,533 | b9ad24f008dbe6a0cb1b6b23e732d7ae26d1f774 | import subprocess
import sys
import pytest
import inspect
from test_utils import *
import os.path
import time
import random
# try:
# import memory_profiler
# except ImportError:
# subprocess.check_call([sys.executable, "-m", "pip", "install", 'memory-profiler'])
# finally:
# import memory_profiler
# from memory_profiler import memory_usage
import session5 as session
def test_readme_file_for_formatting():
f = open("README.md", "r", encoding="utf-8")
content = f.read()
f.close()
assert content.count("#") >= 5
def test_fourspace_equal():
assert fourspace(session) == False, 'Not all spaces before lines are a multiple of 4!'
def test_function_names():
assert function_name_had_cap_letter(session) == False, "One of your function has a capitalized alphabet!"
def test_readme_exists():
assert os.path.isfile("README.md"), "README.md file missing!"
def test_readme_proper_description():
README_CONTENT_CHECK_FOR=[]
functions = inspect.getmembers(session, inspect.isfunction)
for function in functions:
README_CONTENT_CHECK_FOR.extend([function[0]])
READMELOOKSGOOD = True
f = open("README.md", "r")
content = f.read()
f.close()
for c in README_CONTENT_CHECK_FOR:
if c not in content:
READMELOOKSGOOD = False
pass
assert READMELOOKSGOOD == True, f"You have not described {c} function well in your README.md file"
def test_readme_contents():
readme_words=[word for line in open('README.md', 'r', encoding="utf-8") for word in line.split()]
assert len(readme_words) >= 100, "Make your README.md file interesting! Add atleast 500 words"
def test_function_name_had_cap_letter():
functions = inspect.getmembers(session, inspect.isfunction)
for function in functions:
assert len(re.findall('([A-Z])', function[0])) == 0, "You have used Capital letter(s) in your function names"
def test_create_deck_using_lambda_zip_map():
output=['spades-2', 'spades-3', 'spades-4', 'spades-5', 'spades-6', 'spades-7', 'spades-8', 'spades-9', 'spades-10', 'spades-jack', 'spades-queen', 'spades-king', 'spades-ace', 'clubs-2', 'clubs-3', 'clubs-4', 'clubs-5', 'clubs-6', 'clubs-7', 'clubs-8', 'clubs-9', 'clubs-10', 'clubs-jack', 'clubs-queen', 'clubs-king', 'clubs-ace', 'hearts-2', 'hearts-3', 'hearts-4', 'hearts-5', 'hearts-6', 'hearts-7', 'hearts-8', 'hearts-9', 'hearts-10', 'hearts-jack', 'hearts-queen', 'hearts-king', 'hearts-ace', 'diamonds-2', 'diamonds-3', 'diamonds-4', 'diamonds-5', 'diamonds-6', 'diamonds-7', 'diamonds-8', 'diamonds-9', 'diamonds-10', 'diamonds-jack', 'diamonds-queen', 'diamonds-king', 'diamonds-ace']
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
suits = ['spades', 'clubs', 'hearts', 'diamonds']
assert(session.create_deck_using_lambda_zip_map(vals,suits)==output), 'Not Expected output. Please validate logic'
def test_create_deck_using_list_comprehension():
output=['spades-2', 'spades-3', 'spades-4', 'spades-5', 'spades-6', 'spades-7', 'spades-8', 'spades-9', 'spades-10', 'spades-jack', 'spades-queen', 'spades-king', 'spades-ace', 'clubs-2', 'clubs-3', 'clubs-4', 'clubs-5', 'clubs-6', 'clubs-7', 'clubs-8', 'clubs-9', 'clubs-10', 'clubs-jack', 'clubs-queen', 'clubs-king', 'clubs-ace', 'hearts-2', 'hearts-3', 'hearts-4', 'hearts-5', 'hearts-6', 'hearts-7', 'hearts-8', 'hearts-9', 'hearts-10', 'hearts-jack', 'hearts-queen', 'hearts-king', 'hearts-ace', 'diamonds-2', 'diamonds-3', 'diamonds-4', 'diamonds-5', 'diamonds-6', 'diamonds-7', 'diamonds-8', 'diamonds-9', 'diamonds-10', 'diamonds-jack', 'diamonds-queen', 'diamonds-king', 'diamonds-ace']
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
suits = ['spades', 'clubs', 'hearts', 'diamonds']
assert(session.create_deck_using_list_comprehension(vals,suits)==output), 'Not Expected output. Please validate logic'
def test_create_deck_using_lambda_zip_map_performance():
output=['spades-2', 'spades-3', 'spades-4', 'spades-5', 'spades-6', 'spades-7', 'spades-8', 'spades-9', 'spades-10', 'spades-jack', 'spades-queen', 'spades-king', 'spades-ace', 'clubs-2', 'clubs-3', 'clubs-4', 'clubs-5', 'clubs-6', 'clubs-7', 'clubs-8', 'clubs-9', 'clubs-10', 'clubs-jack', 'clubs-queen', 'clubs-king', 'clubs-ace', 'hearts-2', 'hearts-3', 'hearts-4', 'hearts-5', 'hearts-6', 'hearts-7', 'hearts-8', 'hearts-9', 'hearts-10', 'hearts-jack', 'hearts-queen', 'hearts-king', 'hearts-ace', 'diamonds-2', 'diamonds-3', 'diamonds-4', 'diamonds-5', 'diamonds-6', 'diamonds-7', 'diamonds-8', 'diamonds-9', 'diamonds-10', 'diamonds-jack', 'diamonds-queen', 'diamonds-king', 'diamonds-ace']
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
suits = ['spades', 'clubs', 'hearts', 'diamonds']
start1 = time.perf_counter()
session.create_deck_using_lambda_zip_map(vals,suits)
end1 = time.perf_counter()
delta1 = end1 - start1
assert delta1 < 0.01, 'It is taking too much time to create_deck_using_lambda_zip_map'
def test_create_deck_using_list_comprehension_performance():
output=['spades-2', 'spades-3', 'spades-4', 'spades-5', 'spades-6', 'spades-7', 'spades-8', 'spades-9', 'spades-10', 'spades-jack', 'spades-queen', 'spades-king', 'spades-ace', 'clubs-2', 'clubs-3', 'clubs-4', 'clubs-5', 'clubs-6', 'clubs-7', 'clubs-8', 'clubs-9', 'clubs-10', 'clubs-jack', 'clubs-queen', 'clubs-king', 'clubs-ace', 'hearts-2', 'hearts-3', 'hearts-4', 'hearts-5', 'hearts-6', 'hearts-7', 'hearts-8', 'hearts-9', 'hearts-10', 'hearts-jack', 'hearts-queen', 'hearts-king', 'hearts-ace', 'diamonds-2', 'diamonds-3', 'diamonds-4', 'diamonds-5', 'diamonds-6', 'diamonds-7', 'diamonds-8', 'diamonds-9', 'diamonds-10', 'diamonds-jack', 'diamonds-queen', 'diamonds-king', 'diamonds-ace']
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
suits = ['spades', 'clubs', 'hearts', 'diamonds']
start1 = time.perf_counter()
session.create_deck_using_list_comprehension(vals,suits)
end1 = time.perf_counter()
delta1 = end1 - start1
assert delta1 < 0.01, 'It is taking too much time to create_deck_using_list_comprehension'
def test_deal_performance():
start1 = time.perf_counter()
session.deal(1,2,5)
end1 = time.perf_counter()
delta1 = end1 - start1
assert delta1 < 0.01, 'It is taking too much time to create_deck_using_list_comprehension'
def test_deal_for_more_than_2_sets_performance():
start1 = time.perf_counter()
session.deal(3,2,5)
end1 = time.perf_counter()
delta1 = end1 - start1
assert delta1 < 0.01, 'It is taking too much time to create_deck_using_list_comprehension'
def test_create_deck_using_list_comprehension_exception():
output=['spades-2', 'spades-3', 'spades-4', 'spades-5', 'spades-6', 'spades-7', 'spades-8', 'spades-9', 'spades-10', 'spades-jack', 'spades-queen', 'spades-king', 'spades-ace', 'clubs-2', 'clubs-3', 'clubs-4', 'clubs-5', 'clubs-6', 'clubs-7', 'clubs-8', 'clubs-9', 'clubs-10', 'clubs-jack', 'clubs-queen', 'clubs-king', 'clubs-ace', 'hearts-2', 'hearts-3', 'hearts-4', 'hearts-5', 'hearts-6', 'hearts-7', 'hearts-8', 'hearts-9', 'hearts-10', 'hearts-jack', 'hearts-queen', 'hearts-king', 'hearts-ace', 'diamonds-2', 'diamonds-3', 'diamonds-4', 'diamonds-5', 'diamonds-6', 'diamonds-7', 'diamonds-8', 'diamonds-9', 'diamonds-10', 'diamonds-jack', 'diamonds-queen', 'diamonds-king', 'diamonds-ace']
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
with pytest.raises(ValueError):
assert session.create_deck_using_list_comprehension(vals,[])
assert session.create_deck_using_list_comprehension([],suits)
assert session.create_deck_using_list_comprehension([],[])
def test_create_deck_using_lambda_zip_map_exception():
output=['spades-2', 'spades-3', 'spades-4', 'spades-5', 'spades-6', 'spades-7', 'spades-8', 'spades-9', 'spades-10', 'spades-jack', 'spades-queen', 'spades-king', 'spades-ace', 'clubs-2', 'clubs-3', 'clubs-4', 'clubs-5', 'clubs-6', 'clubs-7', 'clubs-8', 'clubs-9', 'clubs-10', 'clubs-jack', 'clubs-queen', 'clubs-king', 'clubs-ace', 'hearts-2', 'hearts-3', 'hearts-4', 'hearts-5', 'hearts-6', 'hearts-7', 'hearts-8', 'hearts-9', 'hearts-10', 'hearts-jack', 'hearts-queen', 'hearts-king', 'hearts-ace', 'diamonds-2', 'diamonds-3', 'diamonds-4', 'diamonds-5', 'diamonds-6', 'diamonds-7', 'diamonds-8', 'diamonds-9', 'diamonds-10', 'diamonds-jack', 'diamonds-queen', 'diamonds-king', 'diamonds-ace']
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
with pytest.raises(ValueError):
assert session.create_deck_using_lambda_zip_map(vals,[])
assert session.create_deck_using_lambda_zip_map([],suits)
assert session.create_deck_using_lambda_zip_map([],[])
def test_deal_exception():
with pytest.raises(ValueError):
assert session.deal(2,0,5)
assert session.deal(2,2,1)
assert session.deal(-1,2,5)
def test_decider_exception():
with pytest.raises(ValueError):
assert session.decider(['hearts-7', 'hearts-10', 'hearts-8', 'hearts-9', 'hearts-6'],[])
assert session.decider([],['hearts-7', 'hearts-10', 'hearts-8', 'hearts-9', 'hearts-6'])
assert session.decider([],[])
def test_get_rank_exception():
with pytest.raises(ValueError):
assert session.get_rank([])
def test_performance_decider():
start1 = time.perf_counter()
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
suits = ['spades', 'clubs', 'hearts', 'diamonds']
n=5
combination={1:'Royal Flush', 2:'Straight Flush', 3:'Four of a Kind', 4:'Full House', 5:'Flush', 6:'Straight', 7:'Three of a Kind', 8:'Two Pair', 9:'One Pair', 10:'High Card'}
for times in range(200):
combinations=[
[i+'-'+j for i,j in list(zip([random.choice(suits)] * n, vals[-n:]))], #1
[i+'-'+j for i,j in list(zip([random.choice(suits)] * n, vals[random.choice(range(len(vals)-(n+1))):][:n]))], #2
[item for sublist in [[m+'-'+j for m in random.sample(suits,n-1)] if i==0 else [m+'-'+j for m in random.sample(suits,1)] for i,j in enumerate(random.sample(vals,2))] for item in sublist], #3
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n+1)/2))] if i==0 else [m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] for i,j in enumerate(random.sample(vals,2))] for item in sublist], #4
# [i+'-'+j for i,j in list(zip([random.choice(suits)] * n, random.sample(vals,n)))], #5
[i+'-'+j for i,j in list(zip([random.choice(suits)] * n, random.sample([vals[k] for k in range(len(vals)) if int(k)%2==1],n)))], #5
[i+'-'+j for i,j in list(zip(random.sample(suits,n) if n<=len(suits) else suits+random.choices(suits,k=n-len(suits)),vals[random.choice(range(len(vals)-(n+1))):][:n]))], #6
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n+1)/2))] if i==0 else [m+'-'+j for m in random.sample(suits,1)] for i,j in enumerate(random.sample(vals,3))] for item in sublist], #7
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] if i==0 else ([m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] if i==1 else [m+'-'+j for m in random.sample(suits,1)]) for i,j in enumerate(random.sample(vals,3))] for item in sublist], #8
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] if i==0 else [m+'-'+j for m in random.sample(suits,1)] for i,j in enumerate(random.sample(vals,min(n,4)))] for item in sublist], #9
# [i+'-'+j for i,j in list(zip(random.sample(suits,n) if n<=len(suits) else suits+random.choices(suits,k=n-len(suits)),random.sample(vals,k=n)))] #10
[i+'-'+j for i,j in list(zip(random.sample(suits,n) if n<=len(suits) else suits+random.choices(suits,k=n-len(suits)),random.sample([vals[k] for k in range(len(vals)) if int(k)%2==0],k=n)))] #10
]
hands={'Player 1' if i==0 else 'Player 2': [combinations[j],j+1] for i,j in enumerate(random.sample([item for sublist in [[j]*int(50/(i+1)) for i,j in enumerate(range(len(combinations)))] for item in sublist],2))}
if hands['Player 1'][1]==hands['Player 2'][1]:
result='no clear winner'
elif hands['Player 1'][1]>hands['Player 2'][1]:
result='Player2 is winner'
else:
result='Player1 is winner'
hands['Player 1'][1]=combination[hands['Player 1'][1]]
hands['Player 2'][1]=combination[hands['Player 2'][1]]
# print((hands, result))
session.decider(hands['Player 1'][0],hands['Player 2'][0])
end1 = time.perf_counter()
delta1 = end1 - start1
assert delta1 < 0.1, 'It is taking too much time to decide winner'
def test_decider():
vals = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'jack', 'queen', 'king', 'ace']
suits = ['spades', 'clubs', 'hearts', 'diamonds']
n=5
combination={1:'Royal Flush', 2:'Straight Flush', 3:'Four of a Kind', 4:'Full House', 5:'Flush', 6:'Straight', 7:'Three of a Kind', 8:'Two Pair', 9:'One Pair', 10:'High Card'}
for times in range(200):
combinations=[
[i+'-'+j for i,j in list(zip([random.choice(suits)] * n, vals[-n:]))], #1
[i+'-'+j for i,j in list(zip([random.choice(suits)] * n, vals[random.choice(range(len(vals)-(n+1))):][:n]))], #2
[item for sublist in [[m+'-'+j for m in random.sample(suits,n-1)] if i==0 else [m+'-'+j for m in random.sample(suits,1)] for i,j in enumerate(random.sample(vals,2))] for item in sublist], #3
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n+1)/2))] if i==0 else [m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] for i,j in enumerate(random.sample(vals,2))] for item in sublist], #4
# [i+'-'+j for i,j in list(zip([random.choice(suits)] * n, random.sample(vals,n)))], #5
[i+'-'+j for i,j in list(zip([random.choice(suits)] * n, random.sample([vals[k] for k in range(len(vals)) if int(k)%2==1],n)))], #5
[i+'-'+j for i,j in list(zip(random.sample(suits,n) if n<=len(suits) else suits+random.choices(suits,k=n-len(suits)),vals[random.choice(range(len(vals)-(n+1))):][:n]))], #6
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n+1)/2))] if i==0 else [m+'-'+j for m in random.sample(suits,1)] for i,j in enumerate(random.sample(vals,3))] for item in sublist], #7
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] if i==0 else ([m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] if i==1 else [m+'-'+j for m in random.sample(suits,1)]) for i,j in enumerate(random.sample(vals,3))] for item in sublist], #8
[item for sublist in [[m+'-'+j for m in random.sample(suits,int(n/2 if n%2==0 else (n-1)/2))] if i==0 else [m+'-'+j for m in random.sample(suits,1)] for i,j in enumerate(random.sample(vals,min(n,4)))] for item in sublist], #9
# [i+'-'+j for i,j in list(zip(random.sample(suits,n) if n<=len(suits) else suits+random.choices(suits,k=n-len(suits)),random.sample(vals,k=n)))] #10
[i+'-'+j for i,j in list(zip(random.sample(suits,n) if n<=len(suits) else suits+random.choices(suits,k=n-len(suits)),random.sample([vals[k] for k in range(len(vals)) if int(k)%2==0],k=n)))] #10
]
hands={'Player 1' if i==0 else 'Player 2': [combinations[j],j+1] for i,j in enumerate(random.sample([item for sublist in [[j]*int(50/(i+1)) for i,j in enumerate(range(len(combinations)))] for item in sublist],2))}
if hands['Player 1'][1]==hands['Player 2'][1]:
result='no clear winner'
elif hands['Player 1'][1]>hands['Player 2'][1]:
result='Player2 is winner'
else:
result='Player1 is winner'
hands['Player 1'][1]=combination[hands['Player 1'][1]]
hands['Player 2'][1]=combination[hands['Player 2'][1]]
assert session.decider(hands['Player 1'][0],hands['Player 2'][0])==(hands,result), (hands, result)
# def test_invalid_function():
# with pytest.raises(AttributeError):
# assert session4.time_it(session4.squared_power_list1, 3,start=0,end=5)
# def test_exec_without_args():
# with pytest.raises(ValueError):
# assert session4.time_it(print, sep='-', end= ' ***\n', repetitons=5)
# assert session4.time_it(session4.squared_power_list, start=0,end=5)
# assert session4.time_it(session4.polygon_area, sides = 3, repetitons=10)
# assert session4.time_it(session4.temp_converter, temp_given_in = 'c', repetitons=100)
# assert session4.time_it(session4.speed_converter, dist='km', time='m', repetitons=200)
# def test_exec_without_kwargs():
# with pytest.raises(ValueError):
# assert session4.time_it(print, 1, 2, 3, repetitons=5)
# assert session4.time_it(session4.squared_power_list, 3)
# assert session4.time_it(session4.polygon_area, 15, repetitons=10)
# assert session4.time_it(session4.temp_converter, 37.78, repetitons=100)
# assert session4.time_it(session4.speed_converter, 100, repetitons=200)
# def test_exec_with_str_args():
# with pytest.raises(ValueError):
# assert session4.time_it(print, 'a', sep='-', end= ' ***\n', repetitons=5)
# assert session4.time_it(session4.squared_power_list,'a', start=0,end=5)
# assert session4.time_it(session4.polygon_area,'a', sides = 3, repetitons=10)
# assert session4.time_it(session4.temp_converter,'a', temp_given_in = 'c', repetitons=100)
# assert session4.time_it(session4.speed_converter, 'a', dist='km', time='m', repetitons=200)
# def test_check_kwargs_squared_power_list():
# with pytest.raises(ValueError):
# assert session4.time_it(session4.squared_power_list, 3,start1=0,end1=5)
# def test_check_kwargs_polygon_area():
# with pytest.raises(ValueError):
# assert session4.time_it(session4.polygon_area, 15, sides2 = 3, repetitons=10)
# def test_check_kwargs_temp_converter():
# with pytest.raises(ValueError):
# assert session4.time_it(session4.temp_converter, 37.78, repetitons=100)
# def test_check_kwargs_speed_converter():
# with pytest.raises(ValueError):
# assert session4.time_it(session4.speed_converter, 100, dist='km', repetitons=200)
# def test_performance_print():
# start1 = time.perf_counter()
# session4.time_it(print, 1, 2, 3, sep='-', end= ' ***\n', repetitons=5)
# end1 = time.perf_counter()
# delta1 = end1 - start1
# assert delta1 < 0.1
# def test_performance_squared_power_list():
# start1 = time.perf_counter()
# session4.time_it(session4.squared_power_list, 3,start=0,end=5)
# end1 = time.perf_counter()
# delta1 = end1 - start1
# assert delta1 < 0.1
# def test_performance_polygon_area():
# start1 = time.perf_counter()
# session4.time_it(session4.polygon_area, 15, sides = 3, repetitons=10)
# end1 = time.perf_counter()
# delta1 = end1 - start1
# assert delta1 < 0.1
# def test_performance_temp_converter():
# start1 = time.perf_counter()
# session4.time_it(session4.temp_converter, 37.78, temp_given_in = 'c', repetitons=100)
# end1 = time.perf_counter()
# delta1 = end1 - start1
# assert delta1 < 0.1
# def test_performance_speed_converter():
# start1 = time.perf_counter()
# session4.time_it(session4.speed_converter, 100, dist='km', time='m', repetitons=200)
# end1 = time.perf_counter()
# delta1 = end1 - start1
# assert delta1 < 0.1
# def test_result_squared_power_list():
# assert session4.time_it(session4.squared_power_list, 3,start=0,end=5) == [1, 3, 9, 27, 81, 243], 'squared_power_list is not yielding desired result'
# def test_result_squared_power_list_negative():
# assert session4.time_it(session4.squared_power_list, -3,start=0,end=5) == [1, -3, 9, -27, 81, -243], 'squared_power_list is not yielding desired result'
# def test_result_polygon_area():
# assert(session4.time_it(session4.polygon_area, 15, sides = 3, repetitons=10)) == 225, 'polygon_area is not yielding desired list'
# def test_result_polygon_area_negative():
# with pytest.raises(ValueError):
# assert(session4.time_it(session4.polygon_area, -15, sides = 3, repetitons=10)), 'length / sides cannot be negative'
# def test_result_temp_converter():
# assert(session4.time_it(session4.temp_converter, 37.78, temp_given_in = 'c', repetitons=100))==100.004, 'temp_converter is not yielding desired list'
# def test_result_speed_converter():
# assert(round(session4.time_it(session4.speed_converter, 100, dist='km', time='m', repetitons=200),2))==1.67, 'speed_converter is not yielding desired list'
# def test_result_speed_converter_negative():
# with pytest.raises(ValueError):
# assert(round(session4.time_it(session4.speed_converter, -100, dist='km', time='m', repetitons=200),2))==1.67, 'distance cannot be negative'
|
997,534 | 1375b2c2415f467fd44650b0497061fad7b00ce8 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
#
from __future__ import print_function
import argparse
import os
import textwrap
from common.config import load_config
from common.logger import init_logging
from resolver.resolver import download_artifact
def main():
load_config()
init_logging(os.path.expanduser(os.path.join("~", ".pymvn", "pymvn.log")))
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter, allow_abbrev=False
)
parser.add_argument(
"-m",
"--maven",
action="store",
type=str,
dest="MAVEN_CENTRAL",
default="google",
help=textwrap.dedent(
"""\
The name of maven central to download artifacts from
"""
),
)
parser.add_argument(
"-t",
"--transitive",
action="store_true",
dest="DOWNLOAD_DEPENDENCIES",
default=False,
help=textwrap.dedent(
"""\
Whether or not to download all the transitive dependencies
"""
),
)
parser.add_argument(
"ARTIFACT_COORDINATE",
nargs="?",
type=str,
default=None,
# required=True,
help=textwrap.dedent(
"""\
The possible coordinates are:
- groupId:artifactId:version
- groupId:artifactId:packaging:version
- groupId:artifactId:packaging:classifier:version
For more details on how to maven coordinate is defined visit
http://maven.apache.org/pom.html#Maven_Coordinates
"""
),
)
args = parser.parse_args()
if not args.ARTIFACT_COORDINATE:
print("Artifact maven coordinate must be specified")
exit(1)
values = args.ARTIFACT_COORDINATE.split(":")
if not values or len(values) < 3:
print("Illegal artifact maven coordinate: %s" % args.ARTIFACT_COORDINATE)
exit(2)
# download_artifact(name="androidx.cardview:cardview:aar:1.0.0", repo_name="google", download_deps=True)
download_artifact(
args.ARTIFACT_COORDINATE, args.MAVEN_CENTRAL, args.DOWNLOAD_DEPENDENCIES
)
if __name__ == "__main__":
main()
|
997,535 | b3fd4ac85214a5781ab3f7aaba0a976ec8e98d89 | #!/usr/bin/python
"""
@author Ryan Summers
@date 2-27-2018
@brief Returns labeled or validated datasets to the robosub server.
"""
from __future__ import print_function
import datetime
import json
import os
import pysftp
import shutil
import sys
import tempfile
import glob
import tarfile
import progressbar
try:
input = raw_input
except:
pass
# Upload progress bar
bar = progressbar.ProgressBar()
# update progress bar
def progress(done, total):
bar.update(done)
def app(args):
""" Main entry point for returning data to the server.
Arguments:
args: Passed in from argparse. Must have a named member `annotations`
that specifies the path to the annotation JSON.
"""
# Grab the username for data ownership and the SFTP password.
data_owner = os.environ.get('ROBOSUB_WHO_AM_I')
if data_owner is None:
print('To suppress this prompt, please set the environment variable ',
end='')
print('ROBOSUB_WHO_AM_I to your name.')
data_owner = input('Please enter your name (First Last): ')
password = os.environ.get('ROBOSUB_SFTP_PASSWORD')
if password is None:
print('To suppress this prompt, please set the ROBOSUB_SFTP_PASSWORD ',
end='')
print('environment variable.')
password = input('Please enter the Robosub SFTP password: ')
# Read the annotations to check if validation or labeling have been
# completed.
with open(args.annotations, 'r') as f:
json_contents = json.load(f)
tar_name = os.path.splitext(os.path.basename(args.annotations))[0] + '.tar'
# Open an SFTP connection with the robosub server.
with pysftp.Connection('robosub.eecs.wsu.edu',
username='sftp_user',
password=password,
default_path='/data/vision/labeling') as sftp:
# Check to see if there is an in-progress labeling or validation
# session.
with sftp.cd('in_progress/labeling/'):
labeling_tars = [x for x in sftp.listdir() if x.endswith('.tar')]
with sftp.cd('in_progress/validation/'):
validation_tars = [x for x in sftp.listdir() if x.endswith('.tar')]
with sftp.cd('in_progress/clarification/'):
clarification_tars = [x for x in sftp.listdir() if x.endswith('.tar')]
in_validation = False
in_clarification = False
delete = False
bad_count = 0
# If the dataset is currently being labeled, set up the proper source
# and destination paths on the server. If labeling, validation or
# clarification is not fully completed, move from in_progress back
# into the intermediate step.
if tar_name in labeling_tars:
complete = True
for annotation in json_contents:
try:
complete = not annotation['unlabeled']
if not complete:
break
except:
pass
src_dir = 'in_progress/labeling/'
dest_dir = 'unvalidated/' if complete else 'new/'
elif tar_name in validation_tars:
in_validation = True
complete = True
for annotation in json_contents:
try:
status = annotation['status']
except:
complete = False
break
src_dir = 'in_progress/validation/'
dest_dir = 'done/' if complete else 'unvalidated/'
elif tar_name in clarification_tars:
in_clarification = True
complete = True
dir_name = os.path.dirname(args.annotations)
# Looping over to see if there are bad images, and see if the tar is complete
for annotation in json_contents:
try:
ann = annotation['status']
if ann == 'Bad':
delete = True
bad_count += 1
except:
complete = False
pass
if not delete:
src_dir = 'in_progress/clarification/'
dest_dir = 'new/' if complete else 'clarification/'
else:
src_dir = 'in_progress/clarification/'
dest_dir = 'in_progress/clarification/temp/' if complete else 'clarification/'
else:
print('The supplied JSON name does not match any in-progress ',
end='')
print('validation, labeling sessions or clarification.')
print('Current labeling sessions: {}'.format(labeling_tars))
print('Current validation sessions: {}'.format(validation_tars))
print('Current clarification sessions: {}'.format(clarification_tars))
sys.exit(-1)
if not complete:
if in_validation:
print('Validation was not completed. Returning progress to ',
end='')
print('unvalidated datasets.')
elif in_clarification:
print('clarification was not completed. Returning progress to new ',
end='')
print('datasets.')
else:
print('Labeling was not completed. Returning progress to new ',
end='')
print('datasets.')
with sftp.cd(src_dir):
tars = [x for x in sftp.listdir() if x.endswith('.tar')]
if tar_name not in tars:
print('The provided annotations dataset was not found in ',
end='')
print('robosub.eecs.wsu.edu:/data/vision/labeling/' + src_dir)
sys.exit(-1)
# If the annotations already exist on the server, pull them down to
# figure out what new labels were added for stat tracking.
if tar_name not in clarification_tars:
previous_annotations = []
with sftp.cd(src_dir):
if os.path.basename(args.annotations) in sftp.listdir():
with tempfile.NamedTemporaryFile() as tempf:
sftp.get(os.path.basename(args.annotations), tempf.name)
with open(tempf.name, 'r') as f:
previous_annotations = json.load(f)
with open(args.annotations, 'r') as f:
new_annotations = json.load(f)
if len(previous_annotations) and len(new_annotations) != len(previous_annotations):
print('Provided annotation file and server annotation file differ.')
sys.exit(-1)
log = {'labels_added': 0,
'images_labeled': 0,
'labels_validated': 0,
'images_validated': 0}
if len(previous_annotations) == 0:
for annotation in new_annotations:
if in_validation:
log['images_validated'] += 1
log['labels_validated'] += len(annotation['annotations'])
else:
labels_added = len(annotation['annotations'])
log['labels_added'] += labels_added
if labels_added > 0:
log['images_labeled'] += 1
else:
for old, new in zip(previous_annotations, new_annotations):
if in_validation:
log['images_validated'] += 1
try:
status = new['status']
try:
old_status = old['status']
if old_status != status:
log['labels_validated'] += len(new['annotations'])
log['images_validated'] += 1
except:
log['labels_validated'] += len(new['annotations'])
log['images_validated'] += 1
except:
pass
else:
labels_added = len(new['annotations']) - len(old['annotations'])
if labels_added > 0:
log['labels_added'] += labels_added
log['images_labeled'] += 1
# Upload the JSON to the server. Or tar if the images were bad
# in clarification proccess
if delete or (tar_name not in clarification_tars):
with sftp.cd(dest_dir):
# Return tar file
global bar
total_size = os.stat(args.annotations)
bar = progressbar.ProgressBar(max_value=total_size.st_size)
sftp.put(args.annotations, callback=progress)
bar.finish()
# Move the tar from in_progress to the proper destination.
# or delete tar if images were bad in clarification
sftp.rename('{}/{}'.format(src_dir, tar_name),
'{}/{}'.format(dest_dir, tar_name))
if delete and complete:
print('Found {} bad images. Deleting'.format(bad_count))
with sftp.cd(dest_dir):
sftp.execute("python /data/vision/labeling/in_progress/clarification/temp/delete.py {} --remote".format(tar_name))
print('Deleted, taring up remaining images and puting into labeling folder.')
# Remove the ownership and annotation files.
with sftp.cd(src_dir):
if os.path.basename(args.annotations) in sftp.listdir():
sftp.remove(os.path.basename(args.annotations))
sftp.remove('{}.owner'.format(tar_name))
# Delete the folder containing the JSON if the user would like. Ensure
# the dirname is a valid path.
directory = os.path.dirname(args.annotations)
if directory != '':
if args.auto_delete:
delete = True
else:
user_input = input('Delete folder `{}/`? (y/n): '.format(
directory))
delete = user_input == 'y' or user_input == 'Y'
if delete:
print('Deleting {}/'.format(directory))
shutil.rmtree(directory)
# Finally, upload the stats to the history folder on the
# server for stats tracking.
if tar_name not in clarification_tars:
stats = [{'owner': data_owner,
'stats': log,
'date-time': datetime.datetime.now().isoformat()}]
# Only upload stats if someone has modified the annotations.
if log['images_labeled'] != 0 or log['images_validated'] != 0:
with tempfile.NamedTemporaryFile(prefix=data_owner) as tempf:
with open(tempf.name, 'w') as f:
json.dump(stats, f)
with sftp.cd('history'):
log_files = [x for x in sftp.listdir() if x.startswith(data_owner.replace(' ', '_'))]
file_numbers = [int(os.path.splitext(x)[0].split('-')[-1]) for x in log_files]
if len(file_numbers) == 0:
f_number = 0
else:
f_number = max(file_numbers) + 1
dst = os.path.basename(tempf.name)
sftp.put(tempf.name)
sftp.rename(dst, '{}-{}.log'.format(data_owner.replace(' ', '_'), f_number))
sftp.execute("python /data/vision/labeling/done/count/count.py") # Update Stats on the server
print('Data has been successfully returned.')
|
997,536 | ef9ce8c5e07e7c7b161de0608ef4ae21b213ca9f | import torch
import pandas as pd
from torch.utils.data import DataLoader
from torchvision.transforms import Compose
from utils import extract_cnrpark_extra_dataset, fix_random_seed, device, extract_annotation_file, remove_parentheses
from constants import TEST_CNRPARK_EXTRA_ANNOTATION, RANDOM_SEED, PKLOT_DATA_DIR, TEST_PKLOT_ANNOTATION, TRAIN_PKLOT_ANNOTATION
from data.transforms import ToTensor
from data.parking_lots_dataset import ParkingLotsDataset
def test_model(model, X_test, y_test, log_path='./test_data_info.csv'):
correct = 0
total = 0
log_df = pd.DataFrame(columns=['image_path', 'ground_true_label', 'predicted_label'])
composed = Compose([ToTensor()])
test_dataloader = DataLoader(
ParkingLotsDataset(X_test, y_test, composed),
pin_memory=True,
batch_size=32,
shuffle=True,
num_workers=4
)
with torch.no_grad():
for data in test_dataloader:
images = data['image'].to(device, dtype=torch.float)
labels = data['label'].to(device, dtype=torch.float)
image_paths = data['image_path']
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += torch.sum(predicted.float() == labels.data)
for image_path, correct_label, predicted_label in zip(image_paths, labels.cpu().numpy(),
predicted.cpu().numpy()):
log_df = log_df.append({
'image_path': image_path,
'ground_true_label': int(correct_label),
'predicted_label': int(predicted_label)
}, ignore_index=True)
log_df.to_csv(log_path)
accuracy = 100 * correct.double() / total
print(f'Accuracy {accuracy}')
def main():
# x_test, y_test = extract_cnrpark_extra_dataset(TEST_CNRPARK_EXTRA_ANNOTATION)
remove_parentheses('/home/m_ulyanov/data/splits/PKLot', ['all.txt', 'train.txt', 'test.txt', 'val.txt'])
x_test, y_test = extract_annotation_file(TRAIN_PKLOT_ANNOTATION, PKLOT_DATA_DIR)
print(f'Count of test examples = {len(x_test)}')
model = torch.load('./models/last.pth')
print('Model is loaded!')
model.to(device)
test_model(model, x_test, y_test)
if __name__ == '__main__':
fix_random_seed(RANDOM_SEED)
main()
|
997,537 | eb95cd27a422b1f262e1412a9d5638c2b47a7f83 | #!/usr/bin/python3
from program.recognition_app import RecognitionApp
if __name__ == "__main__":
appli = RecognitionApp()
appli.mainloop()
|
997,538 | c8f5457e1514d50f6ee27ae658810b2e36770e06 | from pylab import *
from scipy import signal
from numpy import *
import netCDF4 as nc
import pyroms as p
from scipy.special import erf
from scipy.integrate import cumtrapz
#this code reads grid data from an existing grid file, uses it to
#define a boundary forcing file for roms. If you need the boundary
#forcing file to create the roms file, first run the model for a
#timestep with closed boundaries... Or re-write this code to be more
#sensible. The structure of the forcing file is taken from
#roms/Data/ROMS/CDL/ini_hydro.cdl in the roms source distribution.
#get value of slope burger number to make
S=float(sys.argv[1])
Nbv=float(sys.argv[2])
Vinput=float(sys.argv[3])
print('running make_grid.py with S of ',S,'and an N of',Nbv,'and a V of ',Vinput)
#how many time levels in the boundary forcing files? and what are
#those times? And what is amplitude of forcing at those times
timeVec=array([0.0])*8.64e4
ampVec= [0.0]
nTime=len(timeVec)
assert len(timeVec)==nTime,'the number of time levels must match number of times given'
assert len(timeVec)==len(ampVec),'the length of timeVec and ampVec must match'
#define constants WHICH MUST MATCH THOSE IN .in FILE
Vstretching=4
Vtransform=2
#name of a history or average file that is of the same size as the
#model run, and the name of the output forcing file
inFileName='jmpbump_grid.nc'
outFileName='jmpbump_ini.nc'
vbarFile='../makePlotNew/vbar_slopeForce08_from-1150km_to_-1050km.npz'
print('USING vbar FORCING FROM',vbarFile)
#open the files
inNC=nc.Dataset(inFileName,'r')
outNC=nc.Dataset(outFileName,'w',clobber=True)
#define global attributes
outNC.type='INITIALIZATION file'
outNC.title='made by make_initial_cond.py'
outNC.out_file='no idea'
outNC.grd_file=inFileName
#now create the boundary file, following
#https://salishsea-meopar-tools.readthedocs.io/en/latest/netcdf4/index.html
#create time dimensions
for dim in ['ocean_time']:
outNC.createDimension(dim,nTime)
#create dimensions that are taken from existing grid file
for dim in ['xi_rho','xi_u','xi_v','eta_rho','eta_u','eta_v','s_rho','s_w']:
outNC.createDimension(dim,inNC.dimensions[dim].size)
#create tracer dimension
outNC.createDimension('tracer',2)
#create variables that are integers. These are all scalers
for var in ['spherical']:
data=inNC[var][0]
outNC.createVariable(var,int32,())
if var=='spherical': #oh for gods sake, come up with one format
if data==b'F':
data=0
else:
data=1
outNC[var][0]=data
#create variables specified in this code
for varTup in [('Vstretching',Vstretching),('Vtransform',Vtransform)]:
print('Creating',varTup[0],'=',varTup[1])
outNC.createVariable(varTup[0],int32,())
outNC[varTup[0]][0]=varTup[1]
#create variables that are doubles AND take data from grid file.
#tuples of (name,(dimensions))
for vartup in [('theta_s',()),
('theta_b',()),
('Tcline',()),
('hc',()),
('s_rho',('s_rho',)),
('s_w',('s_w',)),
('Cs_r',('s_rho')),
('Cs_w',('s_w')),
('h',('eta_rho','xi_rho')),
('x_rho',('eta_rho','xi_rho')),
('y_rho',('eta_rho','xi_rho')),
('x_u',('eta_u','xi_u')),
('y_u',('eta_u','xi_u')),
('x_v',('eta_v','xi_v')),
('y_v',('eta_v','xi_v'))]:
var=vartup[0]
dims=vartup[1]
data=inNC[var][:]
outNC.createVariable(var,float,dims)
outNC[var][:]=data
#create variables that are doubles, and whose data is the time vector defined above.
for vartup in [('ocean_time',('ocean_time',))]:
var=vartup[0]
dims=vartup[1]
outNC.createVariable(var,float,dims)
outNC[var].long_name='boundary time'
outNC[var].units='seconds since 0001-01-01 00:00:00'
outNC[var].calendar='proleptic_gregorian'
outNC[var].field='time, scaler, series'
outNC[var][:]=timeVec
#assert False,'asdf'
#############################################################################
# WARNING, ALL THE SCIENCE IS BELOW HERE... THIS IS WHERE THE VARIABLES
# THAT ARE USED FOR FORCING AT THE BOUNDARY ARE DEFINED...
#############################################################################
#get grid parameters
h=inNC['h'][:]
theta_b=inNC['theta_b'][:]
theta_s=inNC['theta_s'][:]
Tcline=inNC['Tcline'][:]
N=len(inNC['s_rho'][:])
#IF YOU WANT TO PLAY WITH VERTICAL GRID, SET THIS TO TRUE
adjustVertical=False
if adjustVertical:
theta_b=0.1
theta_s=7.0
Tcline=400.0
N=N
#for a number of reasons, it is useful to have distance and depth on the grids.
#ASSUMING Vtransform=2 and Vstretching=4!!!
assert Vtransform==2, 'wrong Vtransform for calculation of s'
assert Vstretching==4,'wrong Vstetching for calculation of s'
s=p.vgrid.s_coordinate_4(h,theta_b,theta_s,Tcline,N)
#and get vertical positions of w and rho points everywhere.
z_w=s.z_w[0,:,:]
z_r=s.z_r[0,:,:]
x_rho=inNC['x_rho'][:]
y_rho=inNC['y_rho'][:]
if adjustVertical:
clf()
print('showing vertical grid')
plot(z_r[:,0,0]*0.0,z_r[:,0,0],'r-*')
draw(); show()
outNC.close()
grid()
assert False,'Now it is time for you to think about what to set theta_b and theta_s to...'
#====================================================
#now make T and S
#now calculate initial T and S everywhere. since I use ana_initial.h
#instead of creating an initial condition, I must make sure that use
#the same parameters as defined in jmpbump.h
T0=14.0
S0=35.0
g=9.81
if False:
Tcoef=inNC.variables['Tcoef'][0]
R0=inNC.variables['R0'][0]
else:
Tcoef=1.7e-4
R0=1027.0
print('Tcoef=%f, R0=%f, THESE MUST MATCH THE VALUES DEFINED IN THE *.IN FILES!!!'%(Tcoef,R0))
def Gamma(z):
#this is the vertical structure of the density. It should start at
#0 at the surface, have an initial slope of 1, and then taper to a
#slope of 0 at some depth. Monotonically decrease with
#depth. Tanh?
out=z
return out
JMP_rhoStrat=-Nbv**2*R0/g #/*vertical stratification in density*/
print('\nS0=%4.2f, T0=%4.2f, JMP_rhoStrat=%4.2f, g=%4.2f'%(S0,T0,JMP_rhoStrat,g))
print('THESE MUST MATCH THE VALUES DEFINED IN THE *.IN AND jmpbump.h FILES!\n')
tempInit=T0-(JMP_rhoStrat/R0/Tcoef)*Gamma(z_r)
saltInit=0.0*z_r+S0
#=======================================================
# NOW CALCULATE INITIAL ZETA DISTRIBUTION AND APPROPRIATE MATCHING VELOCITIES.
# THIS CODE WAS WRITTEN TO ALLOW BAROCLINIC INITIAL CONDITIONS, SO PInit HAS
# A FULL 3D SHAPE
#
# Because the code
# below is written in terms of a pressure gradient times rho0 (so that
# the geostrophic velocity is P_x), we need to integrate the velocity
# field in the cross-shelf direction to get this "P", so that when we
# take its derivative, we are back to v...
#
bsize=10.0e4 #originall 10.0e3
vbar=-Vinput*0.5*(((1+tanh((x_rho[0,:]-30e3)/bsize))+(1-tanh((x_rho[0,:]-170.0e3)/bsize)))-2.0)
vbar=-Vinput+0*vbar #uniform everywhere
#ok make integral here, so that it is a function that Pboundary can use
vbarInt=cumtrapz(vbar,x_rho[1,:],initial=0)
def Pboundary(x):
P=interp(x,x_rho[1,:],vbarInt)
return P
#now we need to calculate the appropriate geostrophic velocities given
#the pressure field, again assuming grid spacing is uniform.
f=inNC['f'][:]
x_u=inNC['x_u'][:]
y_u=inNC['y_u'][:]
x_v=inNC['x_v'][:]
y_v=inNC['y_v'][:]
dx=x_u[1,2]-x_u[1,1]
dy=y_u[2,1]-y_u[1,1]
x_rhoFull=zeros(z_r.shape)
for nz in range(z_r.shape[0]):
x_rhoFull[nz,:,:]=x_rho+0.0
PInit=Pboundary(x_rhoFull)*R0*f
zetaInit=PInit[-1,:,:]/g/R0
#average Pressure and f onto Psi grid
Ppsi=0.25*(PInit[:,:-1,:-1]+PInit[:,1:,:-1]+PInit[:,:-1,1:]+PInit[:,1:,1:])
fpsi=0.25*(f[:-1,:-1]+f[1:,:-1]+f[:-1,1:]+f[1:,1:])
uInit=zeros((z_r.shape[0],x_u.shape[0],x_u.shape[1]))
vInit=zeros((z_r.shape[0],x_v.shape[0],x_v.shape[1]))
for nz in range(z_r.shape[0]):
uInit[nz,1:-1,1:-1]=-1/dy/R0/(0.5*(fpsi[1:,1:-1]+fpsi[:-1,1:-1]))*(Ppsi[nz,1:,1:-1]-Ppsi[nz,:-1,1:-1])
vInit[nz,1:-1,1:-1]= 1/dx/R0/(0.5*(fpsi[1:-1,1:]+fpsi[1:-1,:-1]))*(Ppsi[nz,1:-1,1:]-Ppsi[nz,1:-1,:-1])
if False:
clf()
subplot(1,6,1)
plot(tempInit[:,-1,-1],z_r[:,-1,-1],'k-*')
grid()
title('T with depth')
subplot(1,6,2)
nx=250; plot(tempInit[:,-1,nx],z_r[:,-1,nx],'k-*')
grid()
title('T with depth')
subplot(1,6,3)
nx=150; plot(tempInit[:,-1,nx],z_r[:,-1,nx],'k-*')
grid()
title('T with depth')
subplot(2,2,2)
plot(x_rho[1,:]/1e3,-h[1,:])
grid()
title('h with x')
subplot(2,2,4)
plot(x_rho[1,:]/1e3,vInit[-1,1,:])
grid()
title('Vinflow with x')
show()
draw()
#outNC.close()
#assert False,'asdf'
#uInit and vInit do not extend to northern or southern edges, because
#of the centering above. Fix by assuming no gradient there. Ignore similar issue on east/west edges
uInit[:,-1,:]=uInit[:,-2,:]
uInit[:,0,:]=uInit[:,1,:]
vInit[:,-1,:]=vInit[:,-2,:]
vInit[:,0,:]=vInit[:,1,:]
#now make ubar and vbar by depth averaging u and v
z_w_u=0.5*(z_w[:,:,:-1]+z_w[:,:,1:])
z_w_v=0.5*(z_w[:,:-1,:]+z_w[:,1:,:])
ubarInit=0.0*x_u
vbarInit=0.0*x_v
h_u=0.0*x_u
h_v=0.0*x_v
#make integral
for nz in range(z_r.shape[0]):
#at the end of this loop, h_u and h_v should be the depth...
dz=z_w_u[nz+1,:,:]-z_w_u[nz,:,:]
h_u=h_u+dz
ubarInit=ubarInit+uInit[nz,:,:]*dz
dz=z_w_v[nz+1,:,:]-z_w_v[nz,:,:]
h_v=h_v+dz
vbarInit=vbarInit+vInit[nz,:,:]*dz
#NOW DEAL WITH FACT THAT INFLOW MUST MATCH OUTFLOW... SO
#MAKE SURE IT DOES BY CHANGING OUTFLOW ON SOUTHER BOUNDARY
#ASSUME CROSS-SHELF SPACING NEARLY IDENTICAL...
vbarInit_integral_north=sum(vbarInit[-1,:])
vbarInit_integral_south=sum(vbarInit[0,:])
inflowRatio=vbarInit_integral_north/vbarInit_integral_south
print('Ratio of northern to southern inflow transport starts as',inflowRatio)
print('adjusting so exactly 1')
#so adjust already
vInit[:,0,:]=vInit[:,0,:]*inflowRatio
vbarInit[0,:]=vbarInit[0,:]*inflowRatio
#compute average
ubarInit=ubarInit/h_u
vbarInit=vbarInit/h_v
#now write data
for vartup in [('zeta',('ocean_time','eta_rho','xi_rho'),zetaInit),
('ubar',('ocean_time','eta_u','xi_u'),ubarInit),
('vbar',('ocean_time','eta_v','xi_v'),vbarInit),
('u',('ocean_time','s_rho','eta_u','xi_u'),uInit),
('v',('ocean_time','s_rho','eta_v','xi_v'),vInit),
('temp',('ocean_time','s_rho','eta_rho','xi_rho'),tempInit),
('salt',('ocean_time','s_rho','eta_rho','xi_rho'),saltInit)]:
var=vartup[0]
dims=vartup[1]
data=vartup[2]
outNC.createVariable(var,float32,dims)
outNC[var][:]=data
#close netcdf files
#inNC.close()
outNC.close()
|
997,539 | bfdd4255301196194a2bd39991e72f1664f8ae94 | import uuid
from datetime import datetime, timedelta
from bson.tz_util import utc
from flask.sessions import SessionInterface, SessionMixin
from werkzeug.datastructures import CallbackDict
__all__ = ("MongoEngineSession", "MongoEngineSessionInterface")
class MongoEngineSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.modified = False
class MongoEngineSessionInterface(SessionInterface):
"""SessionInterface for mongoengine"""
def __init__(self, db, collection="session"):
"""
The MongoSessionInterface
:param db: The app's db eg: MongoEngine()
:param collection: The session collection name defaults to "session"
"""
if not isinstance(collection, str):
raise ValueError("Collection argument should be string")
class DBSession(db.Document):
sid = db.StringField(primary_key=True)
data = db.DictField()
expiration = db.DateTimeField()
meta = {
"allow_inheritance": False,
"collection": collection,
"indexes": [
{
"fields": ["expiration"],
"expireAfterSeconds": 60 * 60 * 24 * 7 * 31,
}
],
}
self.cls = DBSession
def get_expiration_time(self, app, session) -> timedelta:
if session.permanent:
return app.permanent_session_lifetime
# Fallback to 1 day session ttl, if SESSION_TTL not set.
return timedelta(**app.config.get("SESSION_TTL", {"days": 1}))
def open_session(self, app, request):
sid = request.cookies.get(app.session_cookie_name)
if sid:
stored_session = self.cls.objects(sid=sid).first()
if stored_session:
expiration = stored_session.expiration
if not expiration.tzinfo:
expiration = expiration.replace(tzinfo=utc)
if expiration > datetime.utcnow().replace(tzinfo=utc):
return MongoEngineSession(
initial=stored_session.data, sid=stored_session.sid
)
return MongoEngineSession(sid=str(uuid.uuid4()))
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
httponly = self.get_cookie_httponly(app)
# If the session is modified to be empty, remove the cookie.
# If the session is empty, return without setting the cookie.
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name, domain=domain)
return
expiration = datetime.utcnow().replace(tzinfo=utc) + self.get_expiration_time(
app, session
)
if session.modified:
self.cls(sid=session.sid, data=session, expiration=expiration).save()
response.set_cookie(
app.session_cookie_name,
session.sid,
expires=expiration,
httponly=httponly,
domain=domain,
)
|
997,540 | 3d923623ca8cbbaa87c84e5fd69ed00a90743849 | import json
f=open('.json','r')
print(json.loads(f.read())) |
997,541 | 928601da4090decfbb74b9499c0c7e47e445c529 | from flask import current_app as app
from pytz import timezone, UTC
from datetime import timedelta
import time, datetime
import random
import uuid
import requests
import sys
import pandas as pd
import json
def handle_error(req):
try:
req.raise_for_status()
# Binance code errors
if 'code' in json.loads(req.content).keys():
code = json.loads(req.content)['code']
print(json.loads(req.content))
except requests.exceptions.HTTPError as err:
if err:
print(req.json())
else:
print(err)
except requests.exceptions.Timeout:
# Maybe set up for a retry, or continue in a retry loop
print('handle_error: Timeout')
except requests.exceptions.TooManyRedirects:
# Tell the user their URL was bad and try a different one
print('handle_error: Too many Redirects')
except requests.exceptions.RequestException as e:
# catastrophic error. bail.
print('handle_error', e)
sys.exit(1)
|
997,542 | 9c92d29262041d4d363d4511602396537c218a7e | from django.urls import path,re_path
from todo_app.viewsets import TodoViewSet
from django.conf.urls import url
urlpatterns = [
url('^getall/$', TodoViewSet.as_view({'get':'list'}), name='todo_list' ),
url('^create/$', TodoViewSet.as_view({'post':'create'}), name='todo_create' ),
url('^get/(?P<id>[0-9]+)/$', TodoViewSet.as_view({'get':'retrieve'}), name='todo_detail' ),
url('^put/(?P<id>[0-9]+)/$', TodoViewSet.as_view({'put':'update'}), name='todo_update' ),
url('^delete/(?P<id>[0-9]+)/$', TodoViewSet.as_view({'delete':'destroy'}), name='todo_delete' ),
]
|
997,543 | e59fc02b136633fda967b472dcc842352b114ad4 | from rest_framework import viewsets, mixins
from rest_framework.parsers import MultiPartParser, FileUploadParser
from rest_framework.viewsets import GenericViewSet
from .models import Post, PostFile
from .serializers import PostSerializer, PostFileSerializer
class PostViewSet(viewsets.ModelViewSet):
"""
A Post represents a Facebook post.
"""
queryset = Post.objects.all()
serializer_class = PostSerializer
class PostFileView(mixins.CreateModelMixin, mixins.DestroyModelMixin, GenericViewSet):
"""
An API endpoint to upload files for a post.
Additional Information:
Content-Type: multipart/form-data; boundary=<calculated when request is sent>
"""
parser_class = [FileUploadParser, MultiPartParser]
permission_classes = []
serializer_class = PostFileSerializer
def get_queryset(self):
return PostFile.objects.all()
def get_serializer_context(self, **kwargs):
context = super().get_serializer_context()
context['post_id'] = self.kwargs.get('pk')
return context
def create(self, request, *args, **kwargs):
# import pdb;pdb.set_trace()
return super().create(request, *args, **kwargs)
|
997,544 | fe517fe10152ff12af1f54cd4444576bec21a3f6 | import torch
import torch.nn as nn
import torchvision.transforms as transforms
import torchvision.datasets
from bokeh.plotting import figure
from bokeh.io import show
from bokeh.models import LinearAxis, Range1d
import torch.nn.functional as F
import torch.utils.data as torchUtils
import numpy as np
from matplotlib import pyplot as plt
from sklearn.datasets import make_classification
from sklearn.model_selection import GridSearchCV
from skorch import NeuralNetClassifier
from skorch.dataset import Dataset
from skorch.dataset import CVSplit
import time
from functools import partial
from bayes_opt import BayesianOptimization
from utils import *
def fit_with(dropout1_rate, dropout2_rate, learning_rate, train_data_loader, val_data_loader):
# Create the model using a specified hyperparameters.
model = ConvNet(dropout1_rate, dropout2_rate)
if torch.cuda.is_available():
model = model.cuda()
num_epochs = 15
# Train the model
optimizer = torch.optim.Adam(model.parameters(), lr = learning_rate)
criterion = nn.CrossEntropyLoss()
for epoch in range(num_epochs):
for i, (images, labels) in enumerate(train_data_loader):
outputs = model(images)
loss = criterion(outputs, labels)
# Backpropagation and Adam optimisation
optimizer.zero_grad()
loss.backward()
optimizer.step()
#Evaluation with validation data
model.eval()
with torch.no_grad():
correct = 0
total = 0
for images, labels in val_data_loader:
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
score = correct/total
print('Validation accuracy:', score)
return score
if __name__== "__main__":
mini_batch_size = 100
train_data_path = 'data/train_32x32.mat'
test_data_path = 'data/test_32x32.mat'
trainX, trainY, valX, valY, testX, testY = load_dataset(train_data_path, test_data_path)
#converting to grayscale images
trainX = rgb2gray(trainX).astype(np.float32)
valX = rgb2gray(valX).astype(np.float32)
testX = rgb2gray(testX).astype(np.float32)
# Calculating the mean on the training data
train_mean = np.mean(trainX, axis=0)
# Calculating the std on the training data
train_std = np.std(trainX, axis=0)
# normalization of the data
trainX = (trainX - train_mean) / train_std
valX = (valX - train_mean) / train_std
testX = (testX - train_mean) / train_std
#reshaping channel number
trainX = trainX.transpose(0, 3, 1, 2)
valX = valX.transpose(0, 3, 1, 2)
testX = testX.transpose(0, 3, 1, 2)
trainY = trainY.transpose()[0]
valY = valY.transpose()[0]
testY = testY.transpose()[0]
# changing the labels of 10 to 0
trainY[trainY == 10] = 0
valY[valY == 10] = 0
testY[testY == 10] = 0
#converting numpy to tensor
tensor_trainX = torch.from_numpy(trainX)
tensor_trainY = torch.from_numpy(trainY).long()
tensor_valX = torch.from_numpy(valX)
tensor_valY = torch.from_numpy(valY).long()
tensor_testX = torch.from_numpy(testX)
tensor_testY = torch.from_numpy(testY).long()
#running with GPU
if torch.cuda.is_available():
tensor_trainX = tensor_trainX.cuda()
tensor_trainY = tensor_trainY.cuda()
tensor_valX = tensor_valX.cuda()
tensor_valY = tensor_valY.cuda()
tensor_testX = tensor_testX.cuda()
tensor_testY = tensor_testY.cuda()
# creating datset for dataloader
train_dataset = torchUtils.TensorDataset(tensor_trainX, tensor_trainY)
val_dataset = torchUtils.TensorDataset(tensor_valX, tensor_valY)
test_dataset = torchUtils.TensorDataset(tensor_testX, tensor_testY)
# Data loader
train_loader = torchUtils.DataLoader(dataset=train_dataset, batch_size=mini_batch_size, shuffle=True)
val_loader = torchUtils.DataLoader(dataset=val_dataset, batch_size=mini_batch_size, shuffle=False)
#Bayesian Optimization
start_time = time.time()
fit_with_partial = partial(fit_with, train_data_loader = train_loader, val_data_loader = val_loader)
# Bounded region of parameter space
pbounds = {'dropout1_rate': (0.0, 0.5), 'dropout2_rate': (0.0, 0.5), 'learning_rate': (0.001, 0.01)}
optimizer = BayesianOptimization(
f=fit_with_partial,
pbounds=pbounds,
random_state=1,
)
optimizer.maximize(init_points=10, n_iter=10,)
for i, res in enumerate(optimizer.res):
print("Iteration {}: \n\t{}".format(i, res))
print(optimizer.max)
end_time = time.time()
print("Required Time: " + str(end_time - start_time) + " s")
|
997,545 | 936e4539bc8a525df93bd7be5706e82e93fbfa93 | """
Contains classes to be (re)used in various places
"""
import asyncio
class Timer: # pylint: disable=too-few-public-methods
"""
Executes a callback function after a specified timeout
"""
def __init__(self, timeout, callback):
self._timeout = timeout
self._callback = callback
self._task = asyncio.ensure_future(self._job())
async def _job(self):
"""
Runs the defined callback after timeout has passed
"""
await asyncio.sleep(self._timeout)
await self._callback()
def cancel(self):
"""
Cancels a future execution
"""
self._task.cancel()
|
997,546 | 8de8eec9837144c7065f261edf19993862aa5db4 | from django.shortcuts import render
from django.views import View
from django.views.generic import ListView, CreateView, UpdateView, DetailView
from app_news.forms import NewsForm
from app_news.models import News
class MainPage(View):
def get(self, request):
return render(request, 'main.html', {})
class NewsView(ListView):
model = News
template_name = 'news_list.html'
context_object_name = 'news_data'
class CreateNewsView(CreateView):
template_name = 'create_news.html'
form_class = NewsForm
success_url = '/news/'
class EditNewsView(UpdateView):
model = News
template_name = 'edit_news.html'
form_class = NewsForm
success_url = '/news/'
class NewsAndCommentsView(DetailView):
model = News
template_name = 'news_and_comments.html'
context_object_name = 'news_data'
|
997,547 | 733c28cba3358ba727be09ce8fb18cadcfc197b9 | #Uses python3
import sys
import queue
def extract_min(H):
min_key = sorted(H.keys())[0]
rval = H[min_key].pop(0)
if H[min_key] == []:
del H[min_key]
return rval
def change_priority(H, v, d):
if d not in H.keys():
H[d] = [v]
else:
H[d].append(v)
def distance(adj, cost, s, t):
# dijkstra's algorithm
dist = [10**9 for _ in range(len(adj))]
prev = [None for _ in range(len(adj))]
dist[s] = 0
# make priority queue to select next node to process
hash_queue = {}
for v,d in enumerate(dist):
if d in hash_queue.keys():
hash_queue[d].append(v)
else:
hash_queue[d] = [v]
# process nodes from the priority queue
while hash_queue:
u = extract_min(hash_queue)
for v in adj[u]:
if dist[v] > dist[u] + cost[u][adj[u].index(v)]:
dist[v] = dist[u] + cost[u][adj[u].index(v)]
prev[v] = u
change_priority(hash_queue, v, dist[v])
# dist list contains distance from s to all connected nodes
if dist[t] != 10**9:
return dist[t]
return -1
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n, m = data[0:2]
data = data[2:]
edges = list(zip(zip(data[0:(3 * m):3], data[1:(3 * m):3]), data[2:(3 * m):3]))
data = data[3 * m:]
adj = [[] for _ in range(n)]
cost = [[] for _ in range(n)]
for ((a, b), w) in edges:
adj[a - 1].append(b - 1)
cost[a - 1].append(w)
s, t = data[0] - 1, data[1] - 1
print(distance(adj, cost, s, t))
|
997,548 | d2d3aa164eca70ca693ef7489f4cc2ece1f9cad8 | from hyperopt import Trials, fmin, tpe
from models.model import model
import models.MultivariateRegression as mvr
import numpy as np
import math
class MultiVariateRegression(model):
def __init__(self, data, labels):
self.model = self.create_model(data, labels)
def extract_model_parameters(self, model, best):
new_dict = {}
for row in best:
fields = model.__dict__
new_dict[row] = fields[row][best[row]]
return new_dict
def optimize_model(self, model, evals):
trial = Trials()
best = fmin(model.create_model, model.space, algo=tpe.suggest, max_evals=evals, trials=trial)
param = self.extract_model_parameters(model, best)
return param
def model_to_string(self, b, c, params):
function = str(b)
c = c.T
for index in range(c.size):
if index < params['poly']:
function += "+( " + str(c[index]) + "*pow(x," + str(index + 1) + "))"
# elif index < params['poly'] + params['log'] - 1:
# function += "+(log(" + str(index - params['poly'] + 2) + ",x)*" + str(c[index]) + ")"
# else:
# function += "+(" + str(c[index]) + "*math.cos(x))"
return function
def create_model(self, data, labels):
data = data.reshape(-1, 1)
reg = mvr.MultivariateRegression(data, labels)
optimal_params = self.optimize_model(reg, 12)
reg.create_model(optimal_params, True)
return self.model_to_string(reg.b, reg.coefs, optimal_params)
def predict(self, data):
predictions = []
for x in data:
predictions.append(eval(self.model))
return predictions
|
997,549 | e5f51b9448337485ac55b946e931e75f0874a061 | # def factors(n):
# return list(set(x for tup in ([i, n//i] for i in range(1, int(n**0.5)+1) if n % i == 0) for x in tup))
# string = input()
# que = int(input())
# cnt = {}
# # print(string)
# for i in set(string):
# cnt[ord(i)-ord('a')+1] = 0
# for j in string:
# cnt[ord(j)-ord('a')+1] += 1
# # print(cnt)
# while que:
# # print('-------------------------------------------------')
# no = int(input())
# fact =factors(no)
# # print(no,fact)
# if no == 0 :
# print('Yes')
# que -= 1
# continue
# # print(fact)
# for i in fact:
# if i in cnt:
# # print('in')
# if cnt[i] >= no/i:
# # print(cnt[i],no,i)
# print('Yes')
# break
# else:
# print('No')
# que -= 1
#!/bin/python3
import sys
from itertools import groupby
alpha = {'a':1, 'b':2, 'c':3, 'd':4, 'e':5, 'f':6, 'g':7, 'h':8, 'i':9, 'j':10, 'k':11, 'l':12, 'm':13, 'n':14, 'o':15, 'p':16, 'q':17, 'r':18, 's':19, 't':20, 'u':21, 'v':22, 'w':23, 'x':24, 'y':25, 'z':26}
d = set()
s = input().strip()
n = int(input().strip())
for key , group in groupby(s):
k = (len(list(group)))
if(k > 1 ):
for j in range(k,0,-1):
d.add(alpha[key]*j)
else:
d.add(alpha[key])
for a0 in range(n):
x = int(input().strip())
if x in d:
print("Yes")
else:
print("No") |
997,550 | 15d819ed435e4fc82bb8e60a08b2f1fec58ccd1c | import logging
import webapp2
import re
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
from google.appengine.api import mail
from model import Person
import email_helper
class EmailHandler(InboundMailHandler):
def receive(self, mail_message):
logging.info("RECV: " + mail_message.sender +
" : " + mail_message.subject)
message = mail.EmailMessage(subject='Info',
sender=email_helper.SENDER)
message.to = mail_message.sender
subject = mail_message.subject.lower()
sender = mail_message.sender
person = Person.get_by_id(sender)
if subject.startswith('subscribe'):
keywords = re.sub(r'\W+', ' ', subject)
keywords = keywords.split(' ')
if len(keywords) <= 1:
message.body = email_helper.HELP_MESSAGE
else:
keywords.pop(0) # To remove 'subscribe'.
if person == None: # Check whether it exists or not.
person = Person(id=sender, email=sender,
keywords=keywords)
else:
keywords.extend(person.keywords)
keywords = list(set(keywords)) # To remove duplicates.
person.keywords = keywords
person.put()
message.body = email_helper.LIST_MESSAGE.format(
' '.join(keywords))
elif subject.startswith('unsubscribe'):
keywords = re.sub(r'\W+', ' ', subject)
keywords = keywords.split(' ')
if len(keywords) <= 1:
message.body = email_helper.HELP_MESSAGE
else:
keywords.pop(0) # To remove 'unsubscribe'.
if person == None: # Check whether it exists or not.
message.body = email_helper.HELP_MESSAGE
else:
# Set operation to remove keywords.
keywords_set = set(person.keywords) - set(keywords)
keywords = list(keywords_set)
person.keywords = keywords
person.put()
message.body = email_helper.LIST_MESSAGE.format(
' '.join(keywords))
elif subject.startswith('list'):
if person == None: # Check whether it exists or not.
message.body = email_helper.HELP_MESSAGE
else:
keywords = person.keywords
message.body = email_helper.LIST_MESSAGE.format(
' '.join(keywords))
elif subject.startswith('remove'):
if person == None: # Check whether it exists or not.
message.body = email_helper.HELP_MESSAGE
else:
person.key.delete()
message.body = email_helper.REMOVE_MESSAGE
elif subject.startswith('help'):
message.body = email_helper.HELP_MESSAGE
else:
return
message.send()
app = webapp2.WSGIApplication([
EmailHandler.mapping()
], debug=True)
|
997,551 | 296fabd355d0aed1bb0e31fefe465f37b292b71f | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def hasCycle(self, head: ListNode) -> bool:
'''
If there is no cycle in the list, the fast pointer will eventually reach the end and we can return false in this case.
slow and fast pointers
'''
#if there are no cycle, the 2 pointers will never meet!
if (head == None) or (head.next==None):
return False
#while loop keep exploring the next item
slow = head
fast = head.next
while(slow!=fast):
if (fast==None) or (fast.next==None):
return False
slow = slow.next
fast = fast.next.next
return True |
997,552 | 018c1e712d40e4235604f2cd525eb4e5bd5c1987 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: "Zing-p"
# Date: 2017/11/14
import numpy as np
import os
from kNN import k_class
def img2vector(filename):
"""循环读取文件前32行,每行的前32位字符串。转换为1行1024列的numpy数组"""
vector = np.zeros((1, 1024))
f = open(filename, "r")
for i in range(32):
line_str = f.readline()
for j in range(32):
vector[0, 32*i + j] = int(line_str[j])
f.close()
return vector
def hand_writing_class():
# 读取trainingDigits目录下的文件,解析文件名,获取分类,添加到hw_labels
hw_labels = []
training_file_list = os.listdir("trainingDigits")
m = len(training_file_list)
training_mat = np.zeros((m, 1024))
for i in range(m):
file_name = training_file_list[i]
file_prefix = file_name.split(".")[0]
class_num_str = file_prefix.split("_")[0]
hw_labels.append(class_num_str)
# 得到原始参照集合
training_mat[i, :] = img2vector("trainingDigits/{}".format(file_name))
test_file_list = os.listdir("testDigits")
error_count = 0.0
m_test = len(test_file_list)
for i in range(m_test):
file_name = test_file_list[i]
file_prefix = file_name.split(".")[0]
class_num_str = file_prefix.split("_")[0]
vector_test = img2vector("testDigits/{}".format(file_name))
result = k_class(vector_test, training_mat, hw_labels, 3)
# print("result is {}, in fact is {}".format(result, class_num_str))
if result != class_num_str:
error_count += 1.0
print("error count:", error_count)
print("error rate:", error_count / float(m_test))
if __name__ == '__main__':
hand_writing_class()
|
997,553 | 70f332b82df545d37200da3f1b82646c5b8bd9a2 | import pyperclip
def main():
while True:
word = input("Enter word: \n")
output = ""
# output += str(ord(char) - 96) for char in word.lower()
for char in word.lower():
output += str(ord(char) - 96)
pyperclip.copy(output)
print(output)
if __name__ == "__main__":
main()
|
997,554 | f2aa09f3fe8fbc4b877da0f6e1a9921b44702063 | # coding: utf-8
"""
OANDA v20 REST API
The full OANDA v20 REST API Specification. This specification defines how to interact with v20 Accounts, Trades, Orders, Pricing and more. To authenticate use the string 'Bearer ' followed by the token which can be obtained at https://www.oanda.com/demo-account/tpa/personal_token # noqa: E501
OpenAPI spec version: 3.0.23
Contact: api@oanda.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from oanda.models.client_extensions import ClientExtensions # noqa: F401,E501
from oanda.models.market_order_delayed_trade_close import MarketOrderDelayedTradeClose # noqa: F401,E501
from oanda.models.market_order_margin_closeout import MarketOrderMarginCloseout # noqa: F401,E501
from oanda.models.market_order_position_closeout import MarketOrderPositionCloseout # noqa: F401,E501
from oanda.models.market_order_trade_close import MarketOrderTradeClose # noqa: F401,E501
from oanda.models.stop_loss_details import StopLossDetails # noqa: F401,E501
from oanda.models.take_profit_details import TakeProfitDetails # noqa: F401,E501
from oanda.models.trailing_stop_loss_details import TrailingStopLossDetails # noqa: F401,E501
class MarketOrderRejectTransaction(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'time': 'str',
'user_id': 'int',
'account_id': 'str',
'batch_id': 'str',
'request_id': 'str',
'type': 'str',
'instrument': 'str',
'units': 'str',
'time_in_force': 'str',
'price_bound': 'str',
'position_fill': 'str',
'trade_close': 'MarketOrderTradeClose',
'long_position_closeout': 'MarketOrderPositionCloseout',
'short_position_closeout': 'MarketOrderPositionCloseout',
'margin_closeout': 'MarketOrderMarginCloseout',
'delayed_trade_close': 'MarketOrderDelayedTradeClose',
'reason': 'str',
'client_extensions': 'ClientExtensions',
'take_profit_on_fill': 'TakeProfitDetails',
'stop_loss_on_fill': 'StopLossDetails',
'trailing_stop_loss_on_fill': 'TrailingStopLossDetails',
'trade_client_extensions': 'ClientExtensions',
'reject_reason': 'str'
}
attribute_map = {
'id': 'id',
'time': 'time',
'user_id': 'userID',
'account_id': 'AccountID',
'batch_id': 'batchID',
'request_id': 'requestID',
'type': 'type',
'instrument': 'instrument',
'units': 'units',
'time_in_force': 'timeInForce',
'price_bound': 'priceBound',
'position_fill': 'positionFill',
'trade_close': 'tradeClose',
'long_position_closeout': 'longPositionCloseout',
'short_position_closeout': 'shortPositionCloseout',
'margin_closeout': 'marginCloseout',
'delayed_trade_close': 'delayedTradeClose',
'reason': 'reason',
'client_extensions': 'clientExtensions',
'take_profit_on_fill': 'takeProfitOnFill',
'stop_loss_on_fill': 'stopLossOnFill',
'trailing_stop_loss_on_fill': 'trailingStopLossOnFill',
'trade_client_extensions': 'tradeClientExtensions',
'reject_reason': 'rejectReason'
}
def __init__(self, id=None, time=None, user_id=None, account_id=None, batch_id=None, request_id=None, type=None, instrument=None, units=None, time_in_force=None, price_bound=None, position_fill=None, trade_close=None, long_position_closeout=None, short_position_closeout=None, margin_closeout=None, delayed_trade_close=None, reason=None, client_extensions=None, take_profit_on_fill=None, stop_loss_on_fill=None, trailing_stop_loss_on_fill=None, trade_client_extensions=None, reject_reason=None): # noqa: E501
"""MarketOrderRejectTransaction - a model defined in Swagger""" # noqa: E501
self._id = None
self._time = None
self._user_id = None
self._account_id = None
self._batch_id = None
self._request_id = None
self._type = None
self._instrument = None
self._units = None
self._time_in_force = None
self._price_bound = None
self._position_fill = None
self._trade_close = None
self._long_position_closeout = None
self._short_position_closeout = None
self._margin_closeout = None
self._delayed_trade_close = None
self._reason = None
self._client_extensions = None
self._take_profit_on_fill = None
self._stop_loss_on_fill = None
self._trailing_stop_loss_on_fill = None
self._trade_client_extensions = None
self._reject_reason = None
self.discriminator = None
if id is not None:
self.id = id
if time is not None:
self.time = time
if user_id is not None:
self.user_id = user_id
if account_id is not None:
self.account_id = account_id
if batch_id is not None:
self.batch_id = batch_id
if request_id is not None:
self.request_id = request_id
if type is not None:
self.type = type
if instrument is not None:
self.instrument = instrument
if units is not None:
self.units = units
if time_in_force is not None:
self.time_in_force = time_in_force
if price_bound is not None:
self.price_bound = price_bound
if position_fill is not None:
self.position_fill = position_fill
if trade_close is not None:
self.trade_close = trade_close
if long_position_closeout is not None:
self.long_position_closeout = long_position_closeout
if short_position_closeout is not None:
self.short_position_closeout = short_position_closeout
if margin_closeout is not None:
self.margin_closeout = margin_closeout
if delayed_trade_close is not None:
self.delayed_trade_close = delayed_trade_close
if reason is not None:
self.reason = reason
if client_extensions is not None:
self.client_extensions = client_extensions
if take_profit_on_fill is not None:
self.take_profit_on_fill = take_profit_on_fill
if stop_loss_on_fill is not None:
self.stop_loss_on_fill = stop_loss_on_fill
if trailing_stop_loss_on_fill is not None:
self.trailing_stop_loss_on_fill = trailing_stop_loss_on_fill
if trade_client_extensions is not None:
self.trade_client_extensions = trade_client_extensions
if reject_reason is not None:
self.reject_reason = reject_reason
@property
def id(self):
"""Gets the id of this MarketOrderRejectTransaction. # noqa: E501
The Transaction's Identifier. # noqa: E501
:return: The id of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this MarketOrderRejectTransaction.
The Transaction's Identifier. # noqa: E501
:param id: The id of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._id = id
@property
def time(self):
"""Gets the time of this MarketOrderRejectTransaction. # noqa: E501
The date/time when the Transaction was created. # noqa: E501
:return: The time of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._time
@time.setter
def time(self, time):
"""Sets the time of this MarketOrderRejectTransaction.
The date/time when the Transaction was created. # noqa: E501
:param time: The time of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._time = time
@property
def user_id(self):
"""Gets the user_id of this MarketOrderRejectTransaction. # noqa: E501
The ID of the user that initiated the creation of the Transaction. # noqa: E501
:return: The user_id of this MarketOrderRejectTransaction. # noqa: E501
:rtype: int
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""Sets the user_id of this MarketOrderRejectTransaction.
The ID of the user that initiated the creation of the Transaction. # noqa: E501
:param user_id: The user_id of this MarketOrderRejectTransaction. # noqa: E501
:type: int
"""
self._user_id = user_id
@property
def account_id(self):
"""Gets the account_id of this MarketOrderRejectTransaction. # noqa: E501
The ID of the Account the Transaction was created for. # noqa: E501
:return: The account_id of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._account_id
@account_id.setter
def account_id(self, account_id):
"""Sets the account_id of this MarketOrderRejectTransaction.
The ID of the Account the Transaction was created for. # noqa: E501
:param account_id: The account_id of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._account_id = account_id
@property
def batch_id(self):
"""Gets the batch_id of this MarketOrderRejectTransaction. # noqa: E501
The ID of the \"batch\" that the Transaction belongs to. Transactions in the same batch are applied to the Account simultaneously. # noqa: E501
:return: The batch_id of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._batch_id
@batch_id.setter
def batch_id(self, batch_id):
"""Sets the batch_id of this MarketOrderRejectTransaction.
The ID of the \"batch\" that the Transaction belongs to. Transactions in the same batch are applied to the Account simultaneously. # noqa: E501
:param batch_id: The batch_id of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._batch_id = batch_id
@property
def request_id(self):
"""Gets the request_id of this MarketOrderRejectTransaction. # noqa: E501
The Request ID of the request which generated the transaction. # noqa: E501
:return: The request_id of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._request_id
@request_id.setter
def request_id(self, request_id):
"""Sets the request_id of this MarketOrderRejectTransaction.
The Request ID of the request which generated the transaction. # noqa: E501
:param request_id: The request_id of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._request_id = request_id
@property
def type(self):
"""Gets the type of this MarketOrderRejectTransaction. # noqa: E501
The Type of the Transaction. Always set to \"MARKET_ORDER_REJECT\" in a MarketOrderRejectTransaction. # noqa: E501
:return: The type of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this MarketOrderRejectTransaction.
The Type of the Transaction. Always set to \"MARKET_ORDER_REJECT\" in a MarketOrderRejectTransaction. # noqa: E501
:param type: The type of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
allowed_values = ["CREATE", "CLOSE", "REOPEN", "CLIENT_CONFIGURE", "CLIENT_CONFIGURE_REJECT", "TRANSFER_FUNDS", "TRANSFER_FUNDS_REJECT", "MARKET_ORDER", "MARKET_ORDER_REJECT", "FIXED_PRICE_ORDER", "LIMIT_ORDER", "LIMIT_ORDER_REJECT", "STOP_ORDER", "STOP_ORDER_REJECT", "MARKET_IF_TOUCHED_ORDER", "MARKET_IF_TOUCHED_ORDER_REJECT", "TAKE_PROFIT_ORDER", "TAKE_PROFIT_ORDER_REJECT", "STOP_LOSS_ORDER", "STOP_LOSS_ORDER_REJECT", "TRAILING_STOP_LOSS_ORDER", "TRAILING_STOP_LOSS_ORDER_REJECT", "ORDER_FILL", "ORDER_CANCEL", "ORDER_CANCEL_REJECT", "ORDER_CLIENT_EXTENSIONS_MODIFY", "ORDER_CLIENT_EXTENSIONS_MODIFY_REJECT", "TRADE_CLIENT_EXTENSIONS_MODIFY", "TRADE_CLIENT_EXTENSIONS_MODIFY_REJECT", "MARGIN_CALL_ENTER", "MARGIN_CALL_EXTEND", "MARGIN_CALL_EXIT", "DELAYED_TRADE_CLOSURE", "DAILY_FINANCING", "RESET_RESETTABLE_PL"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def instrument(self):
"""Gets the instrument of this MarketOrderRejectTransaction. # noqa: E501
The Market Order's Instrument. # noqa: E501
:return: The instrument of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._instrument
@instrument.setter
def instrument(self, instrument):
"""Sets the instrument of this MarketOrderRejectTransaction.
The Market Order's Instrument. # noqa: E501
:param instrument: The instrument of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._instrument = instrument
@property
def units(self):
"""Gets the units of this MarketOrderRejectTransaction. # noqa: E501
The quantity requested to be filled by the Market Order. A posititive number of units results in a long Order, and a negative number of units results in a short Order. # noqa: E501
:return: The units of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._units
@units.setter
def units(self, units):
"""Sets the units of this MarketOrderRejectTransaction.
The quantity requested to be filled by the Market Order. A posititive number of units results in a long Order, and a negative number of units results in a short Order. # noqa: E501
:param units: The units of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._units = units
@property
def time_in_force(self):
"""Gets the time_in_force of this MarketOrderRejectTransaction. # noqa: E501
The time-in-force requested for the Market Order. Restricted to FOK or IOC for a MarketOrder. # noqa: E501
:return: The time_in_force of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._time_in_force
@time_in_force.setter
def time_in_force(self, time_in_force):
"""Sets the time_in_force of this MarketOrderRejectTransaction.
The time-in-force requested for the Market Order. Restricted to FOK or IOC for a MarketOrder. # noqa: E501
:param time_in_force: The time_in_force of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
allowed_values = ["GTC", "GTD", "GFD", "FOK", "IOC"] # noqa: E501
if time_in_force not in allowed_values:
raise ValueError(
"Invalid value for `time_in_force` ({0}), must be one of {1}" # noqa: E501
.format(time_in_force, allowed_values)
)
self._time_in_force = time_in_force
@property
def price_bound(self):
"""Gets the price_bound of this MarketOrderRejectTransaction. # noqa: E501
The worst price that the client is willing to have the Market Order filled at. # noqa: E501
:return: The price_bound of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._price_bound
@price_bound.setter
def price_bound(self, price_bound):
"""Sets the price_bound of this MarketOrderRejectTransaction.
The worst price that the client is willing to have the Market Order filled at. # noqa: E501
:param price_bound: The price_bound of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
self._price_bound = price_bound
@property
def position_fill(self):
"""Gets the position_fill of this MarketOrderRejectTransaction. # noqa: E501
Specification of how Positions in the Account are modified when the Order is filled. # noqa: E501
:return: The position_fill of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._position_fill
@position_fill.setter
def position_fill(self, position_fill):
"""Sets the position_fill of this MarketOrderRejectTransaction.
Specification of how Positions in the Account are modified when the Order is filled. # noqa: E501
:param position_fill: The position_fill of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
allowed_values = ["OPEN_ONLY", "REDUCE_FIRST", "REDUCE_ONLY", "DEFAULT"] # noqa: E501
if position_fill not in allowed_values:
raise ValueError(
"Invalid value for `position_fill` ({0}), must be one of {1}" # noqa: E501
.format(position_fill, allowed_values)
)
self._position_fill = position_fill
@property
def trade_close(self):
"""Gets the trade_close of this MarketOrderRejectTransaction. # noqa: E501
:return: The trade_close of this MarketOrderRejectTransaction. # noqa: E501
:rtype: MarketOrderTradeClose
"""
return self._trade_close
@trade_close.setter
def trade_close(self, trade_close):
"""Sets the trade_close of this MarketOrderRejectTransaction.
:param trade_close: The trade_close of this MarketOrderRejectTransaction. # noqa: E501
:type: MarketOrderTradeClose
"""
self._trade_close = trade_close
@property
def long_position_closeout(self):
"""Gets the long_position_closeout of this MarketOrderRejectTransaction. # noqa: E501
:return: The long_position_closeout of this MarketOrderRejectTransaction. # noqa: E501
:rtype: MarketOrderPositionCloseout
"""
return self._long_position_closeout
@long_position_closeout.setter
def long_position_closeout(self, long_position_closeout):
"""Sets the long_position_closeout of this MarketOrderRejectTransaction.
:param long_position_closeout: The long_position_closeout of this MarketOrderRejectTransaction. # noqa: E501
:type: MarketOrderPositionCloseout
"""
self._long_position_closeout = long_position_closeout
@property
def short_position_closeout(self):
"""Gets the short_position_closeout of this MarketOrderRejectTransaction. # noqa: E501
:return: The short_position_closeout of this MarketOrderRejectTransaction. # noqa: E501
:rtype: MarketOrderPositionCloseout
"""
return self._short_position_closeout
@short_position_closeout.setter
def short_position_closeout(self, short_position_closeout):
"""Sets the short_position_closeout of this MarketOrderRejectTransaction.
:param short_position_closeout: The short_position_closeout of this MarketOrderRejectTransaction. # noqa: E501
:type: MarketOrderPositionCloseout
"""
self._short_position_closeout = short_position_closeout
@property
def margin_closeout(self):
"""Gets the margin_closeout of this MarketOrderRejectTransaction. # noqa: E501
:return: The margin_closeout of this MarketOrderRejectTransaction. # noqa: E501
:rtype: MarketOrderMarginCloseout
"""
return self._margin_closeout
@margin_closeout.setter
def margin_closeout(self, margin_closeout):
"""Sets the margin_closeout of this MarketOrderRejectTransaction.
:param margin_closeout: The margin_closeout of this MarketOrderRejectTransaction. # noqa: E501
:type: MarketOrderMarginCloseout
"""
self._margin_closeout = margin_closeout
@property
def delayed_trade_close(self):
"""Gets the delayed_trade_close of this MarketOrderRejectTransaction. # noqa: E501
:return: The delayed_trade_close of this MarketOrderRejectTransaction. # noqa: E501
:rtype: MarketOrderDelayedTradeClose
"""
return self._delayed_trade_close
@delayed_trade_close.setter
def delayed_trade_close(self, delayed_trade_close):
"""Sets the delayed_trade_close of this MarketOrderRejectTransaction.
:param delayed_trade_close: The delayed_trade_close of this MarketOrderRejectTransaction. # noqa: E501
:type: MarketOrderDelayedTradeClose
"""
self._delayed_trade_close = delayed_trade_close
@property
def reason(self):
"""Gets the reason of this MarketOrderRejectTransaction. # noqa: E501
The reason that the Market Order was created # noqa: E501
:return: The reason of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""Sets the reason of this MarketOrderRejectTransaction.
The reason that the Market Order was created # noqa: E501
:param reason: The reason of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
allowed_values = ["CLIENT_ORDER", "TRADE_CLOSE", "POSITION_CLOSEOUT", "MARGIN_CLOSEOUT", "DELAYED_TRADE_CLOSE"] # noqa: E501
if reason not in allowed_values:
raise ValueError(
"Invalid value for `reason` ({0}), must be one of {1}" # noqa: E501
.format(reason, allowed_values)
)
self._reason = reason
@property
def client_extensions(self):
"""Gets the client_extensions of this MarketOrderRejectTransaction. # noqa: E501
:return: The client_extensions of this MarketOrderRejectTransaction. # noqa: E501
:rtype: ClientExtensions
"""
return self._client_extensions
@client_extensions.setter
def client_extensions(self, client_extensions):
"""Sets the client_extensions of this MarketOrderRejectTransaction.
:param client_extensions: The client_extensions of this MarketOrderRejectTransaction. # noqa: E501
:type: ClientExtensions
"""
self._client_extensions = client_extensions
@property
def take_profit_on_fill(self):
"""Gets the take_profit_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:return: The take_profit_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:rtype: TakeProfitDetails
"""
return self._take_profit_on_fill
@take_profit_on_fill.setter
def take_profit_on_fill(self, take_profit_on_fill):
"""Sets the take_profit_on_fill of this MarketOrderRejectTransaction.
:param take_profit_on_fill: The take_profit_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:type: TakeProfitDetails
"""
self._take_profit_on_fill = take_profit_on_fill
@property
def stop_loss_on_fill(self):
"""Gets the stop_loss_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:return: The stop_loss_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:rtype: StopLossDetails
"""
return self._stop_loss_on_fill
@stop_loss_on_fill.setter
def stop_loss_on_fill(self, stop_loss_on_fill):
"""Sets the stop_loss_on_fill of this MarketOrderRejectTransaction.
:param stop_loss_on_fill: The stop_loss_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:type: StopLossDetails
"""
self._stop_loss_on_fill = stop_loss_on_fill
@property
def trailing_stop_loss_on_fill(self):
"""Gets the trailing_stop_loss_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:return: The trailing_stop_loss_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:rtype: TrailingStopLossDetails
"""
return self._trailing_stop_loss_on_fill
@trailing_stop_loss_on_fill.setter
def trailing_stop_loss_on_fill(self, trailing_stop_loss_on_fill):
"""Sets the trailing_stop_loss_on_fill of this MarketOrderRejectTransaction.
:param trailing_stop_loss_on_fill: The trailing_stop_loss_on_fill of this MarketOrderRejectTransaction. # noqa: E501
:type: TrailingStopLossDetails
"""
self._trailing_stop_loss_on_fill = trailing_stop_loss_on_fill
@property
def trade_client_extensions(self):
"""Gets the trade_client_extensions of this MarketOrderRejectTransaction. # noqa: E501
:return: The trade_client_extensions of this MarketOrderRejectTransaction. # noqa: E501
:rtype: ClientExtensions
"""
return self._trade_client_extensions
@trade_client_extensions.setter
def trade_client_extensions(self, trade_client_extensions):
"""Sets the trade_client_extensions of this MarketOrderRejectTransaction.
:param trade_client_extensions: The trade_client_extensions of this MarketOrderRejectTransaction. # noqa: E501
:type: ClientExtensions
"""
self._trade_client_extensions = trade_client_extensions
@property
def reject_reason(self):
"""Gets the reject_reason of this MarketOrderRejectTransaction. # noqa: E501
The reason that the Reject Transaction was created # noqa: E501
:return: The reject_reason of this MarketOrderRejectTransaction. # noqa: E501
:rtype: str
"""
return self._reject_reason
@reject_reason.setter
def reject_reason(self, reject_reason):
"""Sets the reject_reason of this MarketOrderRejectTransaction.
The reason that the Reject Transaction was created # noqa: E501
:param reject_reason: The reject_reason of this MarketOrderRejectTransaction. # noqa: E501
:type: str
"""
allowed_values = ["INTERNAL_SERVER_ERROR", "INSTRUMENT_PRICE_UNKNOWN", "ACCOUNT_NOT_ACTIVE", "ACCOUNT_LOCKED", "ACCOUNT_ORDER_CREATION_LOCKED", "ACCOUNT_CONFIGURATION_LOCKED", "ACCOUNT_DEPOSIT_LOCKED", "ACCOUNT_WITHDRAWAL_LOCKED", "ACCOUNT_ORDER_CANCEL_LOCKED", "INSTRUMENT_NOT_TRADEABLE", "PENDING_ORDERS_ALLOWED_EXCEEDED", "ORDER_ID_UNSPECIFIED", "ORDER_DOESNT_EXIST", "ORDER_IDENTIFIER_INCONSISTENCY", "TRADE_ID_UNSPECIFIED", "TRADE_DOESNT_EXIST", "TRADE_IDENTIFIER_INCONSISTENCY", "INSUFFICIENT_MARGIN", "INSTRUMENT_MISSING", "INSTRUMENT_UNKNOWN", "UNITS_MISSING", "UNITS_INVALID", "UNITS_PRECISION_EXCEEDED", "UNITS_LIMIT_EXCEEDED", "UNITS_MIMIMUM_NOT_MET", "PRICE_MISSING", "PRICE_INVALID", "PRICE_PRECISION_EXCEEDED", "PRICE_DISTANCE_MISSING", "PRICE_DISTANCE_INVALID", "PRICE_DISTANCE_PRECISION_EXCEEDED", "PRICE_DISTANCE_MAXIMUM_EXCEEDED", "PRICE_DISTANCE_MINIMUM_NOT_MET", "TIME_IN_FORCE_MISSING", "TIME_IN_FORCE_INVALID", "TIME_IN_FORCE_GTD_TIMESTAMP_MISSING", "TIME_IN_FORCE_GTD_TIMESTAMP_IN_PAST", "PRICE_BOUND_INVALID", "PRICE_BOUND_PRECISION_EXCEEDED", "ORDERS_ON_FILL_DUPLICATE_CLIENT_ORDER_IDS", "TRADE_ON_FILL_CLIENT_EXTENSIONS_NOT_SUPPORTED", "CLIENT_ORDER_ID_INVALID", "CLIENT_ORDER_ID_ALREADY_EXISTS", "CLIENT_ORDER_TAG_INVALID", "CLIENT_ORDER_COMMENT_INVALID", "CLIENT_TRADE_ID_INVALID", "CLIENT_TRADE_ID_ALREADY_EXISTS", "CLIENT_TRADE_TAG_INVALID", "CLIENT_TRADE_COMMENT_INVALID", "ORDER_FILL_POSITION_ACTION_MISSING", "ORDER_FILL_POSITION_ACTION_INVALID", "TRIGGER_CONDITION_MISSING", "TRIGGER_CONDITION_INVALID", "ORDER_PARTIAL_FILL_OPTION_MISSING", "ORDER_PARTIAL_FILL_OPTION_INVALID", "INVALID_REISSUE_IMMEDIATE_PARTIAL_FILL", "TAKE_PROFIT_ORDER_ALREADY_EXISTS", "TAKE_PROFIT_ON_FILL_PRICE_MISSING", "TAKE_PROFIT_ON_FILL_PRICE_INVALID", "TAKE_PROFIT_ON_FILL_PRICE_PRECISION_EXCEEDED", "TAKE_PROFIT_ON_FILL_TIME_IN_FORCE_MISSING", "TAKE_PROFIT_ON_FILL_TIME_IN_FORCE_INVALID", "TAKE_PROFIT_ON_FILL_GTD_TIMESTAMP_MISSING", "TAKE_PROFIT_ON_FILL_GTD_TIMESTAMP_IN_PAST", "TAKE_PROFIT_ON_FILL_CLIENT_ORDER_ID_INVALID", "TAKE_PROFIT_ON_FILL_CLIENT_ORDER_TAG_INVALID", "TAKE_PROFIT_ON_FILL_CLIENT_ORDER_COMMENT_INVALID", "TAKE_PROFIT_ON_FILL_TRIGGER_CONDITION_MISSING", "TAKE_PROFIT_ON_FILL_TRIGGER_CONDITION_INVALID", "STOP_LOSS_ORDER_ALREADY_EXISTS", "STOP_LOSS_ORDER_GUARANTEED_REQUIRED", "STOP_LOSS_ORDER_GUARANTEED_PRICE_WITHIN_SPREAD", "STOP_LOSS_ORDER_GUARANTEED_NOT_ALLOWED", "STOP_LOSS_ORDER_GUARANTEED_HALTED_CREATE_VIOLATION", "STOP_LOSS_ORDER_GUARANTEED_HALTED_TIGHTEN_VIOLATION", "STOP_LOSS_ORDER_GUARANTEED_HEDGING_NOT_ALLOWED", "STOP_LOSS_ORDER_GUARANTEED_MINIMUM_DISTANCE_NOT_MET", "STOP_LOSS_ORDER_NOT_CANCELABLE", "STOP_LOSS_ORDER_NOT_REPLACEABLE", "STOP_LOSS_ORDER_GUARANTEED_LEVEL_RESTRICTION_EXCEEDED", "STOP_LOSS_ORDER_PRICE_AND_DISTANCE_BOTH_SPECIFIED", "STOP_LOSS_ORDER_PRICE_AND_DISTANCE_BOTH_MISSING", "STOP_LOSS_ON_FILL_REQUIRED_FOR_PENDING_ORDER", "STOP_LOSS_ON_FILL_GUARANTEED_NOT_ALLOWED", "STOP_LOSS_ON_FILL_GUARANTEED_REQUIRED", "STOP_LOSS_ON_FILL_PRICE_MISSING", "STOP_LOSS_ON_FILL_PRICE_INVALID", "STOP_LOSS_ON_FILL_PRICE_PRECISION_EXCEEDED", "STOP_LOSS_ON_FILL_GUARANTEED_MINIMUM_DISTANCE_NOT_MET", "STOP_LOSS_ON_FILL_GUARANTEED_LEVEL_RESTRICTION_EXCEEDED", "STOP_LOSS_ON_FILL_DISTANCE_INVALID", "STOP_LOSS_ON_FILL_PRICE_DISTANCE_MAXIMUM_EXCEEDED", "STOP_LOSS_ON_FILL_DISTANCE_PRECISION_EXCEEDED", "STOP_LOSS_ON_FILL_PRICE_AND_DISTANCE_BOTH_SPECIFIED", "STOP_LOSS_ON_FILL_PRICE_AND_DISTANCE_BOTH_MISSING", "STOP_LOSS_ON_FILL_TIME_IN_FORCE_MISSING", "STOP_LOSS_ON_FILL_TIME_IN_FORCE_INVALID", "STOP_LOSS_ON_FILL_GTD_TIMESTAMP_MISSING", "STOP_LOSS_ON_FILL_GTD_TIMESTAMP_IN_PAST", "STOP_LOSS_ON_FILL_CLIENT_ORDER_ID_INVALID", "STOP_LOSS_ON_FILL_CLIENT_ORDER_TAG_INVALID", "STOP_LOSS_ON_FILL_CLIENT_ORDER_COMMENT_INVALID", "STOP_LOSS_ON_FILL_TRIGGER_CONDITION_MISSING", "STOP_LOSS_ON_FILL_TRIGGER_CONDITION_INVALID", "TRAILING_STOP_LOSS_ORDER_ALREADY_EXISTS", "TRAILING_STOP_LOSS_ON_FILL_PRICE_DISTANCE_MISSING", "TRAILING_STOP_LOSS_ON_FILL_PRICE_DISTANCE_INVALID", "TRAILING_STOP_LOSS_ON_FILL_PRICE_DISTANCE_PRECISION_EXCEEDED", "TRAILING_STOP_LOSS_ON_FILL_PRICE_DISTANCE_MAXIMUM_EXCEEDED", "TRAILING_STOP_LOSS_ON_FILL_PRICE_DISTANCE_MINIMUM_NOT_MET", "TRAILING_STOP_LOSS_ON_FILL_TIME_IN_FORCE_MISSING", "TRAILING_STOP_LOSS_ON_FILL_TIME_IN_FORCE_INVALID", "TRAILING_STOP_LOSS_ON_FILL_GTD_TIMESTAMP_MISSING", "TRAILING_STOP_LOSS_ON_FILL_GTD_TIMESTAMP_IN_PAST", "TRAILING_STOP_LOSS_ON_FILL_CLIENT_ORDER_ID_INVALID", "TRAILING_STOP_LOSS_ON_FILL_CLIENT_ORDER_TAG_INVALID", "TRAILING_STOP_LOSS_ON_FILL_CLIENT_ORDER_COMMENT_INVALID", "TRAILING_STOP_LOSS_ORDERS_NOT_SUPPORTED", "TRAILING_STOP_LOSS_ON_FILL_TRIGGER_CONDITION_MISSING", "TRAILING_STOP_LOSS_ON_FILL_TRIGGER_CONDITION_INVALID", "CLOSE_TRADE_TYPE_MISSING", "CLOSE_TRADE_PARTIAL_UNITS_MISSING", "CLOSE_TRADE_UNITS_EXCEED_TRADE_SIZE", "CLOSEOUT_POSITION_DOESNT_EXIST", "CLOSEOUT_POSITION_INCOMPLETE_SPECIFICATION", "CLOSEOUT_POSITION_UNITS_EXCEED_POSITION_SIZE", "CLOSEOUT_POSITION_REJECT", "CLOSEOUT_POSITION_PARTIAL_UNITS_MISSING", "MARKUP_GROUP_ID_INVALID", "POSITION_AGGREGATION_MODE_INVALID", "ADMIN_CONFIGURE_DATA_MISSING", "MARGIN_RATE_INVALID", "MARGIN_RATE_WOULD_TRIGGER_CLOSEOUT", "ALIAS_INVALID", "CLIENT_CONFIGURE_DATA_MISSING", "MARGIN_RATE_WOULD_TRIGGER_MARGIN_CALL", "AMOUNT_INVALID", "INSUFFICIENT_FUNDS", "AMOUNT_MISSING", "FUNDING_REASON_MISSING", "CLIENT_EXTENSIONS_DATA_MISSING", "REPLACING_ORDER_INVALID", "REPLACING_TRADE_ID_INVALID"] # noqa: E501
if reject_reason not in allowed_values:
raise ValueError(
"Invalid value for `reject_reason` ({0}), must be one of {1}" # noqa: E501
.format(reject_reason, allowed_values)
)
self._reject_reason = reject_reason
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MarketOrderRejectTransaction):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
997,555 | a59d807d188c0cab03b8543efb7fb28f8ea233f1 | import pygame
from sys import exit
import numpy as np
import math
def draw_dashed_line(surf, color, start_pos, end_pos, width=1, dash_length=10):
x1, y1 = start_pos
x2, y2 = end_pos
dl = dash_length
if (x1 == x2):
ycoords = [y for y in range(y1, y2, dl if y1 < y2 else -dl)]
xcoords = [x1] * len(ycoords)
elif (y1 == y2):
xcoords = [x for x in range(x1, x2, dl if x1 < x2 else -dl)]
ycoords = [y1] * len(xcoords)
else:
a = abs(x2 - x1)
b = abs(y2 - y1)
c = round(math.sqrt(a**2 + b**2))
dx = dl * a / c
dy = dl * b / c
xcoords = [x for x in np.arange(x1, x2, dx if x1 < x2 else -dx)]
ycoords = [y for y in np.arange(y1, y2, dy if y1 < y2 else -dy)]
next_coords = list(zip(xcoords[1::2], ycoords[1::2]))
last_coords = list(zip(xcoords[0::2], ycoords[0::2]))
for (x1, y1), (x2, y2) in zip(next_coords, last_coords):
start = (round(x1), round(y1))
end = (round(x2), round(y2))
pygame.draw.line(surf, color, start, end, width)
width = 800
height = 600
pygame.init()
screen = pygame.display.set_mode((width, height), 0, 32)
pygame.display.set_caption("CompGraphics-HW1-Indra-20195118")
# Define the colors we will use in RGB format
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
old_pt = np.array([0, 0])
cur_pt = np.array([0, 0])
old_rect_pt = np.array([0, 0])
cur_rect_pt = np.array([0, 0])
screen.fill(WHITE)
clock = pygame.time.Clock()
# Loop until the user clicks the close button.
done = False
pressed = -1
margin = 6
while not done:
time_passed = clock.tick(30)
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
pressed = 1
elif event.type == pygame.MOUSEMOTION:
pressed = 0
elif event.type == pygame.MOUSEBUTTONUP:
pressed = 2
elif event.type == pygame.QUIT:
done = True
else:
pressed = -1
button1, button2, button3 = pygame.mouse.get_pressed()
x, y = pygame.mouse.get_pos()
cur_pt = np.array([x, y])
if old_pt[0] != 0 and old_pt[1] != 0:
draw_dashed_line(screen, RED, old_pt, cur_pt, 1)
if pressed == 1:
if button1 == 1:
pygame.draw.rect(screen, BLUE, (cur_pt[0] - margin, cur_pt[1] - margin, 2 * margin, 2 * margin), 5)
cur_rect_pt = np.array([x,y])
if old_rect_pt[0] == 0 and old_rect_pt[1] == 0:
old_rect_pt = cur_rect_pt
pygame.draw.line(screen, GREEN, (old_rect_pt[0], old_rect_pt[1]),
(cur_rect_pt[0], cur_rect_pt[1]),5)
old_rect_pt = cur_rect_pt
print("mouse x:" + repr(x) + " y:" + repr(y) + " button:" + repr(button1) + " " + repr(button2) + " " + repr(
button3) + " pressed:" + repr(pressed))
old_pt = cur_pt
pygame.display.update()
pygame.quit() |
997,556 | d95b92a1e1042f91f81f08a5122442b12d101abc | from matplotlib.pyplot import figure
from numpy import arange
x = arange(1, 11, 1)
y = x ** 2
x_labels = list('abcdefghij')
x_ticks = range(len(x_labels))
figura = figure(figsize=(15, 11.25), dpi=90)
subplot = figura.add_subplot(1, 1, 1)
subplot.plot(y)
subplot.set_xticks(x_ticks)
subplot.set_xticklabels(x_labels)
figura.savefig('grafico.png')
|
997,557 | 0e390315d0eefad756d23d839e921f710c8f97c6 | from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.urls import reverse
from .models import CustomField, CustomFieldValue
class CustomFieldTest(TestCase):
def setUp(self):
custom_field_ct = ContentType.objects.get(
app_label="custom_field", model="customfield"
)
self.custom_field = CustomField.objects.create(
name="test_field", content_type=custom_field_ct, field_type="i",
)
self.user_custom_field = CustomField.objects.create(
name="test_user_field",
content_type=custom_field_ct,
field_type="i",
default_value=42,
)
user = User.objects.create_user("temporary", "temporary@gmail.com", "temporary")
user.is_staff = True
user.is_superuser = True
user.save()
self.client.login(username="temporary", password="temporary")
def test_validation(self):
custom_value = CustomFieldValue.objects.create(
field=self.custom_field, value="5", object_id=self.custom_field.id,
)
custom_value.clean()
custom_value.save()
self.assertEquals(custom_value.value, "5")
custom_value.value = "fdsf"
try:
custom_value.clean()
self.fail("Was able to save string as custom integer field!")
except ValidationError:
pass
def test_admin(self):
change_url = reverse("admin:custom_field_customfield_change", args=[1])
response = self.client.get(change_url)
self.assertContains(response, "42")
response = self.client.get(change_url)
# Make sure we aren't adding it on each get
self.assertContains(response, "42")
|
997,558 | 0463cad9fed5c548c694cca5620a009d232e1718 | from keras.callbacks import TensorBoard
TensorBoard(
log_dir='./logs', histogram_freq=0, write_graph=True, write_images=False)
# tensorboard --logdir=path_to_logs
|
997,559 | cc97ce72d35f46880c54f096932a59e3521bbe79 | #a sorted_x1
#b onewordcategorylist
#c categorydict
#d sorted_x
#s splitted
#o order
#e twowordcategorylist
#f sorted_xc
#h capitalcategorylist
import wikipediaapi
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF, LatentDirichletAllocation
import operator
import time
def wikipediacategory(a,b,c,d,e,f,h,s,o):
count=0
wiki_wiki = wikipediaapi.Wikipedia('en')
for i in range(len(a)):
page_py = wiki_wiki.page(s[o[a[i][0]]])
if page_py.exists():
time.sleep(0.1)
categories=page_py.categories
dummylist=list()
dummiestlist=list()
for j in categories.keys():
dummylist.append(j)
for k in range(len(dummylist)):
dummylist[k]=dummylist[k].lstrip("Category")
dummylist[k]= dummylist[k].lstrip(":")
dummylist[k]=dummylist[k].lower()
if 'disambiguation' in dummylist[k]:
#Burada link ile disambiguationu cozecegiz
links=page_py.links
dummy=list()
for w in links.keys():
dummy.append(w)
for u in range(len(dummy)):
dummy[u]=dummy[u].lower()
if 'talk' not in dummy[u] and 'help' not in dummy[u]:
dummiestlist.append(dummy[u])
break
if 'republics' not in dummylist[k] and 'nations' not in dummylist[k] and 'dispute' not in dummylist[k] and 'subscription' not in dummylist[k] and 'use' not in dummylist[k] and 'vague or ambiguous' not in dummylist[k] and 'article' not in dummylist[k] and 'page' not in dummylist[k] and 'wiki' not in dummylist[k] and 'link' not in dummylist[k] and 'source' not in dummylist[k] and 'cs1' not in dummylist[k]:
dummiestlist.append(dummylist[k])
for j in range(len(dummiestlist)):
if dummiestlist[j] not in c.keys():
c[dummiestlist[j]]=count
count+=1
b[i]=dummiestlist
for i in range(len(d)):
dum = s[o[d[i][0][0]]]+ ' ' +s[o[d[i][0][1]]]
page_py = wiki_wiki.page(dum)
if page_py.exists():
time.sleep(0.1)
categories=page_py.categories
dummylist=list()
dummiestlist=list()
for j in categories.keys():
dummylist.append(j)
for k in range(len(dummylist)):
dummylist[k]=dummylist[k].lstrip("Category")
dummylist[k]= dummylist[k].lstrip(":")
dummylist[k]=dummylist[k].lower()
if 'republics' not in dummylist[k] and 'nations' not in dummylist[k] and 'dispute' not in dummylist[k] and 'subscription' not in dummylist[k] and 'use' not in dummylist[k] and 'vague or ambiguous' not in dummylist[k] and 'article' not in dummylist[k] and 'page' not in dummylist[k] and 'wiki' not in dummylist[k] and 'link' not in dummylist[k] and 'source' not in dummylist[k] and 'cs1' not in dummylist[k]:
dummiestlist.append(dummylist[k])
for j in range(len(dummiestlist)):
if dummiestlist[j] not in c.keys():
c[dummiestlist[j]]=count
count+=1
e[i]=dummiestlist
for i in range(len(f)):
page_py = wiki_wiki.page(s[o[f[i][0]]])
if page_py.exists():
time.sleep(0.1)
categories=page_py.categories
dummylist=list()
dummiestlist=list()
for j in categories.keys():
dummylist.append(j)
for k in range(len(dummylist)):
dummylist[k]=dummylist[k].lstrip("Category")
dummylist[k]= dummylist[k].lstrip(":")
dummylist[k]=dummylist[k].lower()
if 'republics' not in dummylist[k] and 'nations' not in dummylist[k] and 'dispute' not in dummylist[k] and 'subscription' not in dummylist[k] and 'use' not in dummylist[k] and 'vague or ambiguous' not in dummylist[k] and 'article' not in dummylist[k] and 'page' not in dummylist[k] and 'wiki' not in dummylist[k] and 'link' not in dummylist[k] and 'source' not in dummylist[k] and 'cs1' not in dummylist[k]:
dummiestlist.append(dummylist[k])
for j in range(len(dummiestlist)):
if dummiestlist[j] not in c.keys():
c[dummiestlist[j]]=count
count+=1
h[i]=dummiestlist
return
|
997,560 | 80f2f36bbc06e5b4c52b3abb79a7ae39fc5c66bf | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# __author__: Shane
# _datetime_: 2018/6/18
import os
import sys
from config.config import Config
from common.logger import Logger
__app = None # api客户端,singleton
__rpc = None # rpc客户端,singleton
__block = None # block客户端,singleton
__limiter = None # 限流器,singleton
app_name = None
def __get_app_name(arguments):
"""
初始化app_name
"""
global app_name
if len(arguments) >= 2 and "gunicorn" in arguments[0]:
for arg in arguments[1:]:
if arg.endswith(":app"):
app_name = arg[0:-4]
# endif
# endfor
elif len(arguments) >= 1:
app_name = os.path.splitext(os.path.basename(arguments[0]))[0]
# endif
if app_name is None:
print("No argument???, can not identify what the program is!")
print("EXIT NOW!!!")
exit()
def __get_app_logger():
"""
初始化Logger
"""
global app_name
if Config.log_dir.startswith("/"):
log_dir = os.path.join(Config.log_dir, app_name)
else:
fdir = os.path.dirname(__file__)
log_dir = os.path.abspath(os.path.join(fdir, Config.log_dir, app_name))
if not os.path.isdir(log_dir):
os.makedirs(log_dir)
# 默认输出到stderr,resetup让日志输出到文件并按照相应规则进行切割
Config.log_level = getattr(Logger, Config.log_level, Logger.INFO)
Logger.setup(access_log_file=os.path.join(log_dir, "access.log"),
access_log_level=Config.log_level,
error_log_file=os.path.join(log_dir, "errors.log"),
ledger_log_file=os.path.join(log_dir, "ledger.log"),
transaction_log_file=os.path.join(log_dir, "transaction.log"),
when=Config.log_when,
backup_count=Config.log_backup_count,
enable_console=Config.enable_console)
# 调整application应用日志
Logger.application_handler()
# 初始化日志打印输出信息
logger = Logger.get_logger(__name__)
logger.info(message="INFO FROM GET APP LOGGER")
logger.error(message="ERRS FROM GET APP LOGGER")
# 初始化日志名称||初始化日志路径||初始化rpc到全节点的连接
__get_app_name(sys.argv)
__get_app_logger()
|
997,561 | b7f61b1c1211a21495c16c1e8c984cb9d800221b | class Parent():
def __init__(self,eyecolor,money):
print "Parent constructor is called !"
self.eye = eyecolor
self.mony = money
class Child(Parent):
def __init__(self,eyecolor,money,toys):
print "Child constructor is called ! "
Parent.__init__(self,eyecolor,money)
self.num_of_toys = toys
papa = Parent("blue",50)
print papa.mony
baccha = Child("red",100,12)
print baccha.num_of_toys
print baccha.eye
|
997,562 | cec742347922aa7f05f78006614bbe6156ba9e72 | # Copyright 2012 Midokura Japan KK
from resource_base import ResourceBase
import vendor_media_type
class HostInterfacePort(ResourceBase):
media_type = vendor_media_type.APPLICATION_HOST_INTERFACE_PORT_JSON
def __init__(self, http, uri, dto):
super(HostInterfacePort, self).__init__(http, uri, dto)
def get_host_id(self):
return self.dto['hostId']
def get_interface_name(self):
return self.dto['interfaceName']
def get_port_id(self):
return self.dto['portId']
def port_id(self, port_id):
self.dto['portId'] = port_id
return self
def interface_name(self, name):
self.dto['interfaceName'] = name
return self
|
997,563 | 97fff9e6c9ced005f764af1d37ac21f1b9094bb2 | #~~~~ Standard calculator ~~~~#
#~~~~~ c. Indya Dodson ~~~~~~#
def add(num1, num2):
return num1 + num2
def subtract(num1, num2):
return num1 - num2
def multiply(num1, num2):
return num1 * num2
def divide(num1, num2):
return num1/num2
number1 = int(input("Please enter a number: "))
number2 = int(input("Please enter another number: "))
print("Here's the calculations")
print(f"{number1} + {number2} = {add(number1, number2)}")
print(f"{number1} - {number2} = {subtract(number1, number2)}")
print(f"{number1} * {number2} = {multiply(number1, number2)}")
print(f"{number1} / {number2} = {int(divide(number1, number2))}")
|
997,564 | e1420cabc9a20908e1cac84220ab20aebbdf01c6 |
"""Makes test cases pass."""
import functools
import inspect
import types
def passit(func):
"""Make test cases pass."""
@functools.wraps(func)
def wrapper_makepass(*args, **kwargs):
try:
return func(*args, **kwargs)
# We're not here for the good code, are we?
except Exception: # pylint: disable=broad-except
return True
return wrapper_makepass
|
997,565 | 6654c0ab08c3c9e1923453496bcef787a4a690b7 | #!/usr/bin/python3
nFiles = int(input("Enter the number of files: \n"))
fileType = str(input("Enter the file type (eg: main): \n"))
fileExt = str(input("Enter file extension: \n"))
for i in range(nFiles + 1):
open(str(i) + "-" + fileType + fileExt, "w+")
print("Files creted successfully")
|
997,566 | a064f23093422b2eec9a46f53b10e195ffd9a835 | """
Write a program that prints a message if a variable is less than or equal to 10,
another message if the variable is greater than 10 but less than or equal to 25,
and another message if the variable is greater than 25.
"""
num1 = 130
if num1 <= 10:
print("num1 is <= 10")
elif num1 <= 25:
print("num1 is > 10, but <= 25")
else:
print("num1 is > 25")
|
997,567 | 230075df276d36e46e6e7ac7e6478744b73cd958 | #!/usr/bin/python
class logger:
f = None
fn = ""
elems = []
def __init__(self, fni):
self.fn = fni
def add(self, elem):
self.elems.append(elem)
def open(self):
self.f = open(self.fn, "w")
# self.f.write("ts, ")
for elem in self.elems:
self.f.write(" " + elem.header);
self.f.write("\n")
def go(self):
# self.f.write(repr(ts) + ", ")
for elem in self.elems:
self.f.write(" " + repr(elem))
self.f.write("\n")
def close(self):
self.f.close()
|
997,568 | 06ad6ee0bc3b96e2d55ca64918a6482fd4f68abd | import os
from src.microservices import FlaskChassis
from src.user.modules import user_blueprint
microservice = FlaskChassis(service_name="users", config_file="flask-dev.cfg")
app, db = microservice.app, microservice.db
app.register_blueprint(user_blueprint, url_prefix='/api/v1/users')
|
997,569 | be170881bd27343a606757cb8dea621673d12240 | import argparse
import cProfile, pstats, sys
import logging
from typing import List
logging.basicConfig()
logging.root.setLevel(logging.DEBUG)
"""
leetcode 435. Non-overlapping Intervals
https://leetcode.com/problems/non-overlapping-intervals/
"""
class Solution(object):
def eraseOverlapIntervals(self, intervals: List[List[int]]) -> int:
intervals.sort(key=lambda x: x[1])
currentInterval = None
overlapCount = 0
for interval in intervals:
if currentInterval is None:
currentInterval = interval
continue
if interval[0] <= currentInterval[1]:
overlapCount += 1
else:
currentInterval = interval
return overlapCount
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# arguments ref: https://stackoverflow.com/questions/15301147/python-argparse-default-value-or-specified-value
parser.add_argument("--intervals", help="intervals", nargs='?', type=str, const="1-2,2-3,3-4,1-3", default="1-2,2-3,3-4,1-3")
args = parser.parse_args()
intervals = [x.split("-") for x in args.intervals.split(",")]
#test
intervals = [[1,100],[11,22],[1,11],[2,12]]
print(intervals)
solution = Solution()
result = solution.eraseOverlapIntervals(intervals)
print("result is {}".format(result)) |
997,570 | bdf57964326f0fb15dd4722e00783a6463a3de0b | from unittest import TestCase
from .bst import BSTNode, BST
class BSTNodeTestCase(TestCase):
def setUp(self):
self.left = BSTNode(data=5)
self.right = BSTNode(data=15)
self.root = BSTNode(data=10, left=self.left, right=self.right)
def test_assigned_variables(self):
self.assertEqual(self.left.data, 5)
self.assertEqual(self.left.left, None)
self.assertEqual(self.left.right, None)
self.assertEqual(self.right.data, 15)
self.assertEqual(self.right.left, None)
self.assertEqual(self.right.right, None)
self.assertEqual(self.root.data, 10)
self.assertEqual(self.root.left, self.left)
self.assertEqual(self.root.right, self.right)
class BSTTestCase(TestCase):
def setUp(self):
self.tree = BST()
def test_insert_data(self):
success = self.tree.insert(10)
self.assertEqual(success, True)
self.assertEqual(self.tree.root.data, 10)
success = self.tree.insert(5)
self.assertEqual(success, True)
self.assertEqual(self.tree.root.left.data, 5)
success = self.tree.insert(15)
self.assertEqual(success, True)
self.assertEqual(self.tree.root.right.data, 15)
def test_delete_data(self):
self.tree.insert(10)
self.tree.insert(8)
self.tree.insert(4)
self.tree.insert(9)
self.tree.insert(3)
self.tree.insert(5)
# NOTE: Expected tree structure below
# 10
# 8
# 4 9
# 3 5
self.assertEqual(self.tree.root.data, 10)
self.assertEqual(self.tree.root.left.data, 8)
self.assertEqual(self.tree.root.left.left.data, 4)
self.assertEqual(self.tree.root.left.right.data, 9)
self.assertEqual(self.tree.root.left.left.left.data, 3)
self.assertEqual(self.tree.root.left.left.right.data, 5)
# NOTE: Expected tree structure after deletion
# 10
# 5
# 4 9
# 3
success = self.tree.delete(8)
self.assertEqual(success, True)
self.assertEqual(self.tree.root.left.data, 5)
self.assertEqual(self.tree.root.left.left.data, 4)
self.assertEqual(self.tree.root.left.left.left.data, 3)
self.assertEqual(self.tree.root.left.right.data, 9)
node = self.tree.find(8)
self.assertIsNone(node)
|
997,571 | d66b4cb957d21996215c61f13f1c00209695f6cf | #!/usr/bin/python
#
# Read or Write data from/to the Adafruit 32KB I2C FRAM Breakout Board
# from Raspberry Pi
#
# Can be called from command line or public functions can be imported:
#
# string = FRAMread(int address, int length)
# (to retrieve binary data, use ord() on characters in string)
#
# FRAMwrite(int address, string data)
# (to write binary data, concatenate into a string using chr())
#
# (c) David Pease, April, 2016
#
# import needed functions
from sys import argv, exit
from ctypes import *
from fcntl import ioctl
# Public function to write to FRAM
def FRAMwrite(address, string):
__doFramIO(address, data=string)
# Public function to read from FRAM
def FRAMread(address, length):
return __doFramIO(address, length=length)
# Private function to perform FRAM I/O using ioctl
def __doFramIO(addr, data="", length=0):
FRAM_ADDR = 0x50 # default FRAM I2C address
# constant values from /usr/include/linux/i2c-dev.h
I2C_RDWR = 0x0707 # ioctl value for combined R/W transfer
I2C_M_RD = 0x01 # ioctl command flag for read
# i2c command description structure # from <linux/i2c-dev.h>
class i2c_msg(Structure):
_fields_ = [("addr", c_ushort),
("flags", c_ushort),
("len", c_ushort),
("buf", c_char_p) ]
# array of i2c command descriptions
class msgarray(Structure):
_fields_ = [("msg0", i2c_msg),
("msg1", i2c_msg)]
# i2c command list descriptor (head of structures) # from <linux/i2c-dev.h>
class i2c_rdwr_ioctl_data(Structure):
_fields_ = [("msgs", c_void_p),
("nmsgs", c_ulong)]
# determine read/write mode, set number of ioctl commands needed
if len(data) > 0:
write = True
cmdcnt = 1
else:
write = False
cmdcnt = 2
# validate FRAM address range
if addr + len(data) + length > 32768:
raise ValueError('I/O address out of range.')
# create write string, compute read/write data lengths
writestr = chr(addr >> 8) + chr(addr & 0x00ff) + data
writelen = len(writestr)
readlen = length
# create buffers and buffer pointers for ioctl commands
rwaddr_data = create_string_buffer(writestr, writelen)
p_rw_data = cast(pointer(rwaddr_data), c_char_p)
if write:
p_buffer = cast(None, c_char_p)
else:
buffer = create_string_buffer(readlen)
p_buffer = cast(pointer(buffer), c_char_p)
# initialize array of command descriptors (second is unused for write)
msgs = msgarray(
(FRAM_ADDR, 0, writelen, p_rw_data),
(FRAM_ADDR, I2C_M_RD, readlen, p_buffer))
# set command array pointer to descriptor list, specify command count
cmds = i2c_rdwr_ioctl_data(cast(pointer(msgs),c_void_p), cmdcnt)
# open the Raspberry Pi I2C bus for reading and writing
fd = open("/dev/i2c-1", "rw")
# issue ioctl to read or write FRAM
rc = ioctl(fd, I2C_RDWR, cmds)
if rc != cmdcnt:
raise IOError('Not all ioctl commands processed successfully.')
# close the bus (file descriptor)
fd.close()
# return read buffer for read, otherwise nothing
if write:
return
else:
return buffer.raw
# if invoked from command line, perform function and print result
if __name__ == "__main__":
# validate inputs (minimally)
if len(argv) != 4:
print "%s: Invalid number of arguments; should be 3." % argv[0]
exit(12)
if argv[1] not in ("-r", "-w"):
print "%s: Invalid read/write option (-r/-w)." % argv[0]
exit(12)
# call read or write function as requested
address = int(argv[2]);
if argv[1] == "-w":
FRAMwrite(address, argv[3])
else:
data = FRAMread(address, int(argv[3]))
print data
# we got this far without an exception so return success
exit(0)
|
997,572 | c31b5f752dd2333e31120f7f6fd7a96e0809281c | '''
<단순 변수 사용>
1. 순차형
1-3. 현금교환기 (10000원, 1000원, 100원, 10원)
1626035 이주호
'''
#계산할 가격 입력
money = int(input("교환할 금액을 정수로 입력 >> "))
##계산
#10000won
_10000won = money / 10000
#1000won
_1000won = money % 10000
_1000won = _1000won / 1000
#100won
_100won = money % 1000
_100won = _100won / 100
#10won
_10won = money % 100
_10won = _10won / 10
##계산 끝
print("%d원은 %d만 %d천 %d백 %d십원 입니다." % (money, _10000won, _1000won, _100won, _10won))
|
997,573 | 114e4212e5266b5fc6b54ffa0dc79ed6a658458c | #! /usr/bin/env python
#############
#Written for python 2.7
#############
import nltk, re, sys, os, getopt, pandas, time
from collections import Counter
import numpy as np
from datetime import datetime
from nltk import word_tokenize as token_d
from nltk.tokenize import RegexpTokenizer as token_re
from collections import Counter
from nltk.corpus import stopwords
#Adapted from data_transformation_pt2_1914 (Will Issac and James Murray)
# clarified outputs,
# an option for custom stopwords and custom regex for additional cleaning;
# an option for custom tokens as regex (e.g. s\/res\/\d{4} for security council);
# an generalized workflow, given the header as stated below.
# ability to topic model multiple csv files. MUST HAVE SAME HEADERS!!!
nltk.download('stopwords')
###########################
#NOTES
###########################
#REQUIREMENT FOR INPUT CSV FILE:
# Header includes = ['id','day','month','year','speaker', 'speech']
# (for example, "id" could be a session of the UNSC during which a speaker spoke)
# (if you're not working with speeches, the text to be topic modeled goes under "speech")
#REQUIRES AS INPUT
# CSV (one or more) with the above header
#OPTIONAL INPUT
# text file with list of custom stopwords (one stopword per line)
# (for example, in the UNSC project I removed references to countries)
#OUTPUT FILES:
# meta_output.csv = metadata without DAT info
# full_reflist.txt = reference wordlist
# full_mult.dat = DAT info
# final_meta.csv = metadata with DAT info
# [user defined name].csv = full dataframe, including speeches, metadata, and DAT info
###################
# User Options
###################
# If True, words in document will be stemmed using NLTK Snowball stemmer.
stem_mc = False # Default: True
# If True, stopwords will be excluded.
exclude_stopwords = True # Default: True
#Should I write out the FULL dataframe, including all speeches, as a single csv?
write_teh_big_one = True #Default: True
# Give me any regex for custom tokens (order matters). Separate by "|". Lower case letters.
# If none, simply put ''.
custom_tokens=""
# List out any regex you want to clean in speeches (e.g. \n; \r; \uffd).
# REMEMBER TO ESCAPE ANY BACKSLASHES!!! e.g. if you want to remove the
# "\\n" in a piece of text, you'll need to add "\\\\n" to the
# custom_clean list below.
# If none, simply put [].
custom_clean=[]
#Do you have a list of custom stopwords?
# If True, you will need to define:
# (1) a text file with custom stopwords.
# (2) the directory of said file
# (3) whether to treat these as "contains", too (e.g. remove "Israeli" if "Israel" in stopwords)
# Warning: very powerful. Be careful. Also, could add a lot of time. Default = False.
# (4) a text file with any of your custom stopwords you DO NOT want to use as "contains". Place in "stoppath".
# Note: stopwords will NOT be case sensitive.
custom_stopwords = True
stoplist_file = "namelist.txt"
stoppath = "/Volumes/Seagate Backup Plus Drive/Z/MSUgrad/Dissertation/15_chapters/0_ungavoting/OUTPUT/0_topicmodel/3_dtmprep/0_namelist/"
custom_stems = False
stem_exclude = ""
#########################
#Input and output directories
#########################
inpath = "/Volumes/Seagate Backup Plus Drive/Z/MSUgrad/Dissertation/15_chapters/0_ungavoting/OUTPUT/0_topicmodel/2_combined/"
refpath = "/Volumes/Seagate Backup Plus Drive/Z/MSUgrad/Dissertation/15_chapters/0_ungavoting/OUTPUT/0_topicmodel/3_dtmprep/1_multgen/"
outpath = "/Volumes/Seagate Backup Plus Drive/Z/MSUgrad/Dissertation/15_chapters/0_ungavoting/OUTPUT/0_topicmodel/3_dtmprep/1_multgen/"
assert "full-reflist.txt" in os.listdir(refpath)
###############################
#CSV file names
# (csvfilelist) a list of csv files (SAME HEADERS!!!) as inputs. If only one file, pass as ['filename']
# (final_df_name) the name of your FULL dataframe output csv file
#
###############################
csvfilelist = ["UNGANounsAdj.csv"]
final_df_name = "UNGANounsAdj_Full.csv"
#################
#Args
#################
year=sys.argv[1] #1982
assert str(year).isdigit()
assert len(str(year))==4,"System argument must be a year"
year=int(year)
pref=str(year)+"_"
###############################
######################
#CODE: NO MORE INPUTS PAST THIS POINT
######################
###############################
# Import multiple csv files into a single pandas dataframe and sort
# them by day/month/year timing.
def import_csvs(csvfile_list,indir):
os.chdir(indir)
list_=[]
for file_ in csvfile_list:
my_temp_df=pandas.read_csv(file_,index_col=None)
ed_df=my_temp_df[['id','year','month','day','speaker','speech']]
list_.append(ed_df)
frame=pandas.concat(list_)
final_frame=frame.sort(['year','month','day'],ascending=[1,1,1])
return final_frame
##############
#CODE
##############
#Read File
working_df = import_csvs(csvfilelist,inpath)
#Prep Working DF
working_df = working_df.reset_index()
working_df['index1'] = working_df.index
working_df=working_df[working_df["year"]==year]
#Prepare your toys for wordplay
snowball = nltk.stem.snowball.EnglishStemmer(ignore_stopwords=False)
final_token_set="[A-Za-z]{2,}"
if custom_tokens:
final_token_set=final_token_set+"|"+custom_tokens
tokenizer = token_re(final_token_set) # group cites + words as tokens: ORDER MATTERS!!!!
print "CHECKPOINT 1: prepwork done"
####################################
# 1. Read in the reference list of ALL words
# 2. Assign numeric values to each word in vocabulary
# 3. Convert each speech to a vector of words WITHIN THE VOCABULARY
# 4. For each speech, get word count GIVEN PRESENCE IN VOCABULARY
# 5. For each speech, convert vector of words to sparse matrix
# 6. Write it all as a string in the dataframe
####################################
os.chdir(refpath)
ref_file=open("full-reflist.txt","r")
all_words_fin=ref_file.read().split("\n") #read the reflist text to a list of words
all_words_fin_set=set(all_words_fin) #SET is significantly faster!
assert len(all_words_fin)==len(all_words_fin_set)
assert all([i in all_words_fin_set] for i in all_words_fin)
############################
# 2. Assign numeric values to each word in vocabulary
############################
all_sp_index=[i for i in range(0,len(all_words_fin))]
assert len(all_sp_index)==len(all_words_fin)
ind_word_dict=dict(zip(all_sp_index,all_words_fin))
word_ind_dict=dict(zip(all_words_fin,all_sp_index))
def makemult(x):
#Pre-process speech
sp=x.lower()
sp=sp.replace("-","")
sp=sp.decode("utf8")
#Tokenize words and keep only those in our vocab
#SET IS SIGNIFICANTLY FASTER!!!
vec_raw=tokenizer.tokenize(sp)
vec=[word_ind_dict[i] for i in vec_raw if i in all_words_fin_set]
#Prepare sparse matrix string
wc=len(vec)
multdict=dict(Counter(vec))
multstr_raw=str(multdict)
multstr=multstr_raw.replace("{","").replace("}","").replace(": ",":").replace(",","")
outstr=str(wc)+" "+multstr
return outstr
#LOOP here! Mapping/Applying is too memory intensive and won't work.
os.chdir(outpath)
for index1,row in working_df.iterrows():
working_row=row.copy()
sp=working_row["speech"]
full_str=makemult(sp)
working_df.loc[index1,"Full String"]=full_str
if int(index1)%500==0:
print "Processed through "+str(index1)
# working_df["Full String"]=working_df["speech"].map(makemult)
print "CHECKPOINT 5: Each speech converted to sparse matrix"
########################
#Write out:
# 1. final_meta.csv ["id","Full String","day","month","year","speaker"], no index, encoding="utf-8"
# 2. full-mult.dat
# 3. {final_df_name} [working_df], no index, encoding="utf-8"
########################
os.chdir(outpath)
#Write out metafile
final_meta=working_df[["id","Full String","day","month","year","speaker"]]
final_meta.to_csv(str(str(pref)+"final_meta.csv"),encoding="utf-8",index=False)
print "CHECKPOINT 6: meta file written"
#Write out your datfile
datfile=working_df[['Full String']]
datfile.to_csv(str(str(pref)+"full-mult.dat"),header=False,index=False)
print "CHECKPOINT 7: MULT file written"
#Write out full file, if applicable
if write_teh_big_one==True:
working_df.to_csv(str(str(pref)+final_df_name),encoding="utf-8",index=False)
print "COMPLETE" |
997,574 | 6353d7bfaaac1bf25de5f7e08ba57a9c76f115c8 | #coding:utf-8
from django.conf.urls import url
from django.conf.urls.static import static
from django.conf import settings
import uuid
from django.conf.urls import include
from rest_framework import routers
from rest_framework_jwt.views import obtain_jwt_token,jwt_response_payload_handler
from wechat import views
from mall import views
# from .login import login,authorized
router = routers.DefaultRouter('genesis/')
urlpatterns=[
url('', include(router.urls)),
url(r'get_banners/?', views.get_banners),
url(r'get_onlineshowtype/?', views.get_onlineshowtype),
url(r'get_goodslist_byshowtype/?', views.get_goodslist_byshowtype),
# url(r'get_goodslist_byshowtype/?', views.get_onlineitemlist_byshowtype),
]
|
997,575 | f7fbc66e2e8484d798bebdce45b5c5e3dde2974d | class Solution(object):
def lengthOfLongestSubstring(self, s: str) -> int:
# 字符串为空则返回零
if not s:
return 0
window = [] # 滑动窗口数组
max_length = 0 # 最长串长度
# 遍历字符串
for c in s:
# 如果字符不在滑动窗口中,则直接扩展窗口
if c not in window:
# 使用当前字符扩展窗口
window.append(c)
# 如果字符在滑动窗口中,则
# 1. 从窗口中移除重复字符及之前的字符串部分
# 2. 再扩展窗口
else:
# 从窗口中移除重复字符及之前的字符串部分,新字符串即为无重复字符的字符串
window = window[window.index(c) + 1:]
# 扩展窗口
window.append(c)
# 更新最大长度
max_length = max(len(window), max_length)
return max_length if max_length != 0 else len(s)
class Solution(object):
def multiply(self, num1, num2):
"""
:type num1: str
:type num2: str
:rtype: str
"""
if len(num1) == 0 or len(num2) == 0:
return "error"
int1 = int(num1)
count = 0
sum_ = 0
for c in num2[::-1]:
temp = int(c)
muti = int1 * temp * pow(10, count)
sum_ = sum_ + muti
count += 1
return str(sum_)
if __name__ == "__main__":
solution = Solution()
# str_ = "abcabcbb"
# i = solution.lengthOfLongestSubstring(str_)
# print(i)
res = solution.multiply("123", "111")
print(res)
# 28 ms 11.8 MB Python
|
997,576 | 8af87a9b0a4d5235d7c302408dc18f2ecfc5bfc6 | #!/usr/bin/env python
__author__ = "Stephen P. Henrie, Michael Meisinger"
import ast
import inspect
import os
import string
import sys
import time
import traceback
from flask import Blueprint, request, abort
import flask
# Create special logging category for service gateway access
import logging
webapi_log = logging.getLogger('webapi')
from putil.exception import ApplicationException
from pyon.core.bootstrap import get_service_registry
from pyon.core.object import IonObjectBase
from pyon.core.exception import Unauthorized
from pyon.core.registry import getextends, is_ion_object_dict, issubtype
from pyon.core.governance import DEFAULT_ACTOR_ID, get_role_message_headers, find_roles_by_actor
from pyon.ion.resource import get_object_schema
from pyon.public import IonObject, OT, NotFound, Inconsistent, BadRequest, EventSubscriber, log, CFG
from pyon.public import MSG_HEADER_ACTOR, MSG_HEADER_VALID, MSG_HEADER_ROLES
from pyon.util.lru_cache import LRUCache
from pyon.util.containers import current_time_millis
from ion.service.utility.swagger_gen import SwaggerSpecGenerator
from ion.util.parse_utils import get_typed_value
from ion.util.ui_utils import CONT_TYPE_JSON, json_dumps, json_loads, encode_ion_object, get_auth, clear_auth, OAuthTokenObj
from interface.services.core.idirectory_service import DirectoryServiceProcessClient
from interface.services.core.iresource_registry_service import ResourceRegistryServiceProcessClient
from interface.services.core.iidentity_management_service import IdentityManagementServiceProcessClient
from interface.services.core.iorg_management_service import OrgManagementServiceProcessClient
from interface.objects import Attachment, ProcessDefinition, MediaResponse, UserRoleModifiedEvent, UserRoleCacheResetEvent
from interface import objects
CFG_PREFIX = "service.service_gateway"
DEFAULT_USER_CACHE_SIZE = 2000
DEFAULT_EXPIRY = "0"
SG_IDENTIFICATION = "service_gateway/ScionCC/1.0"
GATEWAY_ARG_PARAMS = "params"
GATEWAY_ARG_JSON = "data"
GATEWAY_RESPONSE = "result"
GATEWAY_STATUS = "status"
GATEWAY_ERROR = "error"
GATEWAY_ERROR_EXCEPTION = "exception"
GATEWAY_ERROR_MESSAGE = "message"
GATEWAY_ERROR_EXCID = "error_id"
GATEWAY_ERROR_TRACE = "trace"
# Stuff for specifying other return types
RETURN_MIMETYPE_PARAM = "return_mimetype"
# Flask blueprint for service gateway routes
sg_blueprint = Blueprint("service_gateway", __name__, static_folder=None)
# Singleton instance of service gateway
sg_instance = None
# Sequence number to identify requests
req_seqnum = 0
class ServiceGateway(object):
"""
The Service Gateway exports service routes for a web server via a Flask blueprint.
The gateway bridges HTTP requests to ION AMQP RPC calls.
"""
def __init__(self, process, config, response_class):
global sg_instance
sg_instance = self
self.name = "service_gateway"
self.process = process
self.config = config
self.response_class = response_class
self.gateway_base_url = process.gateway_base_url
self.develop_mode = self.config.get_safe(CFG_PREFIX + ".develop_mode") is True
self.require_login = self.config.get_safe(CFG_PREFIX + ".require_login") is True
self.token_from_session = self.config.get_safe(CFG_PREFIX + ".token_from_session") is True
# Optional list of trusted originators can be specified in config.
self.trusted_originators = self.config.get_safe(CFG_PREFIX + ".trusted_originators")
if not self.trusted_originators:
self.trusted_originators = None
log.info("Service Gateway will not check requests against trusted originators since none are configured.")
# Service screening
self.service_blacklist = self.config.get_safe(CFG_PREFIX + ".service_blacklist") or []
self.service_whitelist = self.config.get_safe(CFG_PREFIX + ".service_whitelist") or []
self.no_login_whitelist = set(self.config.get_safe(CFG_PREFIX + ".no_login_whitelist") or [])
self.set_cors_headers = self.config.get_safe(CFG_PREFIX + ".set_cors") is True
self.strict_types = self.config.get_safe(CFG_PREFIX + ".strict_types") is True
# Swagger spec generation support
self.swagger_cfg = self.config.get_safe(CFG_PREFIX + ".swagger_spec") or {}
self._swagger_gen = None
if self.swagger_cfg.get("enable", None) is True:
self._swagger_gen = SwaggerSpecGenerator(config=self.swagger_cfg)
# Get the user_cache_size
self.user_cache_size = self.config.get_safe(CFG_PREFIX + ".user_cache_size", DEFAULT_USER_CACHE_SIZE)
self.max_content_length = self.config.get_safe(CFG_PREFIX + ".max_content_length")
# Initialize an LRU Cache to keep user roles cached for performance reasons
#maxSize = maximum number of elements to keep in cache
#maxAgeMs = oldest entry to keep
self.user_role_cache = LRUCache(self.user_cache_size, 0, 0)
self.request_callback = None
self.log_errors = self.config.get_safe(CFG_PREFIX + ".log_errors", True)
self.rr_client = ResourceRegistryServiceProcessClient(process=self.process)
self.idm_client = IdentityManagementServiceProcessClient(process=self.process)
self.org_client = OrgManagementServiceProcessClient(process=self.process)
# -------------------------------------------------------------------------
# Lifecycle management
def start(self):
# Configure subscriptions for user_cache events
self.event_subscriber = EventSubscriber(event_type=OT.UserRoleModifiedEvent, origin_type="Org",
callback=self._event_callback)
self.event_subscriber.add_event_subscription(event_type=OT.UserRoleCacheResetEvent)
self.process.add_endpoint(self.event_subscriber)
def stop(self):
pass
# Stop event subscribers - TODO: This hangs
#self.process.remove_endpoint(self.event_subscriber)
# -------------------------------------------------------------------------
# Event subscriber callbacks
def _event_callback(self, event, *args, **kwargs):
""" Callback function for receiving Events """
if isinstance(event, UserRoleModifiedEvent):
# Event when User Roles are modified
user_role_event = event
org_id = user_role_event.origin
actor_id = user_role_event.actor_id
role_name = user_role_event.role_name
log.debug("User Role modified: %s %s %s" % (org_id, actor_id, role_name))
# Evict the user and their roles from the cache so that it gets updated with the next call.
if self.user_role_cache and self.user_role_cache.has_key(actor_id):
log.debug("Evicting user from the user_role_cache: %s" % actor_id)
self.user_role_cache.evict(actor_id)
elif isinstance(event, UserRoleCacheResetEvent):
# An event is received to clear the user data cache
self.user_role_cache.clear()
# -------------------------------------------------------------------------
# Routes
def sg_index(self):
return self.gateway_json_response(SG_IDENTIFICATION)
def get_service_spec(self, service_name=None, spec_name=None):
try:
if not self._swagger_gen:
raise NotFound("Spec not available")
if spec_name != "swagger.json":
raise NotFound("Unknown spec format")
swagger_json = self._swagger_gen.get_spec(service_name)
resp = flask.make_response(flask.jsonify(swagger_json))
self._add_cors_headers(resp)
return resp
except Exception as ex:
return self.gateway_error_response(ex)
def process_gateway_request(self, service_name=None, operation=None, id_param=None):
"""
Makes a secure call to a SciON service operation via messaging.
"""
# TODO make this service smarter to respond to the mime type in the request data (ie. json vs text)
self._log_request_start("SVC RPC")
try:
result = self._make_service_request(service_name, operation, id_param)
return self.gateway_json_response(result)
except Exception as ex:
return self.gateway_error_response(ex)
finally:
self._log_request_end()
def rest_gateway_request(self, service_name, res_type, id_param=None):
"""
Makes a REST style call to a SciON service operation via messaging.
Get with ID returns the resource, POST without ID creates, PUT with ID updates
and GET without ID returns the collection.
"""
self._log_request_start("SVC REST")
try:
if not service_name:
raise BadRequest("Service name missing")
service_name = str(service_name)
if not res_type:
raise BadRequest("Resource type missing")
res_type = str(res_type)
if request.method == "GET" and id_param:
operation = "read_" + res_type
return self.process_gateway_request(service_name, operation, id_param)
elif request.method == "GET":
ion_res_type = "".join(x.title() for x in res_type.split('_'))
res = self._make_service_request("resource_registry", "find_resources", ion_res_type)
if len(res) == 2:
return self.gateway_json_response(res[0])
raise BadRequest("Unexpected find_resources result")
elif request.method == "PUT":
operation = "update_" + res_type
obj = self._extract_payload_data()
if not obj:
raise BadRequest("Argument object not found")
if id_param:
obj._id = id_param
return self.process_gateway_request(service_name, operation, obj)
elif request.method == "POST":
operation = "create_" + res_type
obj = self._extract_payload_data()
if not obj:
raise BadRequest("Argument object not found")
return self.process_gateway_request(service_name, operation, obj)
else:
raise BadRequest("Bad REST request")
except Exception as ex:
return self.gateway_error_response(ex)
finally:
self._log_request_end()
def _extract_payload_data(self):
request_obj = None
if request.headers.get("content-type", "").startswith(CONT_TYPE_JSON):
if request.data:
request_obj = json_loads(request.data)
elif request.form:
# Form encoded
if GATEWAY_ARG_JSON in request.form:
payload = request.form[GATEWAY_ARG_JSON]
request_obj = json_loads(payload)
if request_obj and is_ion_object_dict(request_obj):
request_obj = self.create_ion_object(request_obj)
return request_obj
def _make_service_request(self, service_name=None, operation=None, id_param=None):
"""
Executes a secure call to a SciON service operation via messaging.
"""
if not service_name:
if self.develop_mode:
# Return a list of available services
result = dict(available_services=get_service_registry().services.keys())
return result
else:
raise BadRequest("Service name missing")
service_name = str(service_name)
if not operation:
if self.develop_mode:
# Return a list of available operations
result = dict(available_operations=[])
return result
else:
raise BadRequest("Service operation missing")
operation = str(operation)
# Apply service white list and black list for initial protection and get service client
service_def = self.get_secure_service_def(service_name)
target_client = service_def.client
# Get service request arguments and operation parameter values request
req_args = self._get_request_args()
param_list = self.create_parameter_list(service_def, operation, req_args, id_param)
# Validate requesting user and expiry and add governance headers
ion_actor_id, expiry = self.get_governance_info_from_request(req_args)
in_login_whitelist = self.in_login_whitelist("request", service_name, operation)
ion_actor_id, expiry = self.validate_request(ion_actor_id, expiry, in_whitelist=in_login_whitelist)
param_list["headers"] = self.build_message_headers(ion_actor_id, expiry)
# Make service operation call
client = target_client(process=self.process)
method_call = getattr(client, operation)
result = method_call(**param_list)
return result
def get_resource_schema(self, resource_type):
try:
# Validate requesting user and expiry and add governance headers
ion_actor_id, expiry = self.get_governance_info_from_request()
ion_actor_id, expiry = self.validate_request(ion_actor_id, expiry)
return self.gateway_json_response(get_object_schema(resource_type))
except Exception as ex:
return self.gateway_error_response(ex)
def get_attachment(self, attachment_id):
try:
# Create client to interface
attachment = self.rr_client.read_attachment(attachment_id, include_content=True)
return self.response_class(attachment.content, mimetype=attachment.content_type)
except Exception as ex:
return self.gateway_error_response(ex)
def create_attachment(self):
try:
payload = request.form[GATEWAY_ARG_JSON]
json_params = json_loads(payload)
actor_id, expiry = self.get_governance_info_from_request(json_params)
actor_id, expiry = self.validate_request(actor_id, expiry)
headers = self.build_message_headers(actor_id, expiry)
data_params = json_params[GATEWAY_ARG_PARAMS]
resource_id = str(data_params.get("resource_id", ""))
fil = request.files["file"]
content = fil.read()
keywords = []
keywords_str = data_params.get("keywords", "")
if keywords_str.strip():
keywords = [str(x.strip()) for x in keywords_str.split(",")]
created_by = data_params.get("attachment_created_by", "unknown user")
modified_by = data_params.get("attachment_modified_by", "unknown user")
# build attachment
attachment = Attachment(name=str(data_params["attachment_name"]),
description=str(data_params["attachment_description"]),
attachment_type=int(data_params["attachment_type"]),
content_type=str(data_params["attachment_content_type"]),
keywords=keywords,
created_by=created_by,
modified_by=modified_by,
content=content)
ret = self.rr_client.create_attachment(resource_id=resource_id, attachment=attachment, headers=headers)
return self.gateway_json_response(ret)
except Exception as ex:
log.exception("Error creating attachment")
return self.gateway_error_response(ex)
def delete_attachment(self, attachment_id):
try:
ret = self.rr_client.delete_attachment(attachment_id)
return self.gateway_json_response(ret)
except Exception as ex:
log.exception("Error deleting attachment")
return self.gateway_error_response(ex)
def get_version_info(self, pack=None):
import pkg_resources
pkg_list = ["scioncc"]
packs = self.config.get_safe(CFG_PREFIX + ".version_packages")
if packs:
pkg_list.extend(packs.split(","))
version = {}
for package in pkg_list:
try:
if pack == "all":
pack_deps = pkg_resources.require(package)
version.update({p.project_name: p.version for p in pack_deps})
else:
version[package] = pkg_resources.require(package)[0].version
# @TODO git versions for current?
except pkg_resources.DistributionNotFound:
pass
try:
dir_client = DirectoryServiceProcessClient(process=self.process)
sys_attrs = dir_client.lookup("/System")
if sys_attrs and isinstance(sys_attrs, dict):
version.update({k: v for (k, v) in sys_attrs.iteritems() if "version" in k.lower()})
except Exception as ex:
log.exception("Could not determine system directory attributes")
if pack and pack != "all":
version = {k: v for (k, v) in version.iteritems() if k == pack}
return self.gateway_json_response(version)
# =========================================================================
# Security and governance helpers
def is_trusted_address(self, requesting_address):
if self.trusted_originators is None:
return True
return requesting_address in self.trusted_originators
def get_governance_info_from_request(self, json_params=None):
# Default values for governance headers.
actor_id = DEFAULT_ACTOR_ID
expiry = DEFAULT_EXPIRY
authtoken = ""
user_session = get_auth()
#if user_session.get("actor_id", None) and user_session.get("valid_until", 0):
if user_session.get("actor_id", None):
# Get info from current server session
# NOTE: Actor id may be inside server session
expiry = int(user_session.get("valid_until", 0)) * 1000
if expiry:
# This was a proper non-token server session authentication
expiry = str(expiry)
actor_id = user_session["actor_id"]
log.info("Request associated with session actor_id=%s, expiry=%s", actor_id, expiry)
else:
# We are just taking the user_id out of the session
# TODO: Need to check access token here
expiry = str(expiry)
if self.token_from_session:
actor_id = user_session["actor_id"]
log.info("Request associated with actor's token from session; actor_id=%s, expiry=%s", actor_id, expiry)
# Developer access using api_key
if self.develop_mode and "api_key" in request.args and request.args["api_key"]:
actor_id = str(request.args["api_key"])
expiry = str(int(user_session.get("valid_until", 0)) * 1000)
if 0 < int(expiry) < current_time_millis():
expiry = str(current_time_millis() + 10000)
# flask.session["valid_until"] = int(expiry / 1000)
log.info("Request associated with actor_id=%s, expiry=%s from developer api_key", actor_id, expiry)
# Check in headers for OAuth2 bearer token
auth_hdr = request.headers.get("authorization", None)
if auth_hdr:
valid, req = self.process.oauth.verify_request([self.process.oauth_scope])
if valid:
actor_id = flask.g.oauth_user.get("actor_id", "")
if actor_id:
log.info("Request associated with actor_id=%s, expiry=%s from OAuth token", actor_id, expiry)
return actor_id, DEFAULT_EXPIRY
# Try to find auth token override
if not authtoken:
if json_params:
if "authtoken" in json_params:
authtoken = json_params["authtoken"]
else:
if "authtoken" in request.args:
authtoken = str(request.args["authtoken"])
# Enable temporary authentication tokens to resolve to actor ids
if authtoken:
try:
if authtoken.startswith(("Bearer_")):
# Backdoor way for OAuth2 access tokens as request args for GET URLs
authtoken = authtoken[7:]
token_id = "access_token_" + str(authtoken)
token_obj = self.process.container.object_store.read(token_id)
token = OAuthTokenObj.from_security_token(token_obj)
if token.is_valid(check_expiry=True):
actor_id = token.user["actor_id"]
expiry = str(token._token_obj.expires)
log.info("Resolved OAuth2 token %s into actor_id=%s expiry=%s", authtoken, actor_id, expiry)
else:
token_info = self.idm_client.check_authentication_token(authtoken, headers=self._get_gateway_headers())
actor_id = token_info.get("actor_id", actor_id)
expiry = token_info.get("expiry", expiry)
log.info("Resolved token %s into actor_id=%s expiry=%s", authtoken, actor_id, expiry)
except NotFound:
log.info("Provided authentication token not found: %s", authtoken)
except Unauthorized:
log.info("Authentication token expired or invalid: %s", authtoken)
except Exception as ex:
log.exception("Problem resolving authentication token")
return actor_id, expiry
def in_login_whitelist(self, category, svc, op):
"""Returns True if service op is whitelisted for anonymous access"""
entry = "%s/%s/%s" % (category, svc, op)
return entry in self.no_login_whitelist
def validate_request(self, ion_actor_id, expiry, in_whitelist=False):
# There is no point in looking up an anonymous user - so return default values.
if ion_actor_id == DEFAULT_ACTOR_ID:
# Since this is an anonymous request, there really is no expiry associated with it
if not in_whitelist and self.require_login:
raise Unauthorized("Anonymous access not permitted")
else:
return DEFAULT_ACTOR_ID, DEFAULT_EXPIRY
try:
user = self.idm_client.read_actor_identity(actor_id=ion_actor_id, headers=self._get_gateway_headers())
except NotFound as e:
if not in_whitelist and self.require_login:
# This could be a restart of the system with a new preload.
# TODO: Invalidate Flask sessions on relaunch/bootstrap with creating new secret
user_session = get_auth()
if user_session.get("actor_id", None) == ion_actor_id:
clear_auth()
raise Unauthorized("Invalid identity", exc_id="01.10")
else:
# If the user isn't found default to anonymous
return DEFAULT_ACTOR_ID, DEFAULT_EXPIRY
# Need to convert to int first in order to compare against current time.
try:
int_expiry = int(expiry)
except Exception as ex:
raise Inconsistent("Unable to read the expiry value in the request '%s' as an int" % expiry)
# The user has been validated as being known in the system, so not check the expiry and raise exception if
# the expiry is not set to 0 and less than the current time.
if 0 < int_expiry < current_time_millis():
if not in_whitelist and self.require_login:
raise Unauthorized("User authentication expired")
else:
log.warn("User authentication expired")
return DEFAULT_ACTOR_ID, DEFAULT_EXPIRY
return ion_actor_id, expiry
# -------------------------------------------------------------------------
# Service call (messaging) helpers
def register_request_callback(self, cb_func):
if cb_func is None:
pass
elif self.request_callback:
log.warn("Callback already registered")
self.request_callback = cb_func
def _call_request_callback(self, action, req_info):
if not self.request_callback:
return
try:
self.request_callback(action, req_info)
except Exception:
log.exception("Error calling request callback")
def _add_cors_headers(self, resp):
# Set CORS headers so that a Swagger client on a different domain can read spec
resp.headers["Access-Control-Allow-Headers"] = "Origin, X-Atmosphere-tracking-id, X-Atmosphere-Framework, X-Cache-Date, Content-Type, X-Atmosphere-Transport, *"
resp.headers["Access-Control-Allow-Methods"] = "POST, GET, OPTIONS , PUT"
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Request-Headers"] = "Origin, X-Atmosphere-tracking-id, X-Atmosphere-Framework, X-Cache-Date, Content-Type, X-Atmosphere-Transport, *"
def _log_request_start(self, req_type="SG"):
global req_seqnum
req_seqnum += 1
req_info = dict(request_id=req_seqnum, start_time=time.time(), req_type=req_type, req_url=request.url)
flask.g.req_info = req_info
webapi_log.info("%s REQUEST (%s) - %s", req_type, req_info["request_id"], request.url)
self._call_request_callback("start", req_info)
def _log_request_response(self, content_type, result="", content_length=-1, status_code=200):
req_info = flask.g.get("req_info", None)
if req_info:
req_info["resp_content_type"] = content_type
req_info["resp_content_length"] = content_length
req_info["resp_result"] = result
req_info["resp_status"] = req_info.get("resp_status", status_code)
def _log_request_error(self, result, status_code):
req_info = flask.g.get("req_info", None)
if req_info:
req_info["resp_error"] = True
req_info["resp_status"] = status_code
webapi_log.warn("%s REQUEST (%s) ERROR (%s%s) - %s: %s",
req_info["req_type"], req_info["request_id"],
status_code,
"/id="+result[GATEWAY_ERROR_EXCID] if result[GATEWAY_ERROR_EXCID] else "",
result[GATEWAY_ERROR_EXCEPTION],
result[GATEWAY_ERROR_MESSAGE])
self._call_request_callback("error", req_info)
else:
webapi_log.warn("REQUEST ERROR (%s%s) - %s: %s",
status_code,
"/id="+result[GATEWAY_ERROR_EXCID] if result[GATEWAY_ERROR_EXCID] else "",
result[GATEWAY_ERROR_EXCEPTION],
result[GATEWAY_ERROR_MESSAGE])
def _log_request_end(self):
req_info = flask.g.get("req_info", None)
if req_info:
req_info["end_time"] = time.time()
webapi_log.info("%s REQUEST (%s) RESP (%s) - %.3f s, %s bytes, %s",
req_info["req_type"], req_info["request_id"],
req_info.get("resp_status", ""),
req_info["end_time"] - req_info["start_time"],
req_info.get("resp_content_length", ""),
req_info.get("resp_content_type", "")
)
self._call_request_callback("end", req_info)
else:
webapi_log.warn("REQUEST END - missing start info")
def _get_request_args(self):
"""Extracts service request arguments from HTTP request. Supports various
methods and forms of encoding. Separates arguments for special parameters
from service operation parameters.
Returns a dict with the service request arguments, containing key params
with the actual values for the service operation parameters.
"""
str_args = False
request_args = {}
if request.method == "POST" or request.method == "PUT":
# Use only body args and ignore any args from query string
if request.headers.get("content-type", "").startswith(CONT_TYPE_JSON):
# JSON body request
if request.data:
request_args = json_loads(request.data)
if GATEWAY_ARG_PARAMS not in request_args:
# Magic fallback: Directly use JSON first level as args if params key not present
request_args = {GATEWAY_ARG_PARAMS: request_args}
elif request.form:
# Form encoded payload
if GATEWAY_ARG_JSON in request.form:
payload = request.form[GATEWAY_ARG_JSON]
request_args = json_loads(payload)
if GATEWAY_ARG_PARAMS not in request_args:
# Magic fallback: Directly use JSON first level as args if params key not present
request_args = {GATEWAY_ARG_PARAMS: request_args}
else:
# Fallback: Directly use form values
str_args = True
request_args = {GATEWAY_ARG_PARAMS: request.form.to_dict(flat=True)}
else:
# No args found in body
request_args = {GATEWAY_ARG_PARAMS: {}}
# Extract file args
for file_arg in request.files:
try:
file_handle = request.files[file_arg]
arg_val = file_handle.read()
request_args[GATEWAY_ARG_PARAMS][file_arg] = arg_val
except Exception as ex:
log.exception("Error reading request file argument %s", file_arg)
elif request.method == "GET":
str_args = True
REQ_ARGS_SPECIAL = {"authtoken", "timeout", "headers"}
args_dict = request.args.to_dict(flat=True)
request_args = {k: request.args[k] for k in args_dict if k in REQ_ARGS_SPECIAL}
req_params = {k: request.args[k] for k in args_dict if k not in REQ_ARGS_SPECIAL}
request_args[GATEWAY_ARG_PARAMS] = req_params
request_args["str_args"] = str_args # Indicate downstream that args are str (GET or form encoded)
#log.info("Request args: %s" % request_args)
return request_args
def _get_typed_arg_value(self, given_value, param_def, strict):
"""Returns a service operation argument value, based on a given value and param schema definition.
"""
param_type = param_def["type"]
if isinstance(given_value, unicode):
# Convert all unicode to str in UTF-8
given_value = given_value.encode("utf8") # Make all unicode into str
if isinstance(given_value, IonObjectBase) and (given_value._get_type() == param_type or
param_type in given_value._get_extends()):
return given_value
elif is_ion_object_dict(given_value) and (param_type == "NoneType" or hasattr(objects, param_type)):
return self.create_ion_object(given_value)
elif param_type in ("str", "bool", "int", "float", "list", "dict", "NoneType"):
arg_val = get_typed_value(given_value, targettype=param_type, strict=strict)
return arg_val
else:
raise BadRequest("Cannot convert param value to type %s" % param_type)
def create_parameter_list(self, service_def, operation, request_args, id_param=None):
"""Build service call parameter list dynamically from service operation definition
"""
service_schema = service_def.schema
service_op_schema = service_schema["operations"][operation]
svc_op_param_list = service_op_schema["in_list"]
svc_params = {}
if id_param:
# Magic shorthand: if one argument is given, fill the first service argument
if svc_op_param_list:
fill_par = svc_op_param_list[0]
fill_par_def = service_op_schema["in"][fill_par]
arg_val = self._get_typed_arg_value(id_param, fill_par_def, strict=False)
svc_params[fill_par] = arg_val
return svc_params
request_args = request_args or {}
# Cannot be strict for a URL string query arguments or directly form encoded
strict_types = False if request_args.get("str_args", False) else self.strict_types
req_op_args = request_args.get(GATEWAY_ARG_PARAMS, None) or {}
for param_name in svc_op_param_list:
param_def = service_op_schema["in"][param_name]
if param_name in req_op_args:
arg_val = self._get_typed_arg_value(req_op_args[param_name], param_def, strict=strict_types)
svc_params[param_name] = arg_val
if "timeout" in request_args:
svc_params["timeout"] = float(request_args["timeout"])
optional_args = [param for param in req_op_args if param not in svc_params and param != "timeout"]
if optional_args and "optional_args" in svc_op_param_list:
# Only support basic strings for these optional params for now
svc_params["optional_args"] = {arg: str(req_op_args[arg]) for arg in optional_args}
#log.info("Service params: %s" % svc_params)
return svc_params
def _get_gateway_headers(self):
"""Returns the headers that the service gateway uses to make service calls on behalf of itself
(not a user passing through), e.g. for identity management purposes"""
return {MSG_HEADER_ACTOR: self.name,
MSG_HEADER_VALID: DEFAULT_EXPIRY}
def get_secure_service_def(self, service_name):
"""Checks whether the service indicated by given service_name exists and/or
is exposed after white and black listing. Returns service registry entry.
"""
if self.service_whitelist:
if service_name not in self.service_whitelist:
raise Unauthorized("Service access not permitted")
if self.service_blacklist:
if service_name in self.service_blacklist:
raise Unauthorized("Service access not permitted")
# Retrieve service definition
target_service = get_service_registry().get_service_by_name(service_name)
if not target_service:
raise BadRequest("The requested service (%s) is not available" % service_name)
if not target_service.client:
raise Inconsistent("Cannot find a client class for the specified service: %s" % service_name)
if not target_service.schema:
raise Inconsistent("Cannot find a schema for the specified service: %s" % service_name)
return target_service
def build_message_headers(self, actor_id, expiry):
"""Returns the headers that the service gateway uses to make service calls on behalf of a
user, based on the user session or request arguments"""
headers = dict()
headers[MSG_HEADER_ACTOR] = actor_id
headers[MSG_HEADER_VALID] = expiry
req_info = flask.g.get("req_info", None)
if req_info:
headers["request-id"] = str(req_info["request_id"])
# If this is an anonymous requester then there are no roles associated with the request
if actor_id == DEFAULT_ACTOR_ID:
headers[MSG_HEADER_ROLES] = dict()
return headers
try:
# Check to see if the user's roles are cached already - keyed by user id
if self.user_role_cache.has_key(actor_id):
role_header = self.user_role_cache.get(actor_id)
if role_header is not None:
headers[MSG_HEADER_ROLES] = role_header
return headers
# The user's roles were not cached so hit the datastore to find it.
role_list = self.org_client.list_actor_roles(actor_id, headers=self._get_gateway_headers())
org_roles = {}
for role in role_list:
org_roles.setdefault(role.org_governance_name, []).append(role)
role_header = get_role_message_headers(org_roles)
# Cache the roles by user id
self.user_role_cache.put(actor_id, role_header)
except Exception:
role_header = dict() # Default to empty dict if there is a problem finding roles for the user
headers[MSG_HEADER_ROLES] = role_header
return headers
def create_ion_object(self, object_params):
"""Create and initialize an ION object from a dictionary of parameters coming via HTTP,
ready to be passed on to services/messaging. The object is validated after creation.
Note: This is not called for service operation argument signatures
"""
new_obj = IonObject(object_params["type_"])
# Iterate over the parameters to add to object; have to do this instead
# of passing a dict to get around restrictions in object creation on setting _id, _rev params
for param in object_params:
self.set_object_field(new_obj, param, object_params.get(param))
new_obj._validate() # verify that all of the object fields were set with proper types
return new_obj
def set_object_field(self, obj, field, field_val):
"""Recursively set sub object field values.
TODO: This may be an expensive operation. May also be redundant with object code
"""
if isinstance(field_val, dict) and field != "kwargs":
sub_obj = getattr(obj, field)
if isinstance(sub_obj, IonObjectBase):
if "type_" in field_val and field_val["type_"] != sub_obj.type_:
if issubtype(field_val["type_"], sub_obj.type_):
sub_obj = IonObject(field_val["type_"])
setattr(obj, field, sub_obj)
else:
raise Inconsistent("Unable to walk the field %s - types don't match: %s %s" % (
field, sub_obj.type_, field_val["type_"]))
for sub_field in field_val:
self.set_object_field(sub_obj, sub_field, field_val.get(sub_field))
elif isinstance(sub_obj, dict):
setattr(obj, field, field_val)
else:
for sub_field in field_val:
self.set_object_field(sub_obj, sub_field, field_val.get(sub_field))
else:
# type_ already exists in the class.
if field != "type_":
setattr(obj, field, field_val)
# -------------------------------------------------------------------------
# Response content helpers
def json_response(self, response_data):
"""Private implementation of standard flask jsonify to specify the use of an encoder to walk ION objects
"""
resp_obj = json_dumps(response_data, default=encode_ion_object, indent=None if request.is_xhr else 2)
resp = self.response_class(resp_obj, mimetype=CONT_TYPE_JSON)
if self.develop_mode and (self.set_cors_headers or ("api_key" in request.args and request.args["api_key"])):
self._add_cors_headers(resp)
self._log_request_response(CONT_TYPE_JSON, resp_obj, len(resp_obj))
return resp
def gateway_json_response(self, response_data):
"""Returns the normal service gateway response as JSON or as media in case the response
is a media response
"""
if isinstance(response_data, MediaResponse):
log.info("Media response. Content mimetype:%s", response_data.media_mimetype)
content = response_data.body
if response_data.internal_encoding == "base64":
import base64
content = base64.decodestring(content)
elif response_data.internal_encoding == "utf8":
pass
elif response_data.internal_encoding == "filename":
# Server side file name for download and then delete
filename = content
if not os.path.exists(filename):
raise BadRequest("File not found")
# Simple way of solving file download problem
# TODO: Use flask send_file and after request hook
with open(filename, "rb") as f:
content = f.read()
os.remove(filename)
resp_attrs = dict(mimetype=response_data.media_mimetype)
if response_data.response_headers:
resp_attrs["headers"] = response_data.response_headers
if response_data.response_code:
resp_attrs["status_code"] = response_data.response_code
resp = self.response_class(content, response_data.code, **resp_attrs)
self._log_request_response(response_data.media_mimetype, "raw", len(content), response_data.code)
return resp
if RETURN_MIMETYPE_PARAM in request.args:
return_mimetype = str(request.args[RETURN_MIMETYPE_PARAM])
return self.response_class(response_data, mimetype=return_mimetype)
result = {
GATEWAY_RESPONSE: response_data,
GATEWAY_STATUS: 200,
}
return self.json_response(result)
def gateway_error_response(self, exc):
"""Forms a service gateway error response.
Can extract multiple stacks from a multi-tier RPC service call exception
"""
if hasattr(exc, "get_stacks"):
# Process potentially multiple stacks.
full_error, exc_stacks = "", exc.get_stacks()
for i in range(len(exc_stacks)):
full_error += exc_stacks[i][0] + "\n"
if i == 0:
full_error += "".join(traceback.format_exception(*sys.exc_info()))
else:
entry = ApplicationException.format_stack(exc_stacks[i][1])
full_error += entry + "\n"
exec_name = exc.__class__.__name__
else:
exc_type, exc_obj, exc_tb = sys.exc_info()
exec_name = exc_type.__name__
full_error = "".join(traceback.format_exception(*sys.exc_info()))
status_code = getattr(exc, "status_code", 400)
if self.log_errors:
if self.develop_mode:
if status_code == 401:
log.warn("%s: %s", exec_name, exc)
else:
log.error(full_error)
else:
if status_code == 401:
log.info("%s: %s", exec_name, exc)
else:
log.info(full_error)
result = {
GATEWAY_ERROR_EXCEPTION: exec_name,
GATEWAY_ERROR_MESSAGE: str(exc.message),
GATEWAY_ERROR_EXCID: getattr(exc, "exc_id", "") or ""
}
if self.develop_mode:
result[GATEWAY_ERROR_TRACE] = full_error
if RETURN_MIMETYPE_PARAM in request.args:
return_mimetype = str(request.args[RETURN_MIMETYPE_PARAM])
return self.response_class(result, mimetype=return_mimetype)
self._log_request_error(result, status_code)
resp = self.json_response({GATEWAY_ERROR: result, GATEWAY_STATUS: status_code})
# Q: Should HTTP status be the error code of the exception?
resp.status_code = status_code
return resp
# Setting Flask app config when blueprint is initialized
@sg_blueprint.record
def record_params(setup_state):
app = setup_state.app
if sg_instance.max_content_length:
app.config["MAX_CONTENT_LENGTH"] = int(sg_instance.max_content_length)
# -------------------------------------------------------------------------
# Generic route handlers
# Checks to see if the remote_addr in the request is in the list of specified trusted addresses, if any.
@sg_blueprint.before_request
def is_trusted_request():
if sg_instance.develop_mode:
print "----------------------------------------------------------------------------------"
print "URL:", request.url
if request.remote_addr is not None:
log.debug("%s from: %s: %s", request.method, request.remote_addr, request.url)
if not sg_instance.is_trusted_address(request.remote_addr):
abort(403)
@sg_blueprint.errorhandler(403)
def custom_403(error):
result = {GATEWAY_ERROR: "The request has been denied since it did not originate from a trusted originator."}
return sg_instance.json_response(result)
# -------------------------------------------------------------------------
# Service calls
# ROUTE: Ping with gateway version
@sg_blueprint.route("/")
def sg_index():
return sg_instance.sg_index()
@sg_blueprint.route("/spec/<spec_name>", methods=["GET"])
@sg_blueprint.route("/spec/<service_name>/<spec_name>", methods=["GET"])
def get_service_spec(service_name=None, spec_name=None):
return sg_instance.get_service_spec(service_name, spec_name)
# ROUTE: Make a service request
# Accepts arguments passed as query string parameters; like:
# http://hostname:port/service/request/resource_registry/find_resources?restype=TestInstrument&id_only=False
# Also accepts arguments form encoded and as JSON; example:
# curl --data "payload={"params": { "restype": "TestInstrument", "name": "", "id_only": true } }" http://localhost:4000/service/request/resource_registry/find_resources
@sg_blueprint.route("/request", methods=["GET", "POST"])
@sg_blueprint.route("/request/<service_name>", methods=["GET", "POST"])
@sg_blueprint.route("/request/<service_name>/<operation>", methods=["GET", "POST"])
@sg_blueprint.route("/request/<service_name>/<operation>/<id_param>", methods=["GET", "POST"])
def process_gateway_request(service_name=None, operation=None, id_param=None):
return sg_instance.process_gateway_request(service_name, operation, id_param)
# ROUTE: Make a service request in REST style
# Arguments to POST, PUT must be form encoded and as JSON; example:
# curl --data "payload={"data": { "type_": "TestInstrument", "name": "" } }" http://localhost:4000/service/rest/service/res_type
@sg_blueprint.route("/rest/<service_name>/<res_type>", methods=["GET", "POST"])
@sg_blueprint.route("/rest/<service_name>/<res_type>/<id_param>", methods=["GET", "PUT"])
def rest_gateway_request(service_name, res_type, id_param=None):
return sg_instance.rest_gateway_request(service_name, res_type, id_param)
# ROUTE: Returns a json object for a specified resource type with all default values.
@sg_blueprint.route("/resource_type_schema/<resource_type>")
def get_resource_schema(resource_type):
return sg_instance.get_resource_schema(resource_type)
# ROUTE: Get attachment for a specific attachment id
@sg_blueprint.route("/attachment/<attachment_id>", methods=["GET"])
def get_attachment(attachment_id):
return sg_instance.get_attachment(attachment_id)
# ROUTE:
@sg_blueprint.route("/attachment", methods=["POST"])
def create_attachment():
return sg_instance.create_attachment()
# ROUTE:
@sg_blueprint.route("/attachment/<attachment_id>", methods=["DELETE"])
def delete_attachment(attachment_id):
return sg_instance.delete_attachment(attachment_id)
# ROUTE: Get version information about this copy of ScionCC
@sg_blueprint.route("/version")
@sg_blueprint.route("/version/<pack>")
def get_version_info(pack=None):
return sg_instance.get_version_info(pack)
|
997,577 | b8a5741ddfbd2de818a56d9a65c5ef098286336a | import pytest
import torch
from torch.utils.data.dataloader import DataLoader
from pytorch_lightning import Trainer
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities.data import (
_get_dataloader_init_kwargs,
_replace_dataloader_init_method,
_update_dataloader,
extract_batch_size,
get_len,
has_iterable_dataset,
has_len,
has_len_all_ranks,
warning_cache,
)
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.deprecated_api import no_warning_call
from tests.helpers.boring_model import BoringModel, RandomDataset, RandomIterableDataset
def test_extract_batch_size():
"""Tests the behavior of extracting the batch size."""
def _check_warning_not_raised(data, expected):
with no_warning_call(match="Trying to infer the `batch_size`"):
assert extract_batch_size(data) == expected
def _check_warning_raised(data, expected):
with pytest.warns(UserWarning, match=f"Trying to infer the `batch_size` .* we found is {expected}."):
assert extract_batch_size(batch) == expected
warning_cache.clear()
def _check_error_raised(data):
with pytest.raises(MisconfigurationException, match="We could not infer the batch_size"):
extract_batch_size(batch)
# Warning not raised
batch = torch.zeros(11, 10, 9, 8)
_check_warning_not_raised(batch, 11)
batch = {"test": torch.zeros(11, 10)}
_check_warning_not_raised(batch, 11)
batch = [torch.zeros(11, 10)]
_check_warning_not_raised(batch, 11)
batch = {"test": [{"test": [torch.zeros(11, 10)]}]}
_check_warning_not_raised(batch, 11)
# Warning raised
batch = {"a": [torch.tensor(1), torch.tensor(2)], "b": torch.tensor([1, 2, 3, 4])}
_check_warning_raised(batch, 1)
batch = {"test": [{"test": [torch.zeros(11, 10), torch.zeros(10, 10)]}]}
_check_warning_raised(batch, 11)
batch = {"test": [{"test": [torch.zeros(10, 10), torch.zeros(11, 10)]}]}
_check_warning_raised(batch, 10)
batch = [{"test": torch.zeros(10, 10), "test_1": torch.zeros(11, 10)}]
_check_warning_raised(batch, 10)
# Error raised
batch = "test string"
_check_error_raised(batch)
data = {"test": ["some text"] * 7}
_check_error_raised(data)
class CustomBatch:
def __init__(self):
self.x = torch.randn(7, 2)
data = CustomBatch()
_check_error_raised(data)
def test_has_iterable_dataset():
assert has_iterable_dataset(DataLoader(RandomIterableDataset(1, 1)))
assert not has_iterable_dataset(DataLoader(RandomDataset(1, 1)))
class MockDatasetWithoutIterableDataset(RandomDataset):
def __iter__(self):
yield 1
return self
assert not has_iterable_dataset(DataLoader(MockDatasetWithoutIterableDataset(1, 1)))
def test_has_len():
assert has_len(DataLoader(RandomDataset(1, 1)))
with pytest.raises(ValueError, match="`Dataloader` returned 0 length."):
assert has_len(DataLoader(RandomDataset(0, 0)))
assert not has_len(DataLoader(RandomIterableDataset(1, 1)))
def test_get_len():
assert get_len(DataLoader(RandomDataset(1, 1))) == 1
value = get_len(DataLoader(RandomIterableDataset(1, 1)))
assert isinstance(value, float)
assert value == float("inf")
def test_has_len_all_rank():
trainer = Trainer(fast_dev_run=True)
model = BoringModel()
with pytest.raises(MisconfigurationException, match="Total length of `Dataloader` across ranks is zero."):
assert not has_len_all_ranks(DataLoader(RandomDataset(0, 0)), trainer.strategy, model)
assert has_len_all_ranks(DataLoader(RandomDataset(1, 1)), trainer.strategy, model)
def test_update_dataloader_typerror_custom_exception():
class BadImpl(DataLoader):
def __init__(self, foo, *args, **kwargs):
self.foo = foo
# positional conflict with `dataset`
super().__init__(foo, *args, **kwargs)
dataloader = BadImpl([1, 2, 3])
with pytest.raises(MisconfigurationException, match="`DataLoader` implementation has an error.*`dataset`"):
_update_dataloader(dataloader, dataloader.sampler)
class BadImpl2(DataLoader):
def __init__(self, randomize, *args, **kwargs):
self.randomize = randomize
# keyword conflict with `shuffle`
super().__init__(*args, shuffle=randomize, **kwargs)
dataloader = BadImpl2(False, [])
with pytest.raises(MisconfigurationException, match="`DataLoader` implementation has an error.*`shuffle`"):
_update_dataloader(dataloader, dataloader.sampler)
class GoodImpl(DataLoader):
def __init__(self, randomize, *args, **kwargs):
# fixed implementation, kwargs are filtered
self.randomize = randomize or kwargs.pop("shuffle", False)
super().__init__(*args, shuffle=randomize, **kwargs)
dataloader = GoodImpl(False, [])
new_dataloader = _update_dataloader(dataloader, dataloader.sampler)
assert isinstance(new_dataloader, GoodImpl)
def test_replace_dataloader_init_method():
"""Test that context manager intercepts arguments passed to custom subclasses of torch.utils.DataLoader and
sets them as attributes."""
class DataLoaderSubclass1(DataLoader):
def __init__(self, attribute1, *args, **kwargs):
# intentionally not setting this attribute, calling super with different args
# self.attribute1 = attribute1
super().__init__(*args, **kwargs)
class DataLoaderSubclass2(DataLoaderSubclass1):
def __init__(self, attribute1, attribute2, *args, **kwargs):
# intentionally not setting this attribute, calling super with different args
# self.attribute2 = attribute2
super().__init__(attribute1, *args, **kwargs)
with _replace_dataloader_init_method():
dataloader = DataLoaderSubclass1("attribute1", dataset=range(4), batch_size=2)
assert dataloader.attribute1 == "attribute1"
with _replace_dataloader_init_method():
dataloader = DataLoaderSubclass2("attribute1", "attribute2", dataset=range(4), batch_size=2)
assert dataloader.attribute1 == "attribute1"
assert dataloader.attribute2 == "attribute2"
@pytest.mark.parametrize("mode", [RunningStage.TRAINING, RunningStage.PREDICTING, RunningStage.TESTING])
def test_dataloader_kwargs_replacement_with_iterable_dataset(mode):
"""Test that DataLoader kwargs are not replaced when using Iterable Dataset."""
dataset = RandomIterableDataset(7, 100)
dataloader = DataLoader(dataset, batch_size=32)
dl_kwargs = _get_dataloader_init_kwargs(dataloader, dataloader.sampler, mode=mode)
assert dl_kwargs["sampler"] is None
assert dl_kwargs["batch_sampler"] is None
assert dl_kwargs["batch_size"] is dataloader.batch_size
assert dl_kwargs["dataset"] is dataloader.dataset
assert dl_kwargs["collate_fn"] is dataloader.collate_fn
|
997,578 | 4396144c8f9031c41f939e4d71da101ded574d9f | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
## Create your models here.
from django.utils import timezone
from django.contrib.postgres.fields import JSONField
class Movie(models.Model):
def __str__(self):
return "%s" % self.title
title = models.CharField('Movie Title', max_length=300, default='')
imdb_id = models.CharField('IMDB ID', max_length=9, default='')
rating = models.CharField('Rating', max_length=25, default='', null=True)
runtime = models.CharField('Runtime', max_length=10, default='', null=True)
imdb_rating = models.DecimalField('IMDb Rating', max_digits=3, decimal_places=1, default=0, null=True)
imdb_votes = models.IntegerField('IMDb Votes', default=0, null=True)
metacritic_rating = models.IntegerField('Metacritic Rating', default=0, null=True)
lastUpdated = models.DateField('lastUpdated', default=timezone.now)
date = models.DateField('Date', default=timezone.now)
plot = models.TextField('Plot', default='', null=True)
tagline = models.TextField('Tagline', default='', null=True)
fullplot = models.TextField('FullPlot', default='', null=True)
poster = models.TextField('Poster', default='', null=True)
postersaved = models.BooleanField(default=False)
awards = models.TextField('Awards', default='', null=True)
youtubeid = models.TextField('YouTube ID', max_length=24, default='')
tmdbdata = models.BooleanField(default=False)
adult = models.BooleanField(default=False)
runtime = models.PositiveSmallIntegerField(null=True)
language = models.TextField('YouTube ID', max_length=2, default='')
similar = models.ManyToManyField('self', through='SimilarMovieRel', symmetrical=False)
def actors(self, N=False):
actorset = Crew.objects.filter(credit=self.pk, job='a').order_by('crewcredit__order')
if not N:
N = actorset.count()
actorset = list(actorset[0:N])
for ii in range(0, N):
actorset[ii].character = actorset[ii].crewcredit_set.filter(movie=self.pk).first().character
return actorset
def tags(self,type='t'):
m = Movie.objects.get(pk=self.pk)
if type == 'all':
return m.movietags_set.all()
else:
return m.movietags_set.filter(type=type)
def actor_preview(self):
return self.crewcredit_set.filter(crew__job="a").order_by('order')[0:2]
def director_preview(self):
return self.crewcredit_set.filter(crew__job="d").order_by('order')
# for a in res:
# name.append(a.crew.name)
#
# return ', '.join(name)
class Genre(models.Model):
def __str__(self):
return "%s" % self.genre
movie = models.ManyToManyField(Movie)
genre = models.CharField('Genre', max_length=25, default='')
class SimilarMovieRel(models.Model):
def __str__(self):
return "%s to %s" % (self.linkfrom.title, self.linkto.title)
linkfrom = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='linkfrom', null=True)
linkto = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='linkto', null=True)
basedon = models.CharField('Based On', max_length=5, default='')
score = models.FloatField('Score')
votes = models.IntegerField('Votes', default=0)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=1)
reason = models.CharField('Reason', max_length=200, default='None given')
votefrom = models.ManyToManyField(User, related_name="simvotefrom")
op_rating = models.SmallIntegerField('Rate Similar', default=99)
op_message = models.TextField(default='')
class Actor(models.Model):
def __str__(self):
return "%s" % self.name
id = models.IntegerField('ActorID', primary_key=True)
name = models.CharField('Actor Name', max_length=50)
profilepath = models.TextField('Poster', default='', null=True)
score = models.FloatField('Derived Actor Score', default=0)
imdb_mean = models.FloatField('Average IMDb Score', default=0)
imdb_votes = models.IntegerField('Order', default=1)
imdb_comp = models.FloatField('Average IMDb Score', default=0)
credit = models.ManyToManyField(Movie, through='ActorCredit')
class ActorCredit(models.Model):
def __str__(self):
return "%s" % self.character
character = models.CharField('Character', max_length=50)
actor = models.ForeignKey(Actor, on_delete=models.CASCADE)
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
order = models.IntegerField('Order', default=0)
castid = models.IntegerField('CastID', default=0)
class Crew(models.Model):
def __str__(self):
return "%s as %s" % (self.name, self.character)
crewid = models.IntegerField('ActorID', unique=True, null=True)
credit = models.ManyToManyField(Movie, through='CrewCredit')
job = models.CharField('Job', max_length=1, default='')
name = models.CharField('Crew Name', max_length=50)
score = models.FloatField('Derived Score', default=0)
imdb_mean = models.FloatField('Average IMDb Score', default=0)
imdb_votes = models.IntegerField('Order', default=1)
imdb_comp = models.FloatField('Average IMDb Score', default=0)
profilepath = models.TextField('Poster', default='', null=True)
character = 'removeme'
class CrewCredit(models.Model):
crew = models.ForeignKey(Crew, on_delete=models.CASCADE)
character = models.CharField('Character', max_length=50, null=True)
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
order = models.PositiveSmallIntegerField('order', default=0)
class Question(models.Model):
def __str__(self):
return "%s" % self.question_text
question_text = models.CharField('Question Text', max_length=200)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Suggestion(models.Model):
def __str__(self):
return "Suggestion"
user = models.ForeignKey(User, on_delete=models.CASCADE) # same answer can't apply to different users
question = models.ForeignKey(Question, on_delete=models.CASCADE) # same answer can't apply to different questions
answer = models.ForeignKey(Movie, on_delete=models.CASCADE, null=True, blank=True) # suggestion can apply to many questions and vice versa
reason = models.CharField('Reason', max_length=200, default='None given')
vote = models.IntegerField('Votes', default=0)
votefrom = models.ManyToManyField(User, related_name="votefrom")
op_rating = models.SmallIntegerField('Rate Suggestion', default=99)
op_message = models.TextField(default='')
class MovieTags(models.Model):
def __str__(self):
return "%s" % self.tag
movie = models.ManyToManyField(Movie)
tag = models.CharField('Tag', max_length=30, default='')
type = models.CharField('Tag Type', max_length=1, default='t')
user = models.ForeignKey(User, on_delete=models.CASCADE, default=1)
n = models.IntegerField('Count', default=0)
# id = models.IntegerField('ID')
class UserData(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
type = models.CharField('Data Type', max_length=8, default='')
# data = JSONField()
class UserSettings(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
displayseen = models.BooleanField(default=True)
class UserMovieRating(models.Model):
def __str__(self):
return "Pref: %s" % self.user
user = models.ForeignKey(User, on_delete=models.CASCADE)
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
watched = models.BooleanField()
rating = models.PositiveSmallIntegerField(default=99)
class ContactUs(models.Model):
def __str__(self):
return "Contacts"
subject = models.CharField('Subject', max_length=100)
sender = models.CharField('Subject', max_length=100)
cc_myself = models.CharField('Subject', max_length=100)
recipients = models.CharField('Subject', max_length=100)
message = models.TextField('Subject', max_length=1000)
|
997,579 | a78f9e9144390b0d1c05fcf30aaa68ac5a59f30a | import csv
import json
from app.database import db_session, Base, engine
from app.models import App, AppBundle, Target, AppType, Organization, Department, Tag, Connection, Header, DNS
from app.serializer import \
TagSerializer, ConnectionSerializer, HeaderSerializer,\
AppTypeSerializer, OrganizationSerializer, AppSerializer, AppBundleSerializer,\
DepartmentSerializer, TargetSerializer, DNSSerializer
from utils.insert import AppTypeInsert, AppInsert, AppBundleInsert, TargetInsert, DepartmentInsert,\
OrganizationInsert, TagInsert, ConnectionInsert, HeaderInsert, DNSInsert
# Create table <-> models mapping:
# table -> (<model>, <serializer>, <insert>)
table_models_map = {
'apptype': {
'model': AppType,
'serializer': AppTypeSerializer,
'insert': AppTypeInsert
},
'application': {
'model': App,
'serializer': AppSerializer,
'insert': AppInsert
},
'appbundle': {
'model': AppBundle,
'serializer': AppBundleSerializer,
'insert': AppBundleInsert
},
'organization': {
'model': Organization,
'serializer': OrganizationSerializer,
'insert': OrganizationInsert
},
'department': {
'model': Department,
'serializer': DepartmentSerializer,
'insert': DepartmentInsert
},
'tag': {
'model': Tag,
'serializer': TagSerializer,
'insert': TagInsert
},
'connection': {
'model': Connection,
'serializer': ConnectionSerializer,
'insert': ConnectionInsert
},
'header': {
'model': Header,
'serializer': HeaderSerializer,
'insert': HeaderInsert
},
'target': {
'model': Target,
'serializer': TargetSerializer,
'insert': TargetInsert
},
'dns': {
'model': DNS,
'serializer': DNSSerializer,
'insert': DNSInsert
}
}
def get_csv_data(file_path):
""" Returns the CSV data as dictionary """
reader = csv.DictReader(open(file_path), delimiter="\t")
result = []
for row in reader:
result.append(row)
return result
def populate_from_samples():
""" Read data from CSV files and init DB """
# Tags
try:
for row in get_csv_data('samples/tags.csv'):
tag = Tag(name=row['Name'], desc=row['Description'])
db_session.add(tag)
finally:
db_session.commit()
# Organizations
try:
for row in get_csv_data('samples/organizations.csv'):
org = Organization(desc=row['Name'])
db_session.add(org)
finally:
db_session.commit()
# Departments
try:
for row in get_csv_data('samples/departments.csv'):
org = db_session.query(Organization).filter_by(desc=row['Organization']).one()
dpt = Department(desc=row['Department'], org=org)
db_session.add(dpt)
finally:
db_session.commit()
# Application types
try:
for row in get_csv_data('samples/apptypes.csv'):
apptype = AppType(desc=row['Name'])
db_session.add(apptype)
finally:
db_session.commit()
# Applications
try:
for row in get_csv_data('samples/applications.csv'):
apptype = db_session.query(AppType).filter_by(desc=row['AppType']).one()
dpt = db_session.query(Department).join(Organization).\
filter(Department.desc==row['Department']).\
filter(Organization.desc==row['Organization']).\
one()
app = App(desc=row['Application'],
app_type=apptype,
department=dpt,
version=row['Version'],
environment=row['Environment'],
platform=row['Platform']
)
db_session.add(app)
finally:
db_session.commit()
# Connections and Headers
try:
for row in get_csv_data('samples/connections.csv'):
conn = Connection(conn_type=row['Type'], url=row['URL'], port=row['Port'], answer=row['Answer'])
header = Header(conn_id=conn.id, header=row['Header'], value=row['Value'], conn=conn)
db_session.add(conn)
db_session.add(header)
finally:
db_session.commit()
def export_tables(output=None):
""" Export all tables to JSON files """
# Get list of tables
tables = Base.metadata.tables
if output:
# Export tables to JSON
tables = table_models_map.keys()
for t in tables:
print("Exporting %s ..." % t)
result = [i for i in db_session.query(table_models_map[t]['model']).all()]
serialized = table_models_map[t]['serializer'](result, many=True)
# Write to JSON file
with open(output + "/" + t + ".json", 'w') as outfile:
json.dump(serialized.data, outfile, sort_keys=True, indent=2)
else:
print("[!] output folder not specified. Aborted.")
def insert_data(table, jsonfile):
""" Insert data into table from jsonfile """
with open(jsonfile) as infile:
data = json.load(infile)
table_models_map[table]['insert'](data) |
997,580 | 3e04605324eb45076a140f4e66aa877162046598 | from Question import Question
from info_gain import info_gain
from Question_gain import Question_gain
from partition import partition
import random
def choose_split(data,treshold):
"""Find the best question to ask by iterating over every feature / value
and calculating the information gain."""
n_features = len(data[0]) - 1 # number of columns
quest_gain = [] # keep track of the gains and questions
for col in range(1,n_features): # for each feature
values = set([row[col] for row in data]) # unique values in the column
for val in values: # for each value
question = Question(col, val)
# try splitting the dataset
true_rows, false_rows = partition(data, question)
# Skip this split if it doesn't divide the dataset.
if len(true_rows) == 0 or len(false_rows) == 0:
continue
# Calculate the information gain from this split
gain = info_gain(data, true_rows, false_rows)
quest_gain.append(Question_gain(gain,question))
possible_question = [] # possible questions to ask
n_quest_gain = len(quest_gain)
if n_quest_gain == 0:
return float('Inf'), float('NaN') #
for x in range(n_quest_gain):
if (quest_gain[x].gain >= treshold):
possible_question.append(Question_gain(quest_gain[x].gain,quest_gain[x].question))
n_possible_question = len(possible_question)
if n_possible_question == 0:
return float('Inf'), float('NaN')
if n_possible_question>=2:
[i, j] = random.sample(range(0, n_possible_question), 2)
else:
i = j = random.randint(0,n_possible_question-1)
if possible_question[i].gain>=possible_question[j].gain:
return possible_question[i].gain, possible_question[i].question
else:
return possible_question[j].gain, possible_question[j].question |
997,581 | b4e9a7f598efedb3f8c03c4a40123cf0059afbba | import random
deck1= dict()
deck2= dict()
deck3= dict()
deck4= dict()
for n in range (1,14):
if n == 1:
card = "A"
elif n ==11:
card = "J"
elif n ==12:
card = "Q"
elif n == 13:
card = "k"
else:
card = str(n)
deck4 ["Diamond" + card] = n
deck3 ["Club" + card] = n
deck2 ["Heart" + card] = n
deck1 ["Spade" + card] = n
#This is not in order
for x in deck1:
hand[x] = deck1[x]
for y in deck2:
hand[y] = deck2[y]
for z in deck3:
hand[z] = deck3[z]
for p in deck4:
hand[p] = deck4[p]
def shuffle (hand):
pile1 = dict()
pile2 = dict()
for i in range(1,7):
while len(hand) >0:
v = hand.popitem()
if random.randint(0,1) ==0:
pile1[v[0]] = v[1]
else:
pile2[v[0]] = v[1]
while len(pile1) >0:
v=pile1.popitem()
hand[v[0]] = v[1]
while len(pile2) > 0:
v = pile2.popitem()
hand[v[0]] = v[1]
|
997,582 | 252645dbd3d606920fcb46acdcbde0a10f101606 | from mainapp.models import *
# def get_all_discussions:
# for q in Question.objects.order_by(timestamp):
|
997,583 | fc284e5bdcc79bc574b0ec2820884d378c39bb02 | #!/usr/bin/env python
# coding:utf-8
# vi:tabstop=4:shiftwidth=4:expandtab:sts=4
import neon
import random
import numpy as np
import math
from ..stacked import Layers, register_layers_class
from ..stacked import register_concat_handler, register_inputs_handler
from ..stacked import register_flag_handler, register_network_wrapper
from ..stacked import register_macro_handler, register_nonlinearities
from ..stacked import *
from neon.layers.layer import Affine, Activation, Linear, SkipNode
from neon.layers.layer import Bias, BranchNode, GeneralizedCost
from neon.layers.layer import Reshape, Pooling, Conv, Layer, LRN
from neon.layers.container import Sequential, MergeSum, MergeBroadcast
from neon.initializers import GlorotUniform as NeonGlorotUniform
import utils
def get_output_shape(network):
if not isinstance(network, neon.layers.layer.DataTransform):
network.configure(None)
return (network.be.bsz,)+network.out_shape
class GaussianNoiseLayer(Layer):
def __init__(self, sigma=0.1, name=None):
super(GaussianNoiseLayer, self).__init__(name)
self.sigma = sigma
self.owns_delta = True
self.is_mklop = True
def fprop(self, inputs=None, inference=False, beta=0):
self.be.fill_normal(self.noisebuf, stdv=self.sigma)
self.be.fprop_skipnode(inputs, self.outputs, beta)
self.outputs[:] = self.outputs + self.noisebuf
return self.outputs
def configure(self, in_obj):
super(GaussianNoiseLayer, self).configure(in_obj)
self.out_shape = self.in_shape
self.noisebuf = self.be.iobuf(self.in_shape, dtype=np.float32)
# self.noisebuf = self.be.iobuf(self.in_shape)
return self
def bprop(self, error, alpha=1.0, beta=0.0):
# for better performance, mkl do nothing
# otherwise, convert back and deal with beta and alpha.
self.be.bprop_skipnode(error, self.deltas, alpha, beta)
return self.deltas
class DimshuffleLayer(Layer):
def __init__(self, pattern, name=None):
super(GaussianNoiseLayer, self).__init__(name)
self.pattern = pattern
self.owns_delta = True
self.is_mklop = True
def fprop(self, inputs=None, inference=False, beta=0):
self.be.copy_transpose(inputs, self.outputs, axis=self.pattern)
return self.outputs
def configure(self, in_obj):
super(GaussianNoiseLayer, self).configure(in_obj)
input_shape = (self.be.bsz,)+self.in_shape
# Copy from lasagne/layers/shape.py
#
# Build output shape while keeping track of the dimensions that we are
# attempting to collapse, so we can ensure that they are broadcastable
output_shape = []
dims_used = [False] * len(input_shape)
for p in self.pattern:
if isinstance(p, int):
if p < 0 or p >= len(input_shape):
raise ValueError("pattern contains {0}, but input shape "
"has {1} dimensions "
"only".format(p, len(input_shape)))
# Dimension p
o = input_shape[p]
dims_used[p] = True
elif p == 'x':
# Broadcast; will be of size 1
o = 1
output_shape.append(o)
for i, (dim_size, used) in enumerate(zip(input_shape, dims_used)):
if not used and dim_size != 1 and dim_size is not None:
raise ValueError(
"pattern attempted to collapse dimension "
"{0} of size {1}; dimensions with size != 1/None are not"
"broadcastable and cannot be "
"collapsed".format(i, dim_size))
###
self.out_shape = tuple(output_shape)
return self
def bprop(self, error, alpha=1.0, beta=0.0):
self.be.copy_transpose(
error, self.deltas, axis=np.argsort(self.pattern))
return self.deltas
class ConstantLayer(Layer):
def __init__(self, name=None):
super(ConstantLayer, self).__init__(name)
self.owns_delta = True
self.is_mklop = True
def fprop(self, inputs=None, inference=False, beta=0):
self.be.fprop_skipnode(inputs, self.outputs, beta)
return self.outputs
def configure(self, in_obj):
super(ConstantLayer, self).configure(in_obj)
self.out_shape = self.in_shape
return self
def bprop(self, error, alpha=1.0, beta=0.0):
self.deltas[:] = 0
return self.deltas
network_branch = {}
branch_notfirst = set()
class NeonLayers(Layers):
def get_layer(self, k):
constant_flag = False
if type(k) == list and len(k) == 1:
constant_flag = True
layer = super(NeonLayers, self).get_layer(k)
if layer in network_branch:
print 'Found branch', layer
b = network_branch[layer]
if b in branch_notfirst:
layer = b
else:
branch_notfirst.add(b)
if constant_flag:
layer = sequential(layers=(layer, ConstantLayer()))
return layer
register_layers_class(NeonLayers)
def sequential(layers):
a = ()
for t in layers:
if type(t) == Sequential:
a += tuple(t.layers)
else:
a += (t, )
res=Sequential(layers=a)
#print 'in_shape:',a[0].in_shape
#res.configure(a[0].in_shape)
return res
def concat_handler(layers, flags, stacks, this_model):
head, ls = split_merge_layers(layers)
return Sequential(layers=head+(MergeBroadcast(ls, merge="depth"),))
# return MergeBroadcast(layers=layers, merge="depth")
# network = Tree(layers=layers)
# return MergeMultistream(layers=network, merge="depth")
def merge_handler(layers, flags, stacks, this_model):
raise NotImplementedError
def split_list(a, d):
res = [[], ]
for t in a:
if t != d:
res[-1] += [t]
else:
res += [[], ]
print a
print [d]
print res
return res
def split_merge_layers(layers):
bs = []
ls = []
for layer in layers:
if type(layer) == BranchNode:
b = layer
l = SkipNode()
elif type(layer.layers[0]) == BranchNode:
b = layer.layers[0]
l = Sequential(tuple(layer.layers[1:]))
else:
b = None
l = None
bs += [b]
ls += [l]
bset = set(bs)-set({None})
if len(bset) > 1:
print bset
assert len(bset) <= 1
if len(bset) == 1:
for b in bset:
pass
print 'bs:', bs
print 'ls:', ls
head = ()
for i, layer in enumerate(layers):
if ls[i] is None:
ll = split_list(layers[i].layers, b)
assert len(ll) <= 2
if len(ll) == 2:
assert head == ()
head, l = ll
head = tuple(head)
l = tuple(l)
ls[i] = l
else:
ls[i] = layers[i].layers
print 'bs:', bs
print 'head:', head
print 'ls:', ls
return head, tuple(ls)
def add_handler(layers, flags, stacks, this_model):
head, ls = split_merge_layers(layers)
return Sequential(layers=head+(MergeSum(ls),))
# if type(layers[1])==BranchNode:
# b=layers[1]
# l3=SkipNode()
# elif type(layers[1].layers[0]==BranchNode):
# b=layers[1].layers[0]==BranchNode
# l3=Sequential(tuple(layers[1].layers[1:]))
# else:
# b=None
#
# if b is not None:
# l1,l2=split_list(layers[0].layers,b)
# l1=tuple(l1)
# l2=tuple(l2)
# return Sequential(layers=l1+(MergeSum((l2,l3)),))
# else:
# return MergeSum(layers)
def sub_handler(layers, flags, stacks, this_model):
head, layers = split_merge_layers(layers)
if len(layers) > 2:
left = layers[0]
right = sequential(
layers=(
MergeSum(layers[1:]),
Activation(neon.transforms.Normalizer(divisor=-1))))
network = Sequential(layers=head+(MergeSum(layers=(left, right)),))
elif len(layers) == 2:
left = layers[0]
right = sequential(
layers=(
layers[1],
Activation(neon.transforms.Normalizer(divisor=-1))))
network = Sequential(layers=head+(MergeSum(layers=(left, right)),))
else:
network = layers[0]
return network
register_concat_handler(concat_handler)
register_inputs_handler('op', merge_handler)
register_inputs_handler('add', add_handler)
register_inputs_handler('sub', sub_handler)
def reshape_handler(network, flags, stacks, this_model):
network = sequential(layers=(network, Reshape(reshape=flags['reshape'])))
return network, ()
def slice_handler(network, flags, stacks, this_model):
raise NotImplementedError
def maxpool_handler(network, flags, stacks, this_model):
# num_filters=flags['num_filters']
layername = flags.get('layername', None)
filter_size = flags.get('filter_size', 0)
conv_stride = flags.get('stride', 0)
if conv_stride == 0 or conv_stride == 1:
pad = filter_size//2
elif conv_stride > 0:
if filter_size == conv_stride:
pad = 0
else:
pad = filter_size//2
if 'pad' in flags:
pad = flags['pad']
dim = len(get_output_shape(network))-2
print 'pooling debug:',filter_size,max(1, conv_stride),pad
assert filter_size > 0
network = sequential(layers=(network, Pooling(
fshape=(filter_size,)*dim if dim>=2 else filter_size,
strides=max(1, conv_stride),
padding=pad,
op='max',
name=layername,
)))
return network, ()
def meanpool_handler(network, flags, stacks, this_model):
# num_filters=flags['num_filters']
layername = flags.get('layername', None)
filter_size = flags.get('filter_size', 0)
conv_stride = flags.get('stride', 0)
if conv_stride == 0 or conv_stride == 1:
pad = filter_size//2
elif conv_stride > 0:
if filter_size == conv_stride:
pad = 0
else:
pad = filter_size//2
if 'pad' in flags:
pad = flags['pad']
dim = len(get_output_shape(network))-2
assert filter_size > 0
network = sequential(layers=(network, Pooling(
fshape=(filter_size,)*dim if dim>=2 else filter_size,
strides=max(1, conv_stride),
padding=pad,
op='avg',
name=layername,
)))
return network, ()
def upscale_handler(network, flags, stacks, this_model):
raise NotImplementedError
class GlorotUniform(NeonGlorotUniform):
def __init__(self, name="autouniformInit", gain=1.0):
if gain == 'relu':
gain = np.sqrt(2)
super(GlorotUniform, self).__init__(name)
self.gain = gain
def fill(self, param):
k = self.gain * np.sqrt(6.0 / (param.shape[0] + param.shape[1]))
param[:] = self.be.rng.uniform(-k, k, param.shape)
def num_filters_handler(network, flags, stacks, this_model):
paramlayers = []
if 'sharegroup2params' not in this_model:
this_model['sharegroup2params'] = {}
sharegroup2params = this_model['sharegroup2params']
if 'layer2sharegroup' not in this_model:
this_model['layer2sharegroup'] = {}
layer2sharegroup = this_model['layer2sharegroup']
if 'constlayer2sharegroup' not in this_model:
this_model['constlayer2sharegroup'] = {}
constlayer2sharegroup = this_model['constlayer2sharegroup']
num_filters = flags['num_filters']
conv_stride = flags.get('stride', 0)
layername = flags.get('layername', None)
filter_size = flags.get('filter_size', 0)
bn = flags.get('bn', False)
if conv_stride == 0 or conv_stride == 1:
pad = filter_size//2
elif conv_stride > 0:
if filter_size == conv_stride:
pad = 0
else:
pad = filter_size//2
else: # conv_stride<0
num_filters = num_filters*(-conv_stride)*(-conv_stride)
if 'nopad' not in flags:
pad = filter_size//2
else:
pad = 0
if 'pad' in flags:
pad = flags['pad']
nonlinearity = None
if 'linear' in flags:
pass
elif 'nonlinearity' in flags:
nonlinearity = flags['nonlinearity']
else:
nonlinearity = this_model.get('relu', neon.transforms.Rectlin())
sharegroup = flags.get('sharegroup', 0)
# if sharegroup and sharegroup in sharegroup2params:
# paramlayer = None # sharegroup2params[sharegroup]
# else:
# paramlayer = None
init = this_model.get('init', GlorotUniform())
if 'init' in flags:
init = flags['init']
if 'init_gain' in flags:
init = GlorotUniform(gain=flags['init_gain'])
else:
if nonlinearity == neon.transforms.Rectlin and nonlinearity.slope > 0:
alpha = nonlinearity.slope
init = GlorotUniform(gain=math.sqrt(2/(1+alpha**2)))
elif nonlinearity == neon.transforms.Rectlin:
init = GlorotUniform(gain='relu')
else:
pass
if 'nobias' in flags:
bias = None
else:
bias = neon.initializers.Constant(0.0)
# utils.walk(network)
dim = len(get_output_shape(network))-2
if 'dense' in flags or dim <= 1:
paramlayer = sequential(layers=Affine(
nout=num_filters,
init=init,
bias=bias,
batch_norm=bn,
activation=nonlinearity))
if sharegroup:
if 'const' in flags:
constlayer2sharegroup[paramlayer] = sharegroup
else:
layer2sharegroup[paramlayer] = sharegroup
network = sequential(layers=(
network,
paramlayer,
))
else:
# input_shape = lasagne.layers.get_output_shape(network)
if 'local' not in flags:
assert filter_size > 0
paramlayer = sequential(layers=Conv(
fshape=(filter_size,)*dim+(num_filters,),
init=init,
bias=bias,
strides=max(1, conv_stride),
padding=pad,
activation=nonlinearity,
name=layername,
batch_norm=bn,
dilation=-conv_stride if conv_stride < 0 else {}
))
if sharegroup:
if 'const' in flags:
constlayer2sharegroup[paramlayer] = sharegroup
else:
layer2sharegroup[paramlayer] = sharegroup
network = sequential(layers=(
network,
paramlayer,
))
else: # local
raise NotImplementedError
paramlayers += [paramlayer]
if sharegroup and sharegroup not in sharegroup2params:
sharegroup2params[sharegroup] = ['W', 'b']
if 'saveparamlayer' in flags and flags['saveparamlayer'] is not None:
g = flags['saveparamlayer']
if g not in stacks:
stacks[g] = []
stacks[g] += [paramlayer]
return network, paramlayers
def dimshuffle_handler(network, flags, stacks, this_model):
pattern = flags['dimshuffle']
return sequential(layers=(network, DimshuffleLayer(pattern))), ()
def noise_handler(network, flags, stacks, this_model):
sigma = flags['noise']
if sigma is True:
sigma = 0.1
return sequential(layers=(network, GaussianNoiseLayer(sigma))), ()
def lrn_handler(network, flags, stacks, this_model):
if type(flags['lrn']) == dict:
lasagne_lru = flags['lrn']
else:
lasagne_lru = {}
N = network.be.bsz #1e4 # XXX
kwargs = {}
k = lasagne_lru.get('k', 2)
assert k == 1
kwargs['ascale'] = lasagne_lru.get('alpha', 1e-4)*N # XXX
kwargs['bpower'] = lasagne_lru.get('beta', 0.75)
kwargs['depth'] = lasagne_lru.get('n', 5)
return sequential(network, LRN(**kwargs)), ()
def dropout_handler(network, flags, stacks, this_model):
p = flags['dropout']
if p is True:
p = 0.5
if p>1.0:
p=1.0
return sequential(layers=(network, Dropout(1.0-p))), ()
def watch_handler(network, flags, stacks, this_model):
raise NotImplementedError
# get_layer=this_model['get_layer']
#
# tmp=None
# g=None
# if type(flags['watch'])==str:
# g = flags['watch']
# tmp=network
# else:
# if len(flags['watch'])==2:
# to, g=flags['watch']
# eq=lasagne.objectives.squared_error
# else:
# to, g, eq=flags['watch']
# if type(to)==type(lambda x:x):
# tmp=lasagne.layers.ExpressionLayer(
# network, to, output_shape=(batchsize, ))
# elif to=='zeros':
# s0=lasagne.layers.get_output_shape(network)
# target=ZeroLayer(shape=s0, input_var=T.zeros(
# s0, dtype=theano.config.floatX))
# #tmp=lasagne.layers.NonlinearityLayer(network,
# # nonlinearity=lambda x:x**2.0
# # )
# tmp=lasagne.layers.ElemwiseMergeLayer((network, target), eq)
# else:
# target=get_layer(to)
# tmp=lasagne.layers.ElemwiseMergeLayer((network, target), eq)
# if 'sum' in flags:
# if type(flags['sum'])==int:
# n=flags['sum']
# else:
# n=1
# shape=lasagne.layers.get_output_shape(tmp)[:n]
# tmp=lasagne.layers.ExpressionLayer(
# tmp,
# curry(
# lambda n, shape, x:x.flatten(ndim=n+1).sum(axis=n), n, shape),
# output_shape=shape)
# if g not in watchpoints:
# watchpoints[g]=[]
# watchpoints[g]+=[tmp]
# return network, ()
def equal_handler(network, flags, stacks, this_model):
get_layer = this_model['get_layer']
if 'errors' not in this_model:
this_model['errors'] = {}
errors = this_model['errors']
if len(flags['equal']) == 2:
to, g = flags['equal']
eq = neon.transforms.cost.MeanSquared()
else:
to, g, eq = flags['equal']
if g not in errors:
errors[g] = []
if to == 'zeros':
delta = network
else:
target = get_layer(to)
if isinstance(target, neon.layers.layer.DataTransform):
delta = [network, target]
else:
assert isinstance(eq, neon.transforms.cost.MeanSquared)
tmp = sequential(
layers=(
target,
Activation(neon.transforms.Normalizer(divisor=-1))))
delta = MergeSum(layers=(network, tmp))
cost = GeneralizedCost(eq, name=g)
# tmp=lasagne.layers.ElemwiseMergeLayer((network, target), eq)
if 'sum' in flags:
raise NotImplementedError
# if type(flags['sum'])==int:
# n=flags['sum']
# else:
# n=1
# shape=lasagne.layers.get_output_shape(tmp)[:n]
# tmp=lasagne.layers.ExpressionLayer(
# tmp,
# curry(
# lambda n, shape, x:x.flatten(ndim=n+1).sum(axis=n), n, shape),
# output_shape=shape)
errors[g] += [(cost, delta)]
return network, ()
def relu_handler(network, flags, stacks, this_model):
assert flags['relu'] is True
nonlinearity = this_model.get('relu', neon.transforms.Rectlin())
if 'shape' in flags:
raise NotImplementedError
else:
network = sequential(
layers=(network, Activation(transform=nonlinearity)))
return network, ()
def nonlinearity_handler(network, flags, stacks, this_model):
relu = this_model.get('relu', neon.transforms.Rectlin())
if type(flags) == dict:
if 'nonlinearity' in flags:
nonlinearity = flags['nonlinearity']
if not callable(nonlinearity):
nonlinearity = relu
else:
nonlinearity = relu
if 'shape' in flags:
raise NotImplementedError
else:
network = sequential(
layers=(network, Activation(transform=nonlinearity)))
return network, ()
def argmax_handler(network, flags, stacks, this_model):
raise NotImplementedError
def unargmax_handler(network, flags, stacks, this_model):
raise NotImplementedError
def max_handler(network, flags, stacks, this_model):
raise NotImplementedError
def branch_handler(network, flags, stacks, this_model):
global network_branch
b = BranchNode(name='branch_'+str(flags['branch']))
network = sequential(layers=(network, b))
network_branch[network] = b
return network, ()
register_flag_handler('equal', equal_handler)
register_flag_handler('watch', watch_handler)
register_flag_handler('branch', branch_handler)
register_flag_handler('relu', relu_handler)
register_flag_handler('nonlinearity', nonlinearity_handler, ('num_filters', ))
register_flag_handler('noise', noise_handler)
register_flag_handler('lrn', lrn_handler)
register_flag_handler('dropout', dropout_handler)
register_flag_handler('unargmax', unargmax_handler)
register_flag_handler('argmax', argmax_handler)
register_flag_handler('max', max_handler)
register_flag_handler('dimshuffle', dimshuffle_handler)
register_flag_handler('num_filters', num_filters_handler, (
'maxpool', 'meanpool', 'upscale'))
register_flag_handler('upscale', upscale_handler)
register_flag_handler('meanpool', meanpool_handler)
register_flag_handler('maxpool', maxpool_handler)
register_flag_handler('slice', slice_handler)
register_flag_handler('reshape', reshape_handler)
def layer_handler(network):
print 'output_shape:', get_output_shape(network)
register_layer_handler(layer_handler)
class LayerSelector(neon.layers.Sequential):
def __init__(self, *args, **kwargs):
assert 'layer2sharegroup' in kwargs
assert 'constlayer2sharegroup' in kwargs
self.layer2sharegroup = kwargs['layer2sharegroup']
self.constlayer2sharegroup = kwargs['constlayer2sharegroup']
self.last_selected_layers = {}
kwargs.pop('layer2sharegroup')
kwargs.pop('constlayer2sharegroup')
# print args, kwargs
super(LayerSelector, self).__init__(*args, **kwargs)
self.sharegroup2layers = {}
self.sharegroup2constlayers = {}
for k in self.layer2sharegroup:
v = self.layer2sharegroup[k]
if v not in self.sharegroup2layers:
self.sharegroup2layers[v] = []
if v not in self.sharegroup2constlayers:
self.sharegroup2constlayers[v] = []
self.sharegroup2layers[v] += [k]
for k in self.constlayer2sharegroup:
v = self.constlayer2sharegroup[k]
if v not in self.sharegroup2layers:
self.sharegroup2layers[v] = []
if v not in self.sharegroup2constlayers:
self.sharegroup2constlayers[v] = []
self.sharegroup2constlayers[v] += [k]
@property
def layers_to_optimize(self):
for g in self.last_selected_layers:
l = self.last_selected_layers[g]
for layer in l.layers_fprop():
if isinstance(layer, Bias):
b = layer.W
if isinstance(layer, Linear):
W = layer.W
for ll in self.sharegroup2layers[g]+self.sharegroup2constlayers[g]:
if ll != l:
b_done = False
W_done = False
for layer in ll.layers_fprop():
if isinstance(layer, Bias):
assert not b_done
if b:
assert layer.W.shape == b.shape
layer.W = b
layer.dW = layer.be.empty_like(layer.W)
b_done = True
if isinstance(layer, Linear):
assert not W_done
if W:
assert layer.W.shape == W.shape
layer.W = W
layer.dW = layer.be.empty_like(layer.W)
W_done = True
self.last_selected_layers = {}
select = self.last_selected_layers
for g in self.sharegroup2layers:
k = int(random.random()*len(self.sharegroup2layers[g]))
select[g] = self.sharegroup2layers[g][k]
a = super(LayerSelector, self).layers_to_optimize
lto = []
done = {}
for l in a:
if l in self.layer2sharegroup:
sharegroup = self.layer2sharegroup[l]
if not done[sharegroup] and l == select[sharegroup]:
lto += [l]
done[sharegroup] = True
else:
lto += [l]
return lto
def network_wrapper(network, stacks, this_model):
return LayerSelector(
network,
layer2sharegroup=this_model['layer2sharegroup'],
constlayer2sharegroup=this_model['constlayer2sharegroup'])
register_network_wrapper(network_wrapper)
def branchs(a):
refs = {}
network = (-1, )
refs[network] = []
stacks = {}
all_layers = Layers(network, stacks)
get_layer = all_layers.get_layer
count = 0
for info in a:
inputs = info[0]
flags = info[-1]
if type(inputs) == int or type(inputs) == str or type(inputs) == list:
layers = [get_layer(inputs)]
elif type(inputs) == tuple:
layers = map(get_layer, inputs)
else:
print type(inputs)
raise Exception
network = count
refs[network] = []
if 'push' in flags:
push = flags['push']
if type(push) == str:
push = [push]
for t in push:
if t not in stacks:
stacks[t] = []
stacks[t] += [network]
if 'pop' in flags:
stacks[flags['pop']] = stacks[flags['pop']][:-1]
for t in layers:
refs[t] += [network]
count += 1
all_layers.add(network)
for t in refs:
if len(refs[t]) > 1:
a[t][-1]['branch'] = t
return a
register_macro_handler(branchs)
register_nonlinearities({
'softmax': neon.transforms.activation.Softmax(),
'rectify': neon.transforms.activation.Rectlin(),
'sigmoid': neon.transforms.activation.Logistic(),
'tanh': neon.transforms.activation.Tanh(),
'linear': neon.transforms.activation.Identity(),
})
|
997,584 | 6c71b754cb3e332f5b5f413017dbc03528bf7ee6 | # 문제 : https://programmers.co.kr/learn/courses/30/lessons/76503
# 아이디어 : https://prgms.tistory.com/47?category=882795
# 18개 중 11개 성공, 7개 실패..ㅠㅠ
# 그리디로 접근
# 시작 리프 노드, 교환 횟수, 간선 정보
def greedy(start, a):
global visited, edge_info
# print("시작노드", start, a[start], "도착노드", edge_info[start])
temp = 0
temp += abs(a[start])
a[edge_info[start][0]] += a[start]
a[start] = 0
visited[start] = 1
edge_info[edge_info[start][0]].remove(start)
del edge_info[start]
# print("지운 후 상태", edge_info, "===== a =====", a, visited)
# print(temp)
# print()
return temp
def solution(a, edges):
global visited, edge_info
if sum(a) != 0: # 불가능한 경우
return -1
answer = 0
edge_info = dict()
for s, e in edges:
if s not in edge_info.keys():
edge_info[s] = [e]
else: edge_info[s].append(e)
if e not in edge_info.keys():
edge_info[e] = [s]
else: edge_info[e].append(s)
# print("edge_info", edge_info)
visited = [0] * len(a)
while a.count(0) != len(a):
for s in range(len(a)):
if s not in edge_info.keys():
continue
if visited[s]: continue # 이미 방문했으면 넘기기
if len(edge_info[s]) == 1:
visited[s] = 1
if a[s] == 0:
edge_info[edge_info[s][0]].remove(s)
del edge_info[s]
continue # 이미 0이면 넘기기
# print("지우기 전 edge_info", edge_info, "===== a =====", a, visited)
# print()
answer += greedy(s, a)
return answer
print(solution([-5,0,2,1,2], [[0,1],[3,4],[2,3],[0,3]]))
print(solution([0,1,0], [[0,1],[1,2]]))
# 참고해보자
# https://sinawi.tistory.com/262?category=649580
# https://bladejun.tistory.com/120 |
997,585 | f6dac5ae3907625774819b96781d8e2319cc05a1 | import socket
import sys
MASK_ERRORS = True
AFINN_FILE = '/usr/local/metaLayer-sentiment/resources/AFINN.txt'
ERROR_NOTEXT = {'status':'failed', 'code':101, 'error':'The required POST field \'text\' was not supplied' }
if socket.gethostname() == 'matt-griffiths':
MASK_ERRORS = False
AFINN_FILE = '/home/matt/code/metaLayer/sentiment/resources/AFINN.txt'
|
997,586 | 2289b503aa0d95e034ee43aab9e69bf65de65aba | # coding=utf-8
# Copyright 2019 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""YellowFin Test Module for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensor2tensor.utils.yellowfin import YellowFinOptimizer
import tensorflow as tf
n_dim = 1000000
n_iter = 0
class YellowFinTest(tf.test.TestCase):
def tune_everything(self, x0squared, c, t, gmin, gmax):
del t
# First tune based on dynamic range
if c == 0:
dr = gmax / gmin
mustar = ((np.sqrt(dr) - 1) / (np.sqrt(dr) + 1))**2
alpha_star = (1 + np.sqrt(mustar))**2/gmax
return alpha_star, mustar
dist_to_opt = x0squared
grad_var = c
max_curv = gmax
min_curv = gmin
const_fact = dist_to_opt * min_curv**2 / 2 / grad_var
coef = [-1, 3, -(3 + const_fact), 1]
roots = np.roots(coef)
roots = roots[np.real(roots) > 0]
roots = roots[np.real(roots) < 1]
root = roots[np.argmin(np.imag(roots))]
assert root > 0 and root < 1 and np.absolute(root.imag) < 1e-6
dr = max_curv / min_curv
assert max_curv >= min_curv
mu = max(((np.sqrt(dr) - 1) / (np.sqrt(dr) + 1))**2, root**2)
lr_min = (1 - np.sqrt(mu))**2 / min_curv
alpha_star = lr_min
mustar = mu
return alpha_star, mustar
def testMeasurement(self):
opt = YellowFinOptimizer(zero_debias=False)
w = tf.Variable(np.ones([n_dim,]),
dtype=tf.float32,
name="w",
trainable=True)
b = tf.Variable(np.ones([1,], dtype=np.float32),
dtype=tf.float32,
name="b",
trainable=True)
x = tf.constant(np.ones([n_dim,], dtype=np.float32),
dtype=tf.float32)
_ = tf.multiply(w, x) + b # loss
tvars = tf.trainable_variables()
w_grad_val = tf.placeholder(tf.float32, shape=(n_dim,))
b_grad_val = tf.placeholder(tf.float32, shape=(1,))
apply_op = opt.apply_gradients(zip([w_grad_val, b_grad_val], tvars))
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
target_h_max = 0.0
target_h_min = 0.0
g_norm_squared_avg = 0.0
g_norm_avg = 0.0
g_avg = 0.0
target_dist = 0.0
for i in range(n_iter):
feed_dict = {w_grad_val: (i + 1) * np.ones([n_dim,], dtype=np.float32),
b_grad_val: (i + 1) * np.ones([1,], dtype=np.float32)}
res = sess.run([opt._curv_win,
opt._h_max,
opt._h_min,
opt._grad_var,
opt._dist_to_opt_avg,
apply_op], feed_dict=feed_dict)
g_norm_squared_avg = (
0.999 * g_norm_squared_avg +
0.001 * np.sum(((i + 1) * np.ones([n_dim + 1,]))**2))
g_norm_avg = (0.999 * g_norm_avg +
0.001 * np.linalg.norm((i + 1)*np.ones([n_dim + 1,])))
g_avg = 0.999 * g_avg + 0.001 * (i + 1)
target_h_max = 0.999 * target_h_max + 0.001 * (i + 1)**2*(n_dim + 1)
target_h_min = (0.999 * target_h_min +
0.001 * max(1, i + 2 - 20)**2 * (n_dim + 1))
target_var = g_norm_squared_avg - g_avg**2 * (n_dim + 1)
target_dist = (0.999 * target_dist +
0.001 * g_norm_avg / g_norm_squared_avg)
assert np.abs(target_h_max - res[1]) < np.abs(target_h_max) * 1e-3
assert np.abs(target_h_min - res[2]) < np.abs(target_h_min) * 1e-3
assert np.abs(target_var - res[3]) < np.abs(res[3]) * 1e-3
assert np.abs(target_dist - res[4]) < np.abs(res[4]) * 1e-3
def testLrMu(self):
opt = YellowFinOptimizer(learning_rate=0.5, momentum=0.5, zero_debias=False)
w = tf.Variable(np.ones([n_dim,]),
dtype=tf.float32,
name="w",
trainable=True)
b = tf.Variable(np.ones([1,],
dtype=np.float32),
dtype=tf.float32,
name="b",
trainable=True)
x = tf.constant(np.ones([n_dim,], dtype=np.float32), dtype=tf.float32)
_ = tf.multiply(w, x) + b # loss
tvars = tf.trainable_variables()
w_grad_val = tf.Variable(np.zeros([n_dim,]),
dtype=tf.float32,
trainable=False)
b_grad_val = tf.Variable(np.zeros([1,]),
dtype=tf.float32,
trainable=False)
apply_op = opt.apply_gradients(zip([w_grad_val, b_grad_val], tvars))
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
target_h_max = 0.0
target_h_min = 0.0
g_norm_squared_avg = 0.0
g_norm_avg = 0.0
g_avg = 0.0
target_dist = 0.0
target_lr = 0.5
target_mu = 0.5
for i in range(n_iter):
sess.run(tf.assign(w_grad_val, (i + 1) * np.ones([n_dim,],
dtype=np.float32)))
sess.run(tf.assign(b_grad_val, (i + 1) * np.ones([1,],
dtype=np.float32)))
res = sess.run([opt._curv_win,
opt._h_max,
opt._h_min,
opt._grad_var,
opt._dist_to_opt_avg,
opt._lr_var,
opt._mu_var,
apply_op])
res[5] = opt._lr_var.eval()
res[6] = opt._mu_var.eval()
g_norm_squared_avg = (
0.999 * g_norm_squared_avg +
0.001 * np.sum(((i + 1) * np.ones([n_dim + 1,]))**2))
g_norm_avg = (0.999 * g_norm_avg +
0.001 * np.linalg.norm((i + 1)*np.ones([n_dim + 1,])))
g_avg = 0.999 * g_avg + 0.001 * (i + 1)
target_h_max = 0.999 * target_h_max + 0.001 * (i + 1)**2 * (n_dim + 1)
target_h_min = (0.999 * target_h_min +
0.001 * max(1, i + 2 - 20)**2 * (n_dim + 1))
target_var = g_norm_squared_avg - g_avg**2 * (n_dim + 1)
target_dist = (0.999 * target_dist +
0.001 * g_norm_avg / g_norm_squared_avg)
if i > 0:
lr, mu = self.tune_everything(target_dist**2,
target_var,
1,
target_h_min,
target_h_max)
target_lr = 0.999 * target_lr + 0.001 * lr
target_mu = 0.999 * target_mu + 0.001 * mu
assert np.abs(target_h_max - res[1]) < np.abs(target_h_max) * 1e-3
assert np.abs(target_h_min - res[2]) < np.abs(target_h_min) * 1e-3
assert np.abs(target_var - res[3]) < np.abs(res[3]) * 1e-3
assert np.abs(target_dist - res[4]) < np.abs(res[4]) * 1e-3
assert (target_lr == 0.0 or
(np.abs(target_lr - res[5]) < np.abs(res[5]) * 1e-3))
assert (target_mu == 0.0 or
(np.abs(target_mu - res[6]) < np.abs(res[6]) * 5e-3))
if __name__ == "__main__":
tf.test.main()
|
997,587 | 3624779fb4a89c6ff1a9199d68ab13ae092d3697 | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/epicBattle/EpicBattlesAfterBattleView.py
import SoundGroups
from gui.Scaleform.daapi.view.lobby.missions.awards_formatters import EpicCurtailingAwardsComposer
from gui.Scaleform.daapi.view.meta.EpicBattlesAfterBattleViewMeta import EpicBattlesAfterBattleViewMeta
from gui.Scaleform.locale.EPIC_BATTLE import EPIC_BATTLE
from gui.Scaleform.locale.RES_ICONS import RES_ICONS
from gui.server_events.awards_formatters import AWARDS_SIZES, getEpicViewAwardPacker
from gui.shared.utils import toUpper
from helpers import dependency, int2roman
from helpers.i18n import makeString as _ms
from skeletons.gui.game_control import IEpicBattleMetaGameController
from skeletons.gui.server_events import IEventsCache
from gui.shared.formatters import text_styles
from gui.Scaleform.daapi.view.lobby.epicBattle.epic_meta_level_icon import getEpicMetaIconVODict
from gui.sounds.epic_sound_constants import EPIC_METAGAME_WWISE_SOUND_EVENTS
from gui.server_events.bonuses import CreditsBonus, CrystalBonus, ItemsBonus, GoodiesBonus, PremiumDaysBonus
from web_stubs import i18n
_LEVELUP_TOKEN_TEMPLATE = 'epicmetagame:levelup:%d'
def _AccumulateBonuses(bonuses):
def __accumulateIntegralBonus(integralBonusType, bonuses):
return integralBonusType(bonuses[0].getName(), sum([ b.getValue() for b in bonuses ]))
def accumulateCredits(bonuses):
return __accumulateIntegralBonus(CreditsBonus, bonuses)
def accumulateCrystals(bonuses):
return __accumulateIntegralBonus(CrystalBonus, bonuses)
def accumulatePremiumDays(bonuses):
return __accumulateIntegralBonus(PremiumDaysBonus, bonuses)
def accumulateItems(bonuses):
values = dict()
for b in bonuses:
for bid, cnt in b.getValue().iteritems():
values[bid] = values.get(bid, 0) + cnt
return ItemsBonus(bonuses[0].getName(), values)
def accumulateGoodies(bonuses):
values = dict()
for b in bonuses:
for bid, value in b.getValue().iteritems():
if bid in values.iterkeys():
cnt = values.get(bid).get('count', 0)
values[bid]['count'] = cnt + value.get('count', 0)
values[bid] = {'count': value.get('count', 0)}
return GoodiesBonus(bonuses[0].getName(), values)
typeToAccumulator = {CreditsBonus: accumulateCredits,
CrystalBonus: accumulateCrystals,
ItemsBonus: accumulateItems,
GoodiesBonus: accumulateGoodies,
PremiumDaysBonus: accumulatePremiumDays}
accumulatedBonuses = []
for bonusType in set((type(b) for b in bonuses)):
bonusesOfType = [ b for b in bonuses if isinstance(b, bonusType) ]
if bonusType not in typeToAccumulator.iterkeys():
accumulatedBonuses.extend(bonusesOfType)
accumulatedBonuses.append(typeToAccumulator.get(bonusType)(bonusesOfType))
return accumulatedBonuses
class EpicBattlesAfterBattleView(EpicBattlesAfterBattleViewMeta):
_MAX_VISIBLE_AWARDS = 6
_awardsFormatter = EpicCurtailingAwardsComposer(_MAX_VISIBLE_AWARDS, getEpicViewAwardPacker())
__eventsCache = dependency.descriptor(IEventsCache)
__epicMetaGameCtrl = dependency.descriptor(IEpicBattleMetaGameController)
def __init__(self, ctx=None):
super(EpicBattlesAfterBattleView, self).__init__()
self.__ctx = ctx
def onIntroStartsPlaying(self):
SoundGroups.g_instance.playSound2D(EPIC_METAGAME_WWISE_SOUND_EVENTS.EB_ACHIEVED_RANK)
def onRibbonStartsPlaying(self):
SoundGroups.g_instance.playSound2D(EPIC_METAGAME_WWISE_SOUND_EVENTS.EB_LEVEL_REACHED)
def onEscapePress(self):
self.__close()
def onCloseBtnClick(self):
self.__close()
def onWindowClose(self):
self.destroy()
def _populate(self):
super(EpicBattlesAfterBattleView, self)._populate()
extInfo = self.__ctx['reusableInfo'].personal.avatar.extensionInfo
epicMetaGame = extInfo['epicMetaGame']
pPrestigeLevel, pMetaLevel, pFamePts = epicMetaGame.get('metaLevel', (None, None, None))
_, prevPMetaLevel, prevPFamePts = epicMetaGame.get('prevMetaLevel', (None, None, None))
maxMetaLevel = self.__epicMetaGameCtrl.getMaxPlayerLevel()
famePtsToProgress = self.__epicMetaGameCtrl.getLevelProgress()
famePointsReceived = sum(famePtsToProgress[prevPMetaLevel:pMetaLevel]) + pFamePts - prevPFamePts
achievedRank = extInfo['playerRank'].get('rank', -1)
rankName = toUpper(_ms(getattr(EPIC_BATTLE, 'RANK_RANK{}'.format(achievedRank))))
awardsVO = self._awardsFormatter.getFormattedBonuses(self.__getBonuses(pMetaLevel), size=AWARDS_SIZES.BIG)
maxLevelText = ''
fameBarVisible = True
maxPrestigeIconVisible = pPrestigeLevel == self.__epicMetaGameCtrl.getMaxPlayerPrestigeLevel()
if prevPMetaLevel >= maxMetaLevel or pMetaLevel >= maxMetaLevel or pPrestigeLevel >= self.__epicMetaGameCtrl.getStageLimit():
lvlReachedText = toUpper(_ms(EPIC_BATTLE.EPIC_BATTLES_AFTER_BATTLE_LEVEL_UP_MAX_TITLE))
maxLevelText = self.__getMaxLevelInfoStr(pPrestigeLevel, pMetaLevel)
if prevPMetaLevel >= maxMetaLevel or pPrestigeLevel >= self.__epicMetaGameCtrl.getStageLimit():
fameBarVisible = False
else:
lvlReachedText = toUpper(_ms(EPIC_BATTLE.EPIC_BATTLES_AFTER_BATTLE_LEVEL_UP_TITLE))
data = {'awards': awardsVO,
'progress': self.__getProgress(pMetaLevel, pFamePts, prevPMetaLevel, prevPFamePts, maxMetaLevel),
'barText': '+' + str(famePointsReceived),
'epicMetaLevelIconData': getEpicMetaIconVODict(pPrestigeLevel, pMetaLevel),
'rank': achievedRank + 1,
'rankText': text_styles.heroTitle(rankName),
'rankTextBig': text_styles.epicTitle(rankName),
'rankSubText': text_styles.highTitle(EPIC_BATTLE.EPIC_BATTLES_AFTER_BATTLE_ACHIEVED_RANK),
'levelUpText': text_styles.heroTitle(lvlReachedText),
'levelUpTextBig': text_styles.epicTitle(lvlReachedText),
'backgroundImageSrc': RES_ICONS.MAPS_ICONS_EPICBATTLES_BACKGROUNDS_META_BG,
'maxLevelText': maxLevelText,
'fameBarVisible': fameBarVisible,
'maxPrestigeIconVisible': maxPrestigeIconVisible,
'maxLevel': maxMetaLevel}
self.as_setDataS(data)
return None
def __getBonuses(self, level):
questsProgressData = self.__ctx['reusableInfo'].personal.getQuestsProgress()
allQuests = self.__eventsCache.getAllQuests()
currentLevelQuest = allQuests.get(_LEVELUP_TOKEN_TEMPLATE % level, None)
if currentLevelQuest and questsProgressData:
bonuses = sum([ allQuests.get(q).getBonuses() for q in questsProgressData ], [])
bonuses = _AccumulateBonuses(bonuses)
else:
bonuses = []
return bonuses
def __getProgress(self, curLevel, curFamePoints, prevLevel, prevFamePoints, maxLevel):
getPointsProgressForLevel = self.__epicMetaGameCtrl.getPointsProgressForLevel
pLevel = prevLevel + float(prevFamePoints) / float(getPointsProgressForLevel(prevLevel)) if prevLevel != maxLevel else maxLevel
cLevel = curLevel + float(curFamePoints) / float(getPointsProgressForLevel(curLevel)) if curLevel != maxLevel else maxLevel
return (pLevel, cLevel)
def __getMaxLevelInfoStr(self, prestige, level):
season = self.__epicMetaGameCtrl.getCurrentSeason() or self.__epicMetaGameCtrl.getPreviousSeason()
levelStr = ''
if prestige >= self.__epicMetaGameCtrl.getMaxPlayerPrestigeLevel():
levelStr = _ms(EPIC_BATTLE.EPIC_BATTLES_AFTER_BATTLE_MAX_PRESTIGE_IN_SEASON_INFO, season=i18n.makeString(EPIC_BATTLE.getSeasonName(season.getSeasonID())))
elif prestige >= self.__epicMetaGameCtrl.getStageLimit():
currCycleID = season.getCycleInfo().getEpicCycleNumber()
prestige = self.__epicMetaGameCtrl.getStageLimit()
prestigeStr = int2roman(prestige) if prestige else ''
levelStr = _ms(EPIC_BATTLE.EPIC_BATTLES_AFTER_BATTLE_MAX_PRESTIGE_IN_CYCLE_INFO, currCycle=currCycleID, prestige=prestigeStr, nextCycle=currCycleID + 1)
elif level >= self.__epicMetaGameCtrl.getMaxPlayerLevel():
levelStr = _ms(EPIC_BATTLE.EPIC_BATTLES_AFTER_BATTLE_MAX_LEVEL_INFO, level=self.__epicMetaGameCtrl.getMaxPlayerLevel())
return text_styles.highTitle(levelStr)
def __close(self):
self.destroy()
|
997,588 | b493188c0e0cfe8b4ed25cf04357dfcae00a6cc3 | """
The purpose of this file is to train our networks.
This is the file you want to run from root folder with
specified dataset and options.
"""
# Importing Python packages
import argparse
import json
import torch
import torchvision
import torchvision.transforms as transforms
# Importing our own files and classes
from gan.artgan import ArtGAN
from datasets.wikiart import Wikiart
if __name__ == "__main__":
# Command lines
parser = argparse.ArgumentParser(description="Main file to train and evaluate ArtGAN.")
parser.add_argument("data_type", type=str, help="Please choose a dataset from those supported.")
parser.add_argument("-v", "--version", type=str, help="Please choose a version for saving results. Default: temp.")
parser.add_argument("-d", "--duration", type=int, help="You can choose a number of epochs for training. Default: 0.")
parser.add_argument("-r", "--retrain", type=int, help="Choose an epoch from which you want to continue training. Default: None.")
parser.add_argument("-l", "--loss", type=int, help="Type any number if you want to save loss in a file. Default: False.")
parser.add_argument("-s", "--score", type=int, help="Type any number if you want to save score in a file. Default: False.")
args = parser.parse_args()
data_type = args.data_type
version = args.version if args.version else "temp"
training_epochs = args.duration if args.duration else 0
retrain_epoch = args.retrain if args.retrain else None
save_loss = bool(args.loss)
save_score = bool(args.score)
# Turning on CUDA globally
USE_CUDA = torch.cuda.is_available()
print("Will we use CUDA? {}".format(USE_CUDA))
DEVICE = torch.device("cuda" if USE_CUDA else "cpu")
# Global parameters for our GANs
batch_size = 128
img_size = 64
save_model_step = 1
if data_type == "cifar":
with open("src/datasets/cifar.json", "r") as f:
CIFAR10_CLASSES = json.load(f)
transform = transforms.Compose([
transforms.Resize(64),
transforms.ToTensor(),
])
trainset = torchvision.datasets.CIFAR10(root="data/", train=True,
download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size,
shuffle=True, num_workers=2)
data_classes = CIFAR10_CLASSES
nb_classes = len(data_classes)
data_classes.append("FAKE")
elif data_type in {"artist", "genre", "style"}:
with open("src/datasets/wikiart.json", "r") as f:
WIKIART_CLASSES = json.load(f)
# Data is expected to be resized prior, in order to accelerate training
transform = transforms.Compose([
transforms.ToTensor(),
])
trainset = Wikiart(data_type, transform, train=True)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size,
shuffle=True, num_workers=2)
data_classes = WIKIART_CLASSES[data_type]
nb_classes = len(data_classes)
data_classes.append("FAKE")
else:
raise ValueError("This dataset is not supported!")
# Modified GAN parameters (optional, but can yield better results)
start_channels = 100 + nb_classes
# GAN initialization
artgan = ArtGAN(data_type, version, img_size, nb_classes,
start_channels=start_channels, retrain_epoch=retrain_epoch,
device=DEVICE)
if USE_CUDA:
artgan.cuda()
# Modified training parameters (optional, but can yield better results)
initial_lr = 2e-4
lr_ratio = 0.5
G_decrease_epoch = 50
G_decrease_rate = 5
D_decrease_epoch = 50
D_decrease_rate = 5
# Training
if training_epochs > 0:
loss_list = artgan.train_model(trainloader, DEVICE,
epochs=training_epochs, initial_lr=initial_lr, lr_ratio=lr_ratio,
G_decrease_epoch=G_decrease_epoch, G_decrease_rate=G_decrease_rate,
D_decrease_epoch=D_decrease_epoch, D_decrease_rate=D_decrease_rate,
data_classes=data_classes, save_loss=save_loss, save_score=save_score,
save_model_step=save_model_step)
# Printing images
artgan.show_img(data_classes, DEVICE)
|
997,589 | d7e90a55a8b8473f7da997df08b434ab8c0022ef | #!/usr/bin/env python3 -u
'''
Тулза для кодирования/декодирования base64/base85/base32/ascii85/hex,
которая в отличие от некоторых специализированных инструментов
(не будем показывать пальцем на утилиту base64)
не пытается запихать в память обрабатываемые данные полностью
'''
from base64 import *
import sys
# недопилено вроде (или нет)
class CoderWriter:
def __init__(self, func, min_size, outstream):
self.func = func
self.min_size = min_size
self.buffer = bytearray()
self.outstream = outstream
def code(self, data):
data = memoryview(data)
if len(self.buffer) != 0:
needed_size = self.min_size - len(self.buffer)
self.buffer.extend(data[:needed_size])
data = data[needed_size:]
if len(self.buffer) == self.min_size:
self.outstream.write(self.func(self.buffer))
self.buffer.clear()
else:
return
d, m = divmod(len(data), self.min_size)
if d:
d *= self.min_size
self.outstream.write(self.func(data[:d]))
data = data[d:]
if m:
self.buffer.extend(data)
def flush(self):
if len(self.buffer) > 0:
self.outstream.write(self.func(self.buffer))
# base64: 3 байта -> 4 символа
# base85: 4 байта -> 5 символов
# base32: 5 байт -> 8 символов
# hex: 1 байт -> 2 символа
CODERS = {
'b64': ((b64encode, 3),
(b64decode, 4)),
'b85': ((b85encode, 4),
(b85decode, 5)),
'a85': ((a85encode, 4),
(a85decode, 5)),
'b32': ((b32encode, 5),
(b32decode, 8)),
'hex': ((bytes.hex, 1),
(bytes.fromhex, 2)),
}
def get_optimal_size(size_limit, min_size):
return size_limit - (size_limit % min_size)
def run(block_size, func, min_size, inp, outp):
inp.seek(0, 2)
total_bytes = inp.tell()
inp.seek(0, 0)
passed_bytes = 0
size = get_optimal_size(block_size, min_size)
while True:
data = inp.read(size)
if not data:
break
outp.write(func(data))
passed_bytes += len(data)
print(f'{passed_bytes} / {total_bytes} [{int(passed_bytes / total_bytes * 100)}%]'.ljust(30, ' '), end='\r')
def main():
if len(sys.argv) == 6:
mode, coder_name, fin_name, fout_name, block_size = sys.argv[1:]
block_size = int(block_size)
else:
print('Usage:', sys.argv[0], '[mode] coder input output block_size')
print(' mode - [d]ecode/[e]ncode (decode by default)')
print(' coder - one of this (', ', '.join(CODERS) ,')')
print(' input - input file name')
print(' output - output file name')
print(' block_size - max size of blocks to read')
return
if mode == 'e':
func, min_size = CODERS[coder_name][0]
elif mode == 'd':
func, min_size = CODERS[coder_name][1]
else:
print('mode must be `e` or `d`')
return
with open(fin_name, 'rb') as fin, open(fout_name, 'wb') as fout:
run(block_size, func, min_size, fin, fout)
if __name__ == '__main__':
main()
|
997,590 | b96fbbf8a150b0d26603e84344a6bccd0fad09b3 | # Image processing functions
import os
import numpy as np
from PIL import Image
from scipy.ndimage import zoom
#%% Functions used in the SSL method for transforming
def get_identity():
return np.identity()
def get_random_gaussian_noise(shape, sigma):
return np.random.normal(scale = sigma, size = shape)
def get_random_shift_displacement_map(shape, max_shift):
batch_size = shape[0]
image_size = shape[-2:]
# generating random 2D shift vectors:
shift = np.random.randint(-max_shift, max_shift, size = (batch_size, 2))
# computing displacement vectors:
displacement = np.tile(np.reshape(shift, (batch_size, 2, 1, 1)), \
[1, 1] + list(image_size))
return displacement
#%% Functions for loading and preprocessing images and segmentations
# loads a batch of JSRT images
def load_batch_JSRT(p, IDs, batchIDs):
original_image_shape = (2048, 2048)
target_image_shape = (512, 512)
images = np.zeros([p.batch_size, 1] + list(target_image_shape))
GT = np.zeros([p.batch_size, 6] + list(target_image_shape), dtype = bool)
for idx, imID in enumerate(batchIDs):
ID = IDs[imID]
path = os.path.join(p.data_path, 'JSRT/images/%s.IMG' % ID) # path to an image
fid = open(path, 'rb')
data = np.fromfile(fid, '>u2')
image = data.reshape(original_image_shape)
image = (4096. - np.clip(np.round(zoom(image, 0.25)), 0, 4096))
image = image.reshape([1] + list(image.shape))
# rescale between -1 and 1
image = image / 4096. * 2 - 1
masks = np.zeros([6] + list(image.shape[-2:]))
for cidx, st in enumerate(['heart', 'left clavicle', 'right clavicle', 'left lung', 'right lung']):
path2 = os.path.join(p.data_path, 'JSRT/masks/%s/%s.gif' % (st, ID))
masks[cidx+1, :, :] = zoom(np.array(Image.open(path2)) > 0, 0.5, order = 0)
# background class
masks[0, :, :] = np.sum(masks, axis = 0) == 0
# subtract heart and clavicles from lungs
masks = nonoverlapJSRT(masks)
assert(np.all(np.sum(masks, axis = 0) == 1))
images[idx, ...] = image
GT[idx, ...] = masks
return images, GT
# this ensures class masks don't overlap
def nonoverlapJSRT(masks):
for idx in range(4):
mask = masks[(idx+1):(idx+2), ...]
masks[(idx+2):, ...][np.tile(mask, (4 - idx, 1, 1)).astype(bool)] = 0
masks[0, :, :] = np.sum(masks[1:, ...], axis = 0) == 0
return masks
#%% Visualizing images and GT
def plot_batch_sample(p, batch_x, batch_y, path, n_images = 3):
batch_x = (batch_x - batch_x.min()) / (batch_x.max() - batch_x.min())
# if pixel-wise labels:
if batch_y.ndim == batch_x.ndim:
batch_y = np.tile(np.expand_dims(np.argmax(batch_y, axis = 1), axis = 1), (1, batch_x.shape[1], 1, 1)) / p.num_classes
images = [np.concatenate((batch_x[idx, ...], batch_y[idx, ...]), axis = -2) \
for idx in range(n_images)]
else:
images = [batch_x[idx, ...] for idx in range(n_images)]
images = np.concatenate(images, axis = -1)
images = np.moveaxis(images, source = [0], destination = [-1])
images = np.tile(images, (1, 1, 3 // images.shape[-1]))
images = (images * 256).astype(np.uint8)
Image.fromarray(images).save(path) |
997,591 | aec5b6ce42f615d7add4a1ae703b0ca29b1efadf | __FILENAME__ = console
from . import Event, get_timestamp
from ..shared import console_repr
class Console(Event):
contains = ('line', 'time', 'user', 'source', 'kind', 'data', 'level')
requires = ('line',)
line = Event.Arg(required=True)
kind = Event.Arg()
time = Event.Arg()
user = Event.Arg(default='')
source = Event.Arg(default='mark2')
data = Event.Arg()
level = Event.Arg()
def setup(self):
if not self.time:
self.time = get_timestamp(self.time)
if not self.data:
self.data = self.line
def value(self):
return console_repr(self)
########NEW FILE########
__FILENAME__ = error
from . import Event
class Error(Event):
pass
class FatalError(Event):
exception = Event.Arg()
reason = Event.Arg()
########NEW FILE########
__FILENAME__ = hook
from . import Event
class Hook(Event):
name = Event.Arg()
is_command = Event.Arg()
args = Event.Arg()
line = Event.Arg()
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def prefilter(self, name, public=False, doc=None):
if name != self.name:
return False
if self.is_command and not public:
return False
return True
########NEW FILE########
__FILENAME__ = player
from . import Event
class PlayerEvent(Event):
def setup(s):
s.username = s.username.encode('ascii')
#Raised in manager
class PlayerJoin(PlayerEvent):
username = Event.Arg(required=True)
ip = Event.Arg(required=True)
class PlayerQuit(PlayerEvent):
username = Event.Arg(required=True)
reason = Event.Arg(required=True)
class PlayerChat(PlayerEvent):
username = Event.Arg(required=True)
message = Event.Arg(required=True)
class PlayerDeath(PlayerEvent):
text = Event.Arg()
username = Event.Arg(required=True)
cause = Event.Arg(required=True)
killer = Event.Arg()
weapon = Event.Arg()
format = Event.Arg(default="{username} died")
def get_text(self, **kw):
d = dict(((k, getattr(self, k)) for k in ('username', 'killer', 'weapon')))
d.update(kw)
return self.format.format(**d)
def setup(self):
self.text = self.get_text()
########NEW FILE########
__FILENAME__ = server
import re
from . import Event, get_timestamp
# input/output
output_exp = re.compile(
r'^(?:\d{4}-\d{2}-\d{2} |)\[?(\d{2}:\d{2}:\d{2})\]? \[?(?:[^\]]+?/|)([A-Z]+)\]:? (.*)')
class ServerInput(Event):
"""Send data to the server's stdin. In plugins, a shortcut
is available: self.send("say hello")"""
line = Event.Arg(required=True)
class ServerOutput(Event):
"""Issued when the server gives us a line on stdout. Note
that to handle this, you must specify both the 'level'
(e.g. INFO or SEVERE) and a regex pattern to match"""
line = Event.Arg(required=True)
time = Event.Arg()
level = Event.Arg()
data = Event.Arg()
def setup(self):
m = output_exp.match(self.line)
if m:
g = m.groups()
self.time = g[0]
self.level= g[1]
self.data = g[2]
else:
self.level= "???"
self.data = self.line.strip()
self.time = get_timestamp(self.time)
def prefilter(self, pattern, level=None):
if level and level != self.level:
return False
m = re.match(pattern, self.data)
if not m:
return False
self.match = m
return True
# start
class ServerStart(Event):
"""Issue this event to start the server"""
pass
class ServerStarting(Event):
"""Issued by the ServerStart handler to alert listening plugins
that the server process has started"""
pid = Event.Arg()
class ServerStarted(Event):
"""Issued when we see the "Done! (1.23s)" line from the server
This event has a helper method in plugins - just overwrite
the server_started method.
"""
class ServerStop(Event):
"""Issue this event to stop the server."""
reason = Event.Arg(required=True)
respawn = Event.Arg(required=True)
kill = Event.Arg(default=False)
announce = Event.Arg(default=True)
dispatch_once = True
class ServerStopping(Event):
"""Issued by the ServerStop handler to alert listening plugins
that the server is going for a shutdown
This event has a helper method in plugins - just overwrite
the server_started method."""
reason = Event.Arg(required=True)
respawn = Event.Arg(required=True)
kill = Event.Arg(default=False)
class ServerStopped(Event):
"""When the server process finally dies, this event is raised"""
pass
class ServerEvent(Event):
"""Tell plugins about something happening to the server"""
cause = Event.Arg(required=True)
friendly = Event.Arg()
data = Event.Arg(required=True)
priority = Event.Arg(default=0)
def setup(self):
if not self.friendly:
self.friendly = self.cause
########NEW FILE########
__FILENAME__ = stat
from . import Event
class StatEvent(Event):
source = Event.Arg()
#provider: ping
class StatPlayerCount(StatEvent):
players_current = Event.Arg(required=True)
players_max = Event.Arg(required=True)
#provider: console tracking
class StatPlayers(StatEvent):
players = Event.Arg(required=True)
#provider: psutil
class StatProcess(StatEvent):
cpu = Event.Arg(required=True)
memory = Event.Arg(required=True)
########NEW FILE########
__FILENAME__ = user
from . import Event
#All these are raised in user_server
class UserInput(Event):
user = Event.Arg(required=True)
line = Event.Arg(required=True)
class UserAttach(Event):
user = Event.Arg(required=True)
class UserDetach(Event):
user = Event.Arg(required=True)
########NEW FILE########
__FILENAME__ = launcher
import re
import os
import sys
import glob
import stat
import time
import errno
#start:
import subprocess
import getpass
import pwd
import tempfile
from . import manager
#config:
from .shared import find_config, open_resource
#attach:
from . import user_client
#send/stop/kill
import json
import socket
#jar-list/jar-get
from . import servers
from twisted.internet import reactor
usage_text = "usage: mark2 command [options] [...]"
help_text = """
mark2 is a minecraft server wrapper
{usage}
commands:
{commands}
examples:
mark2 start /home/you/mcservers/pvp
mark2 attach
mark2 send say hello!
mark2 stop
"""
help_sub_text = """
mark2 {subcommand}: {doc}
usage: mark2 {subcommand} {value_spec}
"""
class Mark2Error(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return "error: %s" % self.error
class Mark2ParseError(Mark2Error):
def __str__(self):
return "%s\nparse error: %s" % (usage_text, self.error)
class Command(object):
name = ""
value_spec = ""
options_spec = tuple()
def __init__(self):
pass
def do_start(self):
pass
def do_end(self):
pass
@classmethod
def get_bases(cls):
o = []
while True:
cls = cls.__bases__[0]
if cls is object:
break
o.append(cls)
return o
@classmethod
def get_options_spec(cls):
return sum([list(b.options_spec) for b in [cls] + cls.get_bases()[::-1]], [])
def parse_options(self, c_args):
options = {}
options_tys = {}
#transform
for opt in self.__class__.get_options_spec():
for flag in opt[1]:
options_tys[flag] = opt
while len(c_args) > 0:
head = c_args[0]
if head[0] != '-':
break
elif head == '--':
c_args.pop(0)
break
elif head in options_tys:
opt = options_tys[c_args.pop(0)]
try:
options[opt[0]] = c_args.pop(0) if opt[2] != '' else True
except IndexError:
raise Mark2ParseError("option `%s` missing argument" % opt[0])
else:
raise Mark2Error("%s: unknown option %s" % (self.name, head))
self.options = options
self.value = ' '.join(c_args) if len(c_args) else None
def start(self):
bases = self.__class__.get_bases()
for b in bases[::-1]:
b.do_start(self)
self.run()
for b in bases:
b.do_end(self)
def run(self):
raise NotImplementedError
class CommandTyStateful(Command):
options_spec = (('base', ('-b', '--base'), 'PATH', 'the directory to put mark2-related temp files (default: /tmp/mark2)'),)
def do_start(self):
self.shared_path = self.options.get('base', '/tmp/mark2')
self.make_writable(self.shared_path)
#get servers
o = []
for path in glob.glob(self.shared('pid', '*')):
with open(path) as fp:
pid = int(fp.read())
try:
os.kill(pid, 0)
except OSError as err:
if err.errno == errno.ESRCH:
os.remove(path)
continue
f = os.path.basename(path)
f = os.path.splitext(f)[0]
o.append(f)
self.servers = sorted(o)
def shared(self, ty, name=None):
if name is None:
name = self.server_name
return os.path.join(self.shared_path, "%s.%s" % (name, ty))
def make_writable(self, directory):
need_modes = stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH | stat.S_IRWXG | stat.S_IRWXO
good_modes = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO | stat.S_ISVTX
if not os.path.exists(directory):
os.makedirs(directory, good_modes)
st = os.stat(directory)
if (st.st_mode & need_modes) == need_modes:
return True
try:
os.chmod(directory, good_modes)
return True
except Exception:
raise Mark2Error('%s does not have the necessary modes to run mark2 and I do not have permission to change them!' % directory)
class CommandTySelective(CommandTyStateful):
options_spec = (('name', ('-n', '--name'), 'NAME', 'create or select a server with this name'),)
name_should_exist = True
server_name = None
def do_start(self):
name = self.options.get('name', None)
if self.name_should_exist:
if name is None:
if len(self.servers) > 0:
name = self.servers[0]
else:
raise Mark2Error("no servers running!")
elif name not in self.servers:
raise Mark2Error("server not running: %s" % name)
else:
if name is None:
pass #CommandStart will fill it.
elif name in self.servers:
raise Mark2Error("server already running: %s" % name)
self.server_name = name
def do_send(self, data):
d = {
'type': 'input',
'user': '@external',
'line': data
}
d = json.dumps(d) + "\n"
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.shared('sock'))
s.send(d)
s.close()
class CommandTyTerminal(CommandTySelective):
options_spec = (
('wait', ('-w', '--wait'), 'REGEX', 'wait for this line of output to appear on console before returning.'),
('only', ('-o', '--only'), '', 'print the matched line and no others'),
('immediate', ('-i', '--immediate'), '', 'don\'t wait for any output'))
wait = None
wait_from_start = False
only = False
def do_end(self):
if 'wait' in self.options:
self.wait = self.options['wait']
if 'only' in self.options:
self.only = True
if 'immediate' in self.options:
self.wait = None
try:
self.do_wait()
except KeyboardInterrupt:
pass
def do_wait(self):
if self.wait is None:
return
while not os.path.exists(self.shared('log')):
time.sleep(0.1)
with open(self.shared('log'), 'r') as f:
if not self.wait_from_start:
f.seek(0,2)
while True:
line = f.readline().rstrip()
if not line:
time.sleep(0.1)
continue
if line[0] in (" ", "\t"):
print line
continue
line = line.split(" ", 3)
if line[2] == '[mark2]':
line2 = line[3].split(" ", 2)
if re.search(self.wait, line2[2]):
print line[3]
return
elif not self.only:
print line[3]
class CommandHelp(Command):
"""display help and available options"""
name = 'help'
value_spec = "[COMMAND]"
def run(self):
if self.value is None:
print help_text.format(
usage=usage_text,
commands=self.columns([(c.name, c.value_spec, c.__doc__) for c in commands]))
elif self.value in commands_d:
cls = commands_d[self.value]
print help_sub_text.format(
subcommand = self.value,
doc = cls.__doc__,
value_spec = cls.value_spec
)
opts = cls.get_options_spec()
if len(opts) > 0:
print "options:"
print self.columns([(' '.join(o[1]), o[2], o[3]) for o in opts]) + "\n"
else:
raise Mark2Error("Unknown command: %s" % self.value)
def columns(self, data):
o = []
for tokens in data:
line = ""
for i, token in enumerate(tokens):
line += token
line += " "*(((i+1)*12)-len(line))
o.append(line)
return "\n".join((" "+l for l in o))
class CommandStart(CommandTyTerminal):
"""start a server"""
name = 'start'
value_spec='[PATH]'
name_should_exist = False
def get_server_path(self):
self.jar_file = None
self.server_path = os.path.realpath("" if self.value is None else self.value)
if os.path.isdir(self.server_path):
pass
elif os.path.isfile(self.server_path):
if self.server_path.endswith('.jar'):
self.server_path, self.jar_file = os.path.split(self.server_path)
else:
raise Mark2Error("unknown file type: " + self.server_path)
else:
raise Mark2Error("path does not exist: " + self.server_path)
def check_config(self):
new_cfg = find_config('mark2.properties', ignore_errors=True)
if os.path.exists(new_cfg):
return
if os.path.exists(os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'config'))):
new_dir = os.path.dirname(new_cfg)
raise Mark2Error("mark2's configuration location has changed! move your config files to {0}".format(new_dir))
else:
raise Mark2Error("mark2 is unconfigured! run `mark2 config`")
def check_ownership(self):
d_user = pwd.getpwuid(os.stat(self.server_path).st_uid).pw_name
m_user = getpass.getuser()
if d_user != m_user:
e = "server directory is owned by '{d_user}', but mark2 is running as '{m_user}'. " + \
"please start mark2 as `sudo -u {d_user} mark2 start ...`"
raise Mark2Error(e.format(d_user=d_user,m_user=m_user))
def daemonize(self):
if os.fork() > 0:
return 1
os.chdir(".")
os.setsid()
os.umask(0)
if os.fork() > 0:
sys.exit(0)
null = os.open('/dev/null', os.O_RDWR)
for fileno in (1, 2, 3):
try:
os.dup2(null, fileno)
except:
pass
return 0
def run(self):
# parse the server path
self.get_server_path()
# get server name
if self.server_name is None:
self.server_name = os.path.basename(self.server_path)
if self.server_name in self.servers:
raise Mark2Error("server already running: %s" % self.server_name)
# check for mark2.properties
self.check_config()
# check we own the server dir
self.check_ownership()
# clear old stuff
for x in ('log', 'sock', 'pid'):
if os.path.exists(self.shared(x)):
os.remove(self.shared(x))
i = 1
while True:
p = self.shared("log.%d" % i)
if not os.path.exists(p):
break
os.remove(p)
i += 1
if self.daemonize() == 0:
with open(self.shared('pid'), 'w') as f:
f.write("{0}\n".format(os.getpid()))
mgr = manager.Manager(self.shared_path, self.server_name, self.server_path, self.jar_file)
reactor.callWhenRunning(mgr.startup)
reactor.run()
sys.exit(0)
self.wait = '# mark2 started|stopped\.'
self.wait_from_start = True
class CommandConfig(Command):
"""configure mark2"""
options_spec = (('ask', ('-a', '--ask'), '', 'Ask before starting an editor'),)
name = 'config'
def check_executable(self, cmd):
return subprocess.call(
["command", "-v", cmd],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
) == 0
def copy_config(self, src, dest, header=''):
f0 = src
f1 = dest
l0 = ''
while l0.strip() == '' or l0.startswith('### ###'):
l0 = f0.readline()
f1.write(header)
while l0 != '':
f1.write(l0)
l0 = f0.readline()
f0.close()
f1.close()
def diff_config(self, src, dest):
diff = ""
d0 = src.readlines()
d1 = dest.readlines()
import difflib
ignore = " \t\f\r\n"
s = difflib.SequenceMatcher(lambda x: x in ignore, d0, d1)
for tag, i0, i1, j0, j1 in s.get_opcodes():
if tag in ('replace', 'insert'):
for l1 in d1[j0:j1]:
if l1.strip(ignore) != '':
diff += l1
return diff
def run(self):
path_old = 'resources/mark2.default.properties'
path_new = find_config('mark2.properties')
def write_config(data=''):
data = "# see resources/mark2.default.properties for details\n" + data
with open(path_new, 'w') as file_new:
file_new.write(data)
if "MARK2_TEST" not in os.environ and self.options.get('ask', False):
response = raw_input('would you like to configure mark2 now? [yes] ') or 'yes'
if response != 'yes':
return write_config() if not os.path.exists(path_new) else None
editors = ["editor", "nano", "vim", "vi", "emacs"]
if "EDITOR" in os.environ:
editors.insert(0, os.environ["EDITOR"])
for editor in editors:
if self.check_executable(editor):
break
else:
if not os.path.exists(path_new):
write_config()
raise Mark2Error("no editor found. please set the $EDITOR environment variable.")
if os.path.exists(path_new):
subprocess.call([editor, path_new])
else:
#launch our editor
fd_tmp, path_tmp = tempfile.mkstemp(prefix='mark2.properties.', text=True)
with open_resource(path_old) as src:
with open(path_tmp, 'w') as dst:
self.copy_config(src, dst)
subprocess.call([editor, path_tmp])
#diff the files
with open_resource(path_old) as src:
with open(path_tmp, 'r') as dst:
write_config(self.diff_config(src, dst))
os.remove(path_tmp)
class CommandList(CommandTyStateful):
"""list running servers"""
name = 'list'
def run(self):
for s in self.servers:
print s
class CommandAttach(CommandTySelective):
"""attach to a server"""
name = 'attach'
def run(self):
f = user_client.UserClientFactory(self.server_name, self.shared_path)
f.main()
class CommandStop(CommandTyTerminal):
"""stop mark2"""
name = 'stop'
def run(self):
self.do_send('~stop')
self.wait='# mark2 stopped\.'
class CommandKill(CommandTyTerminal):
"""kill mark2"""
name = 'kill'
def run(self):
self.do_send('~kill')
self.wait = '# mark2 stopped\.'
class CommandSend(CommandTyTerminal):
"""send a console command"""
name = 'send'
value_spec='INPUT...'
def run(self):
if self.value is None:
raise Mark2ParseError("nothing to send!")
self.do_send(self.value)
class CommandJarList(Command):
"""list server jars"""
name = 'jar-list'
def run(self):
def err(what):
if reactor.running: reactor.stop()
print "error: %s" % what.value
def handle(listing):
if reactor.running: reactor.stop()
if len(listing) == 0:
print "error: no server jars found!"
else:
print "The following server jars/zips are available:"
print listing
def start():
d = servers.jar_list()
d.addCallbacks(handle, err)
reactor.callWhenRunning(start)
reactor.run()
class CommandJarGet(Command):
"""download a server jar"""
name = 'jar-get'
value_spec = 'NAME'
def run(self):
if self.value is None:
raise Mark2ParseError("missing jar type!")
def err(what):
#reactor.stop()
print "error: %s" % what.value
def handle((filename, data)):
reactor.stop()
if os.path.exists(filename):
print "error: %s already exists!" % filename
else:
f = open(filename, 'wb')
f.write(data)
f.close()
print "success! saved as %s" % filename
def start():
d = servers.jar_get(self.value)
d.addCallbacks(handle, err)
reactor.callWhenRunning(start)
reactor.run()
commands = (CommandHelp, CommandStart, CommandList, CommandAttach, CommandStop, CommandKill, CommandSend, CommandJarList, CommandJarGet, CommandConfig)
commands_d = dict([(c.name, c) for c in commands])
def main():
try:
c_args = sys.argv[1:]
if len(c_args) == 0:
command_name = 'help'
else:
command_name = c_args.pop(0)
command_cls = commands_d.get(command_name, None)
if command_cls is None:
raise Mark2ParseError("unknown command: %s" % command_name)
command = command_cls()
command.parse_options(c_args)
command.start()
return 0
except Mark2Error as e:
print e
return 1
########NEW FILE########
__FILENAME__ = manager
import os
import traceback
import signal
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.python import log, logfile
#mark2 things
from . import events, properties, plugins
from .events import EventPriority
from .services import process
from .shared import find_config, open_resource
"""
This is the 'main' class that handles most of the logic
"""
class Manager(object):
name = "manager"
started = False
shutting_down = False
def __init__(self, shared_path, server_name, server_path, jar_file=None):
self.shared_path = shared_path
self.server_name = server_name
self.server_path = server_path
self.jar_file = jar_file
self.players = set()
def startup(self):
reactor.addSystemEventTrigger('before', 'shutdown', self.before_reactor_stop)
try:
self.really_start()
except Exception:
for l in traceback.format_exc().split("\n"):
print l
self.console(l, kind='error')
self.shutdown()
def before_reactor_stop(self):
self.console("mark2 stopped.")
def really_start(self):
#start event dispatcher
self.events = events.EventDispatcher(self.handle_dispatch_error)
#add some handlers
self.events.register(self.handle_server_output, events.ServerOutput, priority=EventPriority.MONITOR, pattern="")
self.events.register(self.handle_console, events.Console, priority=EventPriority.MONITOR)
self.events.register(self.handle_fatal, events.FatalError, priority=EventPriority._HIGH)
self.events.register(self.handle_server_started,events.ServerStarted, priority=EventPriority.MONITOR)
self.events.register(self.handle_user_attach, events.UserAttach, priority=EventPriority.MONITOR)
self.events.register(self.handle_user_detach, events.UserDetach, priority=EventPriority.MONITOR)
self.events.register(self.handle_user_input, events.UserInput, priority=EventPriority.MONITOR)
self.events.register(self.handle_player_join, events.PlayerJoin, priority=EventPriority.MONITOR)
self.events.register(self.handle_player_quit, events.PlayerQuit, priority=EventPriority.MONITOR)
self.events.register(self.handle_server_stopped,events.ServerStopped, priority=EventPriority.MONITOR)
#change to server directory
os.chdir(self.server_path)
#load config
self.load_config()
#start logging
self.start_logging()
#chmod log and pid
for ext in ('log', 'pid'):
os.chmod(os.path.join(self.shared_path, "%s.%s" % (self.server_name, ext)), self.config.get_umask(ext))
self.console("mark2 starting...")
#find jar file
if self.jar_file is None:
self.jar_file = process.find_jar(
self.config['mark2.jar_path'].split(';'),
self.jar_file)
if self.jar_file is None:
return self.fatal_error("Couldn't find server jar!")
#load server.properties
self.properties = properties.load(properties.Mark2Properties, open_resource('resources/server.default.properties'), 'server.properties')
if self.properties is None:
return self.fatal_error(reason="couldn't find server.properties")
self.socket = os.path.join(self.shared_path, "%s.sock" % self.server_name)
self.services = plugins.PluginManager(self,
search_path='services',
name='service',
get_config=self.get_service_config)
for name in self.services.find():
cfg = self.get_service_config(name)
if not cfg.get('enabled', True):
continue
result = self.services.load(name)
if not result:
return self.fatal_error(reason="couldn't load service: '{0}'".format(name))
#load plugins
self.plugins = plugins.PluginManager(self,
search_path='plugins',
name='plugin',
get_config=self.get_plugin_config,
require_config=True)
self.load_plugins()
#start the server
self.events.dispatch(events.ServerStart())
def handle_dispatch_error(self, event, callback, failure):
o = "An event handler threw an exception: \n"
o += " Callback: %s\n" % callback
o += " Event: \n"
o += "".join((" %s: %s\n" % (k, v) for k, v in event.serialize().iteritems()))
# log the message and a very verbose exception log to the log file
log.msg(o)
failure.printDetailedTraceback()
# log a less verbose exception to the console
o += "\n".join(" %s" % l for l in failure.getTraceback().split("\n"))
self.console(o)
#helpers
def start_logging(self):
log_rotate = self.config['mark2.log.rotate_mode']
log_size = self.config['mark2.log.rotate_size']
log_limit = self.config['mark2.log.rotate_limit']
if log_rotate == 'daily':
log_obj = logfile.DailyLogFile("%s.log" % self.server_name, self.shared_path)
elif log_rotate in ('off', 'size'):
log_obj = logfile.LogFile("%s.log" % self.server_name, self.shared_path,
rotateLength=log_size if log_rotate == 'size' else None,
maxRotatedFiles=log_limit if log_limit != "" else None)
else:
raise ValueError("mark2.log.rotate-mode is invalid.")
log.startLogging(log_obj)
def load_config(self):
self.config = properties.load(properties.Mark2Properties,
open_resource('resources/mark2.default.properties'),
find_config('mark2.properties'),
'mark2.properties')
if self.config is None:
return self.fatal_error(reason="couldn't find mark2.properties")
def get_plugin_config(self, name):
return dict(self.config.get_plugins()).get(name, {})
def get_service_config(self, name):
return dict(self.config.get_service(name))
def load_plugins(self):
for name, _ in self.config.get_plugins():
self.plugins.load(name)
def shutdown(self):
if not self.shutting_down:
self.shutting_down = True
reactor.callInThread(lambda: os.kill(os.getpid(), signal.SIGINT))
def console(self, line, **k):
for l in unicode(line).split(u"\n"):
k['line'] = l
self.events.dispatch(events.Console(**k))
def fatal_error(self, *a, **k):
k.setdefault('reason', a[0] if a else None)
self.events.dispatch(events.FatalError(**k))
def send(self, line):
self.events.dispatch(events.ServerInput(line=line))
#handlers
def handle_server_output(self, event):
self.events.dispatch(events.Console(source='server',
line=event.line,
time=event.time,
level=event.level,
data=event.data))
def handle_console(self, event):
for line in event.value().encode('utf8').split("\n"):
log.msg(line, system="mark2")
def handle_fatal(self, event):
s = "fatal error: %s" % event.reason
self.console(s, kind="error")
self.shutdown()
def handle_server_started(self, event):
properties_ = properties.load(properties.Mark2Properties, open_resource('resources/server.default.properties'), 'server.properties')
if properties_:
self.properties = properties_
if not self.started:
self.console("mark2 started.")
self.started = True
def handle_user_attach(self, event):
self.console("%s attached" % event.user, kind="joinpart")
def handle_user_detach(self, event):
self.console("%s detached" % event.user, kind="joinpart")
@inlineCallbacks
def handle_user_input(self, event):
self.console(event.line, user=event.user, source="user")
if event.line.startswith("~"):
handled = yield self.events.dispatch(events.Hook(line=event.line))
if not handled:
self.console("unknown command.")
elif event.line.startswith('#'):
pass
else:
self.events.dispatch(events.ServerInput(line=event.line))
def handle_command(self, user, text):
self.console(text, prompt=">", user=user)
self.send(text)
def handle_player_join(self, event):
self.players.add(str(event.username))
self.events.dispatch(events.StatPlayers(players=list(self.players)))
def handle_player_quit(self, event):
self.players.discard(str(event.username))
self.events.dispatch(events.StatPlayers(players=list(self.players)))
def handle_server_stopped(self, event):
self.players.clear()
self.events.dispatch(events.StatPlayers(players=[]))
########NEW FILE########
__FILENAME__ = alert
import os
import random
from mk2.plugins import Plugin
from mk2.events import Hook, StatPlayerCount
class Alert(Plugin):
interval = Plugin.Property(default=200)
command = Plugin.Property(default="say {message}")
path = Plugin.Property(default="alerts.txt")
min_pcount = Plugin.Property(default=0)
messages = []
requirements_met = True
def setup(self):
self.register(self.count_check, StatPlayerCount)
if self.path and os.path.exists(self.path):
f = open(self.path, 'r')
for l in f:
l = l.strip()
if l:
self.messages.append(l)
f.close()
def count_check(self, event):
if event.players_current >= self.min_pcount:
self.requirements_met = True
else:
self.requirements_met = False
def server_started(self, event):
if self.messages:
self.repeating_task(self.repeater, self.interval)
def repeater(self, event):
if self.requirements_met:
self.send_format(self.command, message=random.choice(self.messages))
########NEW FILE########
__FILENAME__ = backup
import time
import glob
import os
from twisted.internet import protocol, reactor, defer
from mk2.plugins import Plugin
from mk2.events import Hook, ServerOutput, ServerStopped, EventPriority
import shlex
class Backup(Plugin):
path = Plugin.Property(default="backups/{timestamp}.tar.gz")
mode = Plugin.Property(default="include")
spec = Plugin.Property(default="world*")
tar_flags = Plugin.Property(default='-hpczf')
flush_wait = Plugin.Property(default=5)
backup_stage = 0
autosave_enabled = True
proto = None
done_backup = None
def setup(self):
self.register(self.backup, Hook, public=True, name='backup', doc='backup the server to a .tar.gz')
self.register(self.autosave_changed, ServerOutput, pattern="(?P<username>[A-Za-z0-9_]{1,16}): (?P<action>Enabled|Disabled) level saving\.\.")
self.register(self.autosave_changed, ServerOutput, pattern="Turned (?P<action>on|off) world auto-saving")
self.register(self.server_stopped, ServerStopped, priority=EventPriority.HIGHEST)
def server_started(self, event):
self.autosave_enabled = True
@EventPriority.HIGH
@defer.inlineCallbacks
def server_stopping(self, event):
if self.backup_stage > 0:
self.console("backup: delaying server stop until backup operation completes.")
yield self.done_backup
self.stop_tasks()
self.autosave_enabled = False
def server_stopped(self, event):
self.autosave_enabled = False
def save_state(self):
if self.proto:
self.console("stopping in-progress backup!")
self.proto.transport.signalProcess('KILL')
if self.done_backup:
self.done_backup.callback(None)
return self.autosave_enabled
def load_state(self, state):
self.autosave_enabled = state
def autosave_changed(self, event):
self.autosave_enabled = event.match.groupdict()['action'].lower() in ('on', 'enabled')
if self.backup_stage == 1 and not self.autosave_enabled:
self.backup_stage = 2
self.delayed_task(self.do_backup, self.flush_wait)
elif self.backup_stage == 2:
self.console("warning: autosave changed while backup was in progress!")
def backup(self, event):
if self.backup_stage > 0:
self.console("backup already in progress!")
return
self.done_backup = defer.Deferred()
self.console("map backup starting...")
self.autosave_enabled_prev = self.autosave_enabled
if self.autosave_enabled:
self.backup_stage = 1
self.send('save-off')
else:
self.backup_stage = 2
self.do_backup()
return self.done_backup
def do_backup(self, *a):
timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.gmtime())
path = self.path.format(timestamp=timestamp, name=self.parent.server_name)
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except IOError:
self.console("Warning: {0} does't exist and I can't create it".format(os.path.dirname(path)),
kind='error')
return
if self.mode == "include":
add = set()
for e in self.spec.split(";"):
add |= set(glob.glob(e))
elif self.mode == "exclude":
add = set(glob.glob('*'))
for e in self.spec.split(";"):
add -= set(glob.glob(e))
cmd = ['tar']
cmd.extend(shlex.split(self.tar_flags))
cmd.append(path)
cmd.extend(add)
def p_ended(path):
self.console("map backup saved to %s" % path)
if self.autosave_enabled_prev:
self.send('save-on')
self.backup_stage = 0
self.proto = None
if self.done_backup:
d = self.done_backup
self.done_backup = None
d.callback(None)
self.proto = protocol.ProcessProtocol()
self.proto.processEnded = lambda reason: p_ended(path)
self.proto.childDataReceived = lambda fd, d: self.console(d.strip())
reactor.spawnProcess(self.proto, "/bin/tar", cmd)
########NEW FILE########
__FILENAME__ = irc
import re
import os.path as path
from twisted.words.protocols import irc
from twisted.internet import defer
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet.interfaces import ISSLTransport
from twisted.python.util import InsensitiveDict
from mk2.plugins import Plugin
from mk2.events import PlayerChat, PlayerJoin, PlayerQuit, PlayerDeath, ServerOutput, ServerStopping, ServerStarting, StatPlayers, Hook
try:
from OpenSSL import SSL
from twisted.internet import ssl
have_ssl = True
class Mark2ClientContextFactory(ssl.ClientContextFactory):
def __init__(self, parent, fingerprint=None, cert=None):
self.parent = parent
self.fingerprint = fingerprint
self.cert = path.expanduser(cert) if cert else None
@staticmethod
def stripfp(fp):
return fp.replace(':', '').lower()
def verify(self, conn, cert, errno, errdepth, rc):
ok = self.stripfp(cert.digest("sha1")) == self.stripfp(self.fingerprint)
if self.parent and self.parent.factory.reconnect and not ok:
self.parent.console("irc: server certificate verification failed")
self.parent.factory.reconnect = False
return ok
def getContext(self):
ctx = ssl.ClientContextFactory.getContext(self)
if self.fingerprint:
ctx.set_verify(SSL.VERIFY_PEER, self.verify)
if self.cert:
ctx.use_certificate_file(self.cert)
ctx.use_privatekey_file(self.cert)
return ctx
except:
have_ssl = False
class IRCUser(object):
username = ""
hostname = ""
status = ""
oper = False
away = False
def __init__(self, parent, nick):
self.parent = parent
self.nick = nick
@property
def priority(self):
p = self.parent.priority
if self.status:
return min([p[s] for s in self.status])
else:
return None
class SASLExternal(object):
name = "EXTERNAL"
def __init__(self, username, password):
pass
def is_valid(self):
return True
def respond(self, data):
return ""
class SASLPlain(object):
name = "PLAIN"
def __init__(self, username, password):
self.response = "{0}\0{0}\0{1}".format(username, password)
def is_valid(self):
return self.response != "\0\0"
def respond(self, data):
if data:
return False
return self.response
SASL_MECHANISMS = (SASLExternal, SASLPlain)
class IRCBot(irc.IRCClient):
sasl_buffer = ""
sasl_result = None
sasl_login = None
def __init__(self, factory, plugin):
self.factory = factory
self.nickname = plugin.nickname.encode('ascii')
self.realname = plugin.realname.encode('ascii')
self.username = plugin.ident.encode('ascii')
self.ns_username = plugin.username
self.ns_password = plugin.password
self.password = plugin.server_password.encode('ascii')
self.channel = plugin.channel.encode('ascii')
self.key = plugin.key.encode('ascii')
self.console = plugin.console
self.irc_message = plugin.irc_message
self.irc_action = plugin.irc_action
self.irc_chat_status = plugin.irc_chat_status
self.mangle_username = plugin.mangle_username
self.users = InsensitiveDict()
self.cap_requests = set()
def register(self, nickname, hostname="foo", servername="bar"):
self.sendLine("CAP LS")
return irc.IRCClient.register(self, nickname, hostname, servername)
def sendLine(self, line):
irc.IRCClient.sendLine(self, line.encode('ascii', 'replace'))
def _parse_cap(self, cap):
mod = ''
while cap[0] in "-~=":
mod, cap = mod + cap[0], cap[1:]
if '/' in cap:
vendor, cap = cap.split('/', 1)
else:
vendor = None
return (cap, mod, vendor)
def request_cap(self, *caps):
self.cap_requests |= set(caps)
self.sendLine("CAP REQ :{0}".format(' '.join(caps)))
@defer.inlineCallbacks
def end_cap(self):
if self.sasl_result:
yield self.sasl_result
self.sendLine("CAP END")
def irc_CAP(self, prefix, params):
self.supports_cap = True
identifier, subcommand, args = params
args = args.split(' ')
if subcommand == "LS":
self.sasl_start(args)
if not self.cap_requests:
self.sendLine("CAP END")
elif subcommand == "ACK":
ack = []
for cap in args:
if not cap:
continue
cap, mod, vendor = self._parse_cap(cap)
if '-' in mod:
if cap in self.capabilities:
del self.capabilities[cap]
continue
self.cap_requests.remove(cap)
if cap == 'sasl':
self.sasl_next()
if ack:
self.sendLine("CAP ACK :{0}".format(' '.join(ack)))
if not self.cap_requests:
self.end_cap()
elif subcommand == "NAK":
# this implementation is probably not compliant but it will have to do for now
for cap in args:
self.cap_requests.remove(cap)
if not self.cap_requests:
self.end_cap()
def signedOn(self):
if ISSLTransport.providedBy(self.transport):
cert = self.transport.getPeerCertificate()
fp = cert.digest("sha1")
verified = "verified" if self.factory.parent.server_fingerprint else "unverified"
self.console("irc: connected securely. server fingerprint: {0} ({1})".format(fp, verified))
else:
self.console("irc: connected")
if self.ns_username and self.ns_password and not self.sasl_login:
self.msg('NickServ', 'IDENTIFY {0} {1}'.format(self.ns_username, self.ns_password))
self.join(self.channel, self.key)
def irc_JOIN(self, prefix, params):
nick = prefix.split('!')[0]
channel = params[-1]
if nick == self.nickname:
self.joined(channel)
else:
self.userJoined(prefix, channel)
def joined(self, channel):
self.console('irc: joined channel')
self.factory.client = self
def who(a):
self.sendLine("WHO " + channel)
self.factory.parent.repeating_task(who, 30, now=True)
def isupport(self, args):
self.compute_prefix_names()
def compute_prefix_names(self):
KNOWN_NAMES = {"o": "op", "h": "halfop", "v": "voice"}
prefixdata = self.supported.getFeature("PREFIX", {"o": ("@", 0), "v": ("+", 1)}).items()
op_priority = ([priority for mode, (prefix, priority) in prefixdata if mode == "o"] + [None])[0]
self.prefixes, self.statuses, self.priority = {}, {}, {}
for mode, (prefix, priority) in prefixdata:
name = "?"
if mode in KNOWN_NAMES:
name = KNOWN_NAMES[mode]
elif priority == 0:
if op_priority == 2:
name = "owner"
else:
name = "admin"
else:
name = "+" + mode
self.prefixes[mode] = prefix
self.statuses[prefix] = name
self.priority[name] = priority
self.priority[mode] = priority
self.priority[prefix] = priority
def parse_prefixes(self, user, nick, prefixes=''):
status = []
prefixdata = self.supported.getFeature("PREFIX", {"o": ("@", 0), "v": ("+", 1)}).items()
for mode, (prefix, priority) in prefixdata:
if prefix in prefixes + nick:
nick = nick.replace(prefix, '')
status.append((prefix, priority))
if nick == self.nickname:
return
user.status = ''.join(t[0] for t in sorted(status, key=lambda t: t[1]))
def irc_RPL_WHOREPLY(self, prefix, params):
_, channel, username, host, server, nick, status, hg = params
if nick == self.nickname:
return
hops, gecos = hg.split(' ', 1)
user = IRCUser(self, nick)
user.username = username
user.hostname = host
user.oper = '*' in status
user.away = status[0] == 'G'
self.users[nick] = user
self.parse_prefixes(user, nick, status[1:].replace('*', ''))
def modeChanged(self, user, channel, _set, modes, args):
args = list(args)
if channel.lower() != self.channel.lower():
return
for m, arg in zip(modes, args):
if m in self.prefixes and arg != self.nickname:
u = self.users.get(arg, None)
if u:
u.status = u.status.replace(self.prefixes[m], '')
if _set:
u.status = ''.join(sorted(list(u.status + self.prefixes[m]),
key=lambda k: self.priority[k]))
def has_status(self, nick, status):
if status != 0 and not status:
return True
if status not in self.priority:
return False
priority = self.priority[status]
u = self.users.get(nick, None)
return u and (u.priority is not None) and u.priority <= priority
def userJoined(self, user, channel):
nick = user.split('!')[0]
user = IRCUser(self, nick)
self.users[nick] = user
def userRenamed(self, oldname, newname):
if oldname not in self.users:
return
u = self.users[oldname]
u.nick = newname
self.users[newname] = u
del self.users[oldname]
def userLeft(self, user, channel):
if user not in self.users:
return
del self.users[user]
def userKicked(self, kickee, channel, kicker, message):
if kickee not in self.users:
return
del self.users[kickee]
def userQuit(self, user, quitMessage):
if user not in self.users:
return
del self.users[user]
def privmsg(self, user, channel, msg):
if channel != self.channel:
return
if '!' not in user:
return
nick = user.split('!')[0]
p = self.factory.parent
if not self.has_status(nick, self.irc_chat_status):
return
if p.irc_players_enabled and msg.lower() == p.irc_command_prefix + "players":
self.say(self.channel, p.irc_players_format.format(
players=', '.join(map(self.mangle_username, p.players))))
elif p.irc_command_prefix and msg.startswith(p.irc_command_prefix) and p.irc_command_status and self.has_status(nick, p.irc_command_status):
argv = msg[len(p.irc_command_prefix):].split(' ')
command = argv[0]
if command.startswith('~'):
if p.irc_command_mark2 and (command.lower() in p.irc_command_allow.lower().split(',') or p.irc_command_allow == '*'):
p.dispatch(Hook(line=' '.join(argv)))
else:
if command.lower() in p.irc_command_allow.lower().split(',') or p.irc_command_allow == '*':
p.send(' '.join(argv))
else:
self.irc_message(nick, msg)
def action(self, user, channel, msg):
self.console("%s %s %s" % (user, channel, msg))
if channel != self.channel:
return
if '!' not in user:
return
nick = user.split('!')[0]
if self.has_status(nick, self.irc_chat_status):
self.irc_action(nick, msg)
def irc_AUTHENTICATE(self, prefix, params):
self.sasl_continue(params[0])
def sasl_send(self, data):
while data and len(data) >= 400:
en, data = data[:400].encode('base64').replace('\n', ''), data[400:]
self.sendLine("AUTHENTICATE " + en)
if data:
self.sendLine("AUTHENTICATE " + data.encode('base64').replace('\n', ''))
else:
self.sendLine("AUTHENTICATE +")
def sasl_start(self, cap_list):
if 'sasl' not in cap_list:
print cap_list
return
self.request_cap('sasl')
self.sasl_result = defer.Deferred()
self.sasl_mechanisms = list(SASL_MECHANISMS)
def sasl_next(self):
mech = None
while not mech or not mech.is_valid():
if not self.sasl_mechanisms:
return False
self.sasl_auth = mech = self.sasl_mechanisms.pop(0)(self.ns_username, self.ns_password)
self.sendLine("AUTHENTICATE " + self.sasl_auth.name)
return True
def sasl_continue(self, data):
if data == '+':
data = ''
else:
data = data.decode('base64')
if len(data) == 400:
self.sasl_buffer += data
else:
response = self.sasl_auth.respond(self.sasl_buffer + data)
if response is False: # abort
self.sendLine("AUTHENTICATE *")
else:
self.sasl_send(response)
self.sasl_buffer = ""
def sasl_finish(self):
if self.sasl_result:
self.sasl_result.callback(True)
self.sasl_result = None
def sasl_failed(self, whine=True):
if self.sasl_login is False:
return
if self.sasl_next():
return
self.sasl_login = False
self.sendLine("AUTHENTICATE *")
self.sasl_finish()
if whine:
self.console("irc: failed to log in.")
def irc_904(self, prefix, params):
print params
self.sasl_failed()
def irc_905(self, prefix, params):
print params
self.sasl_failed()
def irc_906(self, prefix, params):
self.sasl_failed(False)
def irc_907(self, prefix, params):
self.sasl_failed(False)
def irc_900(self, prefix, params):
self.sasl_login = params[2]
self.console("irc: logged in as '{0}' (using {1})".format(self.sasl_login, self.sasl_auth.name))
def irc_903(self, prefix, params):
self.sasl_finish()
def alterCollidedNick(self, nickname):
return nickname + '_'
def irc_relay(self, message):
self.say(self.channel, message.encode('utf8'))
class IRCBotFactory(protocol.ClientFactory):
protocol = IRCBot
client = None
reconnect = True
def __init__(self, parent):
self.parent = parent
def clientConnectionLost(self, connector, reason):
if self.reconnect:
self.parent.console("irc: lost connection with server: %s" % reason.getErrorMessage())
self.parent.console("irc: reconnecting...")
connector.connect()
def clientConnectionFailed(self, connector, reason):
self.parent.console("irc: connection attempt failed: %s" % reason.getErrorMessage())
def buildProtocol(self, addr):
p = IRCBot(self, self.parent)
return p
def irc_relay(self, message):
if self.client:
self.client.irc_relay(message)
class IRC(Plugin):
#connection
host = Plugin.Property(required=True)
port = Plugin.Property(required=True)
server_password = Plugin.Property()
channel = Plugin.Property(required=True)
key = Plugin.Property()
certificate = Plugin.Property()
ssl = Plugin.Property(default=False)
server_fingerprint = Plugin.Property()
#user
nickname = Plugin.Property(default="RelayBot")
realname = Plugin.Property(default="mark2 IRC relay")
ident = Plugin.Property(default="RelayBot")
username = Plugin.Property(default="")
password = Plugin.Property(default="")
#general
cancel_highlight = Plugin.Property(default=False, type_=False)
cancel_highlight_str = Plugin.Property(default=u"_")
#game -> irc settings
game_columns = Plugin.Property(default=True)
game_status_enabled = Plugin.Property(default=True)
game_status_format = Plugin.Property(default=u"!, | server {what}.")
game_chat_enabled = Plugin.Property(default=True)
game_chat_format = Plugin.Property(default=u"{username}, | {message}")
game_chat_private = Plugin.Property(default=None)
game_join_enabled = Plugin.Property(default=True)
game_join_format = Plugin.Property(default=u"*, | --> {username}")
game_quit_enabled = Plugin.Property(default=True)
game_quit_format = Plugin.Property(default=u"*, | <-- {username}")
game_death_enabled = Plugin.Property(default=True)
game_death_format = Plugin.Property(default=u"*, | {text}")
game_server_message_enabled = Plugin.Property(default=True)
game_server_message_format = Plugin.Property(default=u"#server, | {message}")
#bukkit only
game_me_enabled = Plugin.Property(default=True)
game_me_format = Plugin.Property(default=u"*, | {username} {message}")
#irc -> game settings
irc_chat_enabled = Plugin.Property(default=True)
irc_chat_command = Plugin.Property(default=u"say [IRC] <{nickname}> {message}")
irc_action_command = Plugin.Property(default=u"say [IRC] * {nickname} {message}")
irc_chat_status = Plugin.Property(default=None)
irc_command_prefix = Plugin.Property(default="!")
irc_command_status = Plugin.Property(default=None)
irc_command_allow = Plugin.Property(default="")
irc_command_mark2 = Plugin.Property(default=False)
irc_players_enabled = Plugin.Property(default=True)
irc_players_format = Plugin.Property(default=u"*, | players currently in game: {players}")
def setup(self):
self.players = []
self.factory = IRCBotFactory(self)
if self.ssl:
if have_ssl:
cf = Mark2ClientContextFactory(self,
cert=self.certificate,
fingerprint=self.server_fingerprint)
reactor.connectSSL(self.host, self.port, self.factory, cf)
else:
self.parent.console("Couldn't load SSL for IRC!")
return
else:
reactor.connectTCP(self.host, self.port, self.factory)
if self.game_status_enabled:
self.register(self.handle_stopping, ServerStopping)
self.register(self.handle_starting, ServerStarting)
self.column_width = 16
if self.cancel_highlight == "insert":
self.column_width += len(self.cancel_highlight_str)
def register(event_type, format, filter_=None, *a, **k):
def handler(event, format):
d = event.match.groupdict() if hasattr(event, 'match') else event.serialize()
if filter_ and 'message' in d:
if filter_.match(d['message']):
return
if self.cancel_highlight and 'username' in d and d['username'] in self.factory.client.users:
d['username'] = self.mangle_username(d['username'])
line = self.format(format, **d)
self.factory.irc_relay(line)
self.register(lambda e: handler(e, format), event_type, *a, **k)
if self.game_chat_enabled:
if self.game_chat_private:
try:
filter_ = re.compile(self.game_chat_private)
register(PlayerChat, self.game_chat_format, filter_=filter_)
except:
self.console("plugin.irc.game_chat_private must be a valid regex")
register(PlayerChat, self.game_chat_format)
else:
register(PlayerChat, self.game_chat_format)
if self.game_join_enabled:
register(PlayerJoin, self.game_join_format)
if self.game_quit_enabled:
register(PlayerQuit, self.game_quit_format)
if self.game_death_enabled:
def handler(event):
d = event.serialize()
for k in 'username', 'killer':
if k in d and d[k] and d[k] in self.factory.client.users:
d[k] = self.mangle_username(d[k])
text = event.get_text(**d)
line = self.format(self.game_death_format, text=text)
self.factory.irc_relay(line)
self.register(handler, PlayerDeath)
if self.game_server_message_enabled and not (self.irc_chat_enabled and self.irc_chat_command.startswith('say ')):
register(ServerOutput, self.game_server_message_format, pattern=r'\[(?:Server|SERVER)\] (?P<message>.+)')
if self.game_me_enabled:
register(ServerOutput, self.game_me_format, pattern=r'\* (?P<username>[A-Za-z0-9_]{1,16}) (?P<message>.+)')
if self.irc_chat_enabled:
self.register(self.handle_players, StatPlayers)
def teardown(self):
self.factory.reconnect = False
if self.factory.client:
self.factory.client.quit("Plugin unloading.")
def mangle_username(self, username):
if not self.cancel_highlight:
return username
elif self.cancel_highlight == "insert":
return username[:-1] + self.cancel_highlight_str + username[-1:]
else:
return self.cancel_highlight_str + username[1:]
def format(self, format, **data):
if self.game_columns:
f = unicode(format).split(',', 1)
f[0] = f[0].format(**data)
if len(f) == 2:
f[0] = f[0].rjust(self.column_width)
f[1] = f[1].format(**data)
return ''.join(f)
else:
return format.format(**data)
def handle_starting(self, event):
self.factory.irc_relay(self.format(self.game_status_format, what="starting"))
def handle_stopping(self, event):
self.factory.irc_relay(self.format(self.game_status_format, what="stopping"))
def handle_players(self, event):
self.players = sorted(event.players)
def irc_message(self, user, message):
if self.irc_chat_enabled:
self.send_format(self.irc_chat_command, nickname=user, message=message)
def irc_action(self, user, message):
if self.irc_chat_enabled:
self.console("{} {}".format(user, message))
self.send_format(self.irc_action_command, nickname=user, message=message)
########NEW FILE########
__FILENAME__ = log
import time
import gzip
import os
import re
from mk2.plugins import Plugin
from mk2.events import Console, ServerStopped, ServerStopping, ServerOutput
class Log(Plugin):
gzip = Plugin.Property(default=True)
path = Plugin.Property(default="logs/server-{timestamp}-{status}.log.gz")
vanilla = Plugin.Property(default=False)
log = u""
reason = "unknown"
time_re = re.compile(r'(?:\d{2}:\d{2}:\d{2}) (.*)')
restore = ('log',)
def setup(self):
if self.vanilla:
self.register(self.vanilla_logger, ServerOutput, pattern='.*')
else:
self.register(self.logger, Console)
self.register(self.shutdown, ServerStopped)
self.register(self.pre_shutdown, ServerStopping)
def vanilla_logger(self, event):
m = self.time_re.match(event.line)
if m:
self.log += u"{0} {1}\n".format(event.time, m.group(1))
else:
self.log += u"{0}\n".format(event.line)
def logger(self, event):
self.log += u"{0}\n".format(event.value())
def pre_shutdown(self, event):
self.reason = event.reason
def shutdown(self, event):
reason = self.reason
if reason == None:
reason = "ok"
timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.gmtime())
path = self.path.format(timestamp=timestamp, name=self.parent.name, status=reason)
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except IOError:
self.console("Warning: {0} does't exist and I can't create it".format(os.path.dirname(path)),
kind='error')
return
if self.gzip:
f = gzip.open(path, 'wb')
else:
f = open(path, 'w')
f.write(self.log.encode('utf8'))
f.close()
self.console("server.log written to %s" % os.path.realpath(path))
self.log = ""
########NEW FILE########
__FILENAME__ = mcbouncer
from twisted.python import log
import urllib
import json
from twisted.web.client import HTTPClientFactory, getPage
HTTPClientFactory.noisy = False
from mk2.plugins import Plugin
from mk2.events import ServerOutput
class BouncerAPI:
methods = ['addBan', 'removeBan', 'getBanReason', 'getIPBanReason', 'updateUser']
def __init__(self, api_base, api_key, errback):
self.api_key = api_key
self.api_base = api_base
self.errback = errback
def __getattr__(self, method):
if not method in self.methods:
raise AttributeError
def inner(*args, **kwargs):
args = [urllib.quote(a.encode('utf8'), "") for a in args]
callback = kwargs.get('callback', None)
addr = '/'.join([self.api_base, method, self.api_key] + args)
deferred = getPage(addr)
if callback:
deferred.addCallback(lambda d: callback(json.loads(str(d))))
deferred.addErrback(self.errback)
return inner
class MCBouncer(Plugin):
api_base = Plugin.Property(default='http://mcbouncer.com/api')
api_key = Plugin.Property(default=None)
reason = Plugin.Property(default="Banned by an operator")
proxy_mode = Plugin.Property(default=False)
def setup(self):
self.bouncer = BouncerAPI(self.api_base, self.api_key, self.on_error)
self.register(self.on_login, ServerOutput, pattern='([A-Za-z0-9_]{1,16})\[/([0-9\.]+):\d+\] logged in with entity id .+')
self.register(self.on_ban, ServerOutput, pattern='\[([A-Za-z0-9_]{1,16}): Banned player ([A-Za-z0-9_]{1,16})\]')
self.register(self.on_ban, ServerOutput, pattern='Banned player ([A-Za-z0-9_]{1,16})')
self.register(self.on_pardon, ServerOutput, pattern='\[[A-Za-z0-9_]{1,16}: Unbanned player ([A-Za-z0-9_]{1,16})\]')
self.register(self.on_pardon, ServerOutput, pattern='Unbanned player ([A-Za-z0-9_]{1,16})')
def on_error(self, error):
self.console("Couldn't contact mcbouncer! %s" % error.getErrorMessage())
def on_ban(self, event):
g = event.match.groups()
player = g[-1]
issuer = g[0] if len(g) == 2 else 'console'
o = self.bouncer.addBan(issuer, player, self.reason)
def on_pardon(self, event):
g = event.match.groups()
self.bouncer.removeBan(g[0])
def on_login(self, event):
g = event.match.groups()
self.bouncer.getBanReason(g[0], callback=lambda d: self.ban_reason(g[0], d))
if not self.proxy_mode:
self.bouncer.updateUser(g[0], g[1])
self.bouncer.getIPBanReason(g[1], callback=lambda d: self.ip_ban_reason(g[0], d))
def ban_reason(self, user, details):
if details['is_banned']:
self.send('kick %s Banned: %s' % (user, details['reason']))
def ip_ban_reason(self, user, details):
if details['is_banned']:
self.send('kick %s Banned: %s' % (user, details['reason']))
########NEW FILE########
__FILENAME__ = monitor
from mk2.plugins import Plugin
from mk2.events import ServerOutput, StatPlayerCount, ServerStop, ServerEvent, Event
class Check(object):
alive = True
timeout = 0
time = 0
warn = 0
def __init__(self, parent, **kw):
self.dispatch = parent.dispatch
self.console = parent.console
for k, v in kw.items():
setattr(self, k, v)
def check(self):
if self.alive:
self.alive = False
return True
return False
def step(self):
if self.check():
return
self.time += 1
if self.timeout and self.time == self.timeout:
timeout = "{0} minutes".format(self.timeout)
self.console("{0} -- restarting.".format(self.message.format(timeout=timeout)))
self.dispatch(ServerEvent(cause="server/error/" + self.event[0],
data="REBOOTING SERVER: " + self.event[1].format(timeout=timeout),
priority=1))
self.dispatch(ServerStop(reason=self.stop_reason, respawn=True))
elif self.warn and self.time == self.warn:
if self.timeout:
self.console("{0} -- auto restart in {1} minutes".format(self.warning, self.timeout - self.time))
else:
self.console(self.warning)
time = "{0} minutes".format(self.warn)
self.dispatch(ServerEvent(cause="server/warning/" + self.event[0],
data="WARNING: " + self.event[1].format(timeout=time),
priority=1))
else:
if self.timeout:
self.console("{0} -- auto restart in {1} minutes".format(self.warning, self.timeout - self.time))
else:
self.console(self.warning)
def reset(self):
self.alive = True
self.time = 0
class Monitor(Plugin):
crash_enabled = Plugin.Property(default=True)
crash_timeout = Plugin.Property(default=3)
crash_warn = Plugin.Property(default=0)
crash_unknown_cmd_message = Plugin.Property(default="Unknown command.*")
crash_check_command = Plugin.Property(default="")
oom_enabled = Plugin.Property(default=True)
ping_enabled = Plugin.Property(default=True)
ping_timeout = Plugin.Property(default=3)
ping_warn = Plugin.Property(default=0)
pcount_enabled = Plugin.Property(default=False)
pcount_timeout = Plugin.Property(default=3)
pcount_warn = Plugin.Property(default=0)
def setup(self):
do_step = False
self.checks = {}
if self.oom_enabled:
self.register(self.handle_oom, ServerOutput, level='SEVERE', pattern='java\.lang\.OutOfMemoryError.*')
if self.crash_enabled:
do_step = True
self.checks['crash'] = Check(self, name="crash",
timeout=self.crash_timeout,
warn=self.crash_warn,
message="server has crashed",
warning="server might have crashed",
event=("hang", "server didn't respond for {timeout}"),
stop_reason="crashed")
if self.ping_enabled:
self.register(self.handle_ping, StatPlayerCount)
do_step = True
self.checks['ping'] = Check(self, name="ping",
timeout=self.ping_timeout,
warn=self.ping_warn,
message="server is not accepting connections",
warning="server might have stopped accepting connections",
event=("ping", "server didn't respond for {timeout}"),
stop_reason="not accepting connections")
if self.pcount_enabled:
self.register(self.handle_pcount, StatPlayerCount)
do_step = True
self.checks['pcount'] = Check(self, name="pcount",
timeout=self.pcount_timeout,
warn=self.pcount_warn,
message="server has had 0 players for {timeout}, something is wrong",
warning="server has 0 players, might be inaccessible",
event=("player-count", "server had 0 players for {timeout}"),
stop_reason="zero players")
self.do_step = do_step
def server_started(self, event):
self.reset_counts()
if self.do_step:
self.repeating_task(self.step, 60)
def load_state(self, state):
self.server_started(None)
def step(self, *a):
for c in self.checks.values():
c.step()
if self.crash_enabled:
self.register(self.handle_crash_ok, ServerOutput,
pattern=self.crash_unknown_cmd_message,
track=False)
self.send(self.crash_check_command) # Blank command to trigger 'Unknown command'
def reset_counts(self):
for c in self.checks.values():
c.reset()
### handlers
# crash
def handle_crash_ok(self, event):
self.checks["crash"].reset()
return Event.EAT | Event.UNREGISTER
# out of memory
def handle_oom(self, event):
self.console('server out of memory, restarting...')
self.dispatch(ServerEvent(cause='server/error/oom',
data="server ran out of memory",
priority=1))
self.dispatch(ServerStop(reason='out of memory', respawn=True))
# ping
def handle_ping(self, event):
if event.source == 'ping':
self.checks["ping"].reset()
# pcount
def handle_pcount(self, event):
if event.players_current > 0:
self.checks["pcount"].reset()
else:
self.checks["pcount"].alive = False
########NEW FILE########
__FILENAME__ = mumble
import re
import struct
from twisted.application.internet import UDPServer
from twisted.internet import reactor, defer
from twisted.internet.defer import TimeoutError
from twisted.internet.protocol import DatagramProtocol
from mk2.plugins import Plugin
from mk2.events import ServerOutput
class MumbleProtocol(DatagramProtocol):
buff = ""
def __init__(self, parent, host, port):
self.parent = parent
self.host = host
self.port = port
def ping(self, *a):
self.transport.write('\x00'*12, addr=(self.host, self.port))
def datagramReceived(self, data, (host, port)):
self.buff += data
if len(self.buff) < 24:
return
if not self.buff.startswith('\x00\x01\x02\x03' + '\x00' * 8):
self.parent.console("the mumble server gave us crazy data!")
self.buff = ""
return
d = dict(zip(('users_current', 'users_max', 'bandwidth'), struct.unpack('>III', self.buff[12:24])))
self.buff = self.buff[24:]
self.parent.got_response(d)
class Mumble(Plugin):
host = Plugin.Property(required=True)
port = Plugin.Property(default=64738)
timeout = Plugin.Property(default=10)
trigger = Plugin.Property(default="!mumble")
command_up = Plugin.Property(default='''
msg {username} &2host: &a{host}
msg {username} &2port: &a{port}
msg {username} &2status: &aup! users: {users_current}/{users_max}
'''.strip())
command_down = Plugin.Property(default='''
msg {username} &2host: &a{host}
msg {username} &2port: &a{port}
msg {username} &2status: &adown.
'''.strip())
def setup(self):
self.users = []
self.protocol = MumbleProtocol(self, self.host, self.port)
self.register(self.handle_trigger, ServerOutput, pattern="<([A-Za-z0-9_]{1,16})> "+re.escape(self.trigger))
reactor.listenUDP(0, self.protocol)
def teardown(self):
self.protocol.transport.loseConnection()
def handle_trigger(self, event):
username = event.match.group(1).encode('utf8')
d = defer.Deferred()
d.addCallback(lambda d: self.send_response(self.command_up, username=username, **d))
d.addErrback (lambda d: self.send_response(self.command_down, username=username))
#add a timeout
self.delayed_task(self.got_timeout, self.timeout)
self.users.append(d)
self.protocol.ping()
def got_response(self, d):
for u in self.users:
u.callback(d)
self.users = []
self.stop_tasks()
def got_timeout(self, e):
for u in self.users:
u.errback(TimeoutError())
self.users = []
self.stop_tasks()
def send_response(self, command, **d):
self.send_format(command, host=self.host, port=self.port, **d)
########NEW FILE########
__FILENAME__ = push
from mk2.plugins import Plugin
from mk2.events import ServerEvent, EventPriority
from twisted.internet import reactor
from twisted.internet.defer import Deferred, DeferredList
from twisted.mail import smtp, relaymanager
from twisted.web.client import getPage
from cStringIO import StringIO
from email.mime.text import MIMEText
from urllib import urlencode
import re
_endpoint = {}
_plugin = None
def endpoint(s):
def _wrapper(cls):
_endpoint[s] = cls
cls.scheme = s
return cls
return _wrapper
class Endpoint(object):
causes = "*"
priority = "*"
def __init__(self, plugin, uri):
pass
def push(self, event):
pass
def filter(self, event):
if self.priority != "*":
if int(self.priority) > event.priority:
return False
if self.causes != "*":
for cause in self.causes.split(","):
if cause == event.cause:
return True
if cause.endswith("/") and event.cause.startswith(cause):
return True
return False
return True
def wait(self, defer):
def done_waiting(a):
_plugin.pending.remove(defer)
return a
_plugin.pending.add(defer)
defer.addBoth(done_waiting)
def __str__(self):
return "<{0} {1} causes={2} priority={3}>".format(self.__class__.__name__,
self.url,
self.causes, self.priority)
class HTTPEndpoint(Endpoint):
method = "POST"
postdata = {}
def push(self, event):
self.setup(event)
defer = getPage(self.endpoint,
method=self.method,
postdata=urlencode(self.postdata),
headers={"Content-type": "application/x-www-form-urlencoded"})
self.wait(defer)
@endpoint("nma")
class NMAEndpoint(HTTPEndpoint):
endpoint = "https://www.notifymyandroid.com/publicapi/notify"
method = "POST"
def __init__(self, plugin, url):
self.postdata = {
"apikey": url,
"application": "mark2: {0}".format(plugin.parent.server_name),
}
def setup(self, event):
self.postdata.update(priority=event.priority,
event=event.friendly,
description=event.data)
@endpoint("prowl")
class ProwlEndpoint(HTTPEndpoint):
endpoint = "https://api.prowlapp.com/publicapi/add"
method = "POST"
def __init__(self, plugin, url):
self.postdata = {
"apikey": url,
"application": "mark2: {0}".format(plugin.parent.server_name),
}
def setup(self, event):
self.postdata.update(priority=event.priority,
event=event.friendly,
description=event.data)
@endpoint("pushover")
class PushoverEndpoint(HTTPEndpoint):
endpoint = "https://api.pushover.net/1/messages.json"
method = "POST"
device = None
def __init__(self, plugin, url):
if not plugin.pushover_token:
raise Exception("pushover token is not configured")
self.postdata = {
"user": url,
"token": plugin.pushover_token,
}
def setup(self, event):
self.postdata.update(priority=max(-1, event.priority),
title=event.friendly,
message=event.data)
if self.device:
self.postdata["device"] = self.device
@endpoint("smtp")
class SMTPEndpoint(Endpoint):
def __init__(self, plugin, url):
self.smtp_host, self.smtp_user, self.smtp_password =\
plugin.email_smtp_server, plugin.email_smtp_user, plugin.email_smtp_password
self.smtp_security = plugin.email_smtp_security
if ':' in self.smtp_host:
host = self.smtp_host.split(':')
self.smtp_host, self.smtp_port = host[0], int(host[1])
else:
self.smtp_port = 25
self.from_addr = plugin.email_address
self.from_name = "mark2: {0}".format(plugin.parent.server_name)
self.to_addr = url
def getMailExchange(self, host):
mxc = relaymanager.MXCalculator()
def cbMX(mxRecord):
return str(mxRecord.name)
return mxc.getMX(host).addCallback(cbMX)
def sendEmail(self, from_, from_name, to, msg_, subject=""):
def send(host, user=None, pw=None, require_security=False):
msg = MIMEText(msg_)
msg['From'] = "\"{0}\" <{1}>".format(from_name, from_)
msg['To'] = to
msg['Subject'] = subject
msgfile = StringIO(msg.as_string())
d = Deferred()
factory = smtp.ESMTPSenderFactory(user, pw, from_, to, msgfile, d,
requireAuthentication=(user is not None),
requireTransportSecurity=require_security)
reactor.connectTCP(host, self.smtp_port, factory)
self.wait(d)
return d
if self.smtp_host:
return send(self.smtp_host, self.smtp_user, self.smtp_password, self.smtp_security)
else:
return self.getMailExchange(to.split("@")[1]).addCallback(send)
def push(self, event):
defer = self.sendEmail(self.from_addr, self.from_name, self.to_addr, event.data, event.friendly)
self.wait(defer)
class Push(Plugin):
endpoints = Plugin.Property(default="")
email_address = Plugin.Property(default="mark2@fantastic.minecraft.server")
email_smtp_server = Plugin.Property(default="")
email_smtp_user = Plugin.Property(default="")
email_smtp_password = Plugin.Property(default="")
email_smtp_security = Plugin.Property(default=False)
pushover_token = Plugin.Property(default="")
def setup(self):
global _plugin
_plugin = self
self.pending = set()
self.configure_endpoints()
self.register(self.send_alert, ServerEvent, priority=EventPriority.MONITOR)
self.eventid = reactor.addSystemEventTrigger('before', 'shutdown', self.finish)
def teardown(self):
reactor.removeSystemEventTrigger(self.eventid)
def finish(self):
return DeferredList(list(self.pending))
def configure_endpoints(self):
eps = self.endpoints.split("\n")
self._endpoints = []
for ep in eps:
if not ep.strip():
continue
try:
bits = re.split("\s+", ep)
url, md = bits[0], bits[1:]
scheme, ee = re.split(":(?://)?", url)
if scheme not in _endpoint:
self.console("undefined endpoint requested: {0}".format(url))
continue
cls = _endpoint[scheme]
inst = cls(self, ee)
inst.url = url
for k, v in [d.split("=") for d in md]:
setattr(inst, k, v)
self._endpoints.append(inst)
except Exception as e:
self.console("push: ERROR ({0}) adding endpoint: {1}".format(e, ep))
def send_alert(self, event):
for ep in self._endpoints:
if ep.filter(event):
ep.push(event)
########NEW FILE########
__FILENAME__ = redis
import json
from twisted.internet import protocol
from twisted.internet import reactor
from mk2.plugins import Plugin
from mk2 import events
class RedisProtocol(protocol.Protocol):
def __init__(self, parent):
self.parent = parent
def request(self, *args):
self.transport.write(self.encode_request(args))
def encode_request(self, args):
lines = []
lines.append('*' + str(len(args)))
for a in args:
if isinstance(a, unicode):
a = a.encode('utf8')
lines.append('$' + str(len(a)))
lines.append(a)
lines.append('')
return '\r\n'.join(lines)
class RedisFactory(protocol.ReconnectingClientFactory):
def __init__(self, parent, channel):
self.parent = parent
self.channel = channel
def buildProtocol(self, addr):
self.protocol = RedisProtocol(self.parent)
return self.protocol
def relay(self, data, channel=None):
channel = channel or self.channel
self.protocol.request("PUBLISH", channel, json.dumps(data))
class Redis(Plugin):
host = Plugin.Property(default="localhost")
port = Plugin.Property(default=6379)
channel = Plugin.Property(default="mark2-{server}")
relay_events = Plugin.Property(default="StatPlayers,PlayerJoin,PlayerQuit,PlayerChat,PlayerDeath")
def setup(self):
channel = self.channel.format(server=self.parent.server_name)
self.factory = RedisFactory(self, channel)
reactor.connectTCP(self.host, self.port, self.factory)
for ev in self.relay_events.split(','):
ty = events.get_by_name(ev.strip())
if ty:
self.register(self.on_event, ty)
else:
self.console("redis: couldn't bind to event: {0}".format(ev))
def on_event(self, event):
self.factory.relay(event.serialize())
########NEW FILE########
__FILENAME__ = rss
import feedparser
import re
from twisted.web.client import getPage
from mk2.plugins import Plugin
reddit_link = re.compile('http://(?:www\.)?redd(?:\.it/|it\.com/(?:tb|(?:r/[\w\.]+/)?comments)/)(\w+)(/.+/)?(\w{7})?')
#Many thanks to Adam Wight for this
class FeedPoller(object):
last_seen_id = None
def parse(self, data):
result = feedparser.parse(data)
result.entries.reverse()
skipping = True
for entry in result.entries:
if (self.last_seen_id == entry.id):
skipping = False
elif not skipping:
yield entry
if result.entries:
self.last_seen_id = result.entries[-1].id
class RSS(Plugin):
url = Plugin.Property(default="")
check_interval = Plugin.Property(default=60)
command = Plugin.Property(default="say {link} - {title}")
def setup(self):
self.poller = FeedPoller()
def server_started(self, event):
if self.url != "":
self.repeating_task(self.check_feeds, self.check_interval)
def check_feeds(self, event):
d = getPage(self.url)
d.addCallback(self.update_feeds)
def update_feeds(self, data):
for entry in self.poller.parse(data):
m = reddit_link.match(entry['link'])
if m:
entry['link'] = "http://redd.it/" + m.group(1)
self.send_format(self.command, **entry)
########NEW FILE########
__FILENAME__ = save
from mk2.plugins import Plugin
from mk2.events import Hook
class Save(Plugin):
warn_message = Plugin.Property(default="WARNING: saving map in {delay}.")
message = Plugin.Property(default="MAP IS SAVING.")
def setup(self):
self.register(self.save, Hook, public=True, name='save', doc='save the map')
def warn(self, delay):
self.send_format("say %s" % self.warn_message, delay=delay)
def save(self, event):
action = self.save_real
if event.args:
warn_length, action = self.action_chain(event.args, self.warn, action)
action()
event.handled = True
def save_real(self):
if self.message:
self.send('say %s' % self.message)
self.send('save-all')
########NEW FILE########
__FILENAME__ = script
import re
import os.path
import pwd
from time import localtime
from collections import namedtuple
from twisted.internet import protocol, reactor, defer
from mk2.plugins import Plugin
from mk2 import events
time_bounds = [(0, 59), (0, 23), (1, 31), (1, 12), (1, 7)]
class ScriptEntry(object):
event = None
ranges = None
def __init__(self, plugin, line):
self.plugin = plugin
line = line.strip()
if line.startswith('@'):
self.type = "event"
event_name, command = re.match(r'^@([^\s]+)\s+(.+)$', line).groups()
event = events.get_by_name(event_name)
if not event:
raise ValueError("unknown event: %s" % event_name)
self.plugin.register(lambda e: self.execute(command), event)
else:
self.type = "time"
bits = re.split(r'\s+', line, 5)
time_spec, self.command = bits[:5], bits[5]
self.ranges = self.parse_time(time_spec)
def parse_time(self, time_spec):
Range = namedtuple('Range', ('min', 'max', 'skip'))
ranges = []
for spec_i, bound_i in zip(time_spec, time_bounds):
n, top, skip = re.match(r'^(\d{1,2}|\*)(?:-(\d{1,2}))?(?:/(\d{1,2}))?$', spec_i).groups()
if n == '*':
if top:
raise ValueError("can't use * in a range expression")
ranges.append(Range(bound_i[0], bound_i[1], int(skip or 1)))
else:
ranges.append(Range(int(n), int(top or n), int(skip or 1)))
return ranges
def execute(self, cmd):
execute = defer.succeed(None)
def execute_next(fn, *a, **kw):
execute.addCallback(lambda r: fn(*a, **kw))
execute.addErrback(lambda f: True)
if cmd.startswith('$'):
cmd = cmd[1:]
d = defer.Deferred()
p = protocol.ProcessProtocol()
p.outReceived = lambda d: [execute_next(self.execute_reduced, l, cmd) for l in d.split("\n")]
p.processEnded = lambda r: d.callback(None)
reactor.spawnProcess(p, self.plugin.shell, [self.plugin.shell, '-c', cmd])
d.addCallback(lambda r: execute)
return d
else:
return self.execute_reduced(cmd)
@defer.inlineCallbacks
def execute_reduced(self, cmd, source='script'):
if cmd.startswith('~'):
handled = yield self.plugin.dispatch(events.Hook(line=cmd))
if not handled:
self.plugin.console("unknown command in script: %s" % cmd)
elif cmd.startswith('/'):
self.plugin.send(cmd[1:])
elif cmd.startswith('#'):
self.plugin.console("#{0}".format(cmd[1:]), user=source, source="user")
elif cmd:
self.plugin.console("couldn't understand script input: %s" % cmd)
def step(self):
if self.type != 'time':
return
time = localtime()
time = [time.tm_min, time.tm_hour, time.tm_mday, time.tm_mon, time.tm_wday + 1]
for r, t in zip(self.ranges, time):
if not t in range(r.min, r.max + 1, r.skip):
return
self.execute(self.command)
class Script(Plugin):
path = Plugin.Property(default='scripts.txt')
shell = Plugin.Property(default='/bin/sh')
def setup(self):
self.scripts = []
if not os.path.isfile(self.path):
return
with open(self.path, 'r') as f:
for line in f:
line = line.strip()
if line.startswith('#') or line == '':
continue
try:
self.scripts.append(ScriptEntry(self, line))
except Exception as e:
self.console('invalid script line: %s' % line, kind='error')
self.console(str(e))
for script in self.scripts:
if script.type == 'time':
self.delayed_task(lambda a: self.repeating_task(self.step, 60, now=True),
max(0, 60 - localtime().tm_sec) % 60 + 1)
break
def step(self, event):
for script in self.scripts:
script.step()
def server_stopping(self, event):
pass # don't cancel tasks
########NEW FILE########
__FILENAME__ = shutdown
from mk2.plugins import Plugin
from mk2.events import Hook, ServerStop, StatPlayers, StatPlayerCount
class Shutdown(Plugin):
restart_warn_message = Plugin.Property(default="WARNING: planned restart in {delay}.")
stop_warn_message = Plugin.Property(default="WARNING: server going down for planned maintainence in {delay}.")
restart_message = Plugin.Property(default="Server restarting.")
stop_message = Plugin.Property(default="Server going down for maintainence.")
restart_cancel_message = Plugin.Property(default="WARNING: planned restart cancelled.")
restart_cancel_reason = Plugin.Property(default="WARNING: planned restart cancelled ({reason}).")
stop_cancel_message = Plugin.Property(default="WARNING: planned maintenance cancelled.")
stop_cancel_reason = Plugin.Property(default="WARNING: planned maintenance cancelled ({reason}).")
kick_command = Plugin.Property(default="kick {player} {message}")
kick_mode = Plugin.Property(default="all")
failsafe = None
cancel_preempt = 0
restart_on_empty = False
restore = ('cancel_preempt', 'cancel', 'restart_on_empty')
def setup(self):
self.players = []
self.cancel = []
self.register(self.handle_players, StatPlayers)
self.register(self.handle_player_count, StatPlayerCount)
self.register(self.h_stop, Hook, public=True, name="stop", doc='cleanly stop the server. specify a delay like `~stop 2m`')
self.register(self.h_restart, Hook, public=True, name="restart", doc='cleanly restart the server. specify a delay like `~restart 30s`')
self.register(self.h_restart_empty, Hook, public=True, name="restart-empty",doc='restart the server next time it has 0 players')
self.register(self.h_kill, Hook, public=True, name="kill", doc='kill the server')
self.register(self.h_kill_restart, Hook, public=True, name="kill-restart", doc='kill the server and bring it back up')
self.register(self.h_cancel, Hook, public=True, name="cancel", doc='cancel an upcoming shutdown or restart')
def server_started(self, event):
self.restart_on_empty = False
self.cancel_preempt = 0
def warn_restart(self, delay):
self.send_format("say %s" % self.restart_warn_message, delay=delay)
def warn_stop(self, delay):
self.send_format("say %s" % self.stop_warn_message, delay=delay)
def warn_cancel(self, reason, thing):
if reason:
message = self.restart_cancel_reason if thing == "restart" else self.stop_cancel_reason
else:
message = self.restart_cancel_message if thing == "restart" else self.stop_cancel_message
self.send_format("say %s" % message, reason=reason)
def nice_stop(self, respawn, kill):
if not kill:
message = self.restart_message if respawn else self.stop_message
if self.kick_mode == 'all':
for player in self.players:
self.send_format(self.kick_command, player=player, message=message)
elif self.kick_mode == 'once':
self.send_format(self.kick_command, message=message)
self.dispatch(ServerStop(reason='console', respawn=respawn, kill=kill))
def handle_players(self, event):
self.players = event.players
def handle_player_count(self, event):
if event.players_current == 0 and self.restart_on_empty:
self.restart_on_empty = False
self.nice_stop(True, False)
def cancel_something(self, reason=None):
thing, cancel = self.cancel.pop(0)
cancel(reason, thing)
def should_cancel(self):
if self.cancel_preempt:
self.cancel_preempt -= 1
return True
else:
return False
#Hook handlers:
def h_stop(self, event=None):
if self.should_cancel():
self.console("I'm not stopping because this shutdown was cancelled with ~cancel")
return
action = lambda: self.nice_stop(False, False)
if event and event.args:
warn_length, action, cancel = self.action_chain_cancellable(event.args, self.warn_stop, action, self.warn_cancel)
self.cancel.append(("stop", cancel))
action()
def h_restart(self, event=None):
if self.should_cancel():
self.console("I'm not restarting because this shutdown was cancelled with ~cancel")
return
action = lambda: self.nice_stop(True, False)
if event and event.args:
warn_length, action, cancel = self.action_chain_cancellable(event.args, self.warn_restart, action, self.warn_cancel)
self.cancel.append(("restart", cancel))
action()
def h_restart_empty(self, event):
if self.restart_on_empty:
self.console("I was already going to do that")
else:
self.console("I will restart the next time the server empties")
self.restart_on_empty = True
def h_kill(self, event):
self.nice_stop(False, True)
def h_kill_restart(self, event):
self.nice_stop(True, True)
def h_cancel(self, event):
if self.cancel:
self.cancel_something(event.args or None)
else:
self.cancel_preempt += 1
self.console("I will cancel the next thing")
########NEW FILE########
__FILENAME__ = su
from mk2.plugins import Plugin
from mk2.events import UserInput
class Su(Plugin):
command = Plugin.Property(default="sudo -su {user} -- {command}")
mode = Plugin.Property(default="include")
proc = Plugin.Property(default="ban;unban")
def setup(self):
self.register(self.uinput, UserInput)
def uinput(self, event):
handled = False
for p in self.proc.split(";"):
if event.line.startswith(p):
handled = True
break
if (self.mode == 'exclude') ^ handled:
event.line = self.command.format(user=event.user, command=event.line)
########NEW FILE########
__FILENAME__ = trigger
import os
import re
from mk2.plugins import Plugin
from mk2.events import ServerOutput
class Trigger(Plugin):
command = Plugin.Property(default="msg {user} {message}")
path = Plugin.Property(default="triggers.txt")
triggers = {}
def setup(self):
if self.path and os.path.exists(self.path):
f = open(self.path, 'r')
for l in f:
m = re.match('^\!?([^,]+),(.+)$', l)
if m:
a, b = m.groups()
c = self.triggers.get(a, [])
c.append(b)
self.triggers[a] = c
f.close()
if self.triggers:
self.register(self.trigger, ServerOutput, pattern='<([A-Za-z0-9_]{1,16})> \!(\w+)')
def trigger(self, event):
user, trigger = event.match.groups()
if trigger in self.triggers:
for line in self.triggers[trigger]:
self.send(self.command.format(user=user, message=line))
########NEW FILE########
__FILENAME__ = properties
import os
import re
import shlex
import zipfile
def load(cls, *files):
o = None
for f in files:
if isinstance(f, basestring):
if os.path.isfile(f):
with open(f) as f:
o = cls(f, o)
else:
o = cls(f, 0)
return o
def load_jar(jar, *path):
path = list(path)
while path:
try:
z = zipfile.ZipFile(jar, 'r')
o = Lang(z.open(path.pop(0), 'r'))
z.close()
return o
except KeyError:
pass
return None
class Properties(dict):
def __init__(self, f, parent=None):
dict.__init__(self)
if parent:
self.update(parent)
self.types = dict(parent.types)
else:
self.types = {}
decoder = {
'int': int,
'bool': lambda a: a == 'true',
'string': lambda a: a
}
c_seperator = (':', '=')
c_whitespace = (' ', '\t', '\f')
c_escapes = ('t','n','r','f')
c_comment = ('#','!')
r_unescaped = '(?<!\\\\)(?:\\\\\\\\)*'
r_whitespace = '[' + re.escape(''.join(c_whitespace)) + ']*'
r_seperator = r_unescaped + r_whitespace + r_unescaped + '[' + re.escape(''.join(c_seperator + c_whitespace)) + ']'
#This handles backslash escapes in keys/values
def parse(input):
token = list(input)
out = u""
uni = False
while len(token) > 0:
c = token.pop(0)
if c == '\\':
try:
c = token.pop(0)
if c in c_escapes:
out += ('\\'+c).decode('string-escape')
elif c == 'u':
b = ""
for i in range(4):
b += token.pop(0)
out += unichr(int(b, 16))
uni = True
else:
out += c
except IndexError:
raise ValueError("Invalid escape sequence in input: %s" % input)
else:
out += c
if not uni:
out = out.encode('ascii')
return out
d = f.read()
#Deal with Windows / Mac OS linebreaks
d = d.replace('\r\n','\n')
d = d.replace('\r', '\n')
#Strip leading whitespace
d = re.sub('(?m)\n\s*', '\n', d)
#Split logical lines
d = re.split('(?m)' + r_unescaped + '\n', d)
for line in d:
#Strip comments and empty lines
if line == '' or line[0] in c_comment:
continue
#Strip escaped newlines
line = re.sub('(?m)' + r_unescaped + '(\\\\\n)', '', line)
assert not '\n' in line
#Split into k,v
x = re.split(r_seperator, line, maxsplit=1)
#No seperator, parse as empty value.
if len(x) == 1:
k, v = x[0], ""
else:
k, v = x
k = parse(k).replace('-', '_')
v = parse(v)
if re.match('^\-?\d+$', v):
ty = 'int'
elif v in ('true', 'false'):
ty = 'bool'
elif v != '':
ty = 'string'
elif k in self.types:
ty = self.types[k]
else:
ty = 'string'
self.types[k] = ty
self[k] = decoder[ty](v)
f.close()
def get_by_prefix(self, prefix):
for k, v in self.iteritems():
if k.startswith(prefix):
yield k[len(prefix):], v
class Mark2Properties(Properties):
def get_plugins(self):
plugins = {}
enabled = []
for k, v in self.iteritems():
m = re.match('^plugin\.(.+)\.(.+)$', k)
if m:
plugin, k2 = m.groups()
if plugin not in plugins:
plugins[plugin] = {}
if k2 == 'enabled':
if v:
enabled.append(plugin)
else:
plugins[plugin][k2] = v
return [(n, plugins[n]) for n in sorted(enabled)]
def get_service(self, service):
return self.get_by_prefix('mark2.service.{0}.'.format(service))
def get_jvm_options(self):
options = []
for k, v in self.iteritems():
m = re.match('^java\.cli\.([^\.]+)\.(.+)$', k)
if m:
a, b = m.groups()
if a == 'D':
options.append('-D%s=%s' % (b, v))
elif a == 'X':
options.append('-X%s%s' % (b, v))
elif a == 'XX':
if v in (True, False):
options.append('-XX:%s%s' % ('+' if v else '-', b))
else:
options.append('-XX:%s=%s' % (b, v))
else:
print "Unknown JVM option type: %s" % a
if self.get('java.cli_extra', '') != '':
options.extend(shlex.split(self['java.cli_extra']))
return options
def get_format_options(self):
options = {}
for k, v in self.iteritems():
m = re.match('^mark2\.format\.(.*)$', k)
if m:
options[m.group(1)] = v
return options
def get_umask(self, ext):
return int(str(self['mark2.umask.' + ext]), 8)
class ClientProperties(Properties):
def get_palette(self):
palette = []
for k, v in self.get_by_prefix('theme.%s.' % self['theme']):
palette.append([k,] + [t.strip() for t in v.split(',')])
return palette
def get_player_actions(self):
return self['player_actions'].split(',')
def get_player_reasons(self):
return self.get_by_prefix('player_actions.reasons.')
def get_apps(self):
return self.get_by_prefix('stats.app.')
def get_interval(self, name):
return self['task.%s' % name]
class Lang(Properties):
def get_deaths(self):
seen = []
for k, v in self.get_by_prefix('death.'):
if not v in seen:
seen.append(v)
regex = reduce(lambda a, r: a.replace(*r),
((r"\%{0}\$s".format(i + 1),
"(?P<{0}>[A-Za-z0-9]{{1,32}})".format(x))
for i, x in enumerate(("username", "killer", "weapon"))),
re.escape(v))
format = reduce(lambda a, r: a.replace(*r),
(("%{0}$s".format(i + 1),
"{{{0}}}".format(x))
for i, x in enumerate(("username", "killer", "weapon"))),
v)
yield k, ("^{0}$".format(regex), format)
########NEW FILE########
__FILENAME__ = bukkit
import json
from . import JarProvider
class Bukkit(JarProvider):
def work(self):
self.get('http://dl.bukkit.org/api/1.0/downloads/channels/?_accept=application/json', self.handle_channels)
def handle_channels(self, data):
data = json.loads(data)
for channel in data['results']:
name = channel['name']
slug = channel['slug']
self.add(('Bukkit', name), (None, slug), 'http://dl.bukkit.org/latest-%s/craftbukkit.jar' % slug)
self.commit()
ref = Bukkit
########NEW FILE########
__FILENAME__ = feed_the_beast
import re
from hashlib import md5
from xml.dom import minidom
from . import JarProvider
class FeedTheBeast(JarProvider):
base = 'http://www.creeperrepo.net/'
def work(self):
self.get(self.base+'getdate', self.handle_date)
def handle_date(self, data):
hash = md5()
hash.update('mcepoch1' + data)
self.token = hash.hexdigest()
self.get(self.base+'static/FTB2/modpacks.xml', self.handle_packs)
def handle_packs(self, data):
attr = lambda n, name: n.attributes[name].value
dom = minidom.parseString(data)
for node in dom.getElementsByTagName('modpack'):
filename = attr(node, 'serverPack')
if filename == "":
continue
artifact = attr(node, 'name')
artifact = re.sub(' Pack$', '', artifact)
artifact = re.sub('^Feed The Beast ', '', artifact)
artifact = re.sub('^FTB ', '', artifact)
url = self.base + 'direct/FTB2/' + self.token + '/'
url+= '^'.join((
'modpacks',
attr(node, 'dir'),
attr(node, 'version').replace('.', '_'),
filename))
self.add(('Feed The Beast', artifact), ('ftb', None), url)
self.commit()
ref = FeedTheBeast
########NEW FILE########
__FILENAME__ = forge
from . import JarProvider
class Forge(JarProvider):
base = 'http://files.minecraftforge.net/minecraftforge/minecraftforge-universal-{0}.zip'
def work(self):
for k in 'latest', 'recommended':
self.add(('Forge', k.title()), (None, None), self.base.format(k))
self.commit()
ref = Forge
########NEW FILE########
__FILENAME__ = mcpcplus
from . import JenkinsJarProvider
class MCPCPlus(JenkinsJarProvider):
name = 'MCPC-Plus'
base = 'http://ci.md-5.net/'
project = 'MCPC-Plus'
ref = MCPCPlus
########NEW FILE########
__FILENAME__ = spigot
from . import JenkinsJarProvider
class Spigot(JenkinsJarProvider):
name = 'Spigot'
base = 'http://ci.md-5.net/'
project = 'Spigot'
ref = Spigot
########NEW FILE########
__FILENAME__ = technic
import json
from . import JarProvider
class Technic(JarProvider):
api_base = 'http://solder.technicpack.net/api/modpack/?include=full'
packs = (
('bigdig', 'BigDigServer-v{0}.zip'),
('tekkit', 'Tekkit_Server_{0}.zip'),
('tekkitlite', 'Tekkit_Lite_Server_{0}.zip'),
('voltz', 'Voltz_Server_v{0}.zip'))
builds = ('recommended', 'latest')
def work(self):
self.get(self.api_base, self.handle_data)
def handle_data(self, data):
data = json.loads(data)
base = data['mirror_url']
for name, server in self.packs:
mod = data['modpacks'][name]
title = mod['display_name']
title = 'Tekkit Classic' if title == 'Tekkit' else title
for build in self.builds:
self.add(('Technic', title, build.title()), (None, None, None),
base + 'servers/' + name + '/' + server.format(mod[build]))
self.commit()
ref = Technic
########NEW FILE########
__FILENAME__ = vanilla
import json
from . import JarProvider
class Vanilla(JarProvider):
base = 'http://s3.amazonaws.com/Minecraft.Download/versions/'
def work(self):
self.get(self.base + 'versions.json', self.handle_data)
def handle_data(self, data):
for k, v in json.loads(data)['latest'].iteritems():
self.add(('Vanilla', k.title()), (None, None), '{0}{1}/minecraft_server.{1}.jar'.format(self.base, v))
self.commit()
ref = Vanilla
########NEW FILE########
__FILENAME__ = builtin
from mk2 import events, properties
from mk2.services import process
from mk2.shared import find_config, open_resource
from mk2.plugins import Plugin
import os
class Builtin(Plugin):
def setup(self):
self.register(self.handle_cmd_help, events.Hook, public=True, name="help", doc="displays this message")
self.register(self.handle_cmd_events, events.Hook, public=True, name="events", doc="lists events")
self.register(self.handle_cmd_plugins, events.Hook, public=True, name="plugins", doc="lists running plugins")
self.register(self.handle_cmd_reload_plugin, events.Hook, public=True, name="reload-plugin", doc="reload a plugin")
self.register(self.handle_cmd_rehash, events.Hook, public=True, name="rehash", doc="reload config and any plugins that changed")
self.register(self.handle_cmd_reload, events.Hook, public=True, name="reload", doc="reload config and all plugins")
self.register(self.handle_cmd_jar, events.Hook, public=True, name="jar", doc="wrap a different server jar")
def table(self, v):
m = 0
for name, doc in v:
m = max(m, len(name))
for name, doc in sorted(v, key=lambda x: x[0]):
self.console(" ~%s | %s" % (name.ljust(m), doc))
def handle_cmd_help(self, event):
o = []
for _, callback, args in self.parent.events.get(events.Hook):
if args.get('public', False):
o.append((args['name'], args.get('doc', '')))
self.console("The following commands are available:")
self.table(o)
def handle_cmd_events(self, event):
self.console("The following events are available:")
self.table([(n, c.doc) for n, c in events.get_all()])
def handle_cmd_plugins(self, events):
self.console("These plugins are running: " + ", ".join(sorted(self.parent.plugins.keys())))
def handle_cmd_reload_plugin(self, event):
if event.args in self.parent.plugins:
self.parent.plugins.reload(event.args)
self.console("%s reloaded." % event.args)
else:
self.console("unknown plugin.")
def handle_cmd_rehash(self, event):
# make a dict of old and new plugin list
plugins_old = dict(self.parent.config.get_plugins())
self.parent.load_config()
plugins_new = dict(self.parent.config.get_plugins())
# reload the union of old plugins and new plugins
requires_reload = set(plugins_old.keys()) | set(plugins_new.keys())
# (except plugins whose config is exactly the same)
for k in list(requires_reload):
if plugins_old.get(k, False) == plugins_new.get(k, False):
requires_reload.remove(k)
requires_reload = list(requires_reload)
# actually reload
for p in requires_reload:
self.parent.plugins.reload(p)
reloaded = filter(None, requires_reload)
self.console("%d plugins reloaded: %s" % (len(reloaded), ", ".join(reloaded)))
def handle_cmd_reload(self, event):
self.parent.plugins.unload_all()
self.parent.load_config()
self.parent.load_plugins()
self.console("config + plugins reloaded.")
def handle_cmd_jar(self, event):
new_jar = process.find_jar(
self.parent.config['mark2.jar_path'].split(';'),
event.args)
if new_jar:
self.console("I will switch to {0} at the next restart".format(new_jar))
self.parent.jar_file = new_jar
else:
self.console("Can't find a matching jar file.")
########NEW FILE########
__FILENAME__ = console_tracking
from mk2 import properties
from mk2.events import PlayerChat, PlayerDeath, PlayerJoin, PlayerQuit, ServerOutput
from mk2.plugins import Plugin
import re
class ConsoleTracking(Plugin):
deaths = tuple()
chat_events = tuple()
def setup(self):
lang = properties.load_jar(self.parent.jar_file, 'assets/minecraft/lang/en_US.lang', 'lang/en_US.lang')
if lang is not None:
self.deaths = tuple(lang.get_deaths())
self.register(self.death_handler, ServerOutput, pattern=".*")
self.register_chat()
def register_chat(self):
ev = []
for key, e_ty in (('join', PlayerJoin),
('quit', PlayerQuit),
('chat', PlayerChat)):
pattern = self.parent.config['mark2.regex.' + key]
try:
re.compile(pattern)
except:
return self.fatal_error(reason="mark2.regex.{0} isn't a valid regex!".format(key))
ev.append(self.register(lambda e, e_ty=e_ty: self.dispatch(e_ty(**e.match.groupdict())),
ServerOutput,
pattern=pattern))
self.chat_events = tuple(ev)
def death_handler(self, event):
for name, (pattern, format) in self.deaths:
m = re.match(pattern, event.data)
if m:
self.dispatch(PlayerDeath(cause=None,
format=format,
**m.groupdict()))
break
########NEW FILE########
__FILENAME__ = ping
import struct
from twisted.internet import task, reactor
from twisted.internet.protocol import Protocol, ClientFactory
from mk2.events import Event, StatPlayerCount, ServerOutput
from mk2.plugins import Plugin
class PingProtocol(Protocol):
def connectionMade(self):
self.buff = ""
self.transport.write('\xFE\x01')
def dataReceived(self, data):
self.buff += data
if len(self.buff) >= 3:
l = struct.unpack('>h', self.buff[1:3])[0]
if len(self.buff) >= 3 + l * 2:
data = self.buff[9:].decode('utf-16be').split('\x00')
self.dispatch(StatPlayerCount(source='ping', players_current=int(data[3]), players_max=int(data[4])))
self.transport.loseConnection()
class PingFactory(ClientFactory):
noisy = False
def __init__(self, dispatch):
self.dispatch = dispatch
def buildProtocol(self, addr):
pr = PingProtocol()
pr.dispatch = self.dispatch
return pr
class Ping(Plugin):
alive = False
event_id = None
interval = Plugin.Property(default=10)
def setup(self):
self.host = self.parent.properties['server_ip'] or '127.0.0.1'
self.task = task.LoopingCall(self.loop)
self.task.start(self.interval, now=False)
def server_started(self, event):
ping_pattern = r"\s*(?:/{0}:\d+ lost connection|Reached end of stream for /{0})"
if self.event_id:
self.parent.events.unregister(self.event_id)
self.event_id = self.parent.events.register(lambda ev: Event.EAT, ServerOutput,
pattern=ping_pattern.format(self.host))
def loop(self):
host = self.parent.properties['server_ip'] or '127.0.0.1'
port = self.parent.properties['server_port']
factory = PingFactory(self.parent.events.dispatch)
reactor.connectTCP(host, port, factory, bindAddress=(self.host, 0))
########NEW FILE########
__FILENAME__ = process
import locale
from twisted.internet import protocol, reactor, error, defer, task
import glob
import psutil
import shlex
from mk2 import events
from mk2.events import EventPriority
from mk2.plugins import Plugin
class ProcessProtocol(protocol.ProcessProtocol):
obuff = u""
alive = True
def __init__(self, dispatch, locale):
self.dispatch = dispatch
self.locale = locale
def output(self, line):
self.dispatch(events.ServerOutput(line=line))
def childDataReceived(self, fd, data):
if data[0] == '\b':
data = data.lstrip(' \b')
data = data.decode(self.locale)
data = data.split("\n")
data[0] = self.obuff + data[0]
self.obuff = data.pop()
for l in data:
self.output(l.strip('\r'))
def makeConnection(self, transport):
self.dispatch(events.ServerStarting(pid=transport.pid))
def processEnded(self, reason):
self.alive = False
if isinstance(reason.value, error.ProcessTerminated) and reason.value.exitCode:
self.dispatch(events.ServerEvent(cause='server/error/exit-failure',
data="server exited abnormally: {0}".format(reason.getErrorMessage()),
priority=1))
self.dispatch(events.FatalError(reason=reason.getErrorMessage()))
else:
self.dispatch(events.ServerStopped())
class Process(Plugin):
name = "process"
protocol = None
respawn = False
service_stopping = None
transport = None
failsafe = None
stat_process = None
done_pattern = Plugin.Property(default='Done \\(([0-9\\.]+)s\\)\\!.*')
stop_cmd = Plugin.Property(default='stop\n')
java_path = Plugin.Property(default='java')
server_args = Plugin.Property(default='')
def setup(self):
self.register(self.server_input, events.ServerInput, priority=EventPriority.MONITOR)
self.register(self.server_start, events.ServerStart, priority=EventPriority.MONITOR)
self.register(self.server_starting, events.ServerStarting)
self.register(self._server_started, events.ServerOutput, pattern=self.done_pattern)
self.register(self.server_stop, events.ServerStop, priority=EventPriority.MONITOR)
self.register(self.server_stopping, events.ServerStopping, priority=EventPriority.MONITOR)
self.register(self.server_stopped, events.ServerStopped, priority=EventPriority.MONITOR)
reactor.addSystemEventTrigger('before', 'shutdown', self.before_reactor_stop)
def build_command(self):
cmd = []
cmd.append(self.java_path)
#cmd.append('-server')
cmd.extend(self.parent.config.get_jvm_options())
cmd.append('-jar')
cmd.append(self.parent.jar_file)
cmd.append('nogui')
cmd.extend(shlex.split(self.server_args))
return cmd
def server_start(self, e=None):
self.parent.console("starting minecraft server")
self.locale = locale.getpreferredencoding()
self.protocol = ProcessProtocol(self.parent.events.dispatch, self.locale)
cmd = self.build_command()
self.transport = reactor.spawnProcess(self.protocol, cmd[0], cmd, env=None)
if e:
e.handled = True
def server_input(self, e):
if self.protocol and self.protocol.alive:
l = e.line
if not l.endswith('\n'):
l += '\n'
self.transport.write(l.encode(self.locale, 'ignore'))
e.handled = True
def server_starting(self, e):
self.stat_process = task.LoopingCall(self.update_stat, psutil.Process(e.pid))
self.stat_process.start(self.parent.config['java.ps.interval'])
def _server_started(self, e):
self.parent.events.dispatch(events.ServerStarted())
@defer.inlineCallbacks
def server_stop(self, e):
e.handled = True
if self.protocol is None or not self.protocol.alive:
return
if e.announce:
yield self.parent.events.dispatch(events.ServerStopping(respawn=e.respawn, reason=e.reason, kill=e.kill))
if e.kill:
self.failsafe = None
self.parent.console("killing minecraft server")
self.transport.signalProcess('KILL')
else:
self.parent.console("stopping minecraft server")
self.transport.write(self.stop_cmd)
self.failsafe = self.parent.events.dispatch_delayed(events.ServerStop(respawn=e.respawn, reason=e.reason, kill=True, announce=False), self.parent.config['mark2.shutdown_timeout'])
def server_stopping(self, e):
self.respawn = e.respawn
def server_stopped(self, e):
if self.stat_process and self.stat_process.running:
self.stat_process.stop()
if self.failsafe:
self.failsafe.cancel()
self.failsafe = None
if self.respawn:
self.parent.events.dispatch(events.ServerStart())
self.respawn = False
elif self.service_stopping:
self.service_stopping.callback(0)
else:
print "I'm stopping the reactor now"
reactor.stop()
def update_stat(self, process):
try:
self.parent.events.dispatch(events.StatProcess(cpu=process.get_cpu_percent(interval=0), memory=process.get_memory_percent()))
except psutil.error.NoSuchProcess:
pass
def before_reactor_stop(self):
if self.protocol and self.protocol.alive:
self.parent.events.dispatch(events.ServerStop(reason="SIGINT", respawn=False))
self.service_stopping = defer.Deferred()
return self.service_stopping
def find_jar(search_patterns, hint=None):
if hint:
search_patterns.insert(0, hint)
for pattern in search_patterns:
g = glob.glob(pattern)
if g:
return g[0]
########NEW FILE########
__FILENAME__ = user_server
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.protocols.basic import LineReceiver
import os
import json
from mk2 import events
from mk2.plugins import Plugin
class Scrollback:
def __init__(self, length):
self.length = length
self.data = []
def put(self, line):
self.data.append(line)
if len(self.data) > self.length:
self.data.pop(0)
def get(self, max_items=None):
if max_items is None:
return self.data[:]
else:
return self.data[-max_items:]
class UserServerProtocol(LineReceiver):
MAX_LENGTH = 999999
delimiter = '\n'
tab_last = None
tab_index = 0
attached_user = None
def connectionMade(self):
self._handlers = []
for callback, ty in (
(self.console_helper, events.Console),
(self.handle_attach, events.UserAttach),
(self.handle_detach, events.UserDetach)):
self._handlers.append(self.register(callback, ty))
def connectionLost(self, reason):
if self.attached_user:
self.dispatch(events.UserDetach(user=self.attached_user))
for i in self._handlers:
self.unregister(i)
self._handlers = []
def lineReceived(self, line):
msg = json.loads(str(line))
ty = msg["type"]
if ty == "attach":
self.attached_user = msg['user']
self.dispatch(events.UserAttach(user=msg['user']))
elif ty == "input":
self.dispatch(events.UserInput(user=msg['user'], line=msg['line']))
elif ty == "get_scrollback":
self.send_helper("regex", patterns=dict(self.factory.parent.config.get_by_prefix('mark2.regex.')))
self.send_helper("scrollback", lines=[e.serialize() for e in self.factory.scrollback.get()])
elif ty == "get_users":
for u in self.factory.users:
self.send_helper("user_status", user=u, online=True)
elif ty == "get_stats":
self.send_helper("stats", stats=self.factory.stats)
elif ty == "get_players":
self.send_helper("players", players=self.factory.players)
else:
self.factory.parent.console("unknown packet: %s" % str(msg))
def send_helper(self, ty, **k):
k["type"] = ty
self.sendLine(json.dumps(k))
def console_helper(self, event):
self.send_helper("console", **event.serialize())
def handle_attach(self, event):
self.send_helper("user_status", user=event.user, online=True)
def handle_detach(self, event):
self.send_helper("user_status", user=event.user, online=False)
class UserServerFactory(Factory):
players = []
def __init__(self, parent):
self.parent = parent
self.scrollback = Scrollback(200)
self.users = set()
self.parent.events.register(self.handle_console, events.Console)
self.parent.events.register(self.handle_attach, events.UserAttach)
self.parent.events.register(self.handle_detach, events.UserDetach)
self.parent.events.register(self.handle_player_count, events.StatPlayerCount)
self.parent.events.register(self.handle_players, events.StatPlayers)
self.parent.events.register(self.handle_process, events.StatProcess)
self.stats = dict((k, '___') for k in ('memory', 'cpu', 'players_current', 'players_max'))
def buildProtocol(self, addr):
p = UserServerProtocol()
p.register = self.parent.events.register
p.unregister = self.parent.events.unregister
p.dispatch = self.parent.events.dispatch
p.factory = self
return p
def handle_console(self, event):
self.scrollback.put(event)
def handle_attach(self, event):
self.users.add(event.user)
def handle_detach(self, event):
self.users.discard(event.user)
#stat handlers
def handle_player_count(self, event):
self.stats['players_current'] = event.players_current
self.stats['players_max'] = event.players_max
def handle_players(self, event):
self.players = sorted(event.players, key=str.lower)
def handle_process(self, event):
for n in ('cpu', 'memory'):
self.stats[n] = '{0:.2f}'.format(event[n])
class UserServer(Plugin):
def setup(self):
socket = self.parent.socket
if os.path.exists(socket):
os.remove(socket)
self.factory = UserServerFactory(self.parent)
reactor.listenUNIX(socket, self.factory, mode=self.parent.config.get_umask('sock'))
def save_state(self):
return self.factory.players
def load_state(self, state):
self.factory.players = state
########NEW FILE########
__FILENAME__ = shared
import os
import pkg_resources
def open_resource(name):
return pkg_resources.resource_stream('mk2', name)
_config_found = False
if "MARK2_CONFIG_DIR" in os.environ:
_config_base = os.environ["MARK2_CONFIG_DIR"]
elif "VIRTUAL_ENV" in os.environ:
_config_base = os.path.join(os.environ["VIRTUAL_ENV"], ".config", "mark2")
elif __file__.startswith(os.path.realpath('/home/')):
_config_base = os.path.join(os.path.expanduser("~"), ".config", "mark2")
else:
_config_base = os.path.join(os.path.join("/etc/mark2"))
def find_config(name, create=True, ignore_errors=False):
global _config_base, _config_found
if not _config_found:
if os.path.exists(_config_base):
_config_found = True
if create and not _config_found:
try:
os.makedirs(_config_base)
_config_found = True
except OSError:
pass
if not ignore_errors and not _config_found:
raise ValueError
return os.path.join(_config_base, name)
def console_repr(e):
s = u"%s %s " % (e['time'], {'server': '|', 'mark2': '#', 'user': '>'}.get(e['source'], '?'))
if e['source'] == 'server' and e['level'] != 'INFO':
s += u"[%s] " % e['level']
elif e['source'] == 'user':
s += u"(%s) " % e['user']
s += u"%s" % e['data']
return s
########NEW FILE########
__FILENAME__ = test_events
from .. import events
from ..events import Event, EventPriority
from twisted.trial import unittest
class TestEvent(Event):
name = Event.Arg()
def prefilter(self, name=None):
return self.name == name
class EventWithArgs(Event):
required = Event.Arg(required=True)
default = Event.Arg(default='foo')
class PrefilterTest_1(Event):
def prefilter(self, require, optional=None):
pass
class PrefilterTest_2(Event):
def prefilter(self, require, optional=None, **excess):
pass
class EventsTestCase(unittest.TestCase):
def setUp(self):
self.events = events.EventDispatcher(lambda *a: None)
@staticmethod
def eating_handler(event):
return Event.EAT
@staticmethod
def unregistering_handler(event):
return Event.UNREGISTER
def test_dispatch(self):
"""
Test basic event dispatching.
"""
self.hit = False
def handler(event):
self.hit = True
self.events.register(handler, TestEvent)
self.events.dispatch(TestEvent())
self.assertTrue(self.hit)
def test_priority(self):
"""
Test event priority ordering.
"""
self.hit_1, self.hit_2 = False, False
def handler_1(event):
self.hit_1 = True
def handler_2(event):
self.hit_2 = self.hit_1
self.events.register(handler_1, TestEvent, priority=EventPriority.HIGH)
self.events.register(handler_2, TestEvent, priority=EventPriority.LOW)
self.events.dispatch(TestEvent())
self.assertTrue(self.hit_2)
def test_priority_decorator(self):
"""
Test event priority decorators (like @EventPriority.HIGH)
"""
self.hit_1, self.hit_2 = False, False
@EventPriority.HIGH
def handler_1(event):
self.hit_1 = True
@EventPriority.LOW
def handler_2(event):
self.hit_2 = self.hit_1
self.events.register(handler_1, TestEvent)
self.events.register(handler_2, TestEvent)
self.events.dispatch(TestEvent())
self.assertTrue(self.hit_2)
def test_eat(self):
"""
Test Event.EAT
"""
self.hit = False
def handler(event):
self.hit = True
self.events.register(self.eating_handler, TestEvent, priority=EventPriority.HIGH)
self.events.register(handler, TestEvent, priority=EventPriority.LOW)
self.events.dispatch(TestEvent())
self.assertFalse(self.hit)
def test_unregister(self):
"""
Test unregistering events.
"""
id_ = self.events.register(lambda event: None, TestEvent)
# it should be handled now
handled = self.events.dispatch(TestEvent())
self.assertTrue(self.successResultOf(handled))
# but not once we unregister it
self.events.unregister(id_)
handled = self.events.dispatch(TestEvent())
self.assertFalse(self.successResultOf(handled))
def test_unregister_from_event(self):
"""
Test Event.UNREGISTER
"""
self.events.register(self.unregistering_handler, TestEvent)
handled = self.events.dispatch(TestEvent())
self.assertTrue(self.successResultOf(handled))
handled = self.events.dispatch(TestEvent())
self.assertFalse(self.successResultOf(handled))
def test_event_args(self):
"""
Test Event.Arg
"""
self.assertRaises(Exception, EventWithArgs)
ev = EventWithArgs(required=True)
self.assertEqual(ev.default, 'foo')
def test_prefilter_check(self):
"""
Test Event.prefilter() arg checking
"""
def handler(event):
pass
self.assertRaises(Exception, self.events.register, handler, PrefilterTest_1)
self.assertRaises(Exception, self.events.register, handler, PrefilterTest_2)
self.events.register(handler, PrefilterTest_1, require='foo')
self.events.register(handler, PrefilterTest_2, require='foo')
self.events.register(handler, PrefilterTest_1, require='foo', optional='bar')
self.events.register(handler, PrefilterTest_2, require='foo', optional='bar')
self.assertRaises(Exception, self.events.register, handler, PrefilterTest_1,
require='foo', optional='bar', fooarg='excess argument')
self.events.register(handler, PrefilterTest_2,
require='foo', optional='bar', fooarg='excess argument')
########NEW FILE########
__FILENAME__ = test_plugins
from mk2 import events, plugins
import sys
from twisted.internet import task
from twisted.internet.task import Clock
from twisted.trial import unittest
class TestEventDispatcher(events.EventDispatcher):
def __init__(self):
events.EventDispatcher.__init__(self, lambda a: None)
self.clock = Clock()
self.advance = self.clock.advance
def dispatch_delayed(self, event, delay):
return self.clock.callLater(delay, self.dispatch, event)
def dispatch_repeating(self, event, interval, now=False):
t = task.LoopingCall(self.dispatch, event)
t.clock = self.clock
t.start(interval, now)
return t
class TestPlugin(plugins.Plugin):
foo = 'foo'
bar = 'bar'
def setup(self):
return False
def save_state(self):
return self.foo
def load_state(self, state):
self.bar = state
class TestPluginLoader(plugins.PluginLoader):
plugins = {'test': TestPlugin}
def load_plugin(self, name):
if name in self.plugins:
return self.plugins[name], None
else:
return False
def find_plugins(self):
return list(self.plugins.keys())
class PluginTestBase:
def setUp(self):
self.config = self
self.fatal_error = lambda *a: None
self.events = TestEventDispatcher()
self.plugins = plugins.PluginManager(self, loaders=(TestPluginLoader,))
def console(self, *a, **kw):
print a, kw
def get_plugins(self):
return {'test_plugins': {}}
class PluginLoading(PluginTestBase, unittest.TestCase):
def test_load(self):
self.assertTrue(self.plugins.load('test') is not None)
def test_reload(self):
self.plugins.reload('test')
class PluginTestCase(PluginTestBase, unittest.TestCase):
def setUp(self):
PluginTestBase.setUp(self)
self.plugins.load('test')
@property
def plugin(self):
return self.plugins['test']
def test_load_save_state(self):
self.assertEqual(self.plugin.foo, 'foo')
self.assertEqual(self.plugin.bar, 'bar')
self.plugins.reload('test')
self.assertEqual(self.plugin.bar, 'foo')
def test_parse_time(self):
name, time = self.plugin.parse_time("37s")
self.assertEqual(time, 37)
def test_action_chain(self):
warn = [0]
action = [False]
# evil
sys.modules[plugins.Plugin.__module__].reactor = self.events.clock
def callbackWarn(a):
warn[0] += 1
def callbackAction():
action[0] = True
act = self.plugin.action_chain("10h;10m;10s",
callbackWarn,
callbackAction)[1]
act()
for i, time in enumerate((36000, 590, 10)):
self.assertEqual(warn[0], i + 1)
self.events.advance(time)
self.assertEqual(warn[0], 3)
self.assertTrue(action[0])
def test_action_cancel(self):
action = [False]
cancelled = [False]
# evil
sys.modules[plugins.Plugin.__module__].reactor = self.events.clock
def callbackCancel():
cancelled[0] = True
def callbackAction():
action[0] = True
act, cancel = self.plugin.action_chain_cancellable("1s",
lambda a: None,
callbackAction,
callbackCancel)[-2:]
act()
self.assertFalse(action[0])
self.assertFalse(cancelled[0])
cancel()
self.assertTrue(cancelled[0])
self.events.advance(2)
self.assertFalse(action[0])
def test_delayed_task(self):
calls = [0]
def task(ev):
calls[0] += 1
self.plugin.delayed_task(task, 10)
self.events.advance(9)
self.assertEqual(calls[0], 0)
self.events.advance(1)
self.assertEqual(calls[0], 1)
self.events.advance(100)
self.assertEqual(calls[0], 1)
def test_repeating_task(self):
calls = [0]
def task(ev):
calls[0] += 1
self.plugin.repeating_task(task, 10)
for i in xrange(100):
self.events.advance(10)
self.assertEqual(calls[0], 100)
def test_stop_tasks(self):
calls = [0]
def task(ev):
calls[0] += 1
self.plugin.repeating_task(task, 10)
for i in xrange(100):
self.events.advance(10)
self.plugin.stop_tasks()
for i in xrange(100):
self.events.advance(10)
self.assertEqual(calls[0], 100)
########NEW FILE########
__FILENAME__ = test_process
from mk2 import events
from mk2.services import process
import random
from twisted.internet import error
from twisted.python.failure import Failure
from twisted.trial import unittest
class ProcessProtocolTestCase(unittest.TestCase):
def setUp(self):
self.dispatched = []
self.proto = process.ProcessProtocol(self.dispatch, 'utf8')
def dispatch(self, event):
self.dispatched.append(event)
def test_output(self):
random.seed()
data = '''a line of output
and another line of output
this line is incomplete'''
lines = data.split('\n')
while data:
index = random.randint(1, min(len(data), 18))
bit, data = data[:index], data[index:]
self.proto.childDataReceived(1, bit)
self.assertTrue(self.dispatched)
while self.dispatched:
event = self.dispatched.pop(0)
self.assertIsInstance(event, events.ServerOutput)
self.assertEqual(event['data'], lines.pop(0))
self.assertEqual(len(lines), 1) # the data after the final \n
def test_process_success(self):
fail = Failure(error.ProcessDone(None))
self.proto.processEnded(fail)
self.assertFalse(self.proto.alive)
self.assertEqual(len(self.dispatched), 1)
self.assertIsInstance(self.dispatched[0], events.ServerStopped)
def test_process_failure(self):
fail = Failure(error.ProcessTerminated(exitCode=1))
self.proto.processEnded(fail)
self.assertFalse(self.proto.alive)
self.assertTrue(any(isinstance(event, events.FatalError) for event in self.dispatched))
########NEW FILE########
__FILENAME__ = user_client
import getpass
import glob
import json
import os
from string import Template
from twisted.internet import reactor
from twisted.internet.protocol import ClientFactory, ProcessProtocol
from twisted.internet.task import LoopingCall
from twisted.protocols.basic import LineReceiver
import properties
import psutil
import re
import sys
import urwid
from shared import console_repr, open_resource
class TabEvent:
fail = None
def __init__(self, line, players):
pos = line.rfind(' ') + 1
if pos == 0:
self.left, right = "", line
else:
self.left, right = line[:pos], line[pos:]
self.players = filter(lambda p: p.startswith(right), players)
if len(self.players) == 0:
self.fail = line
self.index = 0
def next(self):
if self.fail:
return self.fail
i = self.index % len(self.players)
self.index += 1
return self.left + self.players[i]
class Prompt(urwid.Edit):
def __init__(self, get_players, run_command, *a, **k):
self.history = ['']
self.history_pos = 0
self.tab = None
self.get_players = get_players
self.run_command = run_command
urwid.Edit.__init__(self, *a, **k)
def get_prompt(self):
return self.get_edit_text()
def set_prompt(self, x):
self.set_edit_text(x)
self.set_edit_pos(len(x))
def save_prompt(self):
self.history[self.history_pos] = self.get_prompt()
def load_prompt(self):
self.set_prompt(self.history[self.history_pos])
def keypress(self, size, key):
if key != 'tab':
self.tab = None
if key == 'up':
if self.history_pos > 0:
self.save_prompt()
self.history_pos -= 1
self.load_prompt()
elif key == 'down':
if self.history_pos < len(self.history) - 1:
self.save_prompt()
self.history_pos += 1
self.load_prompt()
elif key == 'enter':
text = self.get_prompt()
self.run_command(text)
self.history_pos = len(self.history) - 1
if self.history[self.history_pos - 1] == text:
self.set_prompt('')
self.cursor = 0
self.save_prompt()
else:
self.save_prompt()
self.history.append('')
self.history_pos += 1
self.load_prompt()
elif key == 'tab':
text = self.get_prompt()
if text == '':
self.set_prompt('say ')
else:
if self.tab is None:
self.tab = TabEvent(text, self.get_players())
self.set_prompt(self.tab.next())
else:
return urwid.Edit.keypress(self, size, key)
class PMenuButton(urwid.Button):
def __init__(self, caption, *a):
super(PMenuButton, self).__init__(caption, *a)
self._w = urwid.SelectableIcon(caption, 0)
class PMenuWrap(urwid.WidgetPlaceholder):
names = ('players', 'actions', 'reasons')
def __init__(self, actions, reasons, dispatch, escape):
self.dispatch = dispatch
self.escape = escape
self._pmenu_lists = [ (n, urwid.SimpleListWalker([])) for n in self.names ]
self._pmenu_widgets = [ (n, urwid.ListBox(l)) for n, l in self._pmenu_lists ]
self.fill(1, zip(actions, actions))
self.fill(2, reasons)
self.first()
super(PMenuWrap, self).__init__(self._pmenu_widgets[0][1])
def fill(self, index, items):
name, contents = self._pmenu_lists[index]
del contents[0:len(contents)]
for name, result in items:
e = urwid.AttrMap(PMenuButton(name, self.next, result), 'menu_item', 'menu_item_focus')
contents.append(e)
def first(self):
self._pmenu_acc = []
self._pmenu_stage = 0
self.original_widget = self._pmenu_widgets[0][1]
def next(self, widget, result):
acc = self._pmenu_acc
acc.append(result)
#run command?
if (self._pmenu_stage == 1 and not (result in ('kick', 'ban') and len(self._pmenu_lists[2][1]) > 0)) or\
(self._pmenu_stage == 2):
self.dispatch(' '.join([acc[1]] + [acc[0]] + acc[2:]))
self.first()
#next menu
else:
self._pmenu_stage += 1
self.original_widget = self._pmenu_widgets[self._pmenu_stage][1]
def prev(self):
self._pmenu_acc.pop()
self._pmenu_stage -= 1
self.original_widget = self._pmenu_widgets[self._pmenu_stage][1]
def keypress(self, size, key):
if key == 'esc':
if self._pmenu_stage == 0:
self.escape()
else:
self.first()
elif key == 'backspace':
if self._pmenu_stage == 0:
self.escape()
else:
self.prev()
else:
return self.original_widget.keypress(size, key)
def set_players(self, players):
content = self._pmenu_lists[0][1]
diff = lambda a, b: [[e for e in d if not e in c] for c, d in ((a, b), (b, a))]
add, remove = diff([b.original_widget.label for b in list(content)], players)
#first remove players who logged off
for b in list(content):
if b.original_widget.label in remove:
content.remove(b)
#now add new players
i = 0
while len(add) > 0:
a = add.pop(0)
while i < len(content) - 1 and content[i].original_widget.label.lower() < a.lower():
i += 1
content.insert(i, urwid.AttrMap(PMenuButton(a, self.next, a), 'menu_item', 'menu_item_focus'))
i += 1
class UI:
loop = None
def __init__(self, palette, get_players, run_command, switch_server, connect_to_server, pmenu_actions, pmenu_reasons):
self.palette = palette
self.get_players = get_players
self.run_command = run_command
self.switch_server = switch_server
self.connect_to_server = connect_to_server
self.pmenu_actions = pmenu_actions
self.pmenu_reasons = pmenu_reasons
self.lines = []
self.filters = {}
self.filter = lambda *a: True
self.g_output_list = urwid.SimpleListWalker([])
self.build()
def build(self):
#header
self.g_servers = urwid.Columns([])
self.g_users = urwid.Columns([])
g_head = urwid.AttrMap(urwid.Columns((self.g_servers, self.g_users)), 'head')
#main
self.g_output = urwid.ListBox(self.g_output_list)
self.g_stats = urwid.Text("")
#player menu
def escape():
self.g_frame.focus_position='footer'
self.g_pmenu = PMenuWrap(self.pmenu_actions, self.pmenu_reasons, self.run_command, escape)
g_sidebar = urwid.Pile((
('pack', urwid.AttrMap(urwid.LineBox(self.g_stats, title='stats'), 'stats')),
urwid.AttrMap(urwid.LineBox(self.g_pmenu, title="players"), 'menu')))
g_main = urwid.Columns((
urwid.WidgetDisable(urwid.AttrMap(urwid.LineBox(self.g_output, title='server'), 'console')),
('fixed', 31, g_sidebar)))
#foot
self.g_prompt = Prompt(self.get_players, self.run_command, ' > ')
g_prompt = urwid.AttrMap(self.g_prompt, 'prompt', 'prompt_focus')
self.g_frame = urwid.Frame(g_main, g_head, g_prompt, focus_part='footer')
self.g_main = urwid.AttrMap(urwid.Padding(self.g_frame, left=1, right=1), 'frame')
#log.addObserver(lambda m: self.append_output(str(m['message'])))
def main(self):
self.loop = urwid.MainLoop(
self.g_main,
self.palette,
input_filter=self.filter_input,
event_loop=urwid.TwistedEventLoop()
)
self.loop.run()
def stop(self):
def exit(*a):
raise urwid.ExitMainLoop
self.loop.set_alarm_in(0, exit)
def filter_input(self, keys, raw):
passthru = []
for key in keys:
if key in ('page up', 'page down'):
self.g_output.keypress((0, 16), key)
elif key == 'ctrl left':
self.switch_server(-1)
elif key == 'ctrl right':
self.switch_server(1)
elif key == 'ctrl p':
self.g_frame.focus_position = 'body'
elif key == 'f8':
raise urwid.ExitMainLoop
else:
passthru.append(key)
return passthru
def redraw(self):
if self.loop:
self.loop.draw_screen()
def set_servers(self, servers, current=None):
new = []
for s in sorted(servers):
e = PMenuButton(" %s " % s, lambda button, _s=s: self.connect_to_server(_s))
e = urwid.AttrMap(e, 'server_current' if s == current else 'server')
new.append((e, self.g_servers.options('pack')))
contents = self.g_servers.contents
del contents[0:len(contents)]
sep = u'\u21C9 ' if urwid.supports_unicode() else u':'
contents.append((urwid.AttrMap(urwid.Text(u' mark2 %s' % sep), 'mark2'), self.g_servers.options('pack')))
contents.extend(new)
contents.append((urwid.Divider(), self.g_users.options()))
def set_users(self, users):
new = []
for user, attached in users:
e = urwid.Text(" %s " % user)
e = urwid.AttrMap(e, 'user_attached' if attached else 'user')
new.append((e, self.g_users.options('pack')))
contents = self.g_users.contents
del contents[0:len(contents)]
contents.append((urwid.Divider(), self.g_users.options()))
contents.extend(new)
def safe_unicode(self, text):
if urwid.supports_unicode():
return text
else:
return text.encode('ascii', errors='replace')
def append_output(self, line):
scroll = False
del self.lines[:-999]
self.lines.append(line)
if not self.filter(line):
return
try:
p = self.g_output.focus_position
try:
self.g_output.body.next_position(p)
except IndexError: # scrolled to end
scroll = True
except IndexError: # nothing in listbox
pass
self.g_output_list.append(urwid.Text(self.safe_unicode(console_repr(line))))
if scroll:
self.g_output.focus_position += 1
self.redraw()
def set_output(self, lines=None):
contents = self.g_output_list
del contents[0:len(contents)]
lines = lines or self.lines
lines = [l for l in lines if self.filter(l)]
for line in lines:
contents.append(urwid.Text(self.safe_unicode(console_repr(line))))
try:
self.g_output.focus_position = len(lines) - 1
except IndexError: # nothing in list
pass
self.redraw()
def set_filter(self, filter_):
if isinstance(filter_, basestring):
return self.set_filter(self.filters[filter_])
self.filter = filter_.apply
self.set_output()
def set_players(self, players):
self.g_pmenu.set_players(players)
self.redraw()
def set_stats(self, stats):
self.g_stats.set_text(stats)
self.redraw()
class SystemUsers(set):
def __init__(self):
self.me = getpass.getuser()
set.__init__(self)
def update_users(self):
self.clear()
for u in psutil.get_users():
self.add(u.name)
class App(object):
def __init__(self, name, interval, update, shell, command):
self.name = name
self.interval = interval
self.update = update
self.cmd = [shell, '-c', command]
self.stopping = False
self.start()
def start(self):
p = ProcessProtocol()
self.buff = ""
self.protocol = p
p.outReceived = self.got_out
p.processEnded = self.got_exit
reactor.spawnProcess(p, self.cmd[0], self.cmd)
def got_out(self, d):
self.buff += d
def got_exit(self, *a):
self.update(self.name, self.buff.strip())
if not self.stopping:
reactor.callLater(self.interval, self.start)
class LineFilter:
HIDE = 1
SHOW = 2
def __init__(self):
self._actions = []
self._default = self.SHOW
def append(self, action, *predicates):
self.setdefault(action)
def action_(msg):
if all(p(msg) for p in predicates):
return action
return None
self._actions.append(action_)
def setdefault(self, action):
if len(self._actions) == 0:
self._default = (self.HIDE if action != self.SHOW else self.SHOW)
def apply(self, msg):
current = self._default
for action in self._actions:
current = action(msg) or current
return current == LineFilter.SHOW
class UserClientFactory(ClientFactory):
def __init__(self, initial_name, shared_path='/tmp/mark2'):
self.socket_to = lambda n: os.path.join(shared_path, n + ".sock")
self.socket_from = lambda p: os.path.splitext(os.path.basename(p))[0]
self.client = None
self.stats = {}
self.system_users = SystemUsers()
#read the config
self.config = properties.load(properties.ClientProperties, open_resource('resources/mark2rc.default.properties'), os.path.expanduser('~/.mark2rc.properties'))
assert not self.config is None
self.stats_template = Template(self.config['stats'])
#start apps
self.apps = []
#start ui
self.ui = UI(self.config.get_palette(), self.get_players, self.run_command, self.switch_server, self.connect_to_server, self.config.get_player_actions(), self.config.get_player_reasons())
for name, command in self.config.get_apps():
app = App(name, self.config.get_interval('apps'), self.app_update, self.config['stats.app_shell'], command)
self.apps.append(app)
#tasks
t = LoopingCall(self.update_servers)
t.start(self.config.get_interval('servers'))
t = LoopingCall(self.update_users)
t.start(self.config.get_interval('users'))
t = LoopingCall(self.update_players)
t.start(self.config.get_interval('players'))
t = LoopingCall(self.update_stats)
t.start(self.config.get_interval('stats'))
self.connect_to_server(initial_name)
def log(self, w):
self.ui.append_output(str(w))
def main(self):
self.ui.main()
def buildProtocol(self, addr):
self.client = UserClientProtocol(self.socket_from(addr.name), self.system_users.me, self)
self.update_servers()
return self.client
def switch_server(self, delta=1):
self.update_servers()
if len(self.servers) == 0: # no running servers
return self.ui.stop()
if len(self.servers) == 1: # don't switch with only one server
return
index = self.servers.index(self.client.name)
name = self.servers[(index + delta) % len(self.servers)]
self.connect_to_server(name)
def connect_to_server(self, name):
if self.client:
self.client.close()
reactor.connectUNIX(self.socket_to(name), self)
def update_servers(self):
servers = []
for f in glob.glob(self.socket_to('*')):
servers.append(self.socket_from(f))
self.servers = sorted(servers)
self.ui.set_servers(self.servers, current=self.client.name if self.client else None)
def update_users(self):
self.system_users.update_users()
if self.client:
self.client.get_users()
def update_players(self):
if self.client:
self.client.get_players()
def update_stats(self):
if self.client:
self.client.get_stats()
def app_update(self, name, data):
self.stats[name] = data
def get_players(self):
if self.client:
return self.client.players
else:
return []
def run_command(self, command):
if self.client:
return self.client.run_command(command)
def server_connected(self, client):
pass
def server_disconnected(self, client):
self.switch_server()
def server_output(self, line):
self.ui.append_output(line)
def server_scrollback(self, lines):
self.ui.set_output(lines)
def server_players(self, players):
self.ui.set_players(players)
def server_users(self, users_a):
users_l = list(self.system_users)
users = []
for u in sorted(set(users_l + users_a), key=str.lower):
users.append((u, u in users_a))
self.ui.set_users(users)
def server_stats(self, stats):
self.stats.update(stats)
self.ui.set_stats(self.stats_template.safe_substitute(self.stats))
def server_regex(self, patterns):
self.make_filters(patterns)
def make_filters(self, server_patterns={}):
cfg = {}
cfg.update(server_patterns)
cfg.update(self.config.get_by_prefix('pattern.'))
# read patterns from config to get a dict of name: filter function
def makefilter(p):
ppp = p
p = re.compile(p)
def _filter(msg):
m = p.match(msg['data'])
return m and m.end() == len(msg['data'])
return _filter
patterns = dict((k, makefilter(p)) for k, p in cfg.iteritems())
patterns['all'] = lambda a: True
# read filters
self.ui.filters = {}
for name, spec in self.config.get_by_prefix('filter.'):
filter_ = LineFilter()
action = LineFilter.SHOW
for pattern in spec.split(','):
pattern = pattern.strip().replace('-', '_')
if ':' in pattern:
a, pattern = pattern.split(':', 1)
action = {'show': LineFilter.SHOW, 'hide': LineFilter.HIDE}.get(a)
filter_.setdefault(action)
if not pattern:
continue
filter_.append(action, patterns[pattern])
self.ui.filters[name] = filter_
self.ui.set_filter(self.config['use_filter'])
class NullFactory(object):
def __getattr__(self, name):
return lambda *a, **k: None
class UserClientProtocol(LineReceiver):
MAX_LENGTH = 999999
delimiter = '\n'
enabled = False
def __init__(self, name, user, factory):
self.name = name
self.user = user
self.users = set()
self.players = list()
self.factory = factory
def close(self):
self.transport.loseConnection()
self.factory = NullFactory()
def connectionMade(self):
self.alive = 1
self.send("attach", user=self.user)
self.send("get_scrollback")
self.factory.server_connected(self)
def connectionLost(self, reason):
self.alive = 0
self.factory.server_disconnected(self)
def lineReceived(self, line):
#log.msg(line)
msg = json.loads(line)
ty = msg["type"]
if ty == "console":
self.factory.server_output(msg)
elif ty == "scrollback":
self.factory.server_scrollback(msg['lines'])
elif ty == "user_status":
user = str(msg["user"])
if msg["online"]:
self.users.add(user)
else:
self.users.discard(user)
self.factory.server_users(list(self.users))
elif ty == "players":
self.players = msg['players']
self.factory.server_players(self.players)
elif ty == "stats":
self.factory.server_stats(msg['stats'])
elif ty == "regex":
self.factory.server_regex(msg['patterns'])
else:
self.factory.log("wat")
def send(self, ty, **d):
d['type'] = ty
if self.alive:
self.sendLine(json.dumps(d))
def run_command(self, command):
self.send("input", line=command, user=self.user)
def get_players(self):
self.send("get_players")
def get_stats(self):
self.send("get_stats")
def get_users(self):
self.send("get_users")
if __name__ == '__main__':
thing = UserClientFactory('testserver')
thing.main()
########NEW FILE########
|
997,592 | 358108fc75a0e3450126d4c2e3c06332341cff61 | import difflib
import lxml.html
import sys
def main():
if len(sys.argv) == 3:
path1 = sys.argv[1]
path2 = sys.argv[2]
else:
usage = "Usage: %s <file 1> <file 2>"
sys.stderr.write(usage % sys.argv[0])
sys.exit(1)
tags1 = get_tags(lxml.html.parse(path1))
tags2 = get_tags(lxml.html.parse(path2))
diff = difflib.SequenceMatcher()
diff.set_seq1(tags1)
diff.set_seq2(tags2)
print(diff.ratio() * 100)
def get_tags(doc):
tags = list()
for el in doc.getroot().iter():
if isinstance(el, lxml.html.HtmlElement):
tags.append(el.tag)
elif isinstance(el, lxml.html.HtmlComment):
tags.append('comment')
else:
raise ValueError('Don\'t know what to do with element: %s' % el)
return tags
if __name__ == '__main__':
main()
|
997,593 | 37720369c76093ceaebb4b86ff59071422a41f65 | import sys
import logging
import PySimpleGUI as sg
import numpy as np
import datetime
import expenseJSONFile, variables
logging.basicConfig(stream=sys.stderr, level=logging.CRITICAL)
#
# Default global variables
#
dictExpenses = {}
#
# First tab layaout
#
categories = [[sg.Radio(value, "CAT", key=variables.T1_KEY+variables.CAT+key)]
if key is not "other" else [sg.Radio(value, "CAT", key=variables.T1_KEY+variables.CAT+key, default=True)]
for key, value in variables.dictOfCategories.items()]
#
# CONTINUATION OF First tab layaout
# ..we use T1_key for labeling our inputs
tab1_layout = [
[sg.Text('Expense Name', size=(15, 1)),
sg.InputText('Expense Name', key=variables.T1_KEY+variables.EXP)],
[sg.Text('Quantity', size=(15, 1)), sg.InputText(100, key=variables.T1_KEY + variables.QTY)],
[sg.Text('Frequency', size=(15, 1)),
sg.Radio('Monthly', "FREQ", key=variables.T1_KEY+variables.FREQ+"Monthly", default=True),
sg.Radio('Yearly', "FREQ", key=variables.T1_KEY+variables.FREQ+"Yearly"),
sg.Radio('Weekly', "FREQ", key=variables.T1_KEY+variables.FREQ+"Weekly", default=True)],
[sg.Frame("Categories", [[sg.Column(categories)]])],
[sg.Text('Date', size=(15, 1)), sg.InputText(str(datetime.date.today()), key=variables.T1_KEY + variables.DATE)],
[sg.Text('Income/Outcome', size=(15, 1)),
sg.Checkbox('Expense?', size=(10,1), default=False, key=variables.T1_KEY+variables.INCOME)],
[sg.Submit(key=variables.T1_KEY+'_SUBMIT_'), sg.Cancel(key=variables.T1_KEY+'_CANCEL_')]
]
#
# Second tab layaout
# WE HAVE TO USE KEY TAB_2
#
tab2_layout = [[sg.T('This is inside tab 2')],
[sg.In(key=variables.T2_KEY+'_IN_')],
[sg.Submit(key=variables.T2_KEY+'_SUBMIT_'),
sg.Cancel(key=variables.T2_KEY+'_CANCEL_')]]
#
# Third tab layaout
#
tab3_layout = [[sg.T('Please press refresh to update your values.')],
[sg.Submit('Refresh!', key=variables.T3_KEY+'_SUBMIT_'),
sg.Cancel(key=variables.T3_KEY+'_CANCEL_')]]
#
# ALL TABS' LAYOUTs TOGETHER
#
layout = [[sg.TabGroup([[sg.Tab('New Expense', tab1_layout),
sg.Tab('Expense Report', tab2_layout),
sg.Tab('List of Expenses', tab3_layout)]])]]
window = None
#
# printMatrixExpenses
# We print tab3 with all our file expenses.
def printMatrixExpenses():
global dictExpenses
global tab1_layout, tab2_layout, tab3_layout, layout
global window
global header, button, values
# Unmodificable values
inmutableList = [variables.category, variables.date, variables.frequency]
# 'expenseID', 'Expense?'
# Writable values
writableList = [variables.expenseName, variables.qty]
dictExpenses = variables.jsonData['expensesList']
header = [[sg.Text(' ')] + [sg.Text(key, size=(15,1)) for key in writableList]
+ [sg.Text(key, size=(15,1)) for key in inmutableList]] # build header layout
matrix = header
for i in range(len(dictExpenses)):
# current Expense in our dictionary
expense = dictExpenses[i]
row = [[sg.Text(' ')]
+ [sg.InputText(expense[writableList[elemKey]], key=variables.T3_KEY+variables.dictJSON[writableList[elemKey]]+str(expense[variables.expenseID]), size=(15,1)) for elemKey in range(len(writableList))]
+ [sg.Text(value, key=variables.T3_KEY+variables.dictJSON[key]+str(expense[variables.expenseID]), size=(15,1)) for key, value in dictExpenses[i].items() if key in inmutableList]]
matrix = matrix + row
tab3_layout = matrix + [[sg.Submit('Update values!',
key=variables.T3_KEY+variables.UPDEXPS),
sg.Cancel(key=variables.T3_KEY+'_CANCEL_')]]
# tab3_layout = header + input_rows
layout = [[sg.TabGroup([[sg.Tab('New Expense', tab1_layout),
sg.Tab('Expense Report', tab2_layout),
sg.Tab('List of Expenses', tab3_layout)]])]]
windowNew = sg.Window('Hello {}!! Please, type in all your expenses'.format(variables.username)).Layout(layout)
window.close()
#button, values = windowNew.Read()
window = windowNew
#
#
def updateExpenseData(tab3Data):
# All expense list!
for i in range(len(variables.jsonData['expensesList'])):
# NAME
variables.jsonData['expensesList'][i][variables.expenseName] = tab3Data[variables.T3_KEY+variables.EXP+str(i)]
# QTY
variables.jsonData['expensesList'][i][variables.qty] = tab3Data[variables.T3_KEY+variables.QTY+str(i)]
expenseJSONFile.writeExpensesList()
#
# valuesOfTab
# IN: We receive a tab preffix (string) and a dict of values (entries)
# OUT: We return ONLY a dict with key?values for this particular tab
def valuesOfTab(tab, allValues):
#logging.debug(allValues)
#logging.debug(tab)
res = {key:val for key, val in allValues.items()
if key.startswith(tab)}
return res
#
# thirtyDaysExpenseList
# Functioan that is used for our second tab
#
def thirtyDaysExpenseList():
# listExpenses = [{'ID': 0, 'category': 'loging', 'date': '1-1-1980', 'expenseName': 'Deleteme!', 'frequency': 'Monthly', 'in': False, 'qty': '10000'}]
listExpenses = variables.jsonData['expensesList']
result = {}
logging.debug(listExpenses)
for i in range(len(listExpenses)):
# How many times is repeated monthly!
if (listExpenses[i][variables.category] == "Yearly"):
frequency = 12
elif (listExpenses[i][variables.category] == "Weekly"):
frequency = 0.25
else: # motnhly!
frequency=1
# Amount is positive or negative
# qty of our Expense:
value = int(listExpenses[i][variables.qty])
logging.debug("ASDASDASDA")
logging.debug(listExpenses[i][variables.income])
logging.debug(listExpenses[i][variables.qty])
# if it is an expense then it is negative!
if (listExpenses[i][variables.income] is False):
value = -int(listExpenses[i][variables.qty])
# Get day of a string datetime!
day = int(datetime.datetime.strptime(listExpenses[i][variables.date], '%Y-%m-%d').strftime("%d"))
# {day:qty.freq}
tuple = {day:(value/frequency)}
# Create and update dictionary
# sum the values with same keys
result[day] = result.get(day, 0) + tuple[day]
return result
#
#
#
def showMonthlyGraph():
global tab1_layout, tab2_layout, tab3_layout, layout
global window
global header, button, values
# We calculate expenses in a mont bases
dailyExpense = thirtyDaysExpenseList() # result = {'21': 5900.0, '22': -700.0}
logging.debug(dailyExpense)
# SumALL POSITIVEs and ALL Expenses in different Varibles
allIncome=sum(value for value in dailyExpense.values() if value>0)
allOutcome=sum(value for value in dailyExpense.values() if value<0)
grandTotal=allIncome + allOutcome
# Varibles about Income Outcome
maxABSValue=max(max(dailyExpense.values()), abs(min(dailyExpense.values())))
# maxIncome = max(dailyExpense.values())
# minIncome = min(dailyExpense.values())
maxIncome = maxABSValue
minIncome = -maxABSValue
# Canvas canvas_size
wide = 650 # x
tall = 400 # y
# Graph Starting Point
xZero = int(wide*-0.85)
xEnd = abs(xZero)
yZero = int(tall*(-0.8))
yEnd = abs(yZero)
tab2_layout = [[sg.T('Your Monthly Expenses graph!')],
[sg.Graph(canvas_size=(wide, tall),
graph_bottom_left=(-wide, -tall),
graph_top_right=(wide, tall),
background_color='white', key='graph',
tooltip='Your daily account status!')],
[sg.Text("Total Income: "+ str(allIncome))],
[sg.Text("Total Outcome: "+ str(allOutcome))],
[sg.Text("Grand Total: "+ str(grandTotal))],
[sg.Submit(key=variables.T2_KEY+'_SUBMIT_'), sg.Cancel(key=variables.T2_KEY+'_CANCEL_')]]
layout = [[sg.TabGroup([[sg.Tab('New Expense', tab1_layout),
sg.Tab('Expense Report', tab2_layout),
sg.Tab('List of Expenses', tab3_layout)]])]]
windowNew = sg.Window('Hello {}!! Please, type in all your expenses'.format(variables.username)).Layout(layout).Finalize()
window.close()
#button, values = windowNew.Read()
window = windowNew
graph = window.Element('graph')
# Horizontal line with days of a Month!
graph.DrawLine((xZero, yZero), (xEnd, yZero))
# One bracket per day of month
day = 0
for x in range(xZero, xEnd, 35):
graph.DrawLine((x,yZero-5), (x,yZero+5))
graph.DrawText(day, (x,yZero-20), color='green')
day += 1
# Vertical line for Expense price
graph.DrawLine((xZero, yZero + 25), (xZero, yEnd-25))
graph.DrawLine((xZero-5, 0), (xZero+5, 0))
graph.DrawText(0, (xZero-20,0), color='green')
for y in range(yZero, yEnd-25, 50):
graph.DrawLine((xZero-5, y), (xZero+5, y))
graph.DrawText(int(y*maxIncome/yEnd), (xZero-30,y), color='green')
# HOrizontal line for Expense price
graph.DrawLine((xZero, 0), (xEnd, 0))
for x in range(xZero, xEnd, 35):
graph.DrawLine((x,-5), (x,+5))
# Origin of line!
pointA_X=xZero
pointA_Y=0
currentStatus=0
for key in range(1,31):
if key in dailyExpense:
logging.debug(key)
logging.debug(dailyExpense[key]*yEnd/maxIncome)
xValue=(2*xEnd/32) * (key - 15)
currentStatus=currentStatus+dailyExpense[key]
logging.debug(xValue)
logging.debug(currentStatus)
graph.DrawCircle((xValue, currentStatus*yEnd/maxIncome), 3,
fill_color='black',
line_color='black')
if dailyExpense[key]>0:
graph.DrawText(dailyExpense[key],
(xValue+40, currentStatus*yEnd/maxIncome),
color='green')
else:
graph.DrawText(dailyExpense[key],
(xValue+40, currentStatus*yEnd/maxIncome),
color='red')
graph.DrawLine((pointA_X, pointA_Y),
(xValue, currentStatus*yEnd/maxIncome))
pointA_X = xValue
pointA_Y = currentStatus*yEnd/maxIncome
graph.DrawLine((pointA_X, pointA_Y), (xEnd, pointA_Y))
return dailyExpense
def main():
# We bring global variables
global dictExpenses
global layout, window
# call external function to read our file
variables.jsonData = expenseJSONFile.readJSON(variables.filepath)
# email and password is correct?
if not (expenseJSONFile.userAndPassCorrect(variables.email, variables.clearPassword,
variables.jsonData["email"], variables.jsonData["password"])):
sg.popup("USER AND/OR DO NOT MATCH!!!")
exit()
else:
sg.popup("USER AND PASSWORD are MATCHING!")
# send our window.layout out and wait for values
window = sg.Window('Hello {}!! Please, type in all your expenses'.format(variables.username)).Layout(layout)
printMatrixExpenses()
showMonthlyGraph()
while True:
button, values = window.Read()
dictExpenses = expenseJSONFile.readJSON(variables.filepath)['expensesList']
# Depending on which SUBMIT (tab) is pressed, we act
# First tab
if (button == variables.T1_KEY+'_SUBMIT_'): # FIRST TAB
#expenseJSONFile.writeExpense(variables.filepath, jsonData, expense):
# we get ONLY values of this tab1
res = valuesOfTab(variables.T1_KEY, values)
# We write OUR new Expense and RETURN all EXPENSE we have
if expenseJSONFile.writeExpense(res):
sg.popup("New Expense created succesfuly!")
else:
sg.PopupError("Error creating new Expense!")
printMatrixExpenses()
showMonthlyGraph()
elif (button == variables.T2_KEY+'_SUBMIT_'): # SECOND TAB
# we get ONLY values of this tab2
res = valuesOfTab(variables.T2_KEY, values)
showMonthlyGraph()
elif (button == variables.T3_KEY+'_SUBMIT_'): # THIRD TAB
# we get ONLY values of this tab3
res = valuesOfTab(variables.T3_KEY, values)
printMatrixExpenses()
showMonthlyGraph()
elif (button == variables.T3_KEY+variables.UPDEXPS):# THIRD TAB
# logging.debug("Refresh update values")
# logging.debug(values) # All values!
res = valuesOfTab(variables.T3_KEY, values)
# We write OUR new Expense and RETURN all EXPENSE we have
# expenseJSONFile.writeExpense(res)
# printMatrixExpenses()
updateExpenseData(res)
printMatrixExpenses()
showMonthlyGraph()
elif ('_CANCEL_' in button ) or (button is None):
# Cancel button is pressed
logging.debug("Cancel button has been pressed!")
break
else:
logging.error("Button has not been captured right")
break
window.close()
exit()
if __name__=="__main__":
# Call out our MAIN function
main()
|
997,594 | cbccbc85653a81e1081e8f25ae4957af04812261 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-12-24 05:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fias', '0013_auto_20160825_0524'),
]
operations = [
migrations.AddField(
model_name='addrobj',
name='plancode',
field=models.CharField(default='0000', max_length=4, verbose_name='Код элемента планировочной структуры'),
preserve_default=False,
),
]
|
997,595 | 0c0bbc63c603bc4ec536b972b274687eaeea9dfb | from kivy.app import App
from kivy.base import runTouchApp
from kivy.lang import Builder
runTouchApp(Builder.load_string('''
Label:
Button:
text: 'Hello'
color: .8, .9, 0, 1
font_size: 32
pos: 50, 300
size: 100, 150
Button:
text: 'World'
color: .8, .9, 0, 1
font_size: 32
pos: 100, 0
size: 300, 150
'''))
|
997,596 | c2f8f5ffca5efadcb6127c52cd36a56d0c00fe92 | import math
N = int(input())
A_list = list()
B_list = list()
A_list.append(0)
B_list.append(0)
for i in range(N):
A,B = map(int, input().split())
A_list.append(A)
B_list.append(B)
#末尾から考える
ans = 0
for i in range(N):
A = A_list.pop(-1)
B = B_list.pop(-1)
#print(A,B,"→",end="")
A += ans
goal = B*math.ceil(A/B)
#print(":",goal)
ans += goal - A
print(ans) |
997,597 | c33fa06f167b61a07c0a0dae1757a88af4afa3f8 | from math import *
class ArmConversion:
def __init__(self):
self.las_min = 12 # was 13
self.las_max = 17.0+3/16.0 # was 16
self.lae_min = 15 # was 15
self.lae_max = 20.0+15/16.0 # was 20
self.sx = 2.5
self.sy = 3
self.A = 15.375
self.la = 20
self.eh = 3.0+1/16.0
self.d1 = 0.5
self.d2 = 0.75
self.d3 = 2.25
self.d4 = 2
self.sh = sqrt(self.sx**2 + self.sy**2)
self.D = sqrt(self.d1**2 + (self.la-self.d4)**2)
self.gamma1 = asin(self.d1/self.D)
self.E = sqrt(self.d3**2 + self.eh**2)
self.gamma6 = acos(self.eh/self.E)
self.B = sqrt(self.A**2+self.d2**2)
self.beta2 = acos(self.A/self.B)
self.alpha1 = acos(self.sx/self.sh)
def getLengths(self,theta2,theta3):
theta3 += pi/2
alpha4 = pi - self.alpha1 - theta2
beta1 = alpha4 - self.beta2
gamma3 = theta3 - self.gamma1 - self.gamma6
#print degrees(alpha4), degrees(beta1), degrees(gamma3), degrees(self.gamma1)
lae = sqrt(self.D**2 - 2*cos(gamma3)*self.D*self.E + self.E**2)
las = sqrt(self.B**2 - 2*cos(beta1)*self.B*self.sh + self.sh**2)
las_percent = -(las-self.las_min)/(self.las_min-self.las_max)
lae_percent = -(lae-self.lae_min)/(self.lae_min-self.lae_max)
#print lae, las
return [las_percent,lae_percent]
def getAngles(self,las_percent,lae_percent):
las = self.las_min + las_percent*(self.las_max-self.las_min)
lae = self.lae_min + lae_percent*(self.lae_max-self.lae_min)
beta1 = acos((-las**2+self.sh**2+self.B**2)/(2*self.B*self.sh))
gamma3 = acos((self.D**2+self.E**2-lae**2)/(2*self.D*self.E))
alpha4 = beta1+self.beta2
theta2 = pi-self.alpha1-alpha4
theta3 = self.gamma1 + gamma3 + self.gamma6
return [theta2, theta3]
if __name__=="__main__":
a = ArmConversion()
#print a.getLengths(0,radians(90))
|
997,598 | 1f2c424cbeee712273f81691fdbf331819eb75d3 | #!/usr/bin/python
import csv
import json
import requests
import time
from sys import argv
script, club = argv
segmentid=20545879
path=("/home/ubuntu/")
memberlist=(path + club + "/input/membersdiv.csv")
webhook=(path + "Strava/log/webhook.log")
segmentlist=(path + club + "/segmentlist.csv")
segmentsummary=(path + club + "/input/segmentsummary.csv")
activitystream=(path + club + "/input/activity.csv")
def getdata(athleteid,access_token,f):
#this read the webhook.log and finds the unique activity ids...may not work if an activity was deleted.
print ("before open webhook.log")
wh = open(webhook,'r')
lines = wh.readlines()
print ("open webhook.log")
activitylist = []
for (line) in (lines):
if str(athleteid) in (line):
print (line)
print (line.strip("Start:POST: (args):"))
stripped = (line.strip("Start:POST: (args):"))
print (stripped)
jsonline = json.loads(stripped)
print str(jsonline)
print str(jsonline["aspect_type"])
print str(jsonline["object_id"])
print str(jsonline["owner_id"])
if (jsonline["object_id"]) not in activitylist:
activitylist.append(jsonline["object_id"])
if (jsonline["aspect_type"]) == ("delete"):
activitylist.remove(jsonline["object_id"])
wh.close()
print (activitylist)
#time.sleep(60)
headers = {"Authorization":"Bearer %s" % (access_token) }
data = {}
dataStream = {}
#f.write(str(athleteid) + " Activity Data\n")
for (activity) in (activitylist):
activityEndpoint = ("https://www.strava.com/api/v3/activities/" + str(activity))
activityGet = requests.get(activityEndpoint,headers=headers,data=data)
activityInfo=json.loads(activityGet.text)
#f.write("in the for loop")
try:
#f.write("in the try statement")
print ("Activity ID: " + str(activityInfo["id"]))
print ("Activity Name: " + activityInfo["name"])
for efforts in (activityInfo["segment_efforts"]):
if (efforts["segment"]["id"]) == segmentid:
print (efforts["name"])
print (efforts["elapsed_time"])
print (efforts["segment"]["id"])
print (efforts["segment"]["distance"])
elev = (efforts["segment"]["elevation_high"]) - (efforts["segment"]["elevation_low"])
f.write(str(efforts["segment"]["id"]) + ',')
f.write(str(athleteid) + ',')
f.write(str(activityInfo["id"]) + ',')
f.write(str(efforts["elapsed_time"]) + ',')
f.write(str(efforts["start_date"]) + ',')
f.write(str(efforts["name"]) + ',')
f.write(str(elev) + ',')
f.write(str(efforts["kom_rank"]) + ',')
f.write(str(efforts["pr_rank"]) + ',')
f.write("\n")
except:
print ("something was excepted")
#f.write ("something was excepted")
#f.write("\n\n")
#f.write("Activity Stream\n")
#streamEndpoint = ("https://www.strava.com/api/v3/activities/" + str(activity) + "/streams")
#streamGet = requests.get(streamEndpoint,headers=headers,data=dataStream)
#streamInfo=json.loads(streamGet.text)
#json.dump(streamInfo,f)
def initialise():
f = open (activitystream,'w')
members = open(memberlist,'r')
data = csv.reader(members)
for member in data:
athleteid = member[0]
access_token = member[6]
print member
print (athleteid,access_token)
getdata(athleteid,access_token,f)
f.close()
#time.sleep(60)
if __name__ == '__main__':
initialise()
|
997,599 | 686b3d72bec9643b57ab1ca1238717304d866e93 | # Generated by Django 2.2.5 on 2019-09-04 11:27
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Review1_post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=100, null=True)),
('university', models.CharField(blank=True, max_length=100, null=True)),
('major', models.CharField(blank=True, max_length=100, null=True)),
('major_type', models.CharField(blank=True, max_length=100, null=True)),
('region', models.CharField(blank=True, max_length=100, null=True)),
('posted_date', models.DateTimeField(auto_now_add=True, null=True)),
('user', models.CharField(blank=True, max_length=100, null=True)),
('heart_count', models.IntegerField(blank=True, default=0, null=True)),
('content', models.TextField(blank=True, null=True)),
('image_url', models.URLField(blank=True, default='', max_length=1000, null=True, verbose_name='imageurl')),
],
),
migrations.CreateModel(
name='Review1_post_comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('commented_post', models.IntegerField(blank=True, null=True)),
('content', models.TextField(blank=True, null=True)),
('commented_date', models.DateTimeField(auto_now_add=True, null=True)),
('user', models.CharField(blank=True, max_length=100, null=True)),
],
),
migrations.CreateModel(
name='Review1_post_image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('imaged_post', models.IntegerField(blank=True, null=True)),
('image_url', models.URLField(blank=True, default='', max_length=1000, null=True, verbose_name='imageurl')),
],
),
migrations.CreateModel(
name='Review2_post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=100, null=True)),
('university', models.CharField(blank=True, max_length=100, null=True)),
('major', models.CharField(blank=True, max_length=100, null=True)),
('major_type', models.CharField(blank=True, max_length=100, null=True)),
('region', models.CharField(blank=True, max_length=100, null=True)),
('posted_date', models.DateTimeField(auto_now_add=True, null=True)),
('user', models.CharField(blank=True, max_length=100, null=True)),
('heart_count', models.IntegerField(blank=True, default=0, null=True)),
('content', models.TextField(blank=True, null=True)),
('image_url', models.URLField(blank=True, default='', max_length=1000, null=True, verbose_name='imageurl')),
],
),
migrations.CreateModel(
name='Review2_post_comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('commented_post', models.IntegerField(blank=True, null=True)),
('content', models.TextField(blank=True, null=True)),
('commented_date', models.DateTimeField(auto_now_add=True, null=True)),
('user', models.CharField(blank=True, max_length=100, null=True)),
],
),
migrations.CreateModel(
name='Review2_post_image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('imaged_post', models.IntegerField(blank=True, null=True)),
('image_url', models.URLField(blank=True, default='', max_length=1000, null=True, verbose_name='imageurl')),
],
),
migrations.CreateModel(
name='QnA_post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='QnA', max_length=500)),
('body', models.TextField(blank=True, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='images/')),
('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='QnA_comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.QnA_post')),
],
),
migrations.CreateModel(
name='Promotion_post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='홍보', max_length=500)),
('body', models.TextField(blank=True, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='images/')),
('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Promotion_comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.Promotion_post')),
],
),
migrations.CreateModel(
name='Oneonone_post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='일대일상담신청', max_length=500)),
('body', models.TextField(blank=True, null=True)),
('post_password', models.CharField(max_length=500)),
('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Oneonone_comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.Oneonone_post')),
],
),
migrations.CreateModel(
name='Notice_post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='공지', max_length=500)),
('body', models.TextField(blank=True, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='images/')),
('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Notice_comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.Notice_post')),
],
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.