commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
c372d7197afef840e5a1f550f947ecbc2c931573
|
Correct name for mediatailor
|
tests/functional/test_endpoints.py
|
tests/functional/test_endpoints.py
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import json
from nose.tools import assert_equal
from botocore.session import get_session
# Several services have names that don't match for one reason or another.
SERVICE_RENAMES = {
'application-autoscaling': 'autoscaling',
'appstream': 'appstream2',
'autoscaling-plans': 'autoscaling',
'dynamodbstreams': 'streams.dynamodb',
'cloudwatch': 'monitoring',
'efs': 'elasticfilesystem',
'elb': 'elasticloadbalancing',
'elbv2': 'elasticloadbalancing',
'emr': 'elasticmapreduce',
'iot1click-devices': 'devices.iot1click',
'iot1click-projects': 'projects.iot1click',
'iot-data': 'data.iot',
'meteringmarketplace': 'metering.marketplace',
'opsworkscm': 'opsworks-cm',
'ses': 'email',
'stepfunctions': 'states',
'lex-runtime': 'runtime.lex',
'mturk': 'mturk-requester',
'resourcegroupstaggingapi': 'tagging',
'sagemaker-runtime': 'runtime.sagemaker',
'lex-models': 'models.lex',
'marketplace-entitlement': 'entitlement.marketplace',
'pricing': 'api.pricing',
'mediastore-data': 'data.mediastore',
'iot-jobs-data': 'data.jobs.iot',
'kinesis-video-media': 'kinesisvideo',
'kinesis-video-archived-media': 'kinesisvideo',
'alexaforbusiness': 'a4b',
'neptune': 'rds',
}
BLACKLIST = [
'mobileanalytics',
]
def test_endpoint_matches_service():
backwards_renames = dict((v, k) for k, v in SERVICE_RENAMES.items())
session = get_session()
loader = session.get_component('data_loader')
expected_services = set(loader.list_available_services('service-2'))
pdir = os.path.dirname
endpoints_path = os.path.join(pdir(pdir(pdir(__file__))),
'botocore', 'data', 'endpoints.json')
with open(endpoints_path, 'r') as f:
data = json.loads(f.read())
for partition in data['partitions']:
for service in partition['services'].keys():
service = backwards_renames.get(service, service)
if service not in BLACKLIST:
yield _assert_endpoint_is_service, service, expected_services
def _assert_endpoint_is_service(service, expected_services):
assert service in expected_services
def test_service_name_matches_endpoint_prefix():
# Generates tests for each service to verify that the endpoint prefix
# matches the service name unless there is an explicit exception.
session = get_session()
loader = session.get_component('data_loader')
# Load the list of available services. The names here represent what
# will become the client names.
services = loader.list_available_services('service-2')
for service in services:
yield _assert_service_name_matches_endpoint_prefix, loader, service
def _assert_service_name_matches_endpoint_prefix(loader, service_name):
# Load the service model and grab its endpoint prefix
service_model = loader.load_service_model(service_name, 'service-2')
endpoint_prefix = service_model['metadata']['endpointPrefix']
# Handle known exceptions where we have renamed the service directory
# for one reason or another.
expected_endpoint_prefix = SERVICE_RENAMES.get(service_name, service_name)
assert_equal(
endpoint_prefix, expected_endpoint_prefix,
"Service name `%s` does not match expected endpoint "
"prefix `%s`, actual: `%s`" % (
service_name, expected_endpoint_prefix, endpoint_prefix))
|
Python
| 0.000006
|
@@ -1836,16 +1836,54 @@
'rds',%0A
+ 'mediatailor': 'api.mediatailor',%0A
%7D%0A%0ABLACK
|
932e7ea649dad6b9b563eaac90bfc617ba3d10ca
|
Add ResourceTest
|
cal/tests/unit/test_wsgi.py
|
cal/tests/unit/test_wsgi.py
|
"""
Test WSGI basics and provide some helper functions for other WSGI tests.
"""
import routes
import webob
import webob.exc
from cal import wsgi
from cal.tests import base
class Test(base.NoDBTestCase):
def test_debug(self):
class Application(wsgi.Application):
"""Dummy application to test debug."""
def __call__(self, environ, start_response):
start_response("200", [("X-Test", "checking")])
return ['Test result']
application = wsgi.Debug(Application())
result = webob.Request.blank('/').get_response(application)
self.assertEqual(result.body, "Test result")
def test_router(self):
class Application(wsgi.Application):
"""Test application to call from router."""
def __call__(self, environ, start_response):
start_response("200", [])
return ['Router result']
class Router(wsgi.Router):
"""Test router."""
def __init__(self):
mapper = routes.Mapper()
mapper.connect("/test", controller=Application())
super(Router, self).__init__(mapper)
result = webob.Request.blank('/test').get_response(Router())
self.assertEqual(result.body, "Router result")
result = webob.Request.blank('/bad').get_response(Router())
self.assertNotEqual(result.body, "Router result")
class JSONRequestDeserializerTest(base.NoDBTestCase):
def setUp(self):
super(JSONRequestDeserializerTest, self).setUp()
self.deserializer = wsgi.JSONRequestDeserializer()
def test_has_body_return_false(self):
request = wsgi.Request.blank(
"/", headers={'Content-Length': 0})
self.assertFalse(self.deserializer.has_body(request))
def test_has_body_return_true(self):
request = wsgi.Request.blank(
"/", headers={'Content-Length': 1})
self.assertTrue(self.deserializer.has_body(request))
def test_default_with_has_body_return_false(self):
request = wsgi.Request.blank(
"/", headers={'Content-Length': 0})
self.assertEqual({},
self.deserializer.default(request))
def test_default_success(self):
data = """{"a": {
"a1": "1",
"a2": "2",
"bs": ["1", "2", "3", {"c": {"c1": "1"}}],
"d": {"e": "1"},
"f": "1"}}"""
as_dict = {
'body': {
u'a': {
u'a1': u'1',
u'a2': u'2',
u'bs': [u'1', u'2', u'3', {u'c': {u'c1': u'1'}}],
u'd': {u'e': u'1'},
u'f': u'1'}}}
request = webob.Request.blank("/", body=data)
self.assertEqual(as_dict,
self.deserializer.default(request))
def test_default_raise_Malformed_Exception(self):
request = wsgi.Request.blank("/", body=b"{mal:formed")
self.assertRaises(
webob.exc.HTTPBadRequest,
self.deserializer.default,
request)
class JSONResponseSerializerTest(base.NoDBTestCase):
def setUp(self):
super(JSONResponseSerializerTest, self).setUp()
self.serializer = wsgi.JSONResponseSerializer()
def test_default(self):
result = {
'a': {
'a1': '1',
'a2': '2',
'bs': ['1', '2', '3', {'c': {'c1': '1'}}],
'd': {'e': '1'},
'f': '1'}
}
expected_body = '{"a": {"a1": "1", "a2": "2", ' \
'"bs": ["1", "2", "3", ' \
'{"c": {"c1": "1"}}], '\
'"d": {"e": "1"}, "f": "1"}}'
response = webob.Response()
self.serializer.default(response, result)
self.assertEqual("application/json",
response.content_type)
self.assertEqual(response.body, expected_body)
|
Python
| 0
|
@@ -4043,8 +4043,1425 @@
ed_body)
+%0A%0A%0Aclass ResouceTest(base.NoDBTestCase):%0A%0A def setUp(self):%0A super(ResouceTest, self).setUp()%0A self.resource = wsgi.Resource(self.Controller())%0A%0A class Controller(object):%0A def index(self, req, index=None):%0A return index%0A%0A def test_dispatch(self):%0A actual = self.resource.dispatch(self.resource.controller, 'index',%0A None, 'off')%0A expected = 'off'%0A self.assertEqual(actual, expected)%0A%0A def test_dispatch_unknown_action(self):%0A self.assertRaises(%0A AttributeError, self.resource.dispatch,%0A self.resource.controller, 'create', %7B%7D)%0A%0A def test_get_action_args(self):%0A env = %7B%0A 'wsgiorg.routing_args': %5BNone, %7B%0A 'controller': None,%0A 'format': None,%0A 'action': 'update',%0A 'id': 12,%0A %7D%5D,%0A %7D%0A%0A expected = %7B'action': 'update', 'id': 12%7D%0A%0A self.assertEqual(self.resource.get_action_args(env),%0A expected)%0A%0A # def test_malformed_request_body_throws_bad_request(self):%0A # resource = wsgi.Resource(None)%0A # request = wsgi.Request.blank(%0A # %22/%22, body=b%22%7Bmal:formed%22, method='POST',%0A # headers=%7B'Content-Type': %22application/json%22%7D)%0A%0A # response = resource(request)%0A # self.assertEqual(400, response.status_int)%0A
|
587c4603d3ab379e4ee22f2dcda7d7798cd35dcf
|
fix spacing around arguments
|
db_credentials/DBCredentials.py
|
db_credentials/DBCredentials.py
|
#! /usr/local/bin/python
import sys
import re
class DBCredentials:
def __init__( self ):
self.creds = {
'host':'',
'port':'',
'username':'',
'password':'',
'database':'',
}
return;
# Load credentials from a file: no input validation.
#
def load_file( self, filename ):
f = open( filename, 'r' )
text = f.read()
f.close
#print text
tuples = re.findall( r'(\w+)=([^\s]+)', text )
#print tuples
#[('host', 'localhost'), ('username', 'foo'), ('password', 'bar')]
for tuple in tuples:
self.creds[ tuple[0] ] = tuple[1]
#print self.creds
return
def get_host( self ):
return self.creds['host']
def set_host( self, host ):
self.creds['host'] = host
# listener port - return if specified, otherwise default to 3306
#
def get_port( self ):
if self.creds['port']:
return self.creds['port']
else:
return '3306'
def set_port( self, port ):
self.creds['port'] = port
def get_username( self ):
return self.creds['username']
def set_username( self, username ):
self.creds['sid'] = username
def get_password( self ):
return self.creds['password']
def set_password( self, password ):
self.creds['password'] = password
# database
#
def get_database( self ):
if self.creds['database'] == '' and self.creds['host'] != '':
self.creds['database'] = self.creds['host']
return self.creds['database']
def set_database( self, database ):
self.creds['database'] = database
|
Python
| 0.000151
|
@@ -80,22 +80,20 @@
_init__(
-
self
-
):%0A%0A
@@ -347,17 +347,16 @@
ad_file(
-
self, fi
@@ -361,17 +361,16 @@
filename
-
):%0A%0A
@@ -382,17 +382,16 @@
= open(
-
filename
@@ -395,17 +395,16 @@
ame, 'r'
-
)%0A
@@ -487,17 +487,16 @@
findall(
-
r'(%5Cw+)=
@@ -510,17 +510,16 @@
)', text
-
)%0A%0A
@@ -747,22 +747,20 @@
et_host(
-
self
-
):%0A
@@ -806,17 +806,16 @@
et_host(
-
self, ho
@@ -816,17 +816,16 @@
lf, host
-
):%0A
@@ -946,22 +946,20 @@
et_port(
-
self
-
):%0A
@@ -1080,17 +1080,16 @@
et_port(
-
self, po
@@ -1090,17 +1090,16 @@
lf, port
-
):%0A
@@ -1149,22 +1149,20 @@
sername(
-
self
-
):%0A
@@ -1216,17 +1216,16 @@
sername(
-
self, us
@@ -1230,17 +1230,16 @@
username
-
):%0A
@@ -1292,22 +1292,20 @@
assword(
-
self
-
):%0A
@@ -1359,17 +1359,16 @@
assword(
-
self, pa
@@ -1373,17 +1373,16 @@
password
-
):%0A
@@ -1465,14 +1465,12 @@
ase(
-
self
-
):%0A
@@ -1655,17 +1655,16 @@
atabase(
-
self, da
@@ -1669,17 +1669,16 @@
database
-
):%0A
|
b939558f3d4bd0fa90f3f467ca85f698c4813046
|
Update __init__.py
|
comps/__init__.py
|
comps/__init__.py
|
"""
A simple application that provides an entry point for integrating
front end designers into a django project
"""
__version__ = '0.2.0'
|
Python
| 0.000001
|
@@ -131,9 +131,9 @@
'0.
-2
+3
.0'%0A
|
ebcf8afa86caa243148397645870f35f51bb0739
|
Move imgur config to be a CMD argument
|
cats4all/cats_downloader.py
|
cats4all/cats_downloader.py
|
import os
import time
import imgurpython
import requests
import sqlite3
import itertools
from collections import namedtuple
import json
import argparse
DIR_NAME_FRMT = 'cats-%s'
tags = ['cat', 'cats', 'lolcat', 'lolcats']
DEFAULT_DB_FILE_PATH = './cats2.db'
ImageData = namedtuple('ImageData', ['id', 'link', 'title', 'height', 'width'])
ImgurConfig = namedtuple('ImgurConfig', ['id', 'secret'])
def get_config(config_path='./config.json'):
with open(config_path, 'r') as config_file:
config = json.load(config_file)
# Perhaps: return ImgurConfig(**config)
return ImgurConfig(config['id'], config['secret'])
def init_db(db_file_path, table_name='cats'):
db_connection = sqlite3.connect(db_file_path)
cursor = db_connection.cursor()
cursor.execute('create table %s(image_id, date)' % (table_name,))
db_connection.commit()
cursor.close()
db_connection.close()
def get_all_from__db(table_name='cats', db_file_name=DEFAULT_DB_FILE_PATH):
db_connection = sqlite3.connect(db_file_name)
cursor = db_connection.cursor()
cursor.execute('select * from %s' % (table_name,))
db_connection.commit()
cursor.close()
db_connection.close()
def add_to_db(image_id, date, db_file_path, table_name='cats'):
db_connection = sqlite3.connect(db_file_path)
cursor = db_connection.cursor()
cursor.execute('insert into %s values (?, ?)' % (table_name,), (image_id, date))
db_connection.commit()
db_connection.close()
def add_bulk_to_db(image_iter, db_file_path=DEFAULT_DB_FILE_PATH, table_name='cats'):
db_connection = sqlite3.connect(db_file_path)
cursor = db_connection.cursor()
cursor.execute('insert into %s values (?, ?)' % (table_name,), image_iter)
db_connection.commit()
db_connection.close()
def does_image_exist(image_id, db_file_path, table_name='cats'):
db_connection = sqlite3.connect(db_file_path)
try:
cursor = db_connection.cursor()
cursor.execute('select * from %s where image_id=?' % (table_name,), (image_id,))
rows = cursor.fetchall()
cursor.close()
return len(rows) >= 1
finally:
db_connection.close()
def predicate(image):
return True
if image.height < 500 or image.width < 300:
return False
if image.height > 1200:
return False
return True
def get_todays_dir(dir_frmt=DIR_NAME_FRMT):
return dir_frmt % (time.strftime('%Y-%m-%d'))
def remove_existing(images_data, db_file_path):
print str(len(images_data)) + '!!!'
nonexisiting_images = []
for i in images_data:
if not does_image_exist(i.id, db_file_path):
nonexisiting_images.append(i)
return nonexisiting_images
def flatten_items(items, client):
for item in items:
if item.is_album:
album_images = client.get_album_images(item.id)
for album_image in album_images:
yield ImageData(album_image.id, album_image.link, item.title, album_image.height, album_image.width)
elif type(item) is imgurpython.imgur.models.gallery_image.GalleryImage:
yield ImageData(item.id, item.link, item.title, item.height, item.width)
def get_images_data_by_tag(imgur_config, tag, num=150, sort='viral'):
client = imgurpython.ImgurClient(imgur_config.id, imgur_config.secret)
images_by_tag = client.gallery_tag(tag, sort=sort)
images_data = [i for i in flatten_items(images_by_tag.items, client) if predicate(i)]
# return itertools.islice(images_data, num)
print len(images_data), '???'
return images_data[:num]
def get_images_of_tag(imgur_config, tag, db_file_path, num=150, sort='viral'):
current_page = 1
continue_download = True
count_images = 0
while continue_download:
images_data = get_images_data_by_tag(imgur_config, tag, num, sort)
new_images_data = remove_existing(images_data, db_file_path)
print len(new_images_data)
count_images += len(new_images_data)
curr_date = time.strftime('%Y-%m-%d')
for i in new_images_data:
file_name = '%s\\%s.jpg'%(get_todays_dir(), i.id)
try:
print i.title
except UnicodeEncodeError as e:
print 'Could not print image name. ID ' + i.id
with open(file_name,'wb') as f:
f.write(requests.get(i.link).content)
add_to_db(i.id, curr_date, db_file_path)
current_page += 1
if len(new_images_data) == 0 or count_images >= num:
continue_download = False
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('tags', nargs='+', default=['cat'], help='The tags the should be examined.')
parser.add_argument('--db-file', dest='db_file_path', default=DEFAULT_DB_FILE_PATH, help='The path to the DB file contains the already downloaded pictures.')
return parser.parse_args()
def main():
args = parse_arguments()
if not os.path.isfile(args.db_file_path):
init_db(args.db_file_path)
cats_dir = get_todays_dir()
if not os.path.isdir(cats_dir):
os.makedirs(cats_dir)
imgur_config = get_config()
for tag in args.tags:
print 'Downloading images for tag ' + tag
get_images_of_tag(imgur_config, tag, args.db_file_path, sort='time')
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -251,16 +251,60 @@
ats2.db'
+%0ADEFAULT_IMGUR_CONFIG_PATH = './config.json'
%0A%0AImageD
@@ -464,31 +464,41 @@
ig_path=
-'./config.json'
+DEFAULT_IMGUR_CONFIG_PATH
):%0A w
@@ -4954,24 +4954,207 @@
pictures.')%0A
+ parser.add_argument('--imgur-config', dest='imgur_config_path', default=DEFAULT_IMGUR_CONFIG_PATH, help='The path to the JSON file contains the imgur secret and application ID.')%0A
return p
@@ -5426,16 +5426,38 @@
_config(
+args.imgur_config_path
)%0A fo
|
86f143863fd9f0786fe83a5038b970b4782306ce
|
Check table exist
|
erpnext/patches/v7_0/update_missing_employee_in_timesheet.py
|
erpnext/patches/v7_0/update_missing_employee_in_timesheet.py
|
from __future__ import unicode_literals
import frappe
def execute():
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
Python
| 0
|
@@ -63,16 +63,57 @@
cute():%0A
+%09if frappe.db.table_exists(%22Time Log%22):%0A%09
%09timeshe
@@ -186,16 +186,17 @@
ame,%0A%09%09%09
+%09
tl.modif
@@ -292,14 +292,16 @@
r%0A%09%09
+%09
from %0A
+%09
%09%09%09%60
@@ -362,16 +362,17 @@
Log%60 tl%0A
+%09
%09%09where
@@ -375,16 +375,17 @@
ere %0A%09%09%09
+%09
tsd.pare
@@ -459,24 +459,25 @@
to_time %0A%09%09%09
+%09
and tl.hours
@@ -543,16 +543,17 @@
.idx=1 %0A
+%09
%09%09%09and t
@@ -632,16 +632,17 @@
t=1)%0A%09%09%0A
+%09
%09for dat
@@ -657,16 +657,17 @@
esheet:%0A
+%09
%09%09ts_doc
@@ -708,16 +708,17 @@
a.name)%0A
+%09
%09%09if len
@@ -745,16 +745,17 @@
= 1:%0A%09%09%09
+%09
frappe.d
@@ -811,24 +811,25 @@
creation)s,%0A
+%09
%09%09%09%09owner =
@@ -891,24 +891,25 @@
ified_by)s,%0A
+%09
%09%09%09%09employee
|
2d3b97e150f96ed4e7295d4e5b288951c3a7ee50
|
fix freq-check plug&play problem
|
catsnapshot/snapschedule.py
|
catsnapshot/snapschedule.py
|
import time,schedule,datetime
import signal,os,sys
task_list = list()
write_list = []
feqcheck_list = []
def clean_task_list():
global task_list
global write_list
for i in range(len(task_list)):
task_list[i] = None
write_list = []
class schedule_sig_handler(object):
def __init__(self,status):
self.status = status
def handler_func(self,signum,frame):
if self.status == "working":
self.status = "exit"
else:
sys.exit(0)
def schedule_loop(interval=1,scheduler=schedule.default_scheduler):
sch_sig = schedule_sig_handler("idle")
# Set the signal handler
sigint_dh = signal.signal(signal.SIGINT,sch_sig.handler_func)
sigterm_dh = signal.signal(signal.SIGTERM,sch_sig.handler_func)
while True:
sch_sig.status = "working"
clean_task_list()
scheduler.run_pending()
# write snaplogs
for need_write in write_list:
need_write.logs.write(need_write.snaplog_file)
# feqcheck
schedule_feqcheck_work()
if sch_sig.status == "exit": sys.exit(0)
else: sch_sig.status = "idle"
sys.stdout.flush()
time.sleep(interval)
# Set signal handler to default handler
signal.signal(signal.SIGINT,sigint_dh)
signal.signal(signal.SIGTERM,sigterm_dh)
def schedule_check_path(snapmang):
if snapmang.check_path:
for path in snapmang.check_path:
if not os.path.exists(path):
return False
return True
def schedule_rerun(job):
"""Run the job , but not reschedule it."""
job.job_func()
job.last_run = datetime.datetime.now() # refresh last_run
def schedule_feqcheck_work():
global feqcheck_list
for snapmang in feqcheck_list:
if snapmang.latest_undone!=None and schedule_check_path(snapmang):
schedule_rerun(snapmang.latest_undone)
def schedule_work(snapmang,labels,index,job):
global task_list
global write_list
# check-path
if schedule_check_path(snapmang) is False:
snapmang.latest_undone = job
return
if task_list[index] == None:
task_list[index] = snapmang.snapshot(labels,auto_write=False)
write_list += [snapmang] # add to write_list
else: # if there is a snapshot in this time , just add label to it.(not take a new snapshot)
for label in labels:
task_list[index].labels.add(label)
snapmang.limit_check()
snapmang.latest_undone = None
def schedule_task(snapmang):
""" add a task to the scheduler """
global task_list
global feqcheck_list
if "schedule-time" in snapmang.configs:
schedule_time = snapmang.configs["schedule-time"]
# add to task_list
task_list += [None]
index = len(task_list)-1
# init undone and add to feqcheck_list
snapmang.latest_undone = None
if "feqcheck" in snapmang.configs:
if snapmang.configs["feqcheck"] == True:
feqcheck_list += [snapmang]
# schedule
for unit in schedule_time:
if unit in {"second","minute","hour","day"}:
job = schedule.every(int(schedule_time[unit]))
job.unit = unit+"s"
if "schedule-labels" in snapmang.configs and\
unit in snapmang.configs["schedule-labels"]:
job.do(schedule_work,snapmang,
snapmang.configs["schedule-labels"][unit],index,job)
else:
job.do(schedule_work,snapmang,["node"],index,job)
|
Python
| 0.000001
|
@@ -1871,24 +1871,63 @@
(snapmang):%0A
+ sleep(10) # waiting device%0A
|
5ffc79ca3d431eaaa3795f8297790997082b49c8
|
fix bug of generate password
|
genpass/lib/person.py
|
genpass/lib/person.py
|
# coding=utf-8
from __future__ import print_function
import re
from itertools import product
from genpass.rules import combinations
__all__ = ['Person']
class Person(object):
source_dict = {}
def __init__(self, information=None, field_map=()):
self.information = {} if information is None else information
self.field_map = field_map
@classmethod
def generator_map(cls, data, formatter_list):
'''generate passwords fragment by formatting function
:param data: data will be formatted
:param formatter_list: formatting function
:return: strings list
'''
if not data:
return set()
result = set()
for format_func in formatter_list:
if not callable(format_func):
raise TypeError('formatter is not callable')
if not isinstance(data, (list, set, tuple)):
data = [data]
result.update(map(format_func, data))
return result
def generate_source_dict(self):
'''generate source dictionary `source_dict`.
`source_dict` is a dictionary which the value contains all
the situation of provided.
The map of generate `source_dict` named `field_map`.
`field_map` is tuple.
```
field_map = (
('test', None),
(('test1', 'test'), built_in.generate_formats),
('test2', built_in.generate_formats, generate.custom_function),
)
```
The first element of tuple is the field name, and if the first element
is tuple, then the first element of the tuple is the field name from
user information, the second element is a alias name in the `source_dict`.
The second element of tuple is the rules for formatting and transferring
the data which user provided, which is a sequence of lambda.
The third element of tuple is the custom formatting function. The function
has 2 parameters. First of the parameters is the data of user information,
and the type is a list. The second of the parameters is the rule. The function
returned a set of password fragment.
A sample of custom formatting function is:
```
def custom_generator(data, rule):
return set(data)
```
:return: None
'''
source_dict = {}
for row in self.field_map:
if len(row) == 3:
field, rule, method = row
elif len(row) == 2:
field, rule = row
method = None
else:
raise ValueError('Invalid map')
if isinstance(field, tuple):
field, alias = field
else:
alias = field
if not rule and not method:
returned = self.information.get(field, set())
elif rule and not method:
returned = self.generator_map(self.information.get(field, set()), rule)
elif method:
if not callable(method):
raise TypeError('Process function is not callable')
returned = method(self.information.get(field, []), rule)
if not isinstance(returned, set):
raise TypeError('UDF returned value should be a set.')
else:
returned = []
if alias in source_dict:
source_dict[alias].update(returned)
else:
source_dict[alias] = returned
for key, value in source_dict.iteritems():
if value:
self.source_dict[key] = value
def combined_zip(self, dependence):
'''A generator yield passwords by combining the password fragment.
`dependence` is a list of field. The function will combine the values of
each key in the `source_dict`, while the key provided by `dependence`.
:param dependence:
:return:
'''
for res in (zip(dependence.keys(), i) for i in product(*dependence.itervalues())):
yield dict(res)
def generate_password(self):
'''A generator yield passwords
:return:
'''
self.generate_source_dict()
match_keys = re.compile('\{(%s)\}' % '|'.join(self.information.keys()))
for rule in combinations.rules:
dependent_keys = filter(lambda x: x if x in self.information.keys() else False, match_keys.findall(rule))
if all(map(lambda x: x in self.source_dict.keys(), dependent_keys)):
dependence = {i: self.source_dict[i] for i in dependent_keys}
for i in self.combined_zip(dependence):
yield rule.format(**i)
def generate_password_with_dict(self):
pass
def __str__(self):
return '<Person object>'
|
Python
| 0.000003
|
@@ -4592,16 +4592,35 @@
t_keys))
+ and dependent_keys
:%0A
|
14bc45b6447424da4c84f84c40f2d897198c73ab
|
use proper end boundary
|
custom/icds_reports/utils/aggregation_helpers/aww_incentive.py
|
custom/icds_reports/utils/aggregation_helpers/aww_incentive.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.apps.userreports.models import StaticDataSourceConfiguration, get_datasource_config
from corehq.apps.userreports.util import get_table_name
from custom.icds_reports.const import AWW_INCENTIVE_TABLE
from custom.icds_reports.utils.aggregation_helpers import BaseICDSAggregationHelper, month_formatter
class AwwIncentiveAggregationHelper(BaseICDSAggregationHelper):
aggregate_parent_table = AWW_INCENTIVE_TABLE
aggregate_child_table_prefix = 'icds_db_aww_incentive_'
@property
def ccs_record_case_ucr_tablename(self):
doc_id = StaticDataSourceConfiguration.get_doc_id(self.domain, 'static-ccs_record_cases')
config, _ = get_datasource_config(doc_id, self.domain)
return get_table_name(self.domain, config.table_id)
def aggregation_query(self):
month = self.month.replace(day=1)
tablename = self.generate_child_tablename(month)
query_params = {
"month": month_formatter(month),
"state_id": self.state_id
}
return """
INSERT INTO "{tablename}" (
state_id, month, awc_id, block_id, state_name, district_name, block_name,
supervisor_name, awc_name, aww_name, contact_phone_number, wer_weighed,
wer_eligible, awc_num_open, valid_visits, expected_visits
) (
SELECT
%(state_id)s AS state_id,
%(month)s AS month,
awcm.awc_id,
awcm.block_id,
awcm.state_name,
awcm.district_name,
awcm.block_name,
awcm.supervisor_name,
awcm.awc_name,
awcm.aww_name,
awcm.contact_phone_number,
awcm.wer_weighed,
awcm.wer_eligible,
awcm.awc_days_open,
sum(ccsm.valid_visits),
sum(ccsm.expected_visits)
FROM agg_awc_monthly as awcm
INNER JOIN agg_ccs_record_monthly AS ccsm
ON ccsm.month=awcm.month AND ccsm.awc_id=awcm.awc_id AND ccsm.aggregation_level=awcm.aggregation_level
WHERE awcm.month = %(month)s AND awcm.state_id = %(state_id)s and awcm.aggregation_level=5
GROUP BY awcm.awc_id, awcm.block_id, awcm.state_name, awcm.district_name,
awcm.block_name, awcm.supervisor_name, awcm.awc_name, awcm.aww_name,
awcm.contact_phone_number, awcm.wer_weighed, awcm.wer_eligible,
awcm.awc_days_open
);
/* update expected visits for cf cases (not in agg_ccs_record */
UPDATE {tablename} perf
SET expected_visits = expected_visits + ucr.expected
FROM (
SELECT SUM(0.39) AS expected, awc_id
FROM {ccs_record_case_ucr}
WHERE %(month)s - add > 183 AND (closed_on IS NULL OR date_trunc('month', closed_on)::DATE >= %(month)s)
GROUP BY awc_id
) ucr
WHERE ucr.awc_id = perf.awc_id
""".format(
tablename=tablename,
ccs_record_case_ucr=self.ccs_record_case_ucr_tablename
), query_params
|
Python
| 0.000083
|
@@ -2914,17 +2914,16 @@
::DATE %3E
-=
%25(month
|
f04a7d6ac961f1ebb5ac065c8e87d7b45119c288
|
Add an integration test for charmhub.list_resources
|
tests/integration/test_charmhub.py
|
tests/integration/test_charmhub.py
|
import pytest
from .. import base
from juju.errors import JujuAPIError, JujuError
from juju import jasyncio
@base.bootstrapped
@pytest.mark.asyncio
async def test_info(event_loop):
async with base.CleanModel() as model:
_, name = await model.charmhub.get_charm_id("hello-juju")
assert name == "hello-juju"
@base.bootstrapped
@pytest.mark.asyncio
@pytest.mark.skip('CharmHub facade no longer exists')
async def test_info_with_channel(event_loop):
async with base.CleanModel() as model:
result = await model.charmhub.info("hello-juju", "latest/stable")
assert result.result.name == "hello-juju"
assert "latest/stable" in result.result.channel_map
@base.bootstrapped
@pytest.mark.asyncio
@pytest.mark.skip('CharmHub facade no longer exists')
async def test_info_not_found(event_loop):
async with base.CleanModel() as model:
try:
await model.charmhub.info("badnameforapp")
except JujuAPIError as e:
assert e.message == "badnameforapp not found"
else:
assert False
@base.bootstrapped
@pytest.mark.asyncio
@pytest.mark.skip('CharmHub facade no longer exists')
async def test_find(event_loop):
async with base.CleanModel() as model:
result = await model.charmhub.find("kube")
assert len(result.result) > 0
for resp in result.result:
assert resp.name != ""
assert resp.type_ in ["charm", "bundle"]
@base.bootstrapped
@pytest.mark.asyncio
@pytest.mark.skip('CharmHub facade no longer exists')
async def test_find_bundles(event_loop):
async with base.CleanModel() as model:
result = await model.charmhub.find("kube", charm_type="bundle")
assert len(result.result) > 0
for resp in result.result:
assert resp.name != ""
assert resp.type_ in ["bundle"]
@base.bootstrapped
@pytest.mark.asyncio
@pytest.mark.skip('CharmHub facade no longer exists')
async def test_find_all(event_loop):
async with base.CleanModel() as model:
result = await model.charmhub.find("")
assert len(result.result) > 0
for resp in result.result:
assert resp.name != ""
assert resp.type_ in ["charm", "bundle"]
@base.bootstrapped
@pytest.mark.asyncio
async def test_subordinate_charm_zero_units(event_loop):
# normally in pylibjuju deploy num_units defaults to 1, we switch
# that to 0 behind the scenes if we see that the charmhub charm
# we're deploying is a subordinate charm
async with base.CleanModel() as model:
# rsyslog-forwarder-ha is a subordinate charm
app = await model.deploy('rsyslog-forwarder-ha')
await jasyncio.sleep(5)
assert len(app.units) == 0
await app.destroy()
await jasyncio.sleep(5)
# note that it'll error if the user tries to use num_units
with pytest.raises(JujuError):
await model.deploy('rsyslog-forwarder-ha', num_units=175)
# (full disclosure: it'll quitely switch to 0 if user enters
# num_units=1, instead of erroring)
app2 = await model.deploy('rsyslog-forwarder-ha', num_units=1)
await jasyncio.sleep(5)
assert len(app2.units) == 0
|
Python
| 0
|
@@ -3231,20 +3231,280 @@
en(app2.units) == 0%0A
+%0A%0A@base.bootstrapped%0A@pytest.mark.asyncio%0Aasync def test_list_resources(event_loop):%0A async with base.CleanModel() as model:%0A resources = await model.charmhub.list_resources('postgresql')%0A assert type(resources) == list and len(resources) %3E 0%0A
|
ff03fb51d751c3dc820c2aee9b8ba2a7c45f1f0b
|
Split up assertions in `user_create` test #580
|
tests/lib/user/test_create_user.py
|
tests/lib/user/test_create_user.py
|
import dataclasses
from datetime import date
import pytest
from pycroft.lib.user import create_user
from tests import factories
from .assertions import assert_account_name, assert_membership_groups, assert_logmessage_startswith
@dataclasses.dataclass
class UserData:
name: str
login: str
email: str
mac: str
birthdate: date
class TestUserCreation:
@pytest.fixture(scope="class")
def member_group(self, class_session):
return factories.property.MemberPropertyGroupFactory.create()
@pytest.fixture(scope="class")
def room(self, class_session):
return factories.RoomFactory.create(patched_with_subnet=True)
@pytest.fixture(scope="class")
def user_data(self) -> UserData:
return UserData(
name="Hans",
login="hans66",
email="hans@hans.de",
mac="12:11:11:11:11:11",
birthdate=date.fromisoformat("1990-01-01"),
)
@pytest.fixture(scope="class")
def user_mail_capture(self):
# TODO actually test whether mails are sent out correctly instead of mocking
# mocking is only done because we don't test for the mails anyway
from unittest.mock import patch
with patch("pycroft.lib.user.user_send_mails") as p:
yield p
@pytest.fixture(scope="class", autouse=True)
def new_user(self, class_session, user_data, room, processor, member_group, user_mail_capture):
new_user, _ = create_user(
user_data.name,
user_data.login,
user_data.email,
user_data.birthdate,
processor=processor,
groups=(member_group,),
address=room.address,
)
return new_user
def test_user_create(self, new_user, user_data, member_group, user_mail_capture):
# needs: new_user, self.user (the initiating data),
# self.config.member_group
assert new_user.name == user_data.name
assert new_user.login == user_data.login
assert new_user.email == user_data.email
# TODO fix signature and check for explicitly supplied address.
# assert new_user.address == config.dummy_address
assert_account_name(new_user.account, f"User {new_user.id}")
assert_membership_groups(new_user.active_memberships(), [member_group])
assert new_user.unix_account.home_directory == f"/home/{new_user.login}"
assert len(new_user.log_entries) == 2
first, second = new_user.log_entries
assert_logmessage_startswith(first, "Added to group Mitglied")
assert_logmessage_startswith(second, "User created")
assert new_user.account is not None
assert new_user.account.balance == 0
user_mail_capture.assert_called()
|
Python
| 0.002351
|
@@ -1753,22 +1753,25 @@
st_user_
-create
+base_data
(self, n
@@ -1794,136 +1794,14 @@
ta,
-member_group, user_mail_capture):%0A # needs: new_user, self.user (the initiating data),%0A # self.config.member_group
+room):
%0A
@@ -1941,16 +1941,77 @@
ta.email
+%0A%0A def test_user_address(self, new_user, user_data, room):
%0A
@@ -2036,11 +2036,33 @@
ure
-and
+of %60create_user%60 and also
che
@@ -2104,18 +2104,16 @@
%0A
- #
assert
@@ -2136,21 +2136,13 @@
==
-config.dummy_
+room.
addr
@@ -2163,62 +2163,102 @@
sert
-_account_name(new_user.account, f%22User %7Bnew_user.id%7D%22)
+ not new_user.has_custom_address%0A%0A def test_user_memberships(self, new_user, member_group):
%0A
@@ -2333,16 +2333,60 @@
_group%5D)
+%0A%0A def test_unix_account(self, new_user):
%0A
@@ -2458,16 +2458,59 @@
.login%7D%22
+%0A%0A def test_log_entries(self, new_user):
%0A
@@ -2720,24 +2720,140 @@
er created%22)
+%0A%0A def test_finance_account(self, new_user):%0A assert_account_name(new_user.account, f%22User %7Bnew_user.id%7D%22)
%0A ass
@@ -2929,16 +2929,70 @@
nce == 0
+%0A%0A def test_one_mail_sent(self, user_mail_capture):
%0A
|
f2060ccfb3aecceef027031a42e0f8367d3c2618
|
Handle the corner case when there are no user inputs
|
emission/analysis/classification/inference/labels/inferrers.py
|
emission/analysis/classification/inference/labels/inferrers.py
|
# This file encapsulates the various prediction algorithms that take a trip and return a label data structure
# Named "inferrers.py" instead of "predictors.py" to avoid a name collection in our abbreviated import convention
import logging
import random
import emission.analysis.modelling.tour_model.load_predict as lp
# A set of placeholder predictors to allow pipeline development without a real inference algorithm.
# For the moment, the system is configured to work with two labels, "mode_confirm" and
# "purpose_confirm", so I'll do that.
# The first placeholder scenario represents a case where it is hard to distinguish between
# biking and walking (e.g., because the user is a very slow biker) and hard to distinguish
# between work and shopping at the grocery store (e.g., because the user works at the
# grocery store), but whenever the user bikes to the location it is to work and whenever
# the user walks to the location it is to shop (e.g., because they don't have a basket on
# their bike), and the user bikes to the location four times more than they walk there.
# Obviously, it is a simplification.
def placeholder_predictor_0(trip):
return [
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.8},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.2}
]
# The next placeholder scenario provides that same set of labels in 75% of cases and no
# labels in the rest.
def placeholder_predictor_1(trip):
return [
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.8},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.2}
] if random.random() > 0.25 else []
# This third scenario provides labels designed to test the soundness and resilience of
# the client-side inference processing algorithms.
def placeholder_predictor_2(trip):
# Timestamp2index gives us a deterministic way to match test trips with labels
# Hardcoded to match "test_july_22" -- clearly, this is just for testing
timestamp2index = {494: 5, 565: 4, 795: 3, 805: 2, 880: 1, 960: 0}
timestamp = trip["data"]["start_local_dt"]["hour"]*60+trip["data"]["start_local_dt"]["minute"]
index = timestamp2index[timestamp] if timestamp in timestamp2index else 0
return [
[
],
[
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.8},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.2}
],
[
{"labels": {"mode_confirm": "drove_alone"}, "p": 0.8},
],
[
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.8},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.2}
],
[
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.45},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "entertainment"}, "p": 0.35},
{"labels": {"mode_confirm": "drove_alone", "purpose_confirm": "work"}, "p": 0.15},
{"labels": {"mode_confirm": "shared_ride", "purpose_confirm": "work"}, "p": 0.05}
],
[
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.45},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "entertainment"}, "p": 0.35},
{"labels": {"mode_confirm": "drove_alone", "purpose_confirm": "work"}, "p": 0.15},
{"labels": {"mode_confirm": "shared_ride", "purpose_confirm": "work"}, "p": 0.05}
]
][index]
# This fourth scenario provides labels designed to test the expectation and notification system.
def placeholder_predictor_3(trip):
timestamp2index = {494: 5, 565: 4, 795: 3, 805: 2, 880: 1, 960: 0}
timestamp = trip["data"]["start_local_dt"]["hour"]*60+trip["data"]["start_local_dt"]["minute"]
index = timestamp2index[timestamp] if timestamp in timestamp2index else 0
return [
[
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.80},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.20}
],
[
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.80},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.20}
],
[
{"labels": {"mode_confirm": "drove_alone", "purpose_confirm": "entertainment"}, "p": 0.70},
],
[
{"labels": {"mode_confirm": "bike", "purpose_confirm": "work"}, "p": 0.96},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.04}
],
[
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.45},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "entertainment"}, "p": 0.35},
{"labels": {"mode_confirm": "drove_alone", "purpose_confirm": "work"}, "p": 0.15},
{"labels": {"mode_confirm": "shared_ride", "purpose_confirm": "work"}, "p": 0.05}
],
[
{"labels": {"mode_confirm": "walk", "purpose_confirm": "shopping"}, "p": 0.60},
{"labels": {"mode_confirm": "walk", "purpose_confirm": "entertainment"}, "p": 0.25},
{"labels": {"mode_confirm": "drove_alone", "purpose_confirm": "work"}, "p": 0.11},
{"labels": {"mode_confirm": "shared_ride", "purpose_confirm": "work"}, "p": 0.04}
]
][index]
# Placeholder that is suitable for a demo.
# Finds all unique label combinations for this user and picks one randomly
def placeholder_predictor_demo(trip):
import random
import emission.core.get_database as edb
user = trip["user_id"]
unique_user_inputs = edb.get_analysis_timeseries_db().find({"user_id": user}).distinct("data.user_input")
random_user_input = random.choice(unique_user_inputs) if random.randrange(0,10) > 0 else []
logging.debug(f"In placeholder_predictor_demo: ound {len(unique_user_inputs)} for user {user}, returning value {random_user_input}")
return [{"labels": random_user_input, "p": random.random()}]
# Non-placeholder implementation. First bins the trips, and then clusters every bin
# See emission.analysis.modelling.tour_model for more details
# Assumes that pre-built models are stored in working directory
# Models are built using evaluation_pipeline.py and build_save_model.py
def predict_two_stage_bin_cluster(trip):
return lp.predict_labels(trip)
|
Python
| 0.998659
|
@@ -5904,16 +5904,71 @@
input%22)%0A
+ if len(unique_user_inputs) == 0:%0A return %5B%5D%0A
rand
|
b8c05a7ea6abefa3014f8703864031876c211679
|
Add link for total malaria cases for year 2015 for Indonesia
|
src/data/download_scripts/ID_malaria_down.py
|
src/data/download_scripts/ID_malaria_down.py
|
# This script downloads yearly malaria statistics from data.go.id
# It uses urllib and is compatible with both Python 2 and 3
import os
import sys
import logging #logs what goes on
DIRECTORY = '../../Data/raw/disease_ID'
OUTFILE = "yearly-malaria.csv"
URL = "http://data.go.id/dataset/cef9b348-91a9-4270-be1d-3cf64eb9d5b0/resource/42f31bb0-af59-4c96-9a74-db3283f9e316/download/kasusmalaria.csv"
logger = logging.getLogger(__name__)
def download():
# compatibility check between python 2 and 3
if sys.version_info < (3, 0):
# for python 2, use this
try:
os.makedirs(DIRECTORY)
except OSError as e:
pass
import urllib as downloader
from urllib2 import URLError, HTTPError
else:
# for python 3, use this
os.makedirs(DIRECTORY, exist_ok=True)
import urllib.request as downloader
from urllib.error import URLError, HTTPError
output_path = os.path.join(DIRECTORY, OUTFILE)
# now retrieve the file
try:
downloader.urlretrieve(URL, output_path)
logger.info('Downloaded successfully to %s', os.path.abspath(output_path))
except (HTTPError, URLError) as e:
logger.error('Failed to download: %s', e.reason)
if __name__ == "__main__":
DIRECTORY = '../../../Data/raw/disease_ID'
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
download()
|
Python
| 0
|
@@ -390,16 +390,206 @@
ia.csv%22%0A
+URL2015 = %22http://data.go.id/dataset/cef9b348-91a9-4270-be1d-3cf64eb9d5b0/resource/2965b760-0f7f-4bd7-9dbe-8d261729e12f/download/jumlahkasusangkakesakitanmalariaper1000pendudukberisiko.xlsx%22
%0Alogger
|
193cc8025910b92f764e6e1339ce2ec213b20cc5
|
Fix duck punching unit test.
|
tests/qtcore/duck_punching_test.py
|
tests/qtcore/duck_punching_test.py
|
#!/usr/bin/python
'''Test case for duck punching new implementations of C++ virtual methods into object instances.'''
import unittest
import types
from PySide.QtCore import QObject, QEvent
from helper import UsesQCoreApplication
class Duck(QObject):
def __init__(self):
QObject.__init__(self)
def childEvent(self, event):
QObject.childEvent(self, event)
class TestDuckPunchingOnQObjectInstance(UsesQCoreApplication):
'''Test case for duck punching new implementations of C++ virtual methods into object instances.'''
def setUp(self):
#Acquire resources
self.duck_childEvent_called = False
UsesQCoreApplication.setUp(self)
def tearDown(self):
#Release resources
del self.duck_childEvent_called
UsesQCoreApplication.tearDown(self)
def testChildEventMonkeyPatch(self):
#Test if the new childEvent injected on QObject instance is called from C++
parent = QObject()
def childEvent(obj, event):
self.duck_childEvent_called = True
QObject.childEvent(obj, event)
parent.event = types.MethodType(childEvent, parent, QObject)
child = QObject()
child.setParent(parent)
self.assert_(self.duck_childEvent_called)
def testChildEventMonkeyPatchWithInheritance(self):
#Test if the new childEvent injected on a QObject's extension class instance is called from C++
parent = Duck()
def childEvent(obj, event):
QObject.childEvent(obj, event)
self.duck_childEvent_called = True
child = QObject()
child.setParent(parent)
parent.event = types.MethodType(childEvent, parent, QObject)
child = QObject()
child.setParent(parent)
self.assert_(self.duck_childEvent_called)
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -1099,33 +1099,38 @@
%0A parent.
-e
+childE
vent = types.Met
@@ -1658,17 +1658,22 @@
parent.
-e
+childE
vent = t
|
f7f754c4280858305c42699c46dccdfb939e01fb
|
allow setting which plugins should be used via config
|
voltverine/app.py
|
voltverine/app.py
|
import argparse
import inspect
import logging
import os
import sys
import yaml
import voltverine.plugins
import voltverine.actions
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
_PLUGIN_BLACKLIST = ['BasePlugin', 'BaseDbusPlugin']
_DEFAULT_CONFIG = {'action': 'LogindPoweroff'}
class VoltverineApp(object):
def __init__(self):
self._parse_args()
if self.args.verbose:
# get the "root" logger and set it to DEBUG
logging.getLogger().setLevel(logging.DEBUG)
self._parse_config()
self._find_plugins()
self._find_action()
def _parse_args(self):
parser = argparse.ArgumentParser(description='maybe shutdown the machine')
parser.add_argument('-d', '--daemonize', action='store_true')
parser.add_argument('-f', '--foreground', action='store_true')
parser.add_argument('-n', '--dry-run', action='store_true')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-c', '--config', action='store')
parser.add_argument('-a', '--all-plugins', action='store_true')
parser.add_argument('--version', action='version', version='%(prog)s 0.1.0')
self.args = parser.parse_args()
def _parse_config(self):
self.config = _DEFAULT_CONFIG
if not self.args.config:
for f in ['voltverine.conf', os.path.expanduser('~/.config/voltverine/voltverine.conf'), '/etc/voltverine/voltverine.conf']:
if os.path.isfile(f) and os.access(f, os.R_OK):
self.args.config = f
break
elif not (os.path.isfile(self.args.config) and os.access(self.args.config, os.R_OK)):
logger.error("The configuration file is not readable, exiting.")
sys.exit(1)
if self.args.config:
with open(self.args.config) as configfile:
self.config.update(yaml.safe_load(configfile))
_args_config = {
'daemonize': self.args.daemonize,
'foreground': self.args.foreground,
'dry_run': self.args.dry_run,
'verbose': self.args.verbose,
'all_plugins': self.args.all_plugins,
}
self.config.update(_args_config)
def _find_plugins(self):
self._plugins = inspect.getmembers(voltverine.plugins,
lambda x: inspect.isclass(x) and
x.__name__ not in _PLUGIN_BLACKLIST)
def _find_action(self):
if hasattr(voltverine.actions, self.config['action']):
self._action = getattr(voltverine.actions, self.config['action'])()
else:
logger.error("Could not find defined action %s, exiting", self.config['action'])
sys.exit(1)
def run(self):
if self.args.daemonize:
# do something to run self._run() as a daemon
pass
elif self.args.foreground:
# do something to run self._run() in a loop in foreground
pass
else:
self._run()
def _run(self):
results = {voltverine.plugins.NOT_OK: 0, voltverine.plugins.OK: 0, voltverine.plugins.DUNNO: 0}
for plugin in self._plugins:
logger.debug("Trying %s", plugin[0])
pobj = plugin[1]()
(result, info) = pobj.analyze()
if result is voltverine.plugins.NOT_OK and not self.args.all_plugins:
logger.info("%s decided we cannot shutdown now, skipping the other plugins", plugin[0])
return
logger.info((result, info))
results[result] += 1
if results[voltverine.plugins.NOT_OK] > 0:
logger.debug("plugins decided not to take action")
elif results[voltverine.plugins.OK] > 0:
logger.debug("executing action")
if not self.args.dry_run:
self._action.execute()
else:
logger.info("nobody said it is ok to execute an action")
|
Python
| 0
|
@@ -301,16 +301,31 @@
oweroff'
+, 'plugins': %5B%5D
%7D%0A%0A%0Aclas
@@ -2333,21 +2333,21 @@
-self.
+found
_plugins
@@ -2543,16 +2543,203 @@
ACKLIST)
+%0A if self.config%5B'plugins'%5D:%0A self._plugins = %5Bp for p in found_plugins if p%5B0%5D in self.config%5B'plugins'%5D.keys()%5D%0A else:%0A self._plugins = found_plugins
%0A%0A de
|
b95c5dfad2956eca7b0891d3692c140a54b9db84
|
Fix tests.
|
tests/test_google_image_handler.py
|
tests/test_google_image_handler.py
|
# -*- coding: utf-8 -*-
"""
robo.tests.test_handler_google_image
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for robo.handlers.google_image
:copyright: (c) 2015 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import logging
import requests
from mock import patch
from unittest import TestCase
from robo.robot import Robot
from robo.handlers.google_image import Client, GoogleImage
def dummy_response(m, filename=None):
response = requests.Response()
response.status_code = 200
if filename is None:
response._content = ''
else:
root_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(root_path, filename)
with open(file_path, 'r') as f:
data = f.read()
response._content = data
m.return_value = response
class NullAdapter(object):
def __init__(self, signal):
self.signal = signal
self.responses = []
def say(self, message, **kwargs):
self.responses.append(message)
return message
class TestClient(TestCase):
@classmethod
def setUpClass(cls):
os.environ['ROBO_GOOGLE_CSE_KEY'] = 'foo'
os.environ['ROBO_GOOGLE_CSE_ID'] = 'bar'
cls.client = Client()
@patch('robo.handlers.google_image.requests.get')
def test_generate_url(self, m):
""" Client().generate() should generate google search url. """
dummy_response(m, 'fixture.json')
ret = self.client.generate('cat')
self.assertTrue(ret.startswith('http://'))
@patch('robo.handlers.google_image.requests.get')
def test_search_resource(self, m):
""" Client().search_resource() should search from Google. """
dummy_response(m, 'fixture.json')
ret = self.client.search_resource('cat')
self.assertTrue(isinstance(ret, dict))
self.assertTrue('unescapedUrl' in ret)
class TestGoogleImageHandler(TestCase):
@classmethod
def setUpClass(cls):
logger = logging.getLogger('robo')
logger.level = logging.ERROR
cls.robot = Robot('test', logger)
os.environ['ROBO_GOOGLE_CSE_KEY'] = 'foo'
os.environ['ROBO_GOOGLE_CSE_ID'] = 'bar'
client = GoogleImage()
client.signal = cls.robot.handler_signal
method = cls.robot.parse_handler_methods(client)
cls.robot.handlers.extend(method)
adapter = NullAdapter(cls.robot.handler_signal)
cls.robot.adapters['null'] = adapter
@patch('robo.handlers.google_image.requests.get')
def test_should_google_image(self, m):
""" GoogleImage().get() should search google. """
dummy_response(m, 'fixture.json')
self.robot.handler_signal.send('test image cat')
import sys
if sys.version_info[0] == 2:
self.assertRegexpMatches(self.robot.adapters['null'].responses[0],
r'^(http|https)://*')
else:
self.assertRegex(self.robot.adapters['null'].responses[0],
r'^(http|https)://*')
self.robot.adapters['null'].responses = []
|
Python
| 0
|
@@ -1576,19 +1576,16 @@
th('http
-://
'))%0A%0A
@@ -1911,20 +1911,12 @@
ue('
-unescapedUrl
+link
' in
|
3d237a6bf3a3dff684e08496f800a8957a9e3352
|
Fix pep error.
|
hr_contract_hourly_rate/models/hr_hourly_rate_class.py
|
hr_contract_hourly_rate/models/hr_hourly_rate_class.py
|
# -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
from itertools import permutations
class hr_hourly_rate_class(models.Model):
_name = 'hr.hourly.rate.class'
_description = 'Hourly rate class'
name = fields.Char(string='Class Name', required=True, index=True)
line_ids = fields.One2many('hr.hourly.rate',
'class_id',
string='Hourly Rates')
contract_job_ids = fields.One2many('hr.contract.job',
'hourly_rate_class_id',
string='Contract Jobs')
@api.model
@api.constrains('line_ids')
def _check_overlapping_rates(self):
"""
Checks if a class has two rates that overlap in time.
"""
for hourly_rate_class in self:
for r1, r2 in permutations(hourly_rate_class.line_ids, 2):
if r1.date_end and (r1.date_start <= r2.date_start <= r1.date_end):
raise exceptions.Warning(
_("Error! You cannot have overlapping rates"))
elif not r1.date_end and (r1.date_start <= r2.date_start):
raise exceptions.Warning(
_("Error! You cannot have overlapping rates"))
return True
|
Python
| 0.000001
|
@@ -1852,32 +1852,53 @@
r1.date_end and
+%5C%0A
(r1.date_start %3C
|
fa00db22832d43cfb4ed5b79be32e31153a5e570
|
Include new app
|
core/settings.py
|
core/settings.py
|
"""
Django settings for core project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fy5#xmxaf&@-30c_nm)0te&@=-g9y+45i6r03+%2(1q@vfztr_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
SITE_ID = 1
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'polls',
'login',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
)
ROOT_URLCONF = 'core.urls'
WSGI_APPLICATION = 'core.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
os.path.join(BASE_DIR, 'templates/core'),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Configure Templates
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': DEBUG,
},
},
]
# Needed for login
import django.contrib.auth
django.contrib.auth.LOGIN_URL = '/'
|
Python
| 0
|
@@ -1042,24 +1042,73 @@
.flatpages',
+%0A%0A # 3rd Party%0A%0A # Own Apps%0A 'fineants',
%0A 'polls'
|
f5fd631cb4571930cf71513db2622861fb4fcc39
|
simplify put requests
|
lib/test_util.py
|
lib/test_util.py
|
import hashlib
from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient, HTTPRequest
from newebe.settings import TORNADO_PORT
from newebe.profile.models import UserManager, User
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
class NewebeClient(HTTPClient):
'''
Tornado client wrapper to write requests to Newebe faster.
'''
def login(self, password):
'''
Grab authentication cookie from login request.
'''
response = self.fetch(self.root_url + "login/json/",
method="POST", body='{"password":"%s"}' % password)
assert response.headers["Set-Cookie"].startswith("password=")
self.cookie = response.headers["Set-Cookie"]
def set_default_user(self, url=ROOT_URL):
'''
Set to DB default user. This is useful for automatic login.
'''
self.root_url = url
user = UserManager.getUser()
if user:
user.delete()
user = User(
name = "John Doe",
password = hashlib.sha224("password").hexdigest(),
key = None,
authorKey = "authorKey",
url = url,
description = "my description"
)
user.save()
def get(self, url):
'''
Perform a GET request.
'''
request = HTTPRequest(url)
if hasattr(self, "cookie") and self.cookie:
request.headers["Cookie"] = self.cookie
return HTTPClient.fetch(self, request)
def post(self, url, body):
'''
Perform a POST request.
'''
request = HTTPRequest(url, method="POST", body=body)
if hasattr(self, "cookie") and self.cookie:
request.headers["Cookie"] = self.cookie
return HTTPClient.fetch(self, request)
def put(self, url, body):
'''
Perform a PUT request.
'''
request = HTTPRequest(url, method="PUT", body=body)
if hasattr(self, "cookie") and self.cookie:
request.headers["Cookie"] = self.cookie
return HTTPClient.fetch(self, request)
def delete(self, url):
'''
Perform a DELETE request.
'''
request = HTTPRequest(url, method="DELETE")
if self.cookie:
request.headers["Cookie"] = self.cookie
return HTTPClient.fetch(self, request)
def fetch_document_from_url(self, url):
'''
Retrieve newebe document from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
return json_decode(response.body)
def fetch_documents_from_url(self, url):
'''
Retrieve newebe document list from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_document(self, path):
'''
Retrieve document from path located on localhost server.
'''
return self.fetch_document_from_url(self.root_url + path)
def fetch_documents(self, path):
'''
Retrieve document list from path located on localhost server.
'''
return self.fetch_documents_from_url(self.root_url + path)
|
Python
| 0.000002
|
@@ -1938,32 +1938,127 @@
t.%0A '''%0A%0A
+ if hasattr(self, %22root_url%22) and self.root_url:%0A url = self.root_url + url%0A%0A
request
@@ -2197,32 +2197,45 @@
%5D = self.cookie%0A
+ %0A
return H
|
469b20ebd9d9287c8b956555374523cf083eb11a
|
create optional argument group and change --bucket-id argument to --bucket-name
|
aws_ir/cli.py
|
aws_ir/cli.py
|
#!/usr/bin/env python
import datetime
import pprint
import sys
import argparse
import logging
import json
#Add the AWS_IR Object
import aws_ir
#Support for multiple incident plans coming soon
from plans import key
from plans import host
"""Basic arg parser for AWS_IR cli"""
class cli():
def __init__(self):
self.config = None
self.prog = sys.argv[0]
"""Throw an error on missing modules"""
def module_missing(self, module_name):
try:
__import__(module_name)
except ImportError as e:
return True
else:
return False
"""Parent parser for top level flags"""
def parse_args(self, args):
parser = argparse.ArgumentParser(
description="""
Incident Response command line for Amazon Web Services.
This command line interface is designed to process host and key
based incursions without delay or error.
"""
)
parser.add_argument(
'-n',
'--case-number',
default=None,
help="""
The case number to use., usually of the form "cr-16-053018-2d2d"
"""
)
parser.add_argument(
'-e',
'--examiner-cidr-range',
default='0.0.0.0/0',
help="""
The IP/CIDR for the examiner and/or the tool.
This will be added as the only allowed range
in the isolated security group.
"""
)
parser.add_argument(
'-b',
'--bucket-id',
default=None,
help="""
Optional.
The id of the s3 bucket to use.
This must already exist
"""
)
parser.add_argument(
'-d',
'--dry-run',
default=None,
help="""
Dry run. Pass dry run
parameter to perform API calls
but will not modify any resources.
"""
)
subparsers = parser.add_subparsers()
host_compromise_parser = subparsers.add_parser(
'host_compromise', help=''
)
host_compromise_parser.add_argument('ip', help='')
host_compromise_parser.add_argument(
'user',
help="""
this is the privileged ssh user
for acquiring memory from the instance.
"""
)
host_compromise_parser.add_argument(
'ssh_key_file',
help='provide the path to the ssh private key for the user.'
)
host_compromise_parser.set_defaults(func="host_compromise")
key_compromise_parser = subparsers.add_parser(
'key_compromise',
help=''
)
key_compromise_parser.add_argument(
'compromised_access_key_id', help=''
)
key_compromise_parser.add_argument(
'region',
help='Choose a region to store your case logs. Example: us-east-1'
)
key_compromise_parser.set_defaults(func="key_compromise")
return parser.parse_args(args)
"""Logic to decide on host or key compromise"""
def run(self):
self.config = self.parse_args(sys.argv[1:])
case_number = self.config.case_number
bucket = self.config.bucket_id
compromise_object = None
if self.config.func == 'host_compromise':
hc = plans.key.Compromise(
self.config.user,
self.config.ssh_key_file,
self.config.examiner_cidr_range,
self.config.ip,
case_number = self.config.case_number,
bucket = self.config.bucket_id,
prog = self.prog
)
case_number = hc.case_number
compromise_object = hc
try:
hc.mitigate()
except KeyboardInterrupt:
pass
elif self.config.func == 'key_compromise':
kc = plans.host.Compromise(
self.config.examiner_cidr_range,
self.config.compromised_access_key_id,
case_number = self.config.case_number,
bucket = self.config.bucket_id,
region = self.config.region
)
case_number = kc.case_number
compromise_object = kc
try:
kc.mitigate()
except KeyboardInterrupt:
pass
if __name__=='__main__':
c = cli()
if c.prog is not None:
c.run()
|
Python
| 0.000001
|
@@ -983,32 +983,48 @@
)%0A%0A
+ optional_args =
parser.add_argu
@@ -1031,27 +1031,53 @@
ment
-(%0A '-n',
+_group()%0A%0A optional_args.add_argument(
%0A
@@ -1257,38 +1257,45 @@
)%0A%0A
-parser
+optional_args
.add_argument(%0A
@@ -1297,26 +1297,8 @@
nt(%0A
- '-e',%0A
@@ -1582,38 +1582,45 @@
)%0A%0A
-parser
+optional_args
.add_argument(%0A
@@ -1622,26 +1622,8 @@
nt(%0A
- '-b',%0A
@@ -1640,18 +1640,20 @@
-bucket-
-id
+name
',%0A
@@ -1843,22 +1843,29 @@
-parser
+optional_args
.add_arg
@@ -1875,26 +1875,8 @@
nt(%0A
- '-d',%0A
@@ -1904,36 +1904,43 @@
-default=None
+action='store_true'
,%0A
@@ -3455,18 +3455,20 @@
.bucket_
-id
+name
%0A
@@ -3830,34 +3830,36 @@
f.config.bucket_
-id
+name
,%0A
@@ -4367,18 +4367,20 @@
.bucket_
-id
+name
,%0A
|
d818d497fdf2f361d5ea4160061440b6329d6ee4
|
Fix clang-format vim integration issue with non-ascii characters
|
tools/clang-format/clang-format.py
|
tools/clang-format/clang-format.py
|
# This file is a minimal clang-format vim-integration. To install:
# - Change 'binary' if clang-format is not on the path (see below).
# - Add to your .vimrc:
#
# map <C-I> :pyf <path-to-this-file>/clang-format.py<cr>
# imap <C-I> <c-o>:pyf <path-to-this-file>/clang-format.py<cr>
#
# The first line enables clang-format for NORMAL and VISUAL mode, the second
# line adds support for INSERT mode. Change "C-I" to another binding if you
# need clang-format on a different key (C-I stands for Ctrl+i).
#
# With this integration you can press the bound key and clang-format will
# format the current line in NORMAL and INSERT mode or the selected region in
# VISUAL mode. The line or region is extended to the next bigger syntactic
# entity.
#
# You can also pass in the variable "l:lines" to choose the range for
# formatting. This variable can either contain "<start line>:<end line>" or
# "all" to format the full file. So, to format the full file, write a function
# like:
# :function FormatFile()
# : let l:lines="all"
# : pyf <path-to-this-file>/clang-format.py
# :endfunction
#
# It operates on the current, potentially unsaved buffer and does not create
# or save any files. To revert a formatting, just undo.
from __future__ import print_function
import difflib
import json
import subprocess
import sys
import vim
# set g:clang_format_path to the path to clang-format if it is not on the path
# Change this to the full path if clang-format is not on the path.
binary = 'clang-format'
if vim.eval('exists("g:clang_format_path")') == "1":
binary = vim.eval('g:clang_format_path')
# Change this to format according to other formatting styles. See the output of
# 'clang-format --help' for a list of supported styles. The default looks for
# a '.clang-format' or '_clang-format' file to indicate the style that should be
# used.
style = 'file'
fallback_style = None
if vim.eval('exists("g:clang_format_fallback_style")') == "1":
fallback_style = vim.eval('g:clang_format_fallback_style')
def main():
# Get the current text.
encoding = vim.eval("&encoding")
buf = vim.current.buffer
text = '\n'.join(buf)
# Determine range to format.
if vim.eval('exists("l:lines")') == '1':
lines = vim.eval('l:lines')
else:
lines = '%s:%s' % (vim.current.range.start + 1, vim.current.range.end + 1)
# Determine the cursor position.
cursor = int(vim.eval('line2byte(line("."))+col(".")')) - 2
if cursor < 0:
print('Couldn\'t determine cursor position. Is your file empty?')
return
# Avoid flashing an ugly, ugly cmd prompt on Windows when invoking clang-format.
startupinfo = None
if sys.platform.startswith('win32'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
# Call formatter.
command = [binary, '-style', style, '-cursor', str(cursor)]
if lines != 'all':
command.extend(['-lines', lines])
if fallback_style:
command.extend(['-fallback-style', fallback_style])
if vim.current.buffer.name:
command.extend(['-assume-filename', vim.current.buffer.name])
p = subprocess.Popen(command,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE, startupinfo=startupinfo)
stdout, stderr = p.communicate(input=text.encode(encoding))
# If successful, replace buffer contents.
if stderr:
print(stderr)
if not stdout:
print(
'No output from clang-format (crashed?).\n'
'Please report to bugs.llvm.org.'
)
else:
lines = stdout.decode(encoding).split('\n')
output = json.loads(lines[0])
lines = lines[1:]
sequence = difflib.SequenceMatcher(None, vim.current.buffer, lines)
for op in reversed(sequence.get_opcodes()):
if op[0] is not 'equal':
vim.current.buffer[op[1]:op[2]] = lines[op[3]:op[4]]
if output.get('IncompleteFormat'):
print('clang-format: incomplete (syntax errors)')
vim.command('goto %d' % (output['Cursor'] + 1))
main()
|
Python
| 0.999972
|
@@ -2106,16 +2106,24 @@
text =
+unicode(
'%5Cn'.joi
@@ -2128,16 +2128,27 @@
oin(buf)
+, encoding)
%0A%0A # De
|
c88314f935d9bf1e65c2a4f6d3eb6931fee5c4f5
|
fix evaluate.py
|
Utils/py/BallDetection/RegressionNetwork/evaluate.py
|
Utils/py/BallDetection/RegressionNetwork/evaluate.py
|
#!/usr/bin/env python3
import argparse
import pickle
import tensorflow.keras as keras
import numpy as np
parser = argparse.ArgumentParser(description='Train the network given ')
parser.add_argument('-b', '--database-path', dest='imgdb_path',
help='Path to the image database containing test data.'
'Default is img.db in current folder.')
parser.add_argument('-m', '--model-path', dest='model_path',
help='Store the trained model using this path. Default is model.h5.')
args = parser.parse_args()
imgdb_path = "img.db"
model_path = "model.h5"
res = {"x": 16, "y": 16}
if args.model_path is not None:
model_path = args.model_path
if args.imgdb_path is not None:
imgdb_path = args.imgdb_path
with open(imgdb_path, "rb") as f:
mean = pickle.load(f)
print("mean=" + str(mean))
x = pickle.load(f)
y = pickle.load(f)
model = keras.models.load_model(model_path)
print(model.summary())
x = np.array(x)
y = np.array(y)
result = model.evaluate(x, y)
print("Evaluation result")
print("=================")
for idx in range(0, len(result)):
print(model.metrics_names[idx] + ":", result[idx])
|
Python
| 0.000002
|
@@ -99,17 +99,227 @@
y as np%0A
-%0A
+from pathlib import Path%0A%0ADATA_DIR = Path(Path(__file__).parent.absolute() / %22data%22).resolve()%0AMODEL_DIR = Path(Path(__file__).parent.absolute() / %22models/best_models%22).resolve()%0A%0Aif __name__ == '__main__':%0A
parser =
@@ -360,13 +360,16 @@
on='
-Train
+Evaluate
the
@@ -387,16 +387,20 @@
ven ')%0A%0A
+
parser.a
@@ -450,25 +450,66 @@
imgdb_path',
-%0A
+ default=str(DATA_DIR / 'imgdb.pkl'),%0A
@@ -597,16 +597,20 @@
+
'Default
@@ -620,22 +620,26 @@
img
-.
db
- in current
+.pkl in the data
fol
@@ -645,16 +645,20 @@
lder.')%0A
+
parser.a
@@ -709,17 +709,64 @@
l_path',
-%0A
+ default=str(MODEL_DIR / 'fy1500_conf.h5'),%0A
@@ -839,21 +839,27 @@
is
-model
+fy1500_conf
.h5.')%0A%0A
args
@@ -854,16 +854,20 @@
.h5.')%0A%0A
+
args = p
@@ -890,222 +890,56 @@
()%0A%0A
-imgdb_path = %22img.db%22%0Amodel_path = %22model.h5%22%0Ares = %7B%22x%22: 16, %22y%22: 16%7D%0A%0Aif args.model_path is not None:%0A model_path = args.model_path%0A%0Aif args.imgdb_path is not None:%0A imgdb_path = args.imgdb_path%0A%0Awith open(
+ res = %7B%22x%22: 16, %22y%22: 16%7D%0A with open(args.
imgd
@@ -962,16 +962,20 @@
f:%0A
+
+
mean = p
@@ -984,24 +984,28 @@
kle.load(f)%0A
+
print(%22m
@@ -1023,16 +1023,20 @@
(mean))%0A
+
x =
@@ -1046,24 +1046,28 @@
kle.load(f)%0A
+
y = pick
@@ -1078,16 +1078,20 @@
oad(f)%0A%0A
+
model =
@@ -1114,16 +1114,21 @@
d_model(
+args.
model_pa
@@ -1132,16 +1132,20 @@
_path)%0A%0A
+
print(mo
@@ -1160,16 +1160,20 @@
ary())%0A%0A
+
x = np.a
@@ -1180,16 +1180,20 @@
rray(x)%0A
+
y = np.a
@@ -1201,16 +1201,20 @@
ray(y)%0A%0A
+
result =
@@ -1236,16 +1236,20 @@
(x, y)%0A%0A
+
print(%22E
@@ -1267,16 +1267,20 @@
esult%22)%0A
+
print(%22=
@@ -1299,16 +1299,20 @@
====%22)%0A%0A
+
for idx
@@ -1337,16 +1337,20 @@
sult)):%0A
+
prin
|
cc7e3e5ef9d9c59b6b1ac80826445839ede73092
|
Revert mast dev host change
|
astroquery/mast/__init__.py
|
astroquery/mast/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
|
Python
| 0
|
@@ -370,11 +370,8 @@
mast
-dev
.sts
|
3ef82203daebd532af2f8effebe8fa31cec11e76
|
fix error message encoding
|
atest/robot/tidy/TidyLib.py
|
atest/robot/tidy/TidyLib.py
|
from __future__ import with_statement
import os
from os.path import abspath, dirname, join
from subprocess import call, STDOUT
import tempfile
from robot.utils.asserts import assert_equals
ROBOT_SRC = join(dirname(abspath(__file__)), '..', '..', '..', 'src')
class TidyLib(object):
def __init__(self, interpreter):
self._cmd = [interpreter, '-m', 'robot.tidy']
self._interpreter = interpreter
def run_tidy_and_return_output(self, options, input, command=None):
"""Runs tidy in the operating system and returns output."""
options = options.split(' ') if options else []
with tempfile.TemporaryFile() as output:
rc = call(self._cmd + options + [self._path(input)], stdout=output,
stderr=STDOUT, cwd=ROBOT_SRC, shell=os.sep=='\\')
output.seek(0)
content = output.read()
if rc:
raise RuntimeError(content)
return content
def run_tidy_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
result = self.run_tidy_and_return_output(options, input)
self._assert_result(result, open(self._path(expected)).read())
def run_tidy_as_a_script_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
cmd = [self._interpreter, join(ROBOT_SRC, 'robot', 'tidy.py')]
result = self.run_tidy_and_return_output(options, input, cmd)
self._assert_result(result, open(self._path(expected)).read())
def _path(self, path):
return path.replace('/', os.sep)
def _assert_result(self, result, expected):
result = result.decode('UTF-8')
expected = expected.decode('UTF-8')
for line1, line2 in zip(result.splitlines(), expected.splitlines()):
msg = "\n%s\n!=\n%s\n" % (result, expected)
assert_equals(repr(unicode(line1)), repr(unicode(line2)), msg)
|
Python
| 0.000001
|
@@ -1819,32 +1819,122 @@
decode('UTF-8')%0A
+ result_lines = result.splitlines()%0A expected_lines = expected.splitlines()%0A
for line
@@ -1955,29 +1955,22 @@
p(result
-.split
+_
lines
-()
, expect
@@ -1971,29 +1971,22 @@
expected
-.split
+_
lines
-()
):%0A
|
3904a7ac75318de08452fcea1a49b6d6e681da4e
|
remove debug.
|
lib/acli/output/route53.py
|
lib/acli/output/route53.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, print_function, unicode_literals)
from acli.output import (output_ascii_table, dash_if_none)
def output_route53_list(output_media=None, zones=None):
"""
@type output_media: unicode
@type zones: list | dict
"""
if isinstance(zones, dict):
zones = [zones]
for hosted_zone_dict in zones:
if output_media == 'console':
td = list()
td.append(['id', 'name',
'count', 'comment',
'private zone'])
for hosted_zone in hosted_zone_dict.get('HostedZones'):
zone_id = dash_if_none(hosted_zone.get('Id'))
zone_name = hosted_zone.get('Name')
record_count = str(hosted_zone.get('ResourceRecordSetCount'))
comment = hosted_zone.get('Config').get('Comment')
private_zone = str(hosted_zone.get('Config').get('PrivateZone'))
td.append([zone_id,
zone_name,
record_count,
comment,
private_zone])
output_ascii_table(table_title="Route53 Zones",
table_data=td,
inner_heading_row_border=True)
exit(0)
def get_record_set_values(resource_records):
"""
@type resource_records: list
"""
print(resource_records.__class__.__name__)
out = list()
for record in resource_records:
out.append(record.get('Value'))
return out
def output_route53_info(output_media=None, zone=None, record_sets=None):
"""
@type output_media: unicode
@type zone: zone
@type record_sets: ResourceRecordSets
"""
if output_media == 'console':
td = list()
td.append(['id', zone['HostedZone']['Id']])
td.append(['Name', zone['HostedZone']['Name']])
td.append(['Count', str(zone['HostedZone']['ResourceRecordSetCount'])])
td.append(['Comment', zone['HostedZone']['Config']['Comment']])
td.append(['Private', str(zone['HostedZone']['Config']['PrivateZone'])])
td.append(['Name Servers', "\n".join(zone['DelegationSet']['NameServers'])])
td.append(['Records', ' '])
td.append(['{0}'.format("-" * 12), '{0}'.format("-" * 20)])
for record_set in record_sets['ResourceRecordSets']:
td.append(['Name', record_set['Name']])
td.append([' Type', record_set['Type']])
td.append([' TTL', str(record_set['TTL'])])
td.append([' Values', "\n".join(get_record_set_values(record_set['ResourceRecords']))])
output_ascii_table(table_title="Zone Info",
table_data=td)
exit(0)
|
Python
| 0
|
@@ -1516,55 +1516,8 @@
%22%22%22%0A
- print(resource_records.__class__.__name__)%0A
|
ce1169df5393588dea1f6d4abfea99b03d5b732c
|
Fix a sorting bug
|
bank2ynab.pyw
|
bank2ynab.pyw
|
from pathlib import Path
from tkinter import Tk, StringVar, Toplevel, Message
from tkinter.ttk import Combobox, Frame, Button, Label
from tkinter.filedialog import askopenfilename
from src.converter import bank2ynab
from src.config import BankConfig
PADX = 12
PADY = 10
BANK_DIR = Path('./banks')
###################################
# GUI-code
###################################
class Application(Tk):
"""
Main window that can repace the frame that is shown
https://stackoverflow.com/questions/7546050/switch-between-two-frames-in-tkinter
"""
def __init__(self):
Tk.__init__(self)
self._frame = None
self.switch_frame(BankSelection)
def switch_frame(self, frame_class, args=None):
"""Destroys current frame and replaces it with a new one."""
new_frame = frame_class(self, args)
if self._frame is not None:
self._frame.destroy()
self._frame = new_frame
self._frame.grid(padx=PADX, pady=PADY)
self._frame.master.title("Bank2YNAB4")
self._frame.master.resizable(False, False)
class BankSelection(Frame):
def __init__(self, master=None, args=None):
Frame.__init__(self, master)
self.banks = [BankConfig.from_file(b) for b in BANK_DIR.iterdir() if b.suffix == '.toml']
self.createWidgets()
def createWidgets(self):
self.label = Label(self, text="Choose Your Bank:")
self.label.grid(column=0, row=0, sticky='W', ipadx=2)
banknames = self.getNames()
self.bankName = StringVar()
self.bankName.set(banknames[0])
self.bankChosen = Combobox(self, width=12, textvariable=self.bankName)
self.bankChosen['values'] = banknames
self.bankChosen.grid(column=1, row=0)
self.confirm = Button(self, text="Ok", command=self.convert)
self.confirm.grid(column=0, row=1, columnspan=2, sticky='E', pady=5)
def convert(self):
try:
inputPath = self.getFile()
bank = self.banks[self.bankChosen.current()]
except ValueError as e:
pass # No file selected
else:
try:
result = bank2ynab(bank, inputPath)
self.master.switch_frame(Report, result)
except (NameError, OSError, ValueError, TypeError) as e:
Error(self, e)
def getNames(self):
names = [b.name for b in self.banks]
names.sort()
return names
def getFile(self) -> Path:
inputPath = askopenfilename(
filetypes=[('CSV files', '*.csv')],
initialdir='.')
if inputPath:
return Path(inputPath)
else:
raise ValueError('No file selected')
class Report(Frame):
def __init__(self, master, args):
Frame.__init__(self, master)
self.createLabels(args)
self.exitButton = Button(self, text="Exit", command=self.master.destroy)
self.exitButton.grid(column=0, row=4, sticky='E')
def createLabels(self, args):
success, blankRows, ignoredRows, linesRead, rowsParsed= args
readStats = f'{linesRead}/{linesRead+blankRows+ignoredRows} rows read'
if(blankRows+ignoredRows != 0):
ignoreStats = f' (ignored {blankRows} blank rows and ' \
f'{ignoredRows} transactions found in accignore).'
readStats = readStats + ignoreStats
parsedStats = f'{rowsParsed}/{linesRead} rows parsed '
if not success:
self.status = "Conversion failed."
else:
self.status = "YNAB csv-file successfully written."
self.statusLabel = Label(self, text=self.status)
self.statusLabel.grid(column=0, row=0, sticky='W', pady=5)
self.readStatsLabel = Label(self, text=readStats)
self.readStatsLabel.grid(column=0, row=1, sticky='W')
self.parsedStatsLabel = Label(self, text=parsedStats)
self.parsedStatsLabel.grid(column=0, row=3, sticky='W')
class Error(Toplevel):
"""Pop-up for displaying an error message"""
def __init__(self, master, arg):
Toplevel.__init__(self, master, padx=PADX, pady=PADY)
self.title("Error")
self.msg = Message(self, text=arg, aspect=380)
self.msg.grid(column=0, row=0, pady=5)
self.columnconfigure(0, minsize=180)
button = Button(self, text="Dismiss", command=self.destroy)
button.grid(column=0, row=1)
if __name__ == "__main__":
app = Application()
app.mainloop()
|
Python
| 0
|
@@ -1218,29 +1218,24 @@
er)%0A
-self.
banks = %5BBan
@@ -1307,16 +1307,84 @@
.toml'%5D%0A
+ banks.sort(key=lambda b: b.name)%0A self.banks = banks%0A
@@ -1580,23 +1580,36 @@
s =
-self.getNames()
+%5Bb.name for b in self.banks%5D
%0A
@@ -2446,120 +2446,8 @@
e)%0A%0A
- def getNames(self):%0A names = %5Bb.name for b in self.banks%5D%0A names.sort()%0A return names%0A%0A
|
fe67796130854d83b3dfaa085d67d9eabe35a155
|
Allow getdate for Energy Point Rule condition
|
frappe/social/doctype/energy_point_rule/energy_point_rule.py
|
frappe/social/doctype/energy_point_rule/energy_point_rule.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.cache_manager
from frappe.model.document import Document
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert
class EnergyPointRule(Document):
def on_update(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def on_trash(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def apply(self, doc):
if frappe.safe_eval(self.condition, None, {'doc': doc.as_dict()}):
multiplier = 1
if self.multiplier_field:
multiplier = doc.get(self.multiplier_field) or 1
points = round(self.points * multiplier)
reference_doctype = doc.doctype
reference_name = doc.name
user = doc.get(self.user_field)
rule = self.name
# incase of zero as result after roundoff
if not points: return
# if user_field has no value
if not user or user == 'Administrator': return
try:
create_energy_points_log(reference_doctype, reference_name, {
'points': points,
'user': user,
'rule': rule
})
except Exception as e:
frappe.log_error(frappe.get_traceback(), 'apply_energy_point')
def process_energy_points(doc, state):
if (frappe.flags.in_patch
or frappe.flags.in_install
or not is_energy_point_enabled()):
return
old_doc = doc.get_doc_before_save()
# check if doc has been cancelled
if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2:
return revert_points_for_cancelled_doc(doc)
for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype,
dict(reference_doctype = doc.doctype, enabled=1)):
frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc)
def revert_points_for_cancelled_doc(doc):
energy_point_logs = frappe.get_all('Energy Point Log', {
'reference_doctype': doc.doctype,
'reference_name': doc.name,
'type': 'Auto'
})
for log in energy_point_logs:
revert(log.name, _('Reference document has been cancelled'))
def get_energy_point_doctypes():
return [
d.reference_doctype for d in frappe.get_all('Energy Point Rule',
['reference_doctype'], {'enabled': 1})
]
|
Python
| 0
|
@@ -724,16 +724,68 @@
, doc):%0A
+%09%09whitelisted_globals = %7B%0A%09%09%09%22getdate%22: getdate%0A%09%09%7D%0A
%09%09if fra
@@ -818,12 +818,27 @@
on,
-None
+whitelisted_globals
, %7B'
|
09a075891cf881571055538d7b18cb2e684e7d9e
|
Use log.exception instead of traceback module in hook.py
|
libqtile/hook.py
|
libqtile/hook.py
|
import traceback
import utils
subscriptions = {}
SKIPLOG = set()
qtile = None
def init(q):
global qtile
qtile = q
def clear():
subscriptions.clear()
class Subscribe:
def __init__(self):
hooks = set([])
for i in dir(self):
if not i.startswith("_"):
hooks.add(i)
self.hooks = hooks
def _subscribe(self, event, func):
lst = subscriptions.setdefault(event, [])
if not func in lst:
lst.append(func)
def startup(self, func):
"""
Called when Qtile has initialized
"""
return self._subscribe("startup", func)
def setgroup(self, func):
"""
Called when group is changed.
"""
return self._subscribe("setgroup", func)
def addgroup(self, func):
"""
Called when group is added.
"""
return self._subscribe("addgroup", func)
def delgroup(self, func):
"""
Called when group is deleted.
"""
return self._subscribe("delgroup", func)
def changegroup(self, func):
"""
Called whenever a group change occurs.
"""
return self._subscribe("changegroup", func)
def focus_change(self, func):
"""
Called when focus is changed.
"""
return self._subscribe("focus_change", func)
def float_change(self, func):
"""
Called when a change in float state is made
"""
return self._subscribe("float_change", func)
def group_window_add(self, func):
"""
Called when a new window is added to a group.
"""
return self._subscribe("group_window_add", func)
def window_name_change(self, func):
"""
Called whenever a windows name changes.
"""
return self._subscribe("window_name_change", func)
def client_new(self, func):
"""
Called before Qtile starts managing a new client. Use this hook to
declare windows static, or add them to a group on startup. This
hook is not called for internal windows.
- arguments: window.Window object
## Example:
def func(c):
if c.name == "xterm":
c.togroup("a")
elif c.name == "dzen":
c.static(0)
libqtile.hook.subscribe.client_new(func)
"""
return self._subscribe("client_new", func)
def client_managed(self, func):
"""
Called after Qtile starts managing a new client. That is, after a
window is assigned to a group, or when a window is made static.
This hook is not called for internal windows.
- arguments: window.Window object
"""
return self._subscribe("client_managed", func)
def client_killed(self, func):
"""
Called after a client has been unmanaged.
- arguments: window.Window object of the killed window.
"""
return self._subscribe("client_killed", func)
def client_state_changed(self, func):
"""
Called whenever client state changes.
"""
return self._subscribe("client_state_changed", func)
def client_type_changed(self, func):
"""
Called whenever window type changes.
"""
return self._subscribe("client_type_changed", func)
def client_focus(self, func):
"""
Called whenver focus changes.
- arguments: window.Window object of the new focus.
"""
return self._subscribe("client_focus", func)
def client_mouse_enter(self, func):
"""
Called when the mouse enters a client.
"""
return self._subscribe("client_mouse_enter", func)
def client_name_updated(self, func):
"""
Called when the client name changes.
"""
return self._subscribe("client_name_updated", func)
def client_urgent_hint_changed(self, func):
"""
Called when the client urgent hint changes.
"""
return self._subscribe("client_urgent_hint_changed", func)
def layout_change(self, func):
"""
Called on layout change.
"""
return self._subscribe("layout_change", func)
def net_wm_icon_change(self, func):
"""
Called on _NET_WM_ICON chance.
"""
return self._subscribe("net_wm_icon_change", func)
def screen_change(self, func):
"""
Called when a screen is added or screen configuration is changed
(via xrandr). The hook should take two arguments: the root qtile
object and the ``xproto.randr.ScreenChangeNotify`` event. Common
usage is simply to call ``qtile.cmd_restart()`` on each event (to
restart qtile when there is a new monitor):
## Example:
def restart_on_randr(qtile, ev):
qtile.cmd_restart()
"""
return self._subscribe("screen_change", func)
subscribe = Subscribe()
class Unsubscribe(Subscribe):
"""
This class mirrors subscribe, except the _subscribe member has been
overridden to removed calls from hooks.
"""
def _subscribe(self, event, func):
lst = subscriptions.setdefault(event, [])
try:
lst.remove(func)
except ValueError:
raise utils.QtileError(
"Tried to unsubscribe a hook that was not"
" currently subscribed"
)
unsubscribe = Unsubscribe()
def fire(event, *args, **kwargs):
if event not in subscribe.hooks:
raise utils.QtileError("Unknown event: %s" % event)
if not event in SKIPLOG:
qtile.log.info(
"Internal event: %s(%s, %s)" %
(event, args, kwargs)
)
for i in subscriptions.get(event, []):
try:
i(*args, **kwargs)
except Exception as e:
qtile.log.error("Error in hook %s:\n%s" % (
event, traceback.format_exc()))
|
Python
| 0.000001
|
@@ -1,21 +1,4 @@
-import traceback%0A
impo
@@ -6087,23 +6087,8 @@
cept
- Exception as e
:%0A
@@ -6108,20 +6108,24 @@
le.log.e
-rror
+xception
(%22Error
@@ -6138,63 +6138,18 @@
k %25s
-:%5Cn%25s%22 %25 (%0A event, traceback.format_exc()
+%22 %25 (event,
))%0A
|
6236ee8344add06f6adbfef9df5ab224ea19b1fe
|
Remove unused import
|
moocng/courses/backends.py
|
moocng/courses/backends.py
|
# Copyright (C) 2010-2012 Yaco Sistemas (http://www.yaco.es)
# Copyright (C) 2009 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.auth.models import Group
from django.contrib.auth.models import SiteProfileNotAvailable
from django.core.exceptions import ObjectDoesNotExist
from djangosaml2.backends import Saml2Backend
from moocng.courses.models import Course
from moocng.teacheradmin.models import Invitation
class Saml2BackendExtension(Saml2Backend):
# This function is called when a new user is created
# we will check here if a pending teacher invitation
# exists for this user
def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
user = self.update_user(user, attributes, attribute_mapping,
force_save=True)
user_pendings = Invitation.objects.filter(email=user.email)
for user_pending in user_pendings:
user_pending.course.teachers.add(user)
user_pending.delete()
return user
def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set, otherwise it will try to set
it in the profile object.
"""
if not attribute_mapping:
return user
try:
profile = user.get_profile()
except ObjectDoesNotExist:
profile = None
except SiteProfileNotAvailable:
profile = None
user_modified = False
profile_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
try:
for attr in django_attrs:
if hasattr(user, attr):
if attr == 'groups':
user.groups = Group.objects.filter(name__in=attributes[saml_attr])
else:
setattr(user, attr, attributes[saml_attr][0])
user_modified = True
elif profile is not None and hasattr(profile, attr):
setattr(profile, attr, attributes[saml_attr][0])
profile_modified = True
except KeyError:
# the saml attribute is missing
pass
if user_modified or force_save:
user.save()
if profile is not None and (profile_modified or force_save):
profile.save()
return user
|
Python
| 0.000001
|
@@ -893,49 +893,8 @@
end%0A
-from moocng.courses.models import Course%0A
from
|
aa8e55858997d30ca507091672951a99f5fc4ef4
|
Remove mozaik_event_barcode from mozaik_all to be able to uninstall it
|
mozaik_all/__manifest__.py
|
mozaik_all/__manifest__.py
|
# Copyright 2018 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Mozaik: All Modules Loader",
"summary": """
Loads all Mozaik modules""",
"version": "14.0.1.1.9",
"license": "AGPL-3",
"author": "ACSONE SA/NV",
"website": "https://github.com/OCA/mozaik",
"category": "Political Association",
"depends": [
# 'disable_tracking_installation',
# 'disable_user_welcome_message',
"inherit_abstract_view",
# 'ir_rule_child_of',
# 'mail_job_priority',
"mozaik_abstract_model",
"mozaik_account",
"mozaik_address",
"mozaik_address_local_street",
"mozaik_ama_attachment",
"mozaik_automatic_supporter",
"mozaik_mass_mailing_automation",
"mass_mailing_distribution_list",
"mozaik_communication",
"mozaik_committee",
"mozaik_duplicate",
"mozaik_dynamical_time_filter",
"mozaik_event_barcode",
"mozaik_event_chatter",
"mozaik_event_description",
"mozaik_event_involvement_category",
"mozaik_event_question_involvement_category",
"mozaik_event_membership_request_involvement",
"mozaik_event_partner_firstname",
"mozaik_event_publish_date",
"mozaik_event_registration_add_zip",
"mozaik_event_security",
"mozaik_event_thesaurus",
"mozaik_event_tickbox_question",
"mozaik_graphql",
"mozaik_involvement",
"mozaik_involvement_followup",
"mozaik_mass_mailing_access_rights",
"mozaik_mass_mailing_automation",
"mozaik_mass_mailing_bounce_counter",
"mozaik_mass_mailing_dynamic_placeholder",
"mozaik_mass_mailing_immediate_sending",
"mozaik_mass_mailing_int_instance",
"mozaik_mass_mailing_mail_creation",
"mozaik_mass_mailing_multi_sending",
"mozaik_mass_mailing_template",
"mozaik_membership",
"mozaik_partner_assembly",
"mozaik_partner_button_sms",
"mozaik_partner_disabled",
"mozaik_partner_fields",
"mozaik_partner_global_opt_out",
"mozaik_partner_unemployed",
"mozaik_partner_website",
# 'mozaik_partner_unauthorized',
"mozaik_person",
"mozaik_person_deceased",
# 'mozaik_relation_coordinate',
"mozaik_structure",
# 'mozaik_subscription_price',
"mozaik_thesaurus",
"mozaik_tools",
"mozaik_virtual_assembly_instance",
"mozaik_virtual_partner_mandate",
"mozaik_virtual_partner_involvement",
"mozaik_virtual_partner_instance",
"mozaik_virtual_partner_mass_mailing",
"mozaik_virtual_partner_membership",
"mozaik_virtual_partner_relation",
# 'partner_usual_firstname',
"mozaik_mandate",
"mozaik_mandate_email",
"mozaik_mandate_female_label",
"mozaik_mandate_category_sequence",
"mozaik_mandate_show_website",
"mozaik_membership_mandate",
"mozaik_membership_price",
"mozaik_membership_request",
"mozaik_membership_request_autovalidate",
"mozaik_membership_request_from_registration",
"mozaik_petition",
"mozaik_petition_membership_request_involvement",
"mozaik_petition_involvement_category",
"mozaik_petition_question_involvement_category",
"mozaik_petition_thesaurus",
"mozaik_retrocession_mode",
"mozaik_survey_chatter",
"mozaik_survey_involvement_category",
"mozaik_survey_export_csv",
"mozaik_survey_security",
"mozaik_survey_membership_request_involvement",
"mozaik_survey_publish_date",
"mozaik_survey_question_involvement_category",
"mozaik_survey_scoring",
"mozaik_survey_thesaurus",
"mozaik_website_event_track",
"mozaik_membership_payment",
"mozaik_membership_payment_stripe",
# "mass_mail_queue_job",
],
"data": [
# 'views/mail_followers.xml',
"views/res_partner.xml",
],
"installable": True,
"external_dependencies": {"python": ["openupgradelib"]},
}
|
Python
| 0
|
@@ -962,32 +962,34 @@
filter%22,%0A
+ #
%22mozaik_event_b
|
096f45694c741a03d444b4a83bc2d1386c11b0d5
|
Fix data entry backend
|
datapoints/api/resources/base_model.py
|
datapoints/api/resources/base_model.py
|
import json
from django.http import HttpResponse
from tastypie.authorization import Authorization
from tastypie.authentication import ApiKeyAuthentication, MultiAuthentication
from tastypie.resources import ModelResource
from tastypie import http
from datapoints.models import LocationPermission
from datapoints.api.serialize import CustomSerializer
from datapoints.api.custom_session_authentication import CustomSessionAuthentication
from datapoints.api.custom_cache import CustomCache
from datapoints.api.exceptions import DatapointsException
from datapoints.api.resources.base import BaseResource
class BaseModelResource(ModelResource, BaseResource):
'''
This applies to only the V1 API. This method inherits from Tastypie's
model resource.
This resource strips down almost all of the tastypie functions which
drastically slow down the API performance.
IMPORTANT: if you note, all of the resources use the .values() option for
each queryset. That returns the model as JSON, so the idea is that the
API does not need to serialize or dehydrate the resource.
The models are set up so that the API does as little transformation as
possible. That means however, that a few of our metadata models ( see
campaign / indicator ) are cached and contain related information making
the job of the API easy.
'''
class Meta:
authentication = MultiAuthentication(CustomSessionAuthentication(), ApiKeyAuthentication())
authorization = Authorization()
always_return_data = True
allowed_methods = ['get', 'post', 'delete', 'patch']
cache = CustomCache()
serializer = CustomSerializer()
def dispatch(self, request_type, request, **kwargs):
'''
'''
return super(BaseModelResource, self).dispatch(request_type, request, **kwargs)
def convert_post_to_patch(request):
'''
'''
return super(BaseModelResource, self).convert_post_to_patch(request)
def patch_detail(self, request, **kwargs):
"""
Updates a resource in-place.
Calls ``obj_update``.
If the resource is updated, return ``HttpAccepted`` (202 Accepted).
If the resource did not exist, return ``HttpNotFound`` (404 Not Found).
"""
# request = self.convert_post_to_patch(request)
basic_bundle = self.build_bundle(request=request)
# We want to be able to validate the update, but we can't just pass
# the partial data into the validator since all data needs to be
# present. Instead, we basically simulate a PUT by pulling out the
# original data and updating it in-place.
# So first pull out the original object. This is essentially
# ``get_detail``.
try:
obj = self._meta.object_class.objects.get(id=kwargs['pk'])
except ObjectDoesNotExist:
return http.HttpNotFound()
except MultipleObjectsReturned:
return http.HttpMultipleChoices("More than one resource is found at this URI.")
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
# Now update the bundle in-place.
# deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
# self.update_in_place(request, bundle, deserialized)
if not self._meta.always_return_data:
return http.HttpAccepted()
else:
# Invalidate prefetched_objects_cache for bundled object
# because we might have changed a prefetched field
bundle.obj._prefetched_objects_cache = {}
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
return self.create_response(request, bundle, response_class=http.HttpAccepted)
def get_detail(self, request, **kwargs):
"""
Returns a single serialized resource.
Calls ``cached_obj_get/obj_get`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
try:
obj = self._meta.object_class.objects.get(id=kwargs['pk'])
except ObjectDoesNotExist:
return http.HttpNotFound()
except MultipleObjectsReturned:
return http.HttpMultipleChoices("More than one resource is found at this URI.")
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
return self.create_response(request, bundle)
def get_list(self, request, **kwargs):
"""
Overriden from Tastypie..
"""
base_bundle = self.build_bundle(request=request)
objects = self.obj_get_list(bundle=base_bundle, **self.remove_api_resource_names(kwargs))
bundles = []
# this is a temporary hack to get data_entry working ##
# long term fix is to make DatapointEntryResource a NonModelResource
# https://trello.com/c/skxxpzYj/327-rp-bug-2005-cannot-load-entry-form-in-enter-data-via-form
if self.Meta.resource_name == 'datapointentry':
return super(ModelResource, self).get_list(request, **kwargs)
if len(objects) > 0:
# find json_fields ( should be explicit here and check data type)
# of the field, but for this works..
json_obj_keys = [k for k, v in objects[0].items() if 'json' in k]
for obj in objects:
# serialize json fields ##
for json_key in json_obj_keys:
obj[json_key] = json.loads(obj[json_key])
# hack lvl attribute
if 'location_type_id' in obj:
obj['lvl'] = obj['location_type_id'] - 1
bundles.append(obj)
response_meta = self.get_response_meta(len(objects))
response_data = {
'objects': bundles,
'meta': response_meta, # add paginator info here..
'error': None,
}
return self.create_response(request, response_data)
def get_response_meta(self, object_len):
meta_dict = {
'top_lvl_location_id': self.top_lvl_location_id,
'limit': None, # paginator.get_limit(),
'offset': None, # paginator.get_offset(),
'total_count': object_len,
}
return meta_dict
|
Python
| 0.000054
|
@@ -3288,18 +3288,16 @@
%0A
- #
deseria
@@ -3412,18 +3412,16 @@
%0A
- #
self.up
|
6a2a0667179a78e2c56dff551b0d010db6ed0150
|
fix imports
|
chainerrl/initializers/__init__.py
|
chainerrl/initializers/__init__.py
|
from chainerrl.initializers.constant import VarianceScalingConstant # NOQA
from chainerrl.initializers.normal import LeCunNormal # NOQA
|
Python
| 0.000002
|
@@ -123,16 +123,90 @@
nNormal # NOQA%0A
+from chainerrl.initializers.uniform import VarianceScalingUniform # NOQA%0A
|
863629f65bf151afa0b37bc3485ec37c9dcea84d
|
Disable logging until optional
|
changes_lxc_wrapper/cli/wrapper.py
|
changes_lxc_wrapper/cli/wrapper.py
|
#!/usr/bin/env python3
import argparse
import logging
import sys
from raven.handlers.logging import SentryHandler
from threading import Thread
from uuid import UUID
from ..api import ChangesApi
from ..container import Container
from ..log_reporter import LogReporter
DESCRIPTION = "LXC Wrapper for running Changes jobs"
DEFAULT_RELEASE = 'precise'
class WrapperCommand(object):
def __init__(self, argv=None):
self.argv = argv
self.stdout = sys.stdout
self.stderr = sys.stderr
def get_arg_parser(self):
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument('--snapshot', '-s', type=UUID,
help="Snapshot ID of the container")
parser.add_argument('--release', '-r',
help="Ubuntu release (default: {})".format(DEFAULT_RELEASE))
parser.add_argument('--keep', action='store_true', default=False,
help="Don't destroy the container after running cmd/build")
parser.add_argument('--no-validate', action='store_false', default=True, dest='validate',
help="Don't validate downloaded images")
parser.add_argument('--save-snapshot', action='store_true', default=False,
help="Create an image from this container")
parser.add_argument('--clean', action='store_true', default=False,
help="Use a fresh container from Ubuntu minimal install")
parser.add_argument('--flush-cache', action='store_true', default=False,
help="Rebuild Ubuntu minimal install cache")
parser.add_argument('--api-url',
help="API URL to Changes (i.e. https://changes.example.com/api/0/)")
parser.add_argument('--jobstep-id',
help="Jobstep ID for Changes")
parser.add_argument('--pre-launch',
help="Command to run before container is launched")
parser.add_argument('--post-launch',
help="Command to run after container is launched")
parser.add_argument('--user', '-u', default='ubuntu',
help="User to run command (or script) as")
parser.add_argument('--script',
help="Script to execute as command")
parser.add_argument('--s3-bucket',
help="S3 Bucket to store/fetch images from")
parser.add_argument('--log-level', default='INFO')
parser.add_argument('cmd', nargs=argparse.REMAINDER,
help="Command to run inside the container")
return parser
def configure_logging(self, level):
logging.basicConfig(level=level)
root = logging.getLogger()
root.addHandler(SentryHandler())
def patch_system_logging(self, reporter):
sys.stdout = sys.stderr = reporter
def run(self):
parser = self.get_arg_parser()
args = parser.parse_args(self.argv)
try:
args.cmd.remove('--')
except ValueError:
pass
self.configure_logging(args.log_level)
if args.api_url:
api = ChangesApi(args.api_url)
else:
assert not args.jobstep_id, "jobstep_id passed without api_url"
api = None
jobstep_id = args.jobstep_id
# setup log capturing
if jobstep_id:
reporter = LogReporter(api, args.jobstep_id)
reporter_thread = Thread(target=reporter.process)
reporter_thread.start()
self.patch_system_logging(reporter)
else:
reporter_thread = None
if jobstep_id:
# fetch build information to set defaults for things like snapshot
# TODO(dcramer): make this support a small amount of downtime
# TODO(dcramer): make this verify the snapshot
resp = api.get_jobstep(args.jobstep_id)
assert resp['status']['id'] != 'finished', \
'JobStep already marked as finished, aborting.'
release = resp['data'].get('release') or DEFAULT_RELEASE
# If we're expected a snapshot output we need to override
# any snapshot parameters, and also ensure we're creating a clean
# image
if resp['expectedSnapshot']:
snapshot = str(UUID(resp['expectedSnapshot']['id']))
save_snapshot = True
clean = True
else:
if resp['snapshot']:
snapshot = str(UUID(resp['snapshot']['id']))
else:
snapshot = None
save_snapshot = False
clean = False
else:
clean = args.clean
snapshot = str(args.snapshot) if args.snapshot else None
save_snapshot = args.save_snapshot
release = args.release or DEFAULT_RELEASE
assert clean or not (save_snapshot and snapshot), \
"You cannot create a snapshot from an existing snapshot"
container = Container(
snapshot=snapshot,
release=release,
validate=args.validate,
s3_bucket=args.s3_bucket,
)
try:
if args.jobstep_id:
api.update_jobstep(args.jobstep_id, {"status": "in_progress"})
container.launch(args.pre_launch, args.post_launch, clean, args.flush_cache)
# TODO(dcramer): we should assert only one type of command arg is set
if args.cmd:
container.run(args.cmd, user=args.user)
if args.script:
container.run_script(args.script, user=args.user)
if args.api_url and args.jobstep_id:
container.run(['changes-client',
'--server', args.api_url,
'--jobstep_id', args.jobstep_id], user=args.user)
if save_snapshot:
snapshot = container.create_image()
print(" ==> Snapshot saved: {}".format(snapshot))
container.upload_image(snapshot=snapshot)
api.update_snapshot_image(snapshot, {"status": "active"})
except Exception as e:
if args.jobstep_id:
api.update_jobstep(args.jobstep_id, {"status": "finished", "result": "failed"})
if save_snapshot:
api.update_snapshot_image(snapshot, {"status": "failed"})
logging.exception(e)
raise e
finally:
if args.jobstep_id:
api.update_jobstep(args.jobstep_id, {"status": "finished"})
if not args.keep:
container.destroy()
else:
print(" ==> Container kept at {}".format(container.rootfs))
print(" ==> SSH available via:")
print(" ==> $ sudo lxc-attach --name={}".format(container.name))
if reporter_thread:
reporter.close()
reporter_thread.join()
def main():
command = WrapperCommand()
command.run()
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -3646,24 +3646,26 @@
%0A
+ #
self.patch_
|
6da77c8ae7d16e2afc24f0b0e906fe5ff86ef6fd
|
put the destory in the right place
|
channelguide/guide/views/submit.py
|
channelguide/guide/views/submit.py
|
from django.conf import settings
from channelguide import util
from channelguide.guide import forms
from channelguide.guide.auth import login_required
from channelguide.guide.models import Channel
SESSION_KEY = 'submitted-feed'
def destroy_submit_url_session(request):
if SESSION_KEY in request.session:
del request.session[SESSION_KEY]
@login_required
def submit_feed(request):
destroy_submit_url_session(request)
if request.method != 'POST':
form = forms.FeedURLForm(request.connection)
else:
form = forms.FeedURLForm(request.connection, request.POST.copy())
if form.is_valid():
if form.cleaned_data['url']:
request.session[SESSION_KEY] = form.get_feed_data()
else:
request.session[SESSION_KEY] = form.cleaned_data
return util.redirect("submit/step2")
else:
for error in form.error_list():
if (error['name'] == 'RSS Feed' and
'is already a channel in the guide' in error['message']):
url = form.data['url'].strip()
try:
channel = Channel.query(url=url).get(request.connection)
except LookupError:
raise # not sure what to do here
return util.render_to_response(request,
'submit-feed-exists.html',
{'channel': channel})
return util.render_to_response(request, 'submit-feed-url.html',
{'form': form})
def check_session_key(function):
def check(request, *args, **kw):
if SESSION_KEY not in request.session:
return util.redirect('submit/step1')
return function(request, *args, **kw)
return check
@login_required
@check_session_key
def submit_channel(request):
"""
Called when the user is submitting a channel. If the SESSION_KEY
cookie isn't set, then we redirect back to the first step.
XXX: check for clients that don't support cookies
If the submisstion used the GET method, we create a form that allows
the submitter to describe the feed in more detail (languages, categories,
tags, etc.).
If the submission used the POST method, we check to see if the submitted
form is valid; if it is we create the channel and redirect to the
post-submission page. Otherwise, redisplay the form with the errors
highlighted.
"""
session_dict = request.session[SESSION_KEY]
if request.method != 'POST':
form = forms.SubmitChannelForm(request.connection)
form.set_defaults(session_dict)
session_dict['detected_thumbnail'] = form.set_image_from_feed
request.session.modified = True
else:
form = forms.SubmitChannelForm(request.connection,
util.copy_post_and_files(request))
if form.user_uploaded_file():
session_dict['detected_thumbnail'] = False
request.session.modified = True
if form.is_valid():
feed_url = request.session[SESSION_KEY]['url']
channel = form.save_channel(request.user, feed_url)
request.session[SESSION_KEY]['subscribe'] = channel.get_subscription_url()
destroy_submit_url_session(request)
return util.redirect(settings.BASE_URL_FULL + "submit/after")
else:
form.save_submitted_thumbnail()
context = form.get_template_data()
if session_dict.get('detected_thumbnail'):
context['thumbnail_description'] = _("Current image (from the feed)")
else:
context['thumbnail_description'] = _("Current image (uploaded)")
return util.render_to_response(request, 'submit-channel.html', context)
@login_required
@check_session_key
def after_submit(request):
subscribe = request.session[SESSION_KEY]['subscribe']
def img(url):
return "<img src='%s' alt='Miro Video Player' border='0' class='one-click-image' />" % url
def link(inside):
return "<a href='%s' title='Miro: Internet TV'>%s</a>" % (subscribe, inside)
textLink = '%s' % link("Your 1-Click Subscribe URL")
buttons = [
'http://subscribe.getmiro.com/img/buttons/one-click-subscribe-88X34.png',
'http://subscribe.getmiro.com/img/buttons/one-click-subscribe-109X34.png']
buttonHTML = [link(img(url)) for url in buttons]
context = {
'buttons': buttonHTML,
'subscribe': subscribe,
}
return util.render_to_response(request, 'after-submit.html', context)
|
Python
| 0.99976
|
@@ -3322,102 +3322,29 @@
-destroy_submit_url_session(request)%0A return util.redirect(settings.BASE_URL_FULL +
+return util.redirect(
%22sub
@@ -3857,16 +3857,56 @@
cribe'%5D%0A
+ destroy_submit_url_session(request)%0A
def
|
fb19411797ae7ac00e022a9409459c0f42969a91
|
Remove unused code
|
backend/api/helpers/i18n.py
|
backend/api/helpers/i18n.py
|
from typing import Optional
from django.conf import settings
from django.utils import translation
def make_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
return getattr(root, field_name).localize(language)
return resolver
def make_dict_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
field = getattr(root, field_name)
return field.get(language, field["en"])
return resolver
|
Python
| 0.000006
|
@@ -379,321 +379,4 @@
ver%0A
-%0A%0Adef make_dict_localized_resolver(field_name: str):%0A def resolver(root, info, language: Optional%5Bstr%5D = None) -%3E str:%0A language = language or translation.get_language() or settings.LANGUAGE_CODE%0A field = getattr(root, field_name)%0A return field.get(language, field%5B%22en%22%5D)%0A%0A return resolver%0A
|
2af841027c17256964ce92b0459d32a9c210e357
|
remove unneeded check
|
mythril/analysis/solver.py
|
mythril/analysis/solver.py
|
from z3 import Solver, simplify, sat, unknown, FuncInterp, UGE
from mythril.exceptions import UnsatError
from mythril.laser.ethereum.transaction.transaction_models import (
ContractCreationTransaction,
)
import logging
def get_model(constraints):
s = Solver()
s.set("timeout", 100000)
for constraint in constraints:
s.add(constraint)
result = s.check()
if result == sat:
return s.model()
elif result == unknown:
logging.info("Timeout encountered while solving expression using z3")
raise UnsatError
def pretty_print_model(model):
ret = ""
for d in model.decls():
if type(model[d]) == FuncInterp:
condition = model[d].as_list()
ret += "%s: %s\n" % (d.name(), condition)
continue
try:
condition = "0x%x" % model[d].as_long()
except:
condition = str(simplify(model[d]))
ret += "%s: %s\n" % (d.name(), condition)
return ret
def get_transaction_sequence(global_state, constraints):
"""
Generate concrete transaction sequence
:param global_state: GlobalState to generate transaction sequence for
:param constraints: list of constraints used to generate transaction sequence
:param caller: address of caller
:param max_callvalue: maximum callvalue for a transaction
"""
transaction_sequence = global_state.world_state.transaction_sequence
# gaslimit & gasprice don't exist yet
tx_template = {
"calldata": None,
"call_value": None,
"caller": "0xCA11EDEADBEEF37E636E6CA11EDEADBEEFCA11ED",
}
txs = {}
creation_tx_ids = []
tx_constraints = constraints.copy()
for transaction in transaction_sequence:
tx_id = str(transaction.id)
if not isinstance(transaction, ContractCreationTransaction):
# Constrain calldatasize
max_calldatasize = 5000
if max_calldatasize != None:
tx_constraints.append(
UGE(max_calldatasize, transaction.call_data.calldatasize)
)
txs[tx_id] = tx_template.copy()
else:
creation_tx_ids.append(tx_id)
model = get_model(tx_constraints)
for transaction in transaction_sequence:
if not isinstance(transaction, ContractCreationTransaction):
tx_id = str(transaction.id)
txs[tx_id]["calldata"] = "0x" + "".join(
[
hex(b)[2:] if len(hex(b)) % 2 == 0 else "0" + hex(b)[2:]
for b in transaction.call_data.concretized(model)
]
)
for d in model.decls():
name = d.name()
if "call_value" in name:
tx_id = name.replace("call_value", "")
if not tx_id in creation_tx_ids:
call_value = "0x%x" % model[d].as_long()
txs[tx_id]["call_value"] = call_value
if "caller" in name:
tx_id = name.replace("caller", "")
if not tx_id in creation_tx_ids:
caller = "0x" + ("%x" % model[d].as_long()).zfill(64)
txs[tx_id]["caller"] = caller
return txs
|
Python
| 0.000001
|
@@ -2697,16 +2697,45 @@
d.name()
+%0A logging.warn(d.name)
%0A%0A
@@ -3015,92 +3015,103 @@
-tx_id = name.replace(%22caller%22, %22%22)%0A if not tx_id in creation_tx_ids:%0A
+# caller is 'creator' for creation transactions%0A tx_id = name.replace(%22caller%22, %22%22)%0A
@@ -3173,20 +3173,16 @@
ll(64)%0A%0A
-
|
77a53584062e0a0b51d972b1fe08b2efa12b90a8
|
Remove get_notebook_list function
|
nbgrader/html/formgrade.py
|
nbgrader/html/formgrade.py
|
import json
import os
import glob
from flask import Flask, request, abort, redirect, url_for, render_template
app = Flask(__name__, static_url_path='/static')
def get_notebook_list():
suffix = ".autograded.html"
notebooks = glob.glob(os.path.join(app.notebook_dir, "*{}".format(suffix)))
notebooks = [os.path.split(x)[1][:-len(suffix)] for x in notebooks]
return sorted(notebooks)
def get_notebook_score(_id):
score = 0
max_score = 0
needs_manual_grade = False
notebook = app.gradebook.find_notebook(_id=_id)
grades = app.gradebook.find_grades(notebook=notebook)
for grade in grades:
if grade.score is not None:
score += grade.score
elif grade.autoscore is not None:
score += grade.autoscore
else:
needs_manual_grade = True
if grade.max_score is not None:
max_score += grade.max_score
return {
"score": score,
"max_score": max_score,
"needs_manual_grade": needs_manual_grade
}
@app.route("/fonts/<filename>")
def fonts(filename):
return redirect(url_for('static', filename=os.path.join("fonts", filename)))
@app.route("/")
def home():
return render_template("notebook_list.html")
@app.route("/<nb>")
def notebook(nb):
filename = os.path.join(app.notebook_dir, nb)
if not os.path.exists(filename):
abort(404)
with open(filename, "r") as fh:
contents = fh.read()
return contents
@app.route("/api/notebooks")
def get_notebooks():
notebooks = [x.to_dict() for x in app.gradebook.notebooks]
for nb in notebooks:
nb.update(get_notebook_score(nb["_id"]))
student = app.gradebook.find_student(_id=nb["student"])
nb["student_name"] = "{last_name}, {first_name}".format(**student.to_dict())
nb["student_id"] = student.student_id
return json.dumps(notebooks)
@app.route("/api/notebook/<_id>/next")
def next_notebook(_id):
ids = [x._id for x in app.gradebook.notebooks]
index = ids.index(_id)
if index == (len(ids) - 1):
return json.dumps(None)
return app.gradebook.find_notebook(_id=ids[index + 1]).to_json()
@app.route("/api/notebook/<_id>/prev")
def prev_notebook(_id):
ids = [x._id for x in app.gradebook.notebooks]
index = ids.index(_id)
if index == 0:
return json.dumps(None)
return app.gradebook.find_notebook(_id=ids[index - 1]).to_json()
@app.route("/api/notebook/<_id>/grades")
def get_all_grades(_id):
notebook = app.gradebook.find_notebook(_id=_id)
grades = app.gradebook.find_grades(notebook=notebook)
return json.dumps([x.to_dict() for x in grades])
@app.route("/api/notebook/<_id>/comments")
def get_all_comments(_id):
notebook = app.gradebook.find_notebook(_id=_id)
comments = app.gradebook.find_comments(notebook=notebook)
return json.dumps([x.to_dict() for x in comments])
@app.route("/api/grade/<_id>", methods=["GET", "PUT"])
def get_grade(_id):
grade = app.gradebook.find_grade(_id=_id)
if request.method == "PUT":
grade.score = request.json.get("score", None)
app.gradebook.update_grade(grade)
return grade.to_json()
@app.route("/api/comment/<_id>", methods=["GET", "PUT"])
def get_comment(_id):
comment = app.gradebook.find_comment(_id=_id)
if request.method == "PUT":
comment.comment = request.json.get("comment", None)
app.gradebook.update_comment(comment)
return comment.to_json()
if __name__ == "__main__":
app.run(debug=True)
|
Python
| 0.000031
|
@@ -18,20 +18,8 @@
t os
-%0Aimport glob
%0A%0Afr
@@ -147,248 +147,8 @@
)%0A%0A%0A
-def get_notebook_list():%0A suffix = %22.autograded.html%22%0A notebooks = glob.glob(os.path.join(app.notebook_dir, %22*%7B%7D%22.format(suffix)))%0A notebooks = %5Bos.path.split(x)%5B1%5D%5B:-len(suffix)%5D for x in notebooks%5D%0A return sorted(notebooks)%0A%0A%0A
def
|
8054d460bd22c8301d234f8165f906b5b60c34c6
|
Fix incorrect data being sent in judge_flask
|
judge/judge_flask.py
|
judge/judge_flask.py
|
from flask import *
import threading
import lcm
import forseti2 as fs2
import sys
sys.path.append('../src')
import settings
import util
import time
import datetime
app = Flask(__name__)
@app.route('/')
def serve_console():
return render_template('judge_console.html')
@app.route('/clock')
def serve_clock():
return render_template('clock.html')
@app.route('/api/v1/score-delta', methods=["PUT", "POST"])
def score_delta():
global seq
args = {}
for k, v in request.form.items():
args[k] = int(v)
seq.publish(**args)
return "{success: true}"
# data container for persistent state
class FlaskInfo(object):
def __init__(self):
self._last_update_time = time.time()
self.stored_a = 0
self.game_time = 0.0
self.stage_time = 0.0
self.total_stage_time = 0.0
self.stage_name = "none"
self.bonus_time = 0.0
self.blue_points = ['?', '?', '?', '?', 0]
self.gold_points = ['?', '?', '?', '?', 0]
self.bonus_possession = '?'
self.bonus_points = '?'
self.team_numbers = [0, 0, 0, 0]
self.team_names = ['', '', '', '']
def __setattr__(self, name, value):
self.__dict__["_last_update_time"] = time.time()
self.__dict__[name] = value
def time_since_last_update(self):
return time.time() - self._last_update_time
fi = FlaskInfo()
def comms_status():
return "COMMS_UP" if fi.time_since_last_update() < 1 else "COMMS_DOWN"
@app.route('/api/v1/all-info')
def all_info():
data = {
'stored_a' : fi.stored_a,
'comms_status' : comms_status(),
'game_time' : fi.game_time,
'stage_time' : fi.stage_time,
'total_stage_time' : fi.total_stage_time,
'stage_name' : fi.stage_name,
'bonus_time' : fi.bonus_time,
'blue_points' : fi.blue_points,
'gold_points' : fi.gold_points,
'bonus_possession' : fi.bonus_possession,
'bonus_points' : fi.bonus_points,
'team_numbers' : fi.team_numbers,
'team_names' : fi.team_names
}
js = json.dumps(data)
print js
resp = Response(js, status=200, mimetype='application/json')
return resp
def handle_xbox(channel, data):
msg = fs2.xbox_joystick_state.decode(data)
fi.stored_a = msg.buttons[fs2.xbox_joystick_state.GUIDE]
def handle_score(channel, data):
m = fs2.score_state.decode(data)
fi.blue_points = [m.blue_total, m.blue_autonomous_points, m.blue_normal_points, m.blue_permanent_points, m.blue_penalty]
fi.gold_points = [m.gold_total, m.blue_autonomous_points, m.gold_normal_points, m.gold_permanent_points, m.gold_penalty]
fi.bonus_possession = m.bonus_possession
fi.bonus_points = m.bonus_points
fi.bonus_time = m.bonus_time_remaining
def handle_time(channel, data):
m = fs2.Time.decode(data)
fi.game_time = m.game_time_so_far
fi.stage_time = m.stage_time_so_far
fi.total_stage_time = m.total_stage_time
fi.stage_name = m.stage_name
def handle_match_init(channel, data):
m = fs2.Match.decode(data)
fi.team_numbers = m.team_numbers
fi.team_names = m.team_names
def main():
global lc, seq
lc = lcm.LCM(settings.LCM_URI)
lc.subscribe("xbox/state/debug/0", handle_xbox)
lc.subscribe("score/state", handle_score)
lc.subscribe("Timer/Time", handle_time)
lc.subscribe("Match/Init", handle_match_init)
seq = util.LCMSequence(lc, fs2.score_delta, "score/delta")
try:
while True:
lc.handle()
except KeyboardInterrupt:
pass
def run_flask_app():
app.run(host = '0.0.0.0')
if __name__ == '__main__':
t = threading.Thread(target = main)
t.daemon = True
t.start()
run_flask_app()
|
Python
| 0.000563
|
@@ -2566,36 +2566,36 @@
m.gold_total, m.
-blue
+gold
_autonomous_poin
|
3f8be6be92c69acb71f1fbff2510fcbc2d9d6bf6
|
move topic when there is no ns
|
june/models/topic.py
|
june/models/topic.py
|
# coding: utf-8
from datetime import datetime
from werkzeug import cached_property
from ._base import db, JuneQuery
from ..markdown import rich_markdown
from .account import Account
from .node import Node, NodeStatus
__all__ = ['Topic', 'Reply']
class Topic(db.Model):
query_class = JuneQuery
id = db.Column(db.Integer, primary_key=True)
account_id = db.Column(db.Integer, nullable=False, index=True)
node_id = db.Column(db.Integer, nullable=False, index=True)
title = db.Column(db.String(100), nullable=False)
content = db.Column(db.Text)
hits = db.Column(db.Integer, default=0)
reply_count = db.Column(db.Integer, default=0)
created = db.Column(db.DateTime, default=datetime.utcnow)
updated = db.Column(
db.DateTime,
default=datetime.utcnow,
onupdate=datetime.utcnow,
index=True,
)
def __str__(self):
return self.title
def __repr__(self):
return '<Topic: %s>' % self.id
@cached_property
def html(self):
if self.content is None:
return ''
return rich_markdown(self.content)
def save(self, user=None, node=None):
if self.id:
# update topic
db.session.add(self)
db.session.commit()
return self
# insert a topic
if user:
self.account_id = user.id
user.active = datetime.utcnow()
db.session.add(user)
if node:
self.node_id = node.id
node.topic_count += 1
db.session.add(node)
ns = NodeStatus.query.filter_by(
node_id=self.node_id, account_id=self.account_id
).first()
if not ns:
ns = NodeStatus(
node_id=self.node_id,
account_id=self.account_id,
topic_count=0,
)
ns.topic_count += 1
db.session.add(ns)
db.session.add(self)
db.session.commit()
return self
def move(self, node=None):
if self.node_id == node.id:
return self
# clear status in pre node
pre = Node.query.get(self.node_id)
pre.topic_count -= 1
db.session.add(pre)
pre_ns = NodeStatus.query.filter_by(
node_id=self.node_id, account_id=self.account_id
).first()
pre_ns.topic_count -= 1
db.session.add(pre_ns)
# increase status in post node
node.topic_count += 1
db.session.add(node)
ns = NodeStatus.query.filter_by(
node_id=node.id, account_id=self.account_id
).first()
ns.topic_count += 1
db.session.add(ns)
self.node_id = node.id
db.session.add(self)
db.session.commit()
return self
def delete(self, user=None, node=None):
if not user:
user = Account.query.get(self.account_id)
if not node:
node = Node.query.get(self.node_id)
user.active = datetime.utcnow()
db.session.add(user)
node.topic_count -= 1
db.session.add(node)
ns = NodeStatus.query.filter_by(
node_id=self.node_id, account_id=self.account_id
).first()
if ns and ns.topic_count:
ns.topic_count -= 1
db.session.add(ns)
db.session.delete(self)
db.session.commit()
return self
class Reply(db.Model):
query_class = JuneQuery
id = db.Column(db.Integer, primary_key=True)
account_id = db.Column(db.Integer, nullable=False)
topic_id = db.Column(db.Integer, index=True)
content = db.Column(db.Text)
created = db.Column(db.DateTime, default=datetime.utcnow)
flags = db.Column(db.Integer, default=0)
def __str__(self):
return self.content
@cached_property
def html(self):
if self.content is None:
return ''
return rich_markdown(self.content)
def save(self, user=None, topic=None):
if self.id:
# update
db.session.add(self)
db.session.commit()
return self
if user:
self.account_id = user.id
user.active = datetime.utcnow()
db.session.add(user)
if topic:
self.topic_id = topic.id
topic.reply_count += 1
db.session.add(topic)
db.session.add(self)
db.session.commit()
return self
def delete(self, user=None, topic=None):
if not topic:
topic = Topic.query.get(self.topic_id)
topic.reply_count -= 1
db.session.add(topic)
db.session.delete(self)
db.session.commit()
return self
|
Python
| 0.000002
|
@@ -2181,19 +2181,21 @@
-pre
+node1
= Node.
@@ -2226,19 +2226,21 @@
-pre
+node1
.topic_c
@@ -2272,19 +2272,21 @@
ion.add(
-pre
+node1
)%0A
@@ -2287,22 +2287,19 @@
-pre_
ns
+1
= NodeS
@@ -2408,22 +2408,19 @@
-pre_
ns
+1
.topic_c
@@ -2456,14 +2456,11 @@
add(
-pre_
ns
+1
)%0A%0A
@@ -2645,32 +2645,32 @@
self.account_id%0A
-
).first(
@@ -2663,32 +2663,207 @@
).first()%0A
+ if not ns:%0A ns = NodeStatus(%0A node_id=self.node_id,%0A account_id=self.account_id,%0A topic_count=0,%0A )%0A
ns.topic
|
91b51267f0290ebfaf09da3d2e2ff385a2dc7ce7
|
Test api user detail endpoint with delete method is not public
|
billjobs/tests/tests_api.py
|
billjobs/tests/tests_api.py
|
from django.urls import reverse
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient
class APIStatusCode(TestCase):
"""
Test user API response status code
"""
fixtures=['test_api_user.yaml']
def setUp(self):
self.admin = User.objects.get(pk=1)
self.user = User.objects.get(pk=2)
self.client = APIClient()
def test_admin_auth_token(self):
"""
Test status code is 200 when admin use correct credential
"""
data = {'username': self.admin.username, 'password': 'jobs'}
response = self.client.post(reverse('api-token-auth'), data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_user_auth_token(self):
"""
Test status code is 200 when user user correct credential
"""
data = {'username': self.user.username, 'password': 'jobs'}
response = self.client.post(reverse('api-token-auth'), data,
format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_invalid_user(self):
"""
Test invalid user get 400
"""
data = {'username': 'foo', 'password': 'bar'}
response = self.client.post(reverse('api-token-auth'), data,
format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class APITokenAuthentication(TestCase):
"""
Test API token authentication
"""
fixtures=['test_api_user.yaml']
def setUp(self):
self.admin = User.objects.get(pk=1)
self.user = User.objects.get(pk=2)
self.client = APIClient()
self.url = reverse('api-token-auth')
def test_admin_token_auth(self):
"""
Test admin token auth return a valid token
"""
data = {'username': self.admin.username, 'password': 'jobs' }
response = self.client.post(self.url, data)
self.assertTrue(len(response.data['token']), 20)
def test_user_token_auth(self):
"""
Test user api-token-auth return a valid token
"""
data = {'username': self.user.username, 'password': 'jobs' }
response = self.client.post(self.url, data)
self.assertTrue(len(response.data['token']), 20)
def test_invalid_user_get_token_error(self):
"""
Test an invalid user do not get a token
"""
data = {'username': 'invalid', 'password': 'jobs'}
response = self.client.post(self.url, data)
self.assertIn('Unable to log in with provided credentials.',
response.data['non_field_errors'] )
class APIAnonymousPermission(TestCase):
"""
Test API anonymous level permission to endpoints
"""
def setUp(self):
self.client = APIClient()
def test_api_auth_get_is_public(self):
"""
Test api login GET method is public
"""
response = self.client.get(reverse('rest_framework:login'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_api_auth_post_is_public(self):
"""
Test api login POST method is public
"""
response = self.client.post(reverse('rest_framework:login'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_api_auth_token_post_is_public(self):
"""
Test api token POST method is public
Method return 400 with bad credentials, we test response status code is
not 403
"""
response = self.client.post(reverse('api-token-auth'))
self.assertNotEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_api_user_get_is_not_public(self):
"""
Test api user endpoint with GET method is not public
Anonymous user can not list user
"""
response = self.client.get(reverse('user'))
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_user_post_is_not_public(self):
"""
Test api user endpoint with POST method is not public
Anonymous user can not create a user
"""
response = self.client.post(reverse('user'))
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_user_detail_get_is_not_public(self):
"""
Test api user detail endpoint with GET method is not public
Anonymous user can not retrieve user information
"""
response = self.client.get(reverse('user-detail', args=(1,)))
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_user_detail_put_is_not_public(self):
"""
Test api user detail endpoint with POST method is not public
Anonymous user can not update a user instance
"""
response = self.client.post(reverse('user-detail', args=(1,)), {'password': 'inject'})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
Python
| 0.000001
|
@@ -5084,28 +5084,387 @@
atus.HTTP_401_UNAUTHORIZED)%0A
+%0A def test_api_user_detail_delete_is_not_public(self):%0A %22%22%22%0A Test api user detail endpoint with DELETE method is not public%0A Anonymous user can not delete an user instance%0A %22%22%22%0A response = self.client.delete(reverse('user-detail', args=(1,)))%0A self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)%0A%0A
|
b3c41e891252504954dcf08d1cfe909375af63be
|
Add alias for gettext as underscore to fix graders with translated msgs.
|
grader_support/run.py
|
grader_support/run.py
|
#!/usr/bin/env python
"""
Run a set of tests on a submission, printing the outputs to stdout as a json
string.
Note: this command will run student code, so should be run in a sandbox, or
(and!) the code should be sanitized first. Because this just runs the code on
various sample inputs, and does not have answers, bad student code can only hurt
itself.
"""
import gettext
import json
import random
import sys
import six
from . import gradelib # to set the random seed
from . import graderutil
usage = "Usage: run.py GRADER SUBMISSION seed" # pylint: disable=invalid-name
# Install gettext for translation support. This gettext install works within the sandbox,
# so the path to graders/conf/locale can be relative.
# LANGUAGE is set in graderutil.py
trans = gettext.translation( # pylint: disable=invalid-name
'graders',
localedir='conf/locale',
fallback=True,
languages=[graderutil.LANGUAGE]
)
trans.install(names=None)
def run(grader_name, submission_name, seed=1):
"""
`grader_name`: importable module name of the grader
`submission_name`: importable module name of the submission
`seed`: A value to seed randomness with.
Returns a data structure:
{
'grader': {
'status': 'ok', # or 'error', 'nograder'
'stdout': 'whatever grader printed',
'exception': 'a stack trace if error'
},
'submission': {
'status': 'ok', # or 'error', 'caught'
'stdout': 'whatever the submission printed',
'exception': 'a stack trace if error'
},
'results': [
["Test short desc", "test detailed description", "test output..."],
...
],
'exceptions': 0, # or however many were caught.
}
"""
output = {
'grader': {
'status': 'notrun',
},
'submission': {
'status': 'notrun',
},
'results': [],
'exceptions': 0,
}
# Use a private random number generator, so student code won't accidentally
# mess it up. (if they mess it up deliberately, we don't care--it only
# hurts them).
gradelib.rand = random.Random(seed)
# Also seed the random singleton in case the exercise uses random numbers.
random.seed(seed + 1)
grader_mod, results = import_captured(grader_name, our_code=True)
if grader_mod:
try:
grader = grader_mod.grader
except: # pylint: disable=bare-except
results['status'] = 'error'
results['exception'] = graderutil.format_exception()
output['exceptions'] += 1
else:
output['exceptions'] += 1
output['grader'].update(results)
if output['grader']['status'] == 'ok':
submission, results = import_captured(submission_name)
output['submission'].update(results)
if submission and output['submission']['status'] == 'ok':
# results is a list of ("short description", "detailed desc", "output") tuples.
try:
for test in grader.tests():
with graderutil.captured_stdout() as test_stdout:
try:
exception_output = ""
test(submission)
except gradelib.EndTest:
grader.caught_end_test()
except: # pylint: disable=bare-except
# The error could be either the grader code or the submission code,
# so hide information.
exception_output = graderutil.format_exception(
main_file=submission_name,
hide_file=True
)
output['exceptions'] += 1
else:
exception_output = ""
# Get the output, including anything printed, and any exception.
test_output = test_stdout.getvalue()
if test_output and test_output[-1] != '\n':
test_output += '\n'
test_output += exception_output
output['results'].append(
(test.short_description, test.detailed_description, test_output)
)
except: # pylint: disable=bare-except
output['grader']['status'] = 'error'
output['grader']['exception'] = graderutil.format_exception()
output['exceptions'] += 1
else:
output['exceptions'] += 1
if grader.uncaught_end_tests():
# We raised EndTest more than we caught them, the student must be
# catching them, inadvertently or not.
output['submission']['exception'] = _(
"Your code interfered with our grader. Don't use bare 'except' clauses.") # pylint: disable=line-too-long
output['submission']['status'] = 'caught'
return output
def import_captured(name, our_code=False):
"""
Import the module `name`, capturing stdout, and any exceptions that happen.
Returns the module, and a dict of results.
If `our_code` is true, then the code is edX-authored, and any exception output
can include full context. If `our_code` is false, then this is student-submitted
code, and should have only student-provided information visible in exception
traces. This isn't a security precaution, it just keeps us from showing confusing
and unhelpful information to students.
"""
result = {
'status': 'notrun',
}
try:
with graderutil.captured_stdout() as stdout:
mod = __import__(name)
except: # pylint: disable=bare-except
result['status'] = 'error'
if our_code:
exc = graderutil.format_exception()
else:
exc = graderutil.format_exception(main_file=name, hide_file=True)
result['exception'] = exc
mod = None
else:
result['status'] = 'ok'
result['stdout'] = stdout.getvalue()
return mod, result
def main(args): # pragma: no cover
"""
Execute the grader from the command line
"""
if len(args) != 3:
print(usage)
return
(grader_path, submission_path, seed) = args
seed = int(seed)
# strip off .py
grader_name = grader_path[:-3]
submission_name = submission_path[:-3]
output = run(grader_name, submission_name, seed)
print(json.dumps(output))
if __name__ == '__main__': # pragma: no cover
main(sys.argv[1:])
|
Python
| 0
|
@@ -915,16 +915,34 @@
UAGE%5D%0A)%0A
+_ = trans.gettext%0A
trans.in
|
4563e383962690cc196f4551f217d488501b660e
|
support for mysql as well
|
bin/count_users_in_rooms.py
|
bin/count_users_in_rooms.py
|
import sys
import os
import yaml
import redis
import psycopg2
dino_env = sys.argv[1]
dino_home = sys.argv[2]
if dino_home is None:
raise RuntimeError('need environment variable DINO_HOME')
if dino_env is None:
raise RuntimeError('need environment variable DINO_ENVIRONMENT')
def load_secrets_file(config_dict: dict) -> dict:
from string import Template
import ast
secrets_path = dino_home + '/secrets/%s.yaml' % dino_env
# first substitute environment variables, which holds precedence over the yaml config (if it exists)
template = Template(str(config_dict))
template = template.safe_substitute(os.environ)
if os.path.isfile(secrets_path):
try:
secrets = yaml.safe_load(open(secrets_path))
except Exception as e:
raise RuntimeError("Failed to open secrets configuration {0}: {1}".format(secrets_path, str(e)))
template = Template(template)
template = template.safe_substitute(secrets)
return ast.literal_eval(template)
config = yaml.safe_load(open(dino_home + '/dino.yaml'))[dino_env]
config = load_secrets_file(config)
dbtype = config['database']['type']
if dbtype == 'rdbms':
dbname = config['database']['db']
dbhost = config['database']['host']
dbport = config['database']['port']
dbuser = config['database']['user']
dbpass = config['database']['password']
try:
conn = psycopg2.connect("dbname='%s' user='%s' host='%s' port='%s' password='%s'" % (
dbname, dbuser, dbhost, dbport, dbpass)
)
except:
raise RuntimeError('could not connect to db')
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
r_host, r_port = config['cache']['host'].split(':')
r_db = config['cache']['db']
r_server = redis.Redis(host=r_host, port=r_port, db=r_db)
r_server.set('users:online:inrooms', the_count)
|
Python
| 0
|
@@ -43,24 +43,8 @@
dis%0A
-import psycopg2%0A
%0A%0Adi
@@ -1143,16 +1143,31 @@
type'%5D%0A%0A
+the_count = 0%0A%0A
if dbtyp
@@ -1180,16 +1180,60 @@
rdbms':%0A
+ dbdriver = config%5B'database'%5D%5B'driver'%5D%0A
dbna
@@ -1427,28 +1427,84 @@
word'%5D%0A%0A
-try:
+if dbdriver.startswith('postgres'):%0A import psycopg2%0A
%0A con
@@ -1656,71 +1656,332 @@
-except:%0A raise RuntimeError('could not connect to db')%0A%0A
+ cur = conn.cursor()%0A cur.execute(%22%22%22select count(*) from rooms_users_association_table%22%22%22)%0A the_count = cur.fetchone()%5B0%5D%0A%0A if dbtype == 'rdbms' and dbdriver.startswith('mysql'):%0A import MySQLdb%0A%0A conn = MySQLdb.connect(passwd=dbpass, db=dbname, user=dbuser, host=dbhost, port=dbport)%0A
@@ -1996,24 +1996,28 @@
nn.cursor()%0A
+
cur.exec
@@ -2074,24 +2074,28 @@
n_table%22%22%22)%0A
+
the_coun
@@ -2117,20 +2117,16 @@
e()%5B0%5D%0A%0A
-
r_host,
@@ -2169,20 +2169,16 @@
it(':')%0A
-
r_db = c
@@ -2199,20 +2199,16 @@
%5B'db'%5D%0A%0A
-
r_server
@@ -2257,20 +2257,16 @@
b=r_db)%0A
-
r_server
|
952a342cc160f7b994e7a06c7836d1319414a30e
|
Fix bitcointxn.disassemble so it actually works
|
bitcointxn.py
|
bitcointxn.py
|
from bitcoinvarlen import varlenDecode, varlenEncode
from util import dblsha
from struct import pack, unpack
_nullprev = b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'
class Txn:
def __init__(self, data=None):
if data:
self.data = data
self.idhash()
@classmethod
def new(cls):
o = cls()
o.version = 1
o.inputs = []
o.outputs = []
o.locktime = 0
return o
def setCoinbase(self, sigScript, seqno = 0xffffffff):
self.inputs = ( ((_nullprev, 0xffffffff), sigScript, seqno), )
def addInput(self, prevout, sigScript, seqno = 0xffffffff):
self.inputs.append( (prevout, sigScript, seqno) )
def addOutput(self, amount, pkScript):
self.outputs.append( (amount, pkScript) )
def disassemble(self):
self.version = unpack('<L', self.data[:4])[0]
(inputCount, data) = varlenDecode(self.data[4:])
inputs = []
for i in range(inputCount):
prevout = (data[:32], unpack('<L', data[32:36])[0])
(sigScript, data) = varlenDecode(data[36:])
sigScript = data[:sigScript]
seqno = unpack('<L', data[sigScript:sigScript + 4])[0]
data = data[sigScript + 4:]
inputs.append( (prevout, sigScript, seqno) )
self.inputs = inputs
(outputCount, data) = varlenDecode(self.data[4:])
outputs = []
for i in range(outputCount):
amount = unpack('<Q', data[:8])[0]
(pkScript, data) = varlenDecode(data[8:])
pkScript = data[:pkScript]
data = data[pkScript:]
outputs.append( (amount, pkScript) )
self.outputs = outputs
assert len(data) == 4
self.locktime = unpack('<L', data)[0]
def isCoinbase(self):
return len(self.inputs) == 1 and self.inputs[0][1] == 0xffffffff and self.input[0][0] == _nullprev
def assemble(self):
data = pack('<L', self.version)
inputs = self.inputs
data += varlenEncode(len(inputs))
for prevout, sigScript, seqno in inputs:
data += prevout[0] + pack('<L', prevout[1])
data += varlenEncode(len(sigScript)) + sigScript
data += pack('<L', seqno)
outputs = self.outputs
data += varlenEncode(len(outputs))
for amount, pkScript in inputs:
data += pack('<Q', amount)
data += varlenEncode(len(pkScript)) + pkScript
data += pack('<L', self.locktime)
self.data = data
self.idhash()
def idhash(self):
self.txid = dblsha(self.data)
|
Python
| 0.000001
|
@@ -954,24 +954,27 @@
%09%09(sigScript
+Len
, data) = va
@@ -1021,24 +1021,27 @@
a%5B:sigScript
+Len
%5D%0A%09%09%09seqno =
@@ -1068,16 +1068,19 @@
igScript
+Len
:sigScri
@@ -1077,24 +1077,27 @@
en:sigScript
+Len
+ 4%5D)%5B0%5D%0A%09%09
@@ -1118,16 +1118,19 @@
igScript
+Len
+ 4:%5D%0A%09
@@ -1235,37 +1235,28 @@
arlenDecode(
-self.data%5B4:%5D
+data
)%0A%09%09outputs
@@ -1341,16 +1341,19 @@
pkScript
+Len
, data)
@@ -1405,16 +1405,19 @@
pkScript
+Len
%5D%0A%09%09%09dat
@@ -1433,16 +1433,19 @@
pkScript
+Len
:%5D%0A%09%09%09ou
|
33acc8d495e5a71a6d8ef99162920d5076e40db5
|
shorten the text in the wordpress post button
|
blog/admin.py
|
blog/admin.py
|
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.utils.html import format_html
from django.conf.urls import url
from django import forms
from django.http import HttpResponseRedirect
from django.template.response import TemplateResponse
from mptt.admin import MPTTModelAdmin
from .models import Post, Category, Media, Comment, Commenter
from .forms import BulkMediaForm
# Register your models here.
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
prepopulated_fields = { 'slug': ['title'] }
search_fields = ['title']
list_display = ('title', 'get_full_url', 'wordpress_action', 'pub_date', 'admin_first_image', )
# add a link to the blog post on the admin list display to make it easier to preview the post
def get_full_url(self, instance):
return "<a href='%s'>%s</a>" % (instance.get_absolute_url(), instance.get_absolute_url())
get_full_url.short_description = 'Link'
get_full_url.allow_tags = True
# show the first image on the admin list so we can make sure it gets set
def admin_first_image(self, instance):
if not instance.first_image:
return u'None'
return u'<img src="%s" height="150" />' % (instance.first_image.url)
admin_first_image.short_description = 'First Image'
admin_first_image.allow_tags = True
def get_urls(self):
urls = super().get_urls()
custom_urls = [
url(
r'^(?P<post_id>.+)/wordpress/$',
self.admin_site.admin_view(self.process_wordpress),
name='post_wordpress',
),
]
return custom_urls + urls
def process_wordpress(self, request, post_id):
post = self.get_object(request, post_id)
context = self.admin_site.each_context(request)
context['opts'] = self.model._meta
context['post'] = post
return TemplateResponse(request, 'admin/blog/wordpress.html', context)
def wordpress_action(self, obj):
return format_html(
'<a class="button" href="{}">View Wordpress Post</a>',
reverse('admin:post_wordpress', args=[obj.pk])
)
wordpress_action.short_description = 'Wordpress'
wordpress_action.allow_tags = True
@admin.register(Category)
class CategoryAdmin(MPTTModelAdmin):
prepopulated_fields = { 'slug': ['title'] }
@admin.register(Media)
class MediaAdmin(admin.ModelAdmin):
search_fields = ['image_name']
list_display = ('image_name', 'pub_date', 'admin_url', 'admin_thumbnail', 'admin_full', )
change_list_template = 'admin/blog/bulk_upload_list.html'
def generate_data_for_file(self, request, field_name, field_file, index):
if field_name == 'full_image':
index_string = '{0:02d}'.format(index+1)
return dict(image_name=index_string)
def admin_url(self, instance):
return "<a href='%s'>%s</a>" % (instance.get_blog_url(), instance.get_blog_url())
admin_url.short_description = 'Image URL'
admin_url.allow_tags = True
# this stuff is to show a preview of the image in the admin list
def admin_thumbnail(self, instance):
if not instance.scale_image:
return u'None'
return u'<img src="%s" height="150" />' % (instance.scale_image.url)
admin_thumbnail.short_description = 'Image'
admin_thumbnail.allow_tags = True
def admin_full(self, instance):
return u'<img src="%s" height="150" />' % (instance.full_image.url)
admin_full.short_description = 'Full Image'
admin_full.allow_tags = True
def get_urls(self):
urls = super().get_urls()
custom_urls = [
url(
r'^bulk-upload/$',
self.admin_site.admin_view(self.process_bulk_upload),
name='bulk_upload',
),
]
return custom_urls + urls
def process_bulk_upload(self, request, *args, **kwargs):
if request.method != 'POST':
form = BulkMediaForm()
else:
form = BulkMediaForm(request.POST, request.FILES)
if form.is_valid():
counter = 0
base_name = form.cleaned_data['name']
for img in request.FILES.getlist('images'):
counter += 1
new_med = Media()
new_med.image_name = base_name + '-{0:02d}'.format(counter)
new_med.full_image = img
new_med.save()
self.message_user(request, "Success")
else:
self.message_user(request, "Error occurred")
url = reverse(
'admin:blog_media_changelist',
current_app=self.admin_site.name,
)
return HttpResponseRedirect(url)
context = self.admin_site.each_context(request)
context['opts'] = self.model._meta
context['form'] = form
return TemplateResponse(
request,
'admin/blog/bulk_upload.html',
context,
)
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
list_display = ('author', 'pub_date', 'text', 'approved', 'notify', 'spam')
list_filter = ['approved', 'spam']
search_fields = ['author']
actions = ['mark_approved', 'mark_not_approved', 'mark_spam', 'mark_not_spam']
ordering = ['-pub_date']
def mark_approved(self, request, queryset):
for comment in queryset:
comment.approve()
mark_approved.short_description = "Approve the selected comments"
def mark_not_approved(self, request, queryset):
for comment in queryset:
comment.unapprove()
mark_not_approved.short_description = "Unapprove the selected comments"
def mark_spam(self, request, queryset):
for comment in queryset:
comment.spam = True
comment.save()
mark_spam.short_description = "Mark the comments as spam"
def mark_not_spam(self, request, queryset):
for comment in queryset:
comment.spam = False
comment.save()
mark_not_spam.short_description = "Mark the comments as safe"
@admin.register(Commenter)
class CommenterAdmin(admin.ModelAdmin):
list_display = ('username', 'approved', 'spam')
list_filter = ['approved', 'spam']
search_fields = ['username']
actions = ['mark_approved', 'mark_not_approved', 'mark_spam', 'mark_not_spam']
def mark_approved(self, request, queryset):
for commenter in queryset:
commenter.approve()
mark_approved.short_description = "Approve the selected users"
def mark_not_approved(self, request, queryset):
for commenter in queryset:
commenter.unapprove()
mark_not_approved.short_description = "Unapprove the selected users"
def mark_spam(self, request, queryset):
for commenter in queryset:
commenter.mark_spam()
mark_spam.short_description = "Mark the users as spammers"
def mark_not_spam(self, request, queryset):
for commenter in queryset:
commenter.mark_safe()
mark_not_spam.short_description = "Mark the users as safe"
|
Python
| 0.999999
|
@@ -2120,23 +2120,8 @@
View
- Wordpress Post
%3C/a%3E
|
81de62d46d7daefb2e1eef0d0cc4f5ca5c8aef2f
|
Use GCBV queryset to get PostGetMixin obj.
|
blog/utils.py
|
blog/utils.py
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
|
Python
| 0
|
@@ -116,16 +116,33 @@
b_date'%0A
+ model = Post%0A
mont
@@ -318,16 +318,81 @@
slug.%22,%0A
+ 'not_exist':%0A %22No %7B%7D by that date and slug.%22,%0A
%7D%0A%0A
@@ -1090,63 +1090,380 @@
-return get_object_or_404(%0A Post, **filter_dict)
+if queryset is None:%0A queryset = self.get_queryset()%0A queryset = queryset.filter(**filter_dict)%0A try:%0A obj = queryset.get()%0A except queryset.model.DoesNotExist:%0A raise Http404(%0A self.errors%5B'not_exist'%5D.format(%0A queryset.model%0A ._meta.verbose_name))%0A return obj
%0A
|
146dc2567b7349c807fbef3b2c812c04c46d7525
|
Fix abbreviation test on nix
|
keyboard/keyboard.py
|
keyboard/keyboard.py
|
# -*- coding: utf-8 -*-
import time
import platform
if platform.system() == 'Windows':
from. import winkeyboard as os_keyboard
else:
from. import nixkeyboard as os_keyboard
from .keyboard_event import KeyboardEvent, KEY_DOWN, KEY_UP, normalize_name
from .generic import GenericListener
_pressed_events = {}
class KeyboardListener(GenericListener):
def callback(self, event):
if not event.scan_code:
return
if event.event_type == KEY_UP:
if event.scan_code in _pressed_events:
del _pressed_events[event.scan_code]
else:
_pressed_events[event.scan_code] = event
return self.invoke_handlers(event)
def listen(self):
os_keyboard.listen(self.callback)
listener = KeyboardListener()
@listener.wrap
def is_pressed(key):
""" Returns True if the key (by name or code) is pressed. """
if isinstance(key, int):
return key in _pressed_events
elif len(key) and '+' in key:
return all(is_pressed(part) for part in key.split('+'))
else:
for event in _pressed_events.values():
if event.matches(key):
return True
return False
def _split_combination(hotkey):
if isinstance(hotkey, int) or len(hotkey) == 1:
return [[hotkey]]
else:
return [step.split('+') for step in hotkey.split(', ')]
hotkeys = {}
@listener.wrap
def add_hotkey(hotkey, callback, args=(), blocking=True, timeout=1):
"""
Adds a hotkey handler that invokes callback each time the hotkey is
detected. Returns a handler that can be used to unregister it later. The
hotkey must be in the format "ctrl+shift+a, s". This would trigger when the
user presses "ctrl+shift+a", releases, and then presses "s".
`blocking` defines if the system should block processing other hotkeys
after a match is found. This feature is Windows-only.
`timeout` is the amount of time allowed to pass between key strokes before
the combination state is reset.
"""
steps = _split_combination(hotkey)
# Just a dynamic object to store attributes for the `handler` closure.
state = lambda: None
state.step = 0
state.time = time.time()
def handler(event):
if event.event_type == KEY_UP:
return
timed_out = state.step > 0 and timeout and event.time - state.time > timeout
unexpected = not any(event.matches(part) for part in steps[state.step])
if unexpected or timed_out:
if state.step > 0:
state.step = 0
# Could be start of hotkey again.
handler(event)
else:
state.step = 0
else:
state.time = event.time
if all(is_pressed(part) for part in steps[state.step]):
state.step += 1
if state.step == len(steps):
state.step = 0
callback(*args)
return blocking
hotkeys[hotkey] = handler
listener.add_handler(handler)
return handler
@listener.wrap
def remove_hotkey(hotkey):
""" Removes a previously registered hotkey. """
listener.remove_handler(hotkeys[hotkey])
def add_abbreviation(src, dst):
"""
Registers a hotkey that replaces one typed text with another. For example
add_abbreviation('tm', '™')
Replaces every "tm" followed by a space with a ™ symbol.
"""
return add_hotkey(', '.join(src + ' '), lambda: write('\b'*len(src) + dst), timeout=0, blocking=False)
remove_abbreviation = remove_hotkey
@listener.wrap
def write(text, delay=0):
"""
Sends artificial keyboard events to the OS, simulating the typing of a given
text. Characters not available on the keyboard are typed as explicit unicode
characters using OS-specific functionality, such as alt+codepoint.
Delay is a number of seconds to wait between keypresses.
"""
for letter in text:
try:
if letter in '\n\b\t ':
letter = normalize_name(letter)
scan_code, shifted = os_keyboard.map_char(letter)
if shifted:
send('shift', True, False)
os_keyboard.press(scan_code)
os_keyboard.release(scan_code)
if shifted:
send('shift', False, True)
except ValueError:
os_keyboard.type_unicode(letter)
if delay:
time.sleep(delay)
@listener.wrap
def send(combination, do_press=True, do_release=True):
"""
Performs a given hotkey combination.
Ex: "ctrl+alt+del", "alt+F4, enter", "shift+s"
"""
for step in _split_combination(combination):
scan_codes = [os_keyboard.map_char(normalize_name(part))[0] for part in step]
if do_press:
for scan_code in scan_codes:
os_keyboard.press(scan_code)
if do_release:
for scan_code in reversed(scan_codes):
os_keyboard.release(scan_code)
@listener.wrap
def press(combination):
send(combination, True, False)
@listener.wrap
def release(combination):
send(combination, False, True)
@listener.wrap
def wait(combination):
"""
Blocks the program execution until a key combination is activated.
"""
from threading import Lock
lock = Lock()
lock.acquire()
hotkey_handler = add_hotkey(combination, lock.release)
lock.acquire()
listener.remove_handler(hotkey_handler)
@listener.wrap
def record(until='escape'):
"""
Records and returns all keyboard events until the user presses the given
key combination.
"""
recorded = []
listener.add_handler(recorded.append)
wait(until)
listener.remove_handler(recorded.append)
return recorded
@listener.wrap
def play(events, speed_factor=1.0):
"""
Plays a sequence of recorded events, maintaining the relative time
intervals. If speed_factor is invalid (<= 0) the actions are replayed
instantly.
"""
last_time = None
for event in events:
if speed_factor > 0 and last_time is not None:
time.sleep((event.time - last_time) / speed_factor)
last_time = event.time
if event.event_type == KEY_DOWN:
os_keyboard.press(event.scan_code)
else:
os_keyboard.release(event.scan_code)
def get_typed_strings(events, allow_backspace=True):
"""
Given a sequence of events, tries to deduce what strings were typed.
Strings are separated when an unencodable key is pressed (such as tab
or enter). Characters are converted to uppercase according to shift
and capslock status. If `allow_backspace` is True, backspaces remove the
last character typed.
get_type_strings(record()) -> ['', 'This is what', 'I recorded', '']
"""
shift_pressed = False
capslock_pressed = False
strings = ['']
for event in events:
if event.matches('shift'):
shift_pressed = event.event_type == 'down'
elif event.matches('caps lock') and event.event_type == 'down':
capslock_pressed = not capslock_pressed
elif event.matches('backspace') and event.event_type == 'down':
strings[-1] = strings[-1][:-1]
elif event.event_type == 'down':
if len(event.name) == 1:
single_char = event.name
if shift_pressed ^ capslock_pressed:
single_char = single_char.upper()
strings[-1] = strings[-1] + single_char
else:
strings.append('')
return strings
if __name__ == '__main__':
add_abbreviation('tm', '™')
input()
#print('Press esc twice to replay keyboard actions.')
#play(record('esc, esc'), 3)
|
Python
| 0.000024
|
@@ -7667,24 +7667,25 @@
ation('tm',
+u
'%E2%84%A2')%0A inp
|
2b419d499c37597094379f524d8347f35eeda57c
|
Fix tinycss css validator
|
src/checker/plugin/checkers/tinycss_css_validator_plugin.py
|
src/checker/plugin/checkers/tinycss_css_validator_plugin.py
|
from common import PluginType
import tinycss
from yapsy.IPlugin import IPlugin
import logging
class CssValidator(IPlugin):
category = PluginType.CHECKER
id = "tinycss"
def __init__(self):
self.journal = None
def setJournal(self, journal):
self.journal = journal
def check(self, transaction):
"""Pusti validator, ulozi chyby.
"""
try:
parser = tinycss.make_parser('page3')
data = str(transaction.getContent(), 'utf-8')
stylesheet = parser.parse_stylesheet(data)
for error in stylesheet.errors:
self.journal.foundDefect(transaction.idno, "stylesheet", "Stylesheet error", [error.line, error.reason], 0.7)
except UnicodeDecodeError as e:
logging.getLogger(__name__).debug("Unicode decode error: "+format(e))
return
|
Python
| 0.000209
|
@@ -456,16 +456,135 @@
page3')%0A
+ c = transaction.getContent()%0A if type(c) == str:%0A data = c%0A else:%0A
|
08f9575a0de95432729cbf1a9649148998030e17
|
fix error output on windows (#50)
|
cmake/legacy/wafstyleout.py
|
cmake/legacy/wafstyleout.py
|
#!/usr/bin/env python
import subprocess
import sys
import os
import argparse
def unicodeWrite(out, str):
try:
out.write(str)
except UnicodeEncodeError:
bytes = str.encode(out.encoding or 'ascii', 'replace')
if hasattr(sys.stdout, 'buffer'):
out.buffer.write(bytes)
else:
out.write(bytes.decode(out.encoding or 'ascii', 'replace'))
try:
p = subprocess.Popen(sys.argv[1:], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
except Exception as e:
print('Execution failure: %s' % str(e))
sys.exit(-1)
includes = ''
msg = ''
if out:
out = out.decode(sys.stdout.encoding or 'ascii', 'replace')
includes = '\n'.join([l for l in out.split(os.linesep) if l.startswith('Note: including file:')])
out = '\n'.join([l for l in out.split(os.linesep) if not l.startswith('Note: including file:')])
msg = msg + out
unicodeWrite(sys.stdout, includes) # Ninja relies on result of /showIncludes when compiling with cl
if err:
err = err.decode(sys.stderr.encoding or 'ascii', 'replace')
msg = msg + err
if msg:
parser = argparse.ArgumentParser()
parser.add_argument('-o')
parser.add_argument('-c')
(args, unparsed) = parser.parse_known_args(sys.argv[2:])
src_str = None
for opt in [args.c, args.o]:
if opt:
src_str = opt
break
if not src_str:
linkOutArg = '/out:'
for arg in unparsed:
if arg.startswith(linkOutArg):
src_str = arg[len(linkOutArg):]
break
if not src_str:
src_str = sys.argv[-1]
try:
src_str = os.path.basename(src_str)
except:
pass
# The Visual Studio compiler always prints name of the input source
# file when compiling and "Creating library <file>.lib and object
# <file>.exp" when linking an executable. We try to ignore those
# outputs using a heuristic.
if p.returncode == 0 and (
msg.strip() == src_str or
msg.strip().startswith('Creating library ')):
sys.exit(p.returncode)
if p.returncode == 0:
marker_str = 'WARNING'
else:
if 'bde_runtest' in sys.argv[2]:
marker_str = 'TEST'
else:
marker_str = 'ERROR'
# This logic handles unicode in the output.
status_str = u'[{} ({})] <<<<<<<<<<\n{}>>>>>>>>>>\n'.format(src_str, marker_str, msg)
unicodeWrite(sys.stderr, status_str)
sys.exit(p.returncode)
|
Python
| 0
|
@@ -70,16 +70,32 @@
argparse
+%0Aimport platform
%0A%0Adef un
@@ -2393,16 +2393,18 @@
str = u'
+%7B%7D
%5B%7B%7D (%7B%7D)
@@ -2440,16 +2440,64 @@
.format(
+'%5Cn' if platform.system() == 'Windows' else '',
src_str,
|
b4764268302c71481e6a9194b4def0617b7af74d
|
version 0.4.1
|
cmscloud_client/__init__.py
|
cmscloud_client/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '0.4.0'
|
Python
| 0.000002
|
@@ -36,11 +36,11 @@
= '0.4.
-0
+1
'%0A
|
04a255e5747a76cbc4a29626a3ecc23dd23b42b3
|
Add stop words dictionary
|
code/helper/dictionaries.py
|
code/helper/dictionaries.py
|
#! /usr/bin/env python3
from helper.easierlife import BASE_DIR
# Load an example dictionary
# First column is doc id, second is sentence id, third is entity
def load_examples_dictionary(filename):
examples = set()
with open(filename, 'rt') as examples_dict_file:
for line in examples_dict_file:
examples.add(frozenset(line.rstrip().split("\t")))
return examples
# Load the genes dictionary
def load_genes_dictionary(filename):
genes_dict = dict()
with open(filename, 'rt') as genes_dict_file:
for line in genes_dict_file:
tokens = line.strip().split("\t")
# first token is name, the rest are synonyms
name = tokens[0]
for synonym in tokens:
genes_dict[synonym] = name
return genes_dict
# Load the HPOterms dictionary
# Terms are converted to lower case
def load_hpoterms_dictionary(filename):
hpoterms_dict = dict()
with open(filename, 'rt') as hpoterms_dict_file:
for line in hpoterms_dict_file:
tokens = line.strip().split("\t")
# 1st token is name, 2nd is description, 3rd is 'C' and 4th is
# (presumably) the distance from the root of the DAG.
name = tokens[0]
description = tokens[1]
# Skip "All"
# XXX (Matteo) There may be more generic terms that we want to skip
if description == "All":
continue
description_words = description.split()
variants = get_variants(description_words)
for variant in variants:
hpoterms_dict[variant.lower()] = name
return hpoterms_dict
# Load a dictionary which is a set.
def load_set(filename):
_set = set()
with open(filename, 'rt') as set_file:
for line in set_file:
line = line.rstrip()
_set.add(line)
return _set
# Load a dictionary which is a set, but convert the entries to lower case
def load_set_lower_case(filename):
case_set = load_set(filename)
lower_case_set = set()
for entry in case_set:
lower_case_set.add(entry.lower())
return lower_case_set
## Dictionaries
GENES_DICT_FILENAME = BASE_DIR + "/dicts/hugo_synonyms.tsv"
ENGLISH_DICT_FILENAME = BASE_DIR + "/dicts/english_words.tsv"
HPOTERMS_DICT_FILENAME = BASE_DIR + "/dicts/hpo_terms.tsv"
MED_ACRONS_DICT_FILENAME = BASE_DIR + "/dicts/med_acronyms_pruned.tsv"
NIH_GRANTS_DICT_FILENAME = BASE_DIR + "/dicts/grant_codes_nih.tsv"
NSF_GRANTS_DICT_FILENAME = BASE_DIR + "/dicts/grant_codes_nsf.tsv"
POS_GENE_MENTIONS_DICT_FILENAME = BASE_DIR + "/dicts/positive_gene_mentions.tsv"
NEG_GENE_MENTIONS_DICT_FILENAME= BASE_DIR + "/dicts/negative_gene_mentions.tsv"
## Dictionary of dictionaries. First argument is the filename, second is the
## function to call to load the dictionary. The function must take the filename as
## input and return an object like a dictionary, or a set, or a list, ...
dictionaries = dict()
dictionaries["genes"] = [GENES_DICT_FILENAME, load_genes_dictionary]
dictionaries["english"] = [ENGLISH_DICT_FILENAME, load_set_lower_case]
dictionaries["hpoterms"] = [HPOTERMS_DICT_FILENAME,load_hpoterms_dictionary ]
dictionaries["nih_grants"] = [NIH_GRANTS_DICT_FILENAME, load_set]
dictionaries["nsf_grants"] = [NSF_GRANTS_DICT_FILENAME, load_set]
dictionaries["med_acrons"] = [MED_ACRONS_DICT_FILENAME, load_set]
dictionaries["pos_gene_mentions"] = [POS_GENE_MENTIONS_DICT_FILENAME, load_examples_dictionary]
dictionaries["neg_gene_mentions"] = [NEG_GENE_MENTIONS_DICT_FILENAME, load_examples_dictionary]
## Load a dictionary using the appropriate filename and load function
def load_dict(dict_name):
if dict_name not in dictionaries:
return None
filename = dictionaries[dict_name][0]
load = dictionaries[dict_name][1]
return load(filename)
## Given a list of words, return a list of variants built by splitting words that contain the separator.
## An example is more valuable:
## let words = ["the", "cat/dog", "is", "mine"], the function would return ["the
## cat is mine", "the dog is mine"]
## XXX (Matteo) Maybe goes in a different module
def get_variants(words, separator="/"):
if len(words) == 0:
return []
variants = []
base = []
i = 0
# Look for a word containing a "/"
while words[i].find(separator) == -1:
base.append(words[i])
i += 1
if i == len(words):
break
# If we found a word containing a "/", call recursively
if i < len(words):
variants_starting_words = words[i].split("/")
following_variants = get_variants(words[i+1:])
for variant_starting_word in variants_starting_words:
variant_base = base + [variant_starting_word]
if len(following_variants) > 0:
for following_variant in following_variants:
variants.append(" ".join(variant_base +[following_variant]))
else:
variants.append(" ".join(variant_base))
else:
variants = [" ".join(base)]
return variants
|
Python
| 0.999373
|
@@ -2564,16 +2564,84 @@
sf.tsv%22%0A
+STOPWORDS_DICT_FILENAME = BASE_DIR + %22/dicts/english_stopwords.tsv%22%0A
POS_GENE
@@ -3458,32 +3458,96 @@
NAME, load_set%5D%0A
+dictionaries%5B%22stopwords%22%5D = %5BSTOPWORDS_DICT_FILENAME, load_set%5D%0A
dictionaries%5B%22po
|
6f926d405028c849192003dfec46401728850bde
|
Fix the isdict_containing matcher to be ordered when checking
|
src/hamcrest/library/collection/isdict_containingentries.py
|
src/hamcrest/library/collection/isdict_containingentries.py
|
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.core.helpers.hasmethod import hasmethod
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
__author__ = "Jon Reid"
__copyright__ = "Copyright 2011 hamcrest.org"
__license__ = "BSD, see License.txt"
class IsDictContainingEntries(BaseMatcher):
def __init__(self, value_matchers):
self.value_matchers = value_matchers
def _not_a_dictionary(self, dictionary, mismatch_description):
if mismatch_description:
mismatch_description.append_description_of(dictionary) \
.append_text(' is not a mapping object')
return False
def matches(self, dictionary, mismatch_description=None):
for key in self.value_matchers:
try:
if not key in dictionary:
if mismatch_description:
mismatch_description.append_text('no ') \
.append_description_of(key) \
.append_text(' key in ') \
.append_description_of(dictionary)
return False
except TypeError:
return self._not_a_dictionary(dictionary, mismatch_description)
value_matcher = self.value_matchers[key]
try:
actual_value = dictionary[key]
except TypeError:
return self._not_a_dictionary(dictionary, mismatch_description)
if not value_matcher.matches(actual_value):
if mismatch_description:
mismatch_description.append_text('value for ') \
.append_description_of(key) \
.append_text(' ')
value_matcher.describe_mismatch(actual_value, mismatch_description)
return False
return True
def describe_mismatch(self, item, mismatch_description):
self.matches(item, mismatch_description)
def describe_keyvalue(self, index, description):
"""Describes key-value pair at given index."""
description.append_description_of(index) \
.append_text(': ') \
.append_description_of(self.value_matchers[index])
def describe_to(self, description):
description.append_text('a dictionary containing {')
first = True
for key in self.value_matchers:
if not first:
description.append_text(', ')
self.describe_keyvalue(key, description)
first = False
description.append_text('}')
def has_entries(*keys_valuematchers, **kv_args):
"""Matches if dictionary contains entries satisfying a dictionary of keys
and corresponding value matchers.
:param matcher_dict: A dictionary mapping keys to associated value matchers,
or to expected values for
:py:func:`~hamcrest.core.core.isequal.equal_to` matching.
Note that the keys must be actual keys, not matchers. Any value argument
that is not a matcher is implicitly wrapped in an
:py:func:`~hamcrest.core.core.isequal.equal_to` matcher to check for
equality.
Examples::
has_entries({'foo':equal_to(1), 'bar':equal_to(2)})
has_entries({'foo':1, 'bar':2})
``has_entries`` also accepts a list of keyword arguments:
.. function:: has_entries(keyword1=value_matcher1[, keyword2=value_matcher2[, ...]])
:param keyword1: A keyword to look up.
:param valueMatcher1: The matcher to satisfy for the value, or an expected
value for :py:func:`~hamcrest.core.core.isequal.equal_to` matching.
Examples::
has_entries(foo=equal_to(1), bar=equal_to(2))
has_entries(foo=1, bar=2)
Finally, ``has_entries`` also accepts a list of alternating keys and their
value matchers:
.. function:: has_entries(key1, value_matcher1[, ...])
:param key1: A key (not a matcher) to look up.
:param valueMatcher1: The matcher to satisfy for the value, or an expected
value for :py:func:`~hamcrest.core.core.isequal.equal_to` matching.
Examples::
has_entries('foo', equal_to(1), 'bar', equal_to(2))
has_entries('foo', 1, 'bar', 2)
"""
if len(keys_valuematchers) == 1:
try:
base_dict = keys_valuematchers[0].copy()
for key in base_dict:
base_dict[key] = wrap_matcher(base_dict[key])
except AttributeError:
raise ValueError('single-argument calls to has_entries must pass a dict as the argument')
else:
if len(keys_valuematchers) % 2:
raise ValueError('has_entries requires key-value pairs')
base_dict = {}
for index in range(int(len(keys_valuematchers) / 2)):
base_dict[keys_valuematchers[2 * index]] = wrap_matcher(keys_valuematchers[2 * index + 1])
for key, value in kv_args.items():
base_dict[key] = wrap_matcher(value)
return IsDictContainingEntries(base_dict)
|
Python
| 0.000005
|
@@ -383,16 +383,23 @@
chers =
+sorted(
value_ma
@@ -404,16 +404,25 @@
matchers
+.items())
%0A%0A de
@@ -748,32 +748,47 @@
%0A for key
+, value_matcher
in self.value_m
@@ -1362,61 +1362,8 @@
n)%0A%0A
- value_matcher = self.value_matchers%5Bkey%5D%0A
@@ -2116,16 +2116,23 @@
, index,
+ value,
descrip
@@ -2387,34 +2387,13 @@
_of(
-self.value_matchers%5Bindex%5D
+value
)%0A%0A
@@ -2524,24 +2524,31 @@
for key
+, value
in self.val
@@ -2628,24 +2628,24 @@
_text(', ')%0A
-
@@ -2671,16 +2671,23 @@
lue(key,
+ value,
descrip
|
88dd48eab612e89b956dea5600a999c78c61d5fb
|
fix lpproj algorithm
|
lpproj/lpproj.py
|
lpproj/lpproj.py
|
import numpy as np
from sklearn.neighbors import kneighbors_graph
from sklearn.utils import check_array
from sklearn.base import BaseEstimator, TransformerMixin
class LocalityPreservingProjection(BaseEstimator, TransformerMixin)::
def __init__(self, n_neighbors=5, n_components=2, eigen_solver='auto',
neighbors_algorithm='auto'):
self.n_neighbors = n_neighbors
self.n_components = n_components
self.eigen_solver = eigen_solver
self.neighbors_algorithm = neighbors_algorithm
def fit(self, X, y=None):
X = check_array(X)
self.nbrs_ = NearestNeighbors(n_neighbors=self.n_neighbors,
algorithm=self.neighbors_algorithm)
self.nbrs_.fit(X)
self.training_data_ = self.nbrs_._fit_X
# TODO: make this more efficient
# L = D - W
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='connectivity')
D = np.diag(W.sum(1))
L = D - W
evals, evecs = np.linalg.eigh(np.dot(X, np.dot(L, X.T)),
np.dot(X, np.dot(D, X.T)))
self.projection_ = evecs[:, :self.n_components]
return self
def transform(self, X):
X = check_array(X)
reutrn np.dot(self.projection_.T, X)
|
Python
| 0.000012
|
@@ -12,16 +12,42 @@
y as np%0A
+from scipy import linalg%0A%0A
from skl
@@ -84,16 +84,34 @@
rs_graph
+, NearestNeighbors
%0Afrom sk
@@ -268,17 +268,16 @@
rMixin):
-:
%0A def
@@ -391,12 +391,30 @@
uto'
+, kernel_width=None
):%0A
-%0A
@@ -584,16 +584,57 @@
lgorithm
+%0A self.kernel_width = kernel_width
%0A%0A de
@@ -952,26 +952,97 @@
#
-L = D - W%0A
+TODO: make duplicates behave correctly%0A if self.kernel_width is None:%0A
@@ -1121,16 +1121,20 @@
+
mode='co
@@ -1148,123 +1148,316 @@
ity'
-)%0A D = np.diag(W.sum(1))%0A L = D - W%0A%0A evals, evecs = np.linalg.eigh(np.dot(X, np.dot(L, X.T
+, include_self=True)%0A else:%0A W = kneighbors_graph(self.nbrs_, self.n_neighbors,%0A mode='distance')%0A W.data = np.exp(-W.data ** 2 / self.kernel_width ** 2)%0A W = W.toarray()%0A W = np.maximum(W, W.T)%0A %0A D = np.diag(W.sum(1
))
-,
%0A
@@ -1457,36 +1457,42 @@
1))%0A
-
+L = D - W%0A
@@ -1475,32 +1475,33 @@
- W%0A
+%0A
np
@@ -1488,25 +1488,57 @@
-
+A = np.dot(X.T, np.dot(L, X))%0A B =
np.dot(
@@ -1538,16 +1538,18 @@
np.dot(X
+.T
, np.dot
@@ -1557,13 +1557,72 @@
D, X
-.T)))
+))%0A %0A evals, evecs = linalg.eigh(A, B)
%0A
@@ -1761,18 +1761,18 @@
re
-u
t
+u
rn np.do
@@ -1773,16 +1773,19 @@
np.dot(
+X,
self.pro
@@ -1796,11 +1796,6 @@
ion_
-.T, X
)%0A
|
8e58d7cccb837254cc433c7533bff119cc19645d
|
Use json instead of django.utils.simplejson.
|
javascript_settings/templatetags/javascript_settings_tags.py
|
javascript_settings/templatetags/javascript_settings_tags.py
|
from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
|
Python
| 0
|
@@ -1,31 +1,16 @@
-from django import template
+import json%0A
%0Afro
@@ -21,22 +21,16 @@
ango
-.utils
import
simp
@@ -25,26 +25,24 @@
import
-simplejson
+template
%0A%0Afrom j
@@ -661,14 +661,8 @@
' +
-simple
json
|
afafb47d77fd673abf8d8ce9baa9824b985a943a
|
Add create_class_wrapper and class_wrapper
|
undecorate.py
|
undecorate.py
|
"""Allow your decorations to be un-decorated.
In some cases, such as when testing, it can be useful to access the
decorated class or function directly, so as to not to use the behavior
or interface that the decorator might introduce.
Example:
>>> from functools import wraps
>>> from undecorate import unwrap, unwrappable
>>>
>>> @unwrappable
... def pack(func):
... @wraps(func)
... def wrapper(args, kwargs):
... return func(*args, **kwargs)
... return wrapper
...
>>> @pack
... def myfunc(a, b, c=None, d=None):
... return (a, b, c, d)
...
>>> myfunc('a', 'b', c='c')
Traceback (most recent call last):
...
TypeError: wrapper() got an unexpected keyword argument 'c'
>>>
>>> unwrap(myfunc)('a', 'b', c='c')
('a', 'b', 'c', None)
"""
from functools import wraps
def unwrappable(decorator):
"""Make a decorator able to be un-decorated.
This meta-decorator takes a decorator, and returns a new decorator
that allows the decoration to be used by unwrap().
"""
@wraps(decorator)
def wrapper(decoration):
decorated = decorator(decoration)
decorated.__decoration__ = decoration
return decorated
return wrapper
def unwrap(wrapped):
"""Remove all wrappers from this decorated object."""
while True:
decoration = getattr(wrapped, '__decoration__', None)
if decoration is None:
return wrapped
wrapped = decoration
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
doctest.testfile('README.rst', optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
|
Python
| 0.000021
|
@@ -788,16 +788,25 @@
rt wraps
+, partial
%0A%0A%0Adef u
@@ -1443,16 +1443,2041 @@
ation%0A%0A%0A
+CLASS_WRAPPER_DELETES = ('__dict__', '__doc__', '__weakref__')%0ACLASS_WRAPPER_ASSIGNMENTS = ('__module__',)%0A%0A%0Adef create_class_wrapper(wrapper,%0A wrapped,%0A deleted=CLASS_WRAPPER_DELETES,%0A assigned=CLASS_WRAPPER_ASSIGNMENTS):%0A %22%22%22Create a wrapper class that looks like the wrapped class.%0A%0A wrapper is the class used to override the wrapped class.%0A wrapped is the class has values overridden by the wrapper.%0A deleted is a tuple naming the __dict__ items to be removed from the%0A wrapper class (defaults to CLASS_WRAPPER_DELETES).%0A assigned is a tuple naming the __dict__ items to be copied directly%0A from the wrapped class (defaults to CLASS_WRAPPER_ASSIGNMENTS).%0A%0A A notable difference from update_wrapper is that is creates a new class%0A that does not appear to be exactly the same as the wrapped class, but%0A rather mimics the name and the module, and inherits from the original%0A class, relying on class inheritance to mimic the behavior.%0A %22%22%22%0A __dict__ = dict(wrapper.__dict__)%0A%0A for attr in deleted:%0A __dict__.pop(attr)%0A%0A for attr in assigned:%0A __dict__%5Battr%5D = getattr(wrapped, attr)%0A%0A __dict__%5B'__wrapped__'%5D = wrapped%0A%0A # Use the metaclass of the wrapped class%0A return wrapped.__class__(wrapped.__name__, (wrapped,), __dict__)%0A%0A%0Adef class_wraps(wrapped,%0A deleted=CLASS_WRAPPER_DELETES,%0A assigned=CLASS_WRAPPER_ASSIGNMENTS):%0A %22%22%22Decorator factory to apply create_class_wrapper() to a wrapper class.%0A%0A Return a decorator that invokes create_class_wrapper() with the decorated%0A class as the wrapper argument and the arguments to class_wraps() as the%0A remaining arguments. Default arguments are as for create_class_wrapper().%0A This is a convenience function to simplify applying partial() to%0A create_class_wrapper().%0A %22%22%22%0A return partial(create_class_wrapper, wrapped=wrapped,%0A deleted=deleted, assigned=assigned)%0A%0A%0A
if __nam
|
fd8cf4ec0705df9df192d66f444a927109da17c7
|
Use f-string
|
src/python/grpcio_tests/tests_aio/unit/timeout_test.py
|
src/python/grpcio_tests/tests_aio/unit/timeout_test.py
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests behavior of the timeout mechanism on client side."""
import asyncio
import logging
import platform
import random
import unittest
import datetime
import grpc
from grpc.experimental import aio
from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit import _common
_SLEEP_TIME_UNIT_S = datetime.timedelta(seconds=1).total_seconds()
_TEST_SLEEPY_UNARY_UNARY = '/test/Test/SleepyUnaryUnary'
_TEST_SLEEPY_UNARY_STREAM = '/test/Test/SleepyUnaryStream'
_TEST_SLEEPY_STREAM_UNARY = '/test/Test/SleepyStreamUnary'
_TEST_SLEEPY_STREAM_STREAM = '/test/Test/SleepyStreamStream'
_REQUEST = b'\x00\x00\x00'
_RESPONSE = b'\x01\x01\x01'
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self):
self._routing_table = {
_TEST_SLEEPY_UNARY_UNARY:
grpc.unary_unary_rpc_method_handler(self._test_sleepy_unary_unary
),
_TEST_SLEEPY_UNARY_STREAM:
grpc.unary_stream_rpc_method_handler(self._test_sleepy_unary_stream),
_TEST_SLEEPY_STREAM_UNARY:
grpc.stream_unary_rpc_method_handler(self._test_sleepy_stream_unary),
_TEST_SLEEPY_STREAM_STREAM:
grpc.stream_stream_rpc_method_handler(self._test_sleepy_stream_stream)
}
@staticmethod
async def _test_sleepy_unary_unary(unused_request, unused_context):
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
return _RESPONSE
@staticmethod
async def _test_sleepy_unary_stream(unused_request, unused_context):
yield _RESPONSE
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
yield _RESPONSE
@staticmethod
async def _test_sleepy_stream_unary(unused_request_iterator, context):
assert _REQUEST == await context.read()
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
assert _REQUEST == await context.read()
return _RESPONSE
@staticmethod
async def _test_sleepy_stream_stream(unused_request_iterator, context):
assert _REQUEST == await context.read()
await asyncio.sleep(_SLEEP_TIME_UNIT_S)
await context.write(_RESPONSE)
def service(self, handler_call_details):
return self._routing_table.get(handler_call_details.method)
async def _start_test_server():
server = aio.server()
port = server.add_insecure_port('[::]:0')
server.add_generic_rpc_handlers((_GenericHandler(),))
await server.start()
return 'localhost:%d' % port, server
class TestTimeout(AioTestBase):
async def setUp(self):
address, self._server = await _start_test_server()
self._client = aio.insecure_channel(address)
async def tearDown(self):
await self._client.close()
await self._server.stop(None)
async def test_unary_unary_success_with_timeout(self):
multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
call = multicallable(_REQUEST, timeout=2*_SLEEP_TIME_UNIT_S)
self.assertEqual(_RESPONSE, await call)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_unary_unary_deadline_exceeded(self):
multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
call = multicallable(_REQUEST, timeout=0.5*_SLEEP_TIME_UNIT_S)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
async def test_unary_stream_success_with_timeout(self):
multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
call = multicallable(_REQUEST, timeout=2*_SLEEP_TIME_UNIT_S)
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_unary_stream_deadline_exceeded(self):
multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
call = multicallable(_REQUEST, timeout=0.5*_SLEEP_TIME_UNIT_S)
self.assertEqual(_RESPONSE, await call.read())
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.read()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
async def test_stream_unary_success_with_timeout(self):
multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
call = multicallable(timeout=2*_SLEEP_TIME_UNIT_S)
await call.write(_REQUEST)
await call.write(_REQUEST)
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_stream_unary_deadline_exceeded(self):
multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
call = multicallable(timeout=0.5*_SLEEP_TIME_UNIT_S)
await call.write(_REQUEST)
await call.write(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
async def test_stream_stream_success_with_timeout(self):
multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
call = multicallable(timeout=2*_SLEEP_TIME_UNIT_S)
await call.write(_REQUEST)
self.assertEqual(_RESPONSE, await call.read())
self.assertEqual(grpc.StatusCode.OK, await call.code())
async def test_stream_stream_deadline_exceeded(self):
multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
call = multicallable(timeout=0.5*_SLEEP_TIME_UNIT_S)
await call.write(_REQUEST)
with self.assertRaises(aio.AioRpcError) as exception_context:
await call.read()
rpc_error = exception_context.exception
self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
Python
| 0.001188
|
@@ -3048,16 +3048,17 @@
return
+f
'localho
@@ -3064,18 +3064,15 @@
ost:
-%25d' %25
+%7B
port
+%7D'
, se
|
551c4b971f1d18e232ba193cf486300d3490224b
|
add log
|
api/photo2song.py
|
api/photo2song.py
|
import asyncio
from collections import Counter
from api.bluemix_vision_recognition import VisionRecognizer
from api.echonest import Echonest
from api.spotify import Spotify
from machines.machine_loader import MachineLoader
import machines.photo_mood
def convert(image_urls):
vr = VisionRecognizer()
ec = Echonest()
sp = Spotify()
photo_to_mood = MachineLoader.load(machines.photo_mood)
TARGET_LABELS = ['Boat', 'Human', 'Insect', 'Invertebrate', 'Mammal', 'Man Made Scene', 'Outdoors', 'People Activity', 'Placental Mammal', 'Vertebrate']
# analyze moode
moods = Counter()
matrix = vr.recognize(image_urls).to_matrix(TARGET_LABELS)
for r in matrix:
mood = photo_to_mood.predict(r)[0]
moods[int(mood)] += 1
target_mood = moods.most_common(1)[0][0] # get top and its score
target_mood = Echonest.MOOD[target_mood]
# choose song from mood
tracks = ec.search_songs(target_mood)
# load spotify info
@asyncio.coroutine
def load_spotify(t):
t.load_spotify(sp)
tasks = [load_spotify(t) for t in tracks]
done, _ = asyncio.get_event_loop().run_until_complete(asyncio.wait(tasks))
result = {
"mood": target_mood,
"tracks": [t.__dict__ for t in tracks]
}
return result
|
Python
| 0.000002
|
@@ -553,16 +553,29 @@
ebrate'%5D
+%0A log = %5B%5D
%0A%0A #
@@ -588,16 +588,59 @@
e moode%0A
+ log.append(%22begin vision recognition%22)%0A
mood
@@ -955,16 +955,60 @@
om mood%0A
+ log.append(%22begin search song by mood%22)%0A
trac
@@ -1142,16 +1142,62 @@
fy(sp)%0A%0A
+ log.append(%22begin load song information%22)%0A
task
@@ -1358,16 +1358,16 @@
t_mood,%0A
-
@@ -1400,24 +1400,44 @@
t in tracks%5D
+,%0A %22log%22: log
%0A %7D%0A%0A
|
c59c0911c5022291b38774bf407ca83557c78cc5
|
test login and logout views.
|
user/tests.py
|
user/tests.py
|
from django.test import TestCase
class ViewsTest(TestCase):
"""
TestCase to test all exposed views for anonymous users.
"""
def setUp(self):
pass
def testHome(self):
response = self.client.get('/user/')
self.assertEquals(response.status_code, 200)
def testLogin(self):
response = self.client.get('/login/')
self.assertEquals(response.status_code, 200)
def testLogout(self):
response = self.client.get('/logout/')
self.assertEquals(response.status_code, 200)
|
Python
| 0
|
@@ -352,16 +352,21 @@
t.get('/
+user/
login/')
@@ -483,16 +483,21 @@
t.get('/
+user/
logout/'
|
c6536da7fc1eda82922b286c096412e4371f6d4c
|
Bump version
|
graphysio/__init__.py
|
graphysio/__init__.py
|
"""Graphical time series visualizer and analyzer."""
__version__ = '2021.07.14'
__all__ = [
'algorithms',
'dialogs',
'exporter',
'legend',
'mainui',
'puplot',
'tsplot',
'utils',
'types',
'ui',
'transformations',
]
|
Python
| 0
|
@@ -72,16 +72,18 @@
21.07.14
+.1
'%0A%0A__all
|
23cdb0d62e44797f84aee61f1a4c2909df8221b0
|
Fix settings import and add an option to DjangoAppEngineMiddleware to allow setting up of signals on init
|
main/__init__.py
|
main/__init__.py
|
import logging
import os
def validate_models():
"""
Since BaseRunserverCommand is only run once, we need to call
model valdidation here to ensure it is run every time the code
changes.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
logging.info("Validating models...")
s = StringIO()
num_errors = get_validation_errors(s, None)
if num_errors:
s.seek(0)
error_text = s.read()
logging.critical("One or more models did not validate:\n%s" %
error_text)
else:
logging.info("All models validated.")
from djangoappengine.utils import on_production_server
if not on_production_server:
validate_models()
from django.conf import settings
class DjangoAppEngineMiddleware:
def __init__(self, app):
self.settings_module = os.environ['DJANGO_SETTINGS_MODULE']
from djangoappengine.boot import setup_env
setup_env()
## In vanilla Django, staticfiles overrides runserver to use StaticFilesHandler
## if necessary. As we can't do this in our runserver (because we handover to dev_appserver)
## this has to be done here
if (not on_production_server and settings.DEBUG) and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
app = StaticFilesHandler(app)
if getattr(settings, 'ENABLE_APPSTATS', False):
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app = appstats_wsgi_middleware(app)
self.wrapped_app = app
def __call__(self, environ, start_response):
#Always make sure the settings module is set - AppEngine sometimes loses it!
os.environ['DJANGO_SETTINGS_MODULE'] = self.settings_module
return self.wrapped_app(environ, start_response)
|
Python
| 0
|
@@ -17,16 +17,65 @@
mport os
+%0Afrom django.utils.importlib import import_module
%0A%0Adef va
@@ -887,42 +887,8 @@
()%0A%0A
-from django.conf import settings%0A%0A
clas
@@ -942,16 +942,37 @@
elf, app
+, setup_signals=False
):%0A
@@ -1107,16 +1107,405 @@
_env()%0A%0A
+ from django.conf import settings%0A%0A if setup_signals:%0A # Load all models.py to ensure signal handling installation or index%0A # loading of some apps.%0A for app_to_import in settings.INSTALLED_APPS:%0A try:%0A import_module('%25s.models' %25 app_to_import)%0A except ImportError:%0A pass%0A%0A
|
7b2e39a19865e72510f818033727a25ed2c7bf3a
|
Add function to create symlinks to npm/bower package dirs outside of the project git repo
|
blues/node.py
|
blues/node.py
|
"""
Node.js Blueprint
=================
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.node
settings:
node:
# version: latest # Install latest node version
packages: # List of npm packages to install (Optional)
# - coffee-script
# - yuglify
# - less
"""
import os
from fabric.contrib import files
from fabric.context_managers import cd, prefix
from fabric.decorators import task
from refabric.api import info
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from refabric.operations import run
from .application.project import git_repository_path, project_home, \
sudo_project, project_name
from .util import maybe_managed
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Setup Nodejs
"""
install()
configure()
@task
def configure():
"""
Install npm packages and, if bower is in the packages,
install bower dependencies.
"""
install_packages()
install_dependencies()
def get_version():
return blueprint.get('version')
def install(for_user=None):
version = get_version()
if version == 'latest':
info('Installing latest node from tarball', )
with sudo():
install_node_build_deps()
if for_user is not None:
cm = sudo(user=for_user)
else:
cm = None
with maybe_managed(cm):
return install_latest()
else:
info('Installing node from apt')
return install_deb()
def install_node_build_deps():
info('Installing build tools')
debian.apt_get_update()
debian.apt_get('install', 'build-essential node-rimraf')
def install_latest():
info('Installing latest node and NPM for {user}', user=run('whoami').stdout)
common = [
'set -x',
'set -o verbose',
'eval PREFIX=~/.local',
'eval PROFILE=~/.bash_profile',
'eval SRC=~/node-latest-install',
'source $PROFILE',
]
setup_env = [
'echo \'export PATH=$HOME/.local/bin:$PATH\' >> $PROFILE',
'echo \'export npm_config_userconfig=$HOME/.config/npmrc\' >> $PROFILE',
'source $PROFILE',
'mkdir $PREFIX || true',
'mkdir $SRC || true'
]
run(' && '.join(common + setup_env), shell=True)
install_node_and_npm = [
'cd $SRC',
('curl -z node-latest.tar.gz'
' -O http://nodejs.org/dist/node-latest.tar.gz'),
'tar xz --strip-components=1 --file node-latest.tar.gz',
'./configure --prefix=$PREFIX',
'make install',
'curl -L https://www.npmjs.org/install.sh | sh'
]
run(' && '.join(common + install_node_and_npm), shell=True)
def install_deb():
with sudo():
lbs_release = debian.lbs_release()
# 12.04 ships with really old nodejs, TODO: 14.04?
if lbs_release in ['10.04', '12.04']:
info('Adding ppa...')
debian.add_apt_ppa('chris-lea/node.js', src=True)
info('Installing Node.js')
debian.apt_get('install', 'nodejs')
if lbs_release == '14.04':
info('Installing NPM')
debian.apt_get('install', 'npm')
debian.ln('/usr/bin/nodejs', '/usr/bin/node')
def install_packages():
packages = blueprint.get('packages', [])
if packages:
info('Installing Packages')
npm('install', *packages)
def npm(command, *options):
info('Running npm {}', command)
with sudo():
run('npm {} -g {}'.format(command, ' '.join(options)))
def install_dependencies(path=None, production=True, changed=True):
"""
Install dependencies from "package.json" at path.
:param path: Package path, current directory if None. [default: None]
:param production:
Boolean flag to toggle `--production` parameter for npm
:param changed:
Boolean flag or tuple of two commit sha to check if package.json and
bower.json were changed.
:return:
"""
dependency_path_root = path or git_repository_path()
if not files.exists(os.path.join(dependency_path_root, 'package.json')):
return
with sudo_project(), cd(dependency_path_root):
npm_changed = bower_changed = changed
if isinstance(changed, tuple): # i.e. commits: (from_sha, to_sha)
changed = '{}..{}'.format(*changed)
from blues import git
npm_changed = git.diff_stat(
git_repository_path(), changed, 'package.json')[0]
bower_changed = git.diff_stat(
git_repository_path(), changed, 'bower.json')[0]
if npm_changed:
run('npm install' + (' --production' if production else ''))
if bower_changed:
run('test -f bower.json && '
'bower install --config.interactive=false')
|
Python
| 0
|
@@ -350,16 +350,28 @@
ss%0A%0A%22%22%22%0A
+import json%0A
import o
@@ -448,16 +448,8 @@
t cd
-, prefix
%0Afro
@@ -688,22 +688,8 @@
ath,
- project_home,
%5C%0A
@@ -4947,12 +4947,649 @@
ive=false')%0A
+%0A%0Adef create_symlinks(npm_path='../node_modules',%0A bower_path='../bower_components'):%0A%0A with cd(git_repository_path()):%0A # get bower components dir from config file%0A b = run('cat .bowerrc 2%3E/dev/null %7C%7C true') or '%7B%7D'%0A b = json.loads(b).get('directory') or 'bower_components'%0A%0A for src, dst in %5B%0A ('../bower_components', b),%0A ('../node_modules', ''),%0A %5D:%0A run('mkdir -p %7Bsrc%7D && ln -sf %7Bsrc%7D %7Bdst%7D'.format(%0A src=os.path.abspath(os.path.join(git_repository_path(), src)),%0A dst=dst,%0A ), user=project_name())%0A
|
e4ad2863236cd36e5860f1d17a06ca05e30216d5
|
Store more stuff about songs in the queue
|
make_database.py
|
make_database.py
|
import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
|
Python
| 0
|
@@ -137,16 +137,160 @@
EFAULT 0
+,%0A name TEXT,%0A artist_name TEXT,%0A artist_uri TEXT,%0A artist_image TEXT,%0A album_name TEXT,%0A album_uri TEXT,%0A album_image TEXT
%0A);%0A'''%0A
|
43712c77f58ba49bbb34e1f1a6f6df8b993f7a39
|
Fix MSA agent.
|
bioagents/msa/msa_module.py
|
bioagents/msa/msa_module.py
|
import os
import sys
import re
import pickle
import requests
import logging
from kqml import KQMLPerformative
from indra.sources.trips.processor import TripsProcessor
from bioagents import Bioagent
logging.basicConfig(format='%(levelname)s: %(name)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger('MSA')
from indra import has_config
if has_config('INDRA_DB_REST_URL') and has_config('INDRADB_REST_API_KEY'):
CAN_CHECK_STATEMENTS = True
from indra.sources.indra_db_rest import get_statements
else:
logger.warning("Database web api not specified. Cannot get background.")
CAN_CHECK_STATEMENTS = False
def _read_signor_afs():
path = os.path.dirname(os.path.abspath(__file__)) + \
'/../resources/signor_active_forms.pkl'
with open(path, 'rb') as pkl_file:
stmts = pickle.load(pkl_file)
if isinstance(stmts, dict):
signor_afs = []
for _, stmt_list in stmts.items():
signor_afs += stmt_list
else:
signor_afs = stmts
return signor_afs
class MSA_Module(Bioagent):
name = 'MSA'
tasks = ['PHOSPHORYLATION-ACTIVATING']
signor_afs = _read_signor_afs()
def receive_tell(self, msg, content):
tell_content = content[0].to_string().upper()
if tell_content == 'START-CONVERSATION':
logger.info('MSA resetting')
def respond_phosphorylation_activating(self, content):
"""Return response content to phosphorylation_activating request."""
if not CAN_CHECK_STATEMENTS:
return self.make_failure(
'NO_KNOWLEDGE_ACCESS',
'Cannot access the database through the web api.'
)
heading = content.head()
m = re.match('(\w+)-(\w+)', heading)
if m is None:
return self.make_failure('UNKNOWN_ACTION')
action, polarity = [s.lower() for s in m.groups()]
target_ekb = content.gets('target')
if target_ekb is None or target_ekb == '':
return self.make_failure('MISSING_TARGET')
agent = self._get_agent(target_ekb)
logger.debug('Found agent (target): %s.' % agent.name)
residue = content.gets('residue')
position = content.gets('position')
related_result_dict = {}
for namespace, name in agent.db_refs.items():
# TODO: Remove this eventually, as it is a temporary work-around.
if namespace == 'FPLX':
namespace = 'BE'
stmts = get_statements(agents=['%s@%s' % (name, namespace)],
stmt_type='ActiveForm')
for s in stmts:
if self._matching(s, residue, position, action, polarity):
related_result_dict[s.matches_key()] = s
if not len(related_result_dict):
return self.make_failure(
'MISSING_MECHANISM',
"Could not find statement matching phosphorylation activating "
"%s, %s, %s, %s." % (agent.name, residue, position,
'phosphorylation')
)
else:
self.send_provenance_for_stmts(
related_result_dict.values(),
"Phosphorylation at %s%s activates %s." % (
residue,
position,
agent.name
)
)
msg = KQMLPerformative('SUCCESS')
msg.set('is-activating', 'TRUE')
return msg
@staticmethod
def _get_agent(agent_ekb):
tp = TripsProcessor(agent_ekb)
terms = tp.tree.findall('TERM')
term_id = terms[0].attrib['id']
agent = tp._get_agent_by_id(term_id, None)
return agent
def _matching(self, stmt, residue, position, action, polarity):
if stmt.is_active is not (polarity == 'activating'):
return False
matching_residues = any([
m.residue == residue
and m.position == position
and m.mod_type == action
for m in stmt.agent.mods])
return matching_residues
if __name__ == "__main__":
MSA_Module(argv=sys.argv[1:])
|
Python
| 0.000001
|
@@ -42,24 +42,8 @@
kle%0A
-import requests%0A
impo
@@ -53,16 +53,16 @@
logging%0A
+
%0Afrom kq
@@ -145,16 +145,45 @@
rocessor
+%0Afrom indra import has_config
%0A%0Afrom b
@@ -357,37 +357,8 @@
)%0A%0A%0A
-from indra import has_config%0A
if h
@@ -409,16 +409,17 @@
g('INDRA
+_
DB_REST_
@@ -437,36 +437,17 @@
-CAN_CHECK_STATEMENTS = True%0A
+try:%0A
@@ -501,16 +501,101 @@
tements%0A
+ CAN_CHECK_STATEMENTS = True%0A except:%0A CAN_CHECK_STATEMENTS = False%0A
else:%0A
|
fd83aaa43e5ae59555ce8c259365d22adeb7bd90
|
Include results_file among dubins_traffic parameters
|
domains/dubins_traffic/trial-runner.py
|
domains/dubins_traffic/trial-runner.py
|
#!/usr/bin/env python
"""Run trials and collect results for the Problem domain: Traffic network of Dubins cars.
"""
from __future__ import print_function
import argparse
import random
import json
import tempfile
import subprocess
import os
import sys
from time import gmtime, strftime
from fmrb import dubins_traffic
def gen_roslaunch(worldsdf_filename, rnd_path, trialconf):
nl = '\n'
idt = ' '*2
output = '<launch>'+nl
output += '<include file="$(find gazebo_ros)/launch/empty_world.launch">'
output += '<arg name="world_name" value="'+worldsdf_filename+'" />'
output += """
<arg name="headless" value="true" />
<arg name="gui" value="false" />
</include>
<node pkg="dubins_traffic_utils" type="monitor" name="$(anon monitor)" />
<param name="robot_description" command="$(find xacro)/xacro.py '$(find dub_sim)/urdf/lasermounted.urdf.xacro'" />
<param name="dubins_traffic/rnd" textfile="{RND_PATH}" />
""".format(RND_PATH=rnd_path)
if 'e-agents' in trialconf:
eagent_names = []
for eagent in trialconf['e-agents']:
eagent_names.append(eagent['name'])
output += """
<include file="$(find dub_sim)/launch/includes/scopedbase.launch.xml">
<arg name="namespace" value="{EAGENT_NAME}" />
<arg name="init_pose" value="-x {X} -y {Y} -z 0 -Y 0" />
</include>
<node pkg="{EAGENT_PKG}" type="{EAGENT_TYPE}"
name="$(anon {EAGENT_NAME})" ns="{EAGENT_NAME}"
args="{RNDPATH}">
<remap from="cmd_vel" to="mobile_base/commands/velocity" />
</node>
""".format(EAGENT_NAME=eagent['name'],
EAGENT_PKG=eagent['type'].split('/')[0],
EAGENT_TYPE='/'.join(eagent['type'].split('/')[1:]),
X=int(random.random()*10), Y=int(random.random()*10),
RNDPATH=rnd_path)
output += '<param name="dubins_traffic/e_agents" value="{0}" />\n'.format(','.join(eagent_names))
else:
# Empty implies no e-agents.
output += '<param name="dubins_traffic/e_agents" value="" />\n'
return output+nl+'</launch>'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('FILE', type=str, help='trials configuration file')
parser.add_argument('-f', type=str,
dest='DATAFILE', default=None,
help=('name of file in which to save trial data. '
'If not given, then data will not be saved. '
'If the file exists, quit without modifying it.'))
parser.add_argument('-F', type=str, metavar='DATAFILE',
dest='DATAFILE_fuerte', default=None,
help=('like `-f` switch but overwrite the file '
'if it already exists.'))
parser.add_argument('--rnd', type=str, metavar='FILE', dest='rnd_path',
help=('road network description file. The path given '
'here will override the road network '
'description or path in the configuration file '
'(if any).'))
args = parser.parse_args()
if (args.DATAFILE is not None) and os.path.exists(args.DATAFILE):
print('Requested file "'+args.DATAFILE+'" already exists.')
print('(Use `-F` if you want to overwrite it.)')
sys.exit(-1)
if args.DATAFILE_fuerte is not None:
args.DATAFILE = args.DATAFILE_fuerte
with open(args.FILE, 'r') as f:
trialconf = json.load(f)
assert trialconf['version'] == 0, 'Unrecognized version of the dubins_traffic trials configuration format: '+str(trialconf['version'])
assert trialconf['problem_domain'] == 'dubins_traffic', 'This trial-runner is for the dubins_traffic problem domain, but the given configuration file is for: '+str(trialconf['problem_domain'])
if args.DATAFILE is not None:
with open(args.DATAFILE, 'w') as f:
f.write('{"version": 0,\n')
f.write('"date": "'+strftime('%Y-%m-%d %H:%M:%S', gmtime())+'",\n')
f.write('"trialconf": ')
json.dump(trialconf, f)
f.write(',\n')
if args.rnd_path is not None:
rnd_path = args.rnd_path
elif 'rnd' not in trialconf:
print('ERROR: Road network description not provided in trials configuration file nor at command-line.')
sys.exit(-1)
else:
rnd_path = trialconf['rnd']
if isinstance(rnd_path, dict):
roads = dubins_traffic.RoadNetwork(rnd_path)
else:
os.path.abspath(rnd_path)
with open(rnd_path, 'rt') as fp:
roads = dubins_traffic.RoadNetwork(fp)
tempfd_rnd, tempfname_rnd = tempfile.mkstemp()
tmprndfile = os.fdopen(tempfd_rnd, 'w+')
if isinstance(rnd_path, dict):
json.dump(rnd_path, tmprndfile)
else:
with open(rnd_path, 'rt') as fp:
tmprndfile.write(fp.read())
tmprndfile.close()
tempfd_sdf, tempfname_sdf = tempfile.mkstemp()
worldsdffile = os.fdopen(tempfd_sdf, 'w+')
worldsdffile.write(dubins_traffic.gen_worldsdf(roads))
worldsdffile.close()
tempfd, tempfname = tempfile.mkstemp()
launchfile = os.fdopen(tempfd, 'w+')
launchfile.write(gen_roslaunch(tempfname_sdf, rnd_path=tempfname_rnd,
trialconf=trialconf))
launchfile.seek(0)
try:
launchp = subprocess.Popen(['roslaunch', '-'], stdin=launchfile)
launchp.wait()
except KeyboardInterrupt:
launchp.terminate()
launchp.wait()
launchfile.close()
os.unlink(tempfname)
os.unlink(tempfname_sdf)
if args.DATAFILE is not None:
with open(args.DATAFILE, 'a') as f:
f.write('}')
|
Python
| 0
|
@@ -331,16 +331,27 @@
slaunch(
+trialconf,
worldsdf
@@ -371,25 +371,37 @@
d_path,
-trialconf
+results_filename=None
):%0A n
@@ -998,16 +998,205 @@
_path)%0A%0A
+ if results_filename is not None:%0A results_path = os.path.abspath(results_filename)%0A output += ' %3Cparam name=%22dubins_traffic/results_file%22 value=%22'+results_path+'%22 /%3E%5Cn'%0A%0A
if '
@@ -5488,16 +5488,62 @@
slaunch(
+trialconf,%0A
tempfnam
@@ -5612,27 +5612,38 @@
-trialconf=trialconf
+results_filename=args.DATAFILE
))%0A
|
8ff38f6eed889b2e5ab2bb04279fe57e8743bf0a
|
move input tools to vertical row
|
bokeh_plot.py
|
bokeh_plot.py
|
from bokeh.io import vform
from bokeh.models import CustomJS, ColumnDataSource
from bokeh.models import Select, Button #, MultiSelect
from bokeh.plotting import Figure, output_file, save
import shutil
# import pandas as pd
# Usage: import do_a_plot and feed it a table
# currently takes Pandas DataFrame as input; haven't tested
# other formats such as astropy tables
# output_file("templates/callback.html") # currently writes to a file
# should change to output JS and HTML strings to pass to template
def get_error_tuples(val,err,pos,alpha=0.6):
# val: the coordinates on the axis with error bars (i.e., y coordinates if plotting y errs, etc)
# err: the given error (assuming 1 sigma)
# pos: the coordinates on the axis opposite the error bars (x coordinates if y errs, etc)
err_width = [(i, j) for i, j in zip(x - xerr, x + xerr)]
err_pos = [(i, i) for i in pos]
#plot.multi_line(err_width, err_ypos, alpha=alpha)
def do_a_plot(table):
#print table.columns
table.columns = [c.strip() for c in table.columns]
#df.columns = ['a', 'b']
column_list = list(table)
#print column_list
#print table[column_list[0]]
table['blank_x'] = '' # add fake columns for plotting
table['blank_y'] = ''
#table['blank_x_err'] = ''
#table['blank_y_err'] = ''
source = ColumnDataSource(data=dict(table))
plot = Figure(plot_width=500, plot_height=500)
scatter = plot.scatter('blank_x', 'blank_y', source=source, _changing=True)
# line = plot.line('blank_x', 'blank_y', source=source, visible=False, _changing=True)
main_callback = CustomJS(args=dict(source=source,
xaxis=plot.xaxis[0],
yaxis=plot.yaxis[0]), code="""
var data = source.get('data');
var f = cb_obj.get('value').trim();
console.log(f);
for(var propertyName in data) {
console.log('name ' + propertyName + ', name_stripped ' + propertyName.trim());
}
var axis = cb_obj.get('title')[0].toLowerCase();
console.log(axis);
if (axis == 'x') {
xaxis.set({"axis_label": f});
} else if (axis == 'y') {
yaxis.set({"axis_label": f});
} else {
return false;
}
blank_data = data['blank_' + axis];
for (i = 0; i < blank_data.length; i++) {
blank_data[i] = data[f][i];
}
source.trigger('change');
""")
reverse_js = """
var start = range.get("start");
var end = range.get("end");
range.set({"start": end, "end": start});
return false;
"""
reverse_x_callback = CustomJS(args=dict(range=plot.x_range), code=reverse_js)
reverse_y_callback = CustomJS(args=dict(range=plot.y_range), code=reverse_js)
select_x = Select(title="X Options:", value=column_list[0], options=column_list, callback=main_callback)
select_y = Select(title="Y Options:", value=column_list[0], options=column_list, callback=main_callback)
reverse_x_button = Button(label="Reverse X range", type="success", callback=reverse_x_callback)
reverse_y_button = Button(label="Reverse Y range", type="success", callback=reverse_y_callback)
layout = vform(select_x, select_y, reverse_x_button, reverse_y_button, plot)
output_file('bokeh_plot.html') # currently writes to a file
save(layout)
shutil.copy('bokeh_plot.html', 'templates/')
|
Python
| 0
|
@@ -115,23 +115,24 @@
tton
- #, MultiSelect
+, HBox, VBoxForm
%0Afro
@@ -180,16 +180,44 @@
le, save
+%0Afrom bokeh.io import curdoc
%0A%0Aimport
@@ -1356,18 +1356,18 @@
t_width=
+6
5
-0
0, plot_
@@ -1373,18 +1373,18 @@
_height=
+6
5
-0
0)%0A%09scat
@@ -2881,32 +2881,251 @@
=main_callback)%0A
+%09select_c = Select(title=%22Color Weight:%22, value=column_list%5B0%5D, options=column_list, callback=main_callback)%09%0A%09select_r = Select(title=%22Size Weight:%22, value=column_list%5B0%5D, options=column_list, callback=main_callback)%09%0A
%09reverse_x_butto
@@ -3304,16 +3304,18 @@
back)%0A%0A%09
+#
layout =
@@ -3382,16 +3382,182 @@
n, plot)
+%0A%09%0A%09controls = %5Bselect_x, select_y, select_c, select_r, reverse_x_button, reverse_y_button%5D%0A%09inputs = HBox(VBoxForm(*controls))%0A%09curdoc().add_root(HBox(inputs, plot))
%0A%0A%09outpu
@@ -3617,22 +3617,24 @@
e%0A%09save(
-layout
+curdoc()
)%0A%09shuti
|
939ba609e6e7a527ef3325c4dd5b0a51c97d1af9
|
fix #29
|
djangocms_reversion2/signals.py
|
djangocms_reversion2/signals.py
|
# -*- coding: utf-8 -*-
from django.db.models import signals
def make_page_version_dirty(page, language):
pv = page.page_versions.filter(active=True, language=language)
if pv.count() > 0:
pv = pv.first()
if not pv.dirty:
pv.dirty = True
pv.save()
def mark_title_dirty(sender, instance, **kwargs):
page = instance.page
language = instance.language
make_page_version_dirty(page, language)
def handle_placeholder_change(**kwargs):
language = kwargs.get('language')
placeholder = kwargs.get('placeholder')
target_placeholder = kwargs.get('target_placeholder', None)
page = None
if placeholder:
page = placeholder.page
elif target_placeholder:
page = target_placeholder.page
if page:
make_page_version_dirty(page, language)
def handle_page_publish(**kwargs):
language = kwargs.get('language')
page = kwargs.get('instance')
# when the page is published create a backup automatically
from djangocms_reversion2.models import PageVersion
try:
PageVersion.create_version(page, language, version_parent=None,
comment='Auto before publish', title='auto')
make_page_version_dirty(page, language)
except AssertionError:
# AssertionError page is not dirty
pass
def handle_page_delete(sender, instance, **kwargs):
# deleting a real page will delete all of its hidden versions
page = instance
for pv in page.page_versions.iterator():
pv.hidden_page.delete()
pv.delete()
def delete_hidden_page(sender, **kwargs):
# deleting a PageVersion deletes its hidden page in the PageTree
# This signal handler deletes the hidden page associated to a PageVersion
# (reverse to on_delete=models.CASCADE)
# Problem was that an infinite loop can be originated
# if kwargs['instance'] and kwargs['instance'].hidden_page:
# hidden_page = kwargs['instance'].hidden_page
# try:
# hidden_page.delete()
# except Exception as e:
# print(e)
pass
def connect_all_plugins():
from cms.signals import post_placeholder_operation, post_publish
post_placeholder_operation.connect(handle_placeholder_change, dispatch_uid='reversion2_placeholder')
signals.post_save.connect(mark_title_dirty, sender='cms.Title', dispatch_uid='reversion2_title')
signals.pre_delete.connect(handle_page_delete, sender='cms.Page', dispatch_uid='reversion2_page')
signals.pre_delete.connect(delete_hidden_page, sender='djangocms_reversion2.PageVersion',
dispatch_uid='reversion2_page_version')
post_publish.connect(handle_page_publish, dispatch_uid='reversion2_page_publish')
|
Python
| 0.000001
|
@@ -17,16 +17,75 @@
f-8 -*-%0A
+from cms.operations import REVERT_PAGE_TRANSLATION_TO_LIVE%0A
from dja
@@ -113,16 +113,16 @@
signals%0A
-
%0A%0A%0A%0Adef
@@ -1411,24 +1411,809 @@
pass%0A%0A%0A
+def handle_page_reverted_to_live(**kwargs):%0A page = kwargs.get('obj')%0A translation = kwargs.get('translation')%0A language = translation.language%0A operation = kwargs.get('operation')%0A if operation == REVERT_PAGE_TRANSLATION_TO_LIVE:%0A from djangocms_reversion2.models import PageVersion%0A # if a page draft is replaced by the currently published page, then we have to make a backup and also%0A # set the active flag correctly%0A try:%0A PageVersion.create_version(page, language, version_parent=None,%0A comment='Auto before revert to live', title='auto')%0A make_page_version_dirty(page, language)%0A except AssertionError:%0A # AssertionError page is not dirty%0A pass%0A%0A%0A
def handle_p
@@ -3060,16 +3060,35 @@
_publish
+, pre_obj_operation
%0A pos
@@ -3550,24 +3550,24 @@
e_version')%0A
-
post_pub
@@ -3639,10 +3639,148 @@
ublish')
+%0A pre_obj_operation.connect(handle_page_reverted_to_live,%0A dispatch_uid='reversion2_page_revert_to_live')
%0A%0A
|
70448c7f4ea132376a6d3547edb99ec616501171
|
Implement Gref#parents in terms of Gref#direct_parents
|
groundstation/gref.py
|
groundstation/gref.py
|
import os
import groundstation.objects.object_factory as object_factory
from groundstation.objects.update_object import UpdateObject
from groundstation.objects.root_object import RootObject
import logger
log = logger.getLogger(__name__)
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def tips(self):
return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip, silent=False):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
if not silent:
raise
def direct_parents(self, tip):
"""Return all parents of `tip` in the order they're written into the
object"""
obj = object_factory.hydrate_object(self.store[tip].data)
if isinstance(obj, RootObject):
# Roots can't have parents
return []
elif isinstance(obj, UpdateObject):
return obj.parents
else:
raise "Unknown object hydrated %s" % (str(type(obj)))
def parents(self, tips=None):
"""Return all ancestors of `tip`, in an undefined order"""
# XXX This will asplode the stack at some point
parents = []
for tip in (tips or self.tips()):
obj = object_factory.hydrate_object(self.store[tip].data)
if isinstance(obj, UpdateObject):
for tip in obj.parents:
parents.append(tip)
parents.extend(self.parents([tip]))
elif isinstance(obj, RootObject):
return []
else:
raise "Unknown object hydrated %s" % (str(type(obj)))
return parents
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
|
Python
| 0.002283
|
@@ -2131,18 +2131,21 @@
rents =
-%5B%5D
+set()
%0A
@@ -2145,26 +2145,27 @@
-for tip in
+this_iter =
(tips o
@@ -2178,17 +2178,16 @@
.tips())
-:
%0A
@@ -2191,190 +2191,105 @@
- obj = object_factory.hydrate_object(self.store%5Btip%5D.data)%0A if isinstance(obj, UpdateObject):%0A for tip in obj.parents:%0A parents.append
+while this_iter:%0A tip = this_iter.pop()%0A tips_parents = self.direct_parents
(tip
@@ -2306,49 +2306,48 @@
-
- parents.extend(self.
+parents = parents.union(set(tips_
parents
-(%5Btip%5D
))%0A
@@ -2361,154 +2361,37 @@
-elif isinstance(obj, RootObject):%0A return %5B%5D%0A else:%0A raise %22Unknown object hydrated %25s%22 %25 (str(type(obj))
+this_iter.extend(tips_parents
)%0A
|
f3195d0d41232c7655250dea15ba4ecbe1a7b036
|
append http:// if protocol is missing, sanitize the return value
|
Commands/Slurp.py
|
Commands/Slurp.py
|
# -*- coding: utf-8 -*-
"""
Created on Aug 31, 2015
@author: Tyranic-Moron
"""
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
from Utils import WebUtils
from bs4 import BeautifulSoup
class Slurp(CommandInterface):
triggers = ['slurp']
help = "slurp <attribute> <url> <css selector> - scrapes the given attribute from the tag selected at the given url"
def execute(self, message):
"""
@type message: IRCMessage
"""
if len(message.ParameterList) < 3:
return IRCResponse(ResponseType.Say, u"Not enough parameters, usage: {}".format(self.help), message.ReplyTo)
prop, url, selector = (message.ParameterList[0], message.ParameterList[1], u" ".join(message.ParameterList[2:]))
page = WebUtils.fetchURL(url)
if page is None:
return IRCResponse(ResponseType.Say, u"Problem fetching {}".format(url), message.ReplyTo)
soup = BeautifulSoup(page.body)
tag = soup.select_one(selector)
if tag is None:
return IRCResponse(ResponseType.Say,
u"'{}' does not select a tag at {}".format(selector, url),
message.ReplyTo)
specials = {
'name': tag.name,
'text': tag.text
}
if prop in specials:
value = specials[prop]
elif prop in tag:
value = tag[prop]
else:
return IRCResponse(ResponseType.Say,
u"The tag selected by '{}' ({}) does not have attribute '{}'".format(selector,
tag.name,
prop),
message.ReplyTo)
if not isinstance(value, basestring):
value = u" ".join(value)
return IRCResponse(ResponseType.Say, value, message.ReplyTo)
|
Python
| 0.000015
|
@@ -72,16 +72,44 @@
oron%0A%22%22%22
+%0Aimport HTMLParser%0Aimport re
%0A%0Afrom I
@@ -471,16 +471,58 @@
n url%22%0A%0A
+ htmlParser = HTMLParser.HTMLParser()%0A%0A
def
@@ -890,16 +890,103 @@
%5B2:%5D))%0A%0A
+ if not re.match(ur'%5E%5Cw+://', url):%0A url = u%22http://%7B%7D%22.format(url)%0A%0A
@@ -1470,16 +1470,19 @@
'
+tag
name': t
@@ -2135,16 +2135,16 @@
tring):%0A
-
@@ -2173,16 +2173,218 @@
value)%0A%0A
+ # sanitize the value%0A value = value.strip()%0A value = re.sub(ur'%5B%5Cr%5Cn%5D+', u' ', value)%0A value = re.sub(ur'%5Cs+', u' ', value)%0A value = self.htmlParser.unescape(value)%0A%0A
|
d475bf4da5af06e6b40e51e8d14df3fe31b5491b
|
Define a function for python version check
|
bindings/python-examples/sentence-check.py
|
bindings/python-examples/sentence-check.py
|
#!/usr/bin/env python
"""
Demo: Find unlinked or unknown words.
These demo is extremely simplified.
It can only work with link-grammar library version >= 5.3.10.
Input: English sentences, one per line.
Output: If there are any []-marked words in the linkage results,
the output contains unique combinations of the input sentence with
these works marked. No attempt is done to handle the walls.
Spell guesses are not handled in this demo.
Example:
This is a the test of bfgiuing and xxxvfrg
Output:
Sentence has 1 unlinked word:
1: LEFT-WALL this.p is.v [a] the test.n of bfgiuing[!].g and.j-n xxxvfrg[?].n RIGHT-WALL
2: LEFT-WALL this.p is.v a [the] test.n of bfgiuing[!].g and.j-n xxxvfrg[?].n RIGHT-WALL
3: LEFT-WALL this.p is.v [a] the test.n of bfgiuing[!].g and.j-n xxxvfrg[?].a RIGHT-WALL
4: LEFT-WALL this.p is.v a [the] test.n of bfgiuing[!].g and.j-n xxxvfrg[?].a RIGHT-WALL
"""
from __future__ import print_function
import sys
import re
import itertools
import argparse
import readline
from linkgrammar import (Sentence, ParseOptions, Dictionary,
LG_Error, LG_TimerExhausted, Clinkgrammar as clg)
get_input = input
# If this is Python 2, use raw_input()
if sys.version_info[:2] <= (2, 7):
get_input = raw_input
def nsuffix(q):
return '' if q == 1 else 's'
class Formatter(argparse.HelpFormatter):
""" Display the "lang" argument as a first one, as in link-parser. """
def _format_usage(self, usage, actions, groups, prefix):
usage_message = super(Formatter, self)._format_usage(usage, actions, groups, prefix)
return re.sub(r'(usage: \S+) (.*) \[lang]', r'\1 [lang] \2', str(usage_message))
#-----------------------------------------------------------------------------#
DISPLAY_GUESSES = True # Display regex and POS guesses
print ("Version:", clg.linkgrammar_get_version())
args = argparse.ArgumentParser(formatter_class=Formatter)
args.add_argument('lang', nargs='?', default='en',
help="language or dictionary location")
args.add_argument("-v", "--verbosity", type=int,default=0,
choices=range(0,199), metavar='[0-199]',
help= "1: Basic verbosity; 2-4: Trace; >5: Debug")
args.add_argument("-p", "--position", action="store_true",
help="show word sentence position")
args.add_argument("-nm", "--no-morphology", dest='morphology', action='store_false',
help="do not display morphology")
args.add_argument("-i", "--interactive", action="store_true",
help="interactive mode after each result")
arg = args.parse_args()
try:
lgdict = Dictionary(arg.lang)
except LG_Error:
# The default error handler will print the error message
args.print_usage()
sys.exit(2)
po = ParseOptions(verbosity=arg.verbosity)
po.max_null_count = 999 # > allowed maximum number of words
po.max_parse_time = 10 # actual parse timeout may be about twice bigger
po.spell_guess = True if DISPLAY_GUESSES else False
po.display_morphology = arg.morphology
# iter(): avoid python2 input buffering
while True:
sentence_text = get_input("sentence-check: ")
if sentence_text.strip() == '':
continue
sent = Sentence(str(sentence_text), lgdict, po)
try:
linkages = sent.parse()
except LG_TimerExhausted:
print('Sentence too complex for parsing in ~{} second{}.'.format(
po.max_parse_time,nsuffix(po.max_parse_time)))
continue
if not linkages:
print('Error occurred - sentence ignored.')
continue
if len(linkages) <= 0:
print('Cannot parse the input sentence')
continue
null_count = sent.null_count()
if null_count == 0:
print("Sentence parsed OK", end='')
linkages = list(linkages)
correction_found = False
# search for correction suggestions
for l in linkages:
for word in l.words():
if word.find(r'.#') > 0:
correction_found = True
break;
if correction_found:
break
if correction_found:
print(" - with correction", end='')
print(".")
guess_found = False
if DISPLAY_GUESSES:
# Check the first linkage for regexed/unknown words
for word in linkages[0].words():
# search for something[x]
if re.search(r'\S+\[[^]]+]', word):
guess_found = True
break
# Show results with unlinked words or guesses
if arg.position or guess_found or correction_found or null_count != 0:
if arg.position:
for p in range (0, len(sentence_text)):
print(p%10, end="")
print()
print('Sentence has {} unlinked word{}:'.format(
null_count, nsuffix(null_count)))
result_no = 0
uniqe_parse = {}
for linkage in linkages:
words = list(linkage.words())
if str(words) in uniqe_parse:
continue
result_no += 1
uniqe_parse[str(words)] = True
if arg.position:
words_char = []
words_byte = []
wi = 0
for w in words:
if sys.version_info < (3, 0):
words[wi] = words[wi].decode('utf-8')
words_char.append(words[wi] + str((linkage.word_char_start(wi), linkage.word_char_end(wi))))
words_byte.append(words[wi] + str((linkage.word_byte_start(wi), linkage.word_byte_end(wi))))
wi += 1
print(u"{}: {}".format(result_no, ' '.join(words_char)))
print(u"{}: {}".format(result_no, ' '.join(words_byte)))
else:
print("{}: {}".format(result_no, ' '.join(words)))
if arg.interactive:
print("Interactive session (^D to end):")
import code
code.interact(local=locals())
|
Python
| 0.999478
|
@@ -1134,67 +1134,36 @@
g)%0A%0A
-get_input = input%0A# If this is Python 2, use raw_input()%0Aif
+def is_python2():%0A return
sys
@@ -1181,26 +1181,20 @@
fo%5B:
-2%5D %3C
+1%5D =
= (2,
- 7):%0A
+)%0A%0A
get_
@@ -1210,16 +1210,43 @@
aw_input
+ if is_python2() else input
%0A%0Adef ns
@@ -5225,32 +5225,19 @@
if
-sys.version_info %3C (3, 0
+is_python2(
):%0A
|
ed97f1cdbcc5a00c2bf597ad921b17da652b0b07
|
add annotations to _pytesttester.py
|
bottleneck/_pytesttester.py
|
bottleneck/_pytesttester.py
|
"""
Generic test utilities.
Based on scipy._libs._testutils
"""
from __future__ import division, print_function, absolute_import
import os
import sys
__all__ = ["PytestTester"]
class PytestTester(object):
"""
Pytest test runner entry point.
"""
def __init__(self, module_name):
self.module_name = module_name
def __call__(
self,
label="fast",
verbose=1,
extra_argv=None,
doctests=False,
coverage=False,
tests=None,
parallel=None,
):
import pytest
module = sys.modules[self.module_name]
module_path = os.path.abspath(module.__path__[0])
pytest_args = ["-l"]
if doctests:
raise ValueError("Doctests not supported")
if extra_argv:
pytest_args += list(extra_argv)
if verbose and int(verbose) > 1:
pytest_args += ["-" + "v" * (int(verbose) - 1)]
if coverage:
pytest_args += ["--cov=" + module_path]
if label == "fast":
pytest_args += ["-m", "not slow"]
elif label != "full":
pytest_args += ["-m", label]
if tests is None:
tests = [self.module_name]
if parallel is not None and parallel > 1:
if _pytest_has_xdist():
pytest_args += ["-n", str(parallel)]
else:
import warnings
warnings.warn(
"Could not run tests in parallel because "
"pytest-xdist plugin is not available."
)
pytest_args += ["--pyargs"] + list(tests)
try:
code = pytest.main(pytest_args)
except SystemExit as exc:
code = exc.code
return code == 0
def _pytest_has_xdist():
"""
Check if the pytest-xdist plugin is installed, providing parallel tests
"""
# Check xdist exists without importing, otherwise pytests emits warnings
from importlib.util import find_spec
return find_spec("xdist") is not None
|
Python
| 0.000002
|
@@ -63,94 +63,62 @@
%22%22%0A%0A
-from __future__ import division, print_function, absolute_import%0A%0Aimport os%0Aimport sys
+import os%0Aimport sys%0Afrom typing import Optional, List
%0A%0A%0A_
@@ -260,17 +260,30 @@
ule_name
-)
+: str) -%3E None
:%0A
@@ -361,17 +361,24 @@
label
-=
+: str =
%22fast%22,%0A
@@ -392,17 +392,24 @@
verbose
-=
+: int =
1,%0A
@@ -421,17 +421,40 @@
tra_argv
-=
+: Optional%5BList%5Bstr%5D%5D =
None,%0A
@@ -467,17 +467,25 @@
doctests
-=
+: bool =
False,%0A
@@ -499,17 +499,25 @@
coverage
-=
+: bool =
False,%0A
@@ -528,17 +528,40 @@
tests
-=
+: Optional%5BList%5Bstr%5D%5D =
None,%0A
@@ -574,17 +574,34 @@
parallel
-=
+: Optional%5Bint%5D =
None,%0A
@@ -603,16 +603,24 @@
e,%0A )
+ -%3E bool
:%0A
@@ -1885,24 +1885,32 @@
_has_xdist()
+ -%3E bool
:%0A %22%22%22%0A
|
16ddf7225bab12bf373bb7d5251713d58c7616c5
|
support linux mint package manager
|
lib/dotinstall/packages.py
|
lib/dotinstall/packages.py
|
import logging
import re
import sys
from dotinstall import util
logger = logging.getLogger(__name__)
class CygwinPackageInstaller:
name = "cygwin"
@property
def supported(self):
return util.has_executable("apt-cyg")
def has_installed(self, package):
return (
len(util.run_silent(["apt-cyg", "list", "^%s$" % package])[1]) > 0
)
def is_available(self, package):
return (
len(util.run_silent(["apt-cyg", "listall", "^%s$" % package])[1])
> 0
)
def install(self, package):
return util.run_verbose(["apt-cyg", "install", package])
class PacmanPackageInstaller:
name = "pacman"
@property
def supported(self):
return util.has_executable("pacman") and util.has_executable("sudo")
def has_installed(self, package):
return util.run_silent(["pacman", "-Q", package])[0]
def is_available(self, package):
return util.run_silent(["pacman", "-Ss", package])[0]
def install(self, package):
return util.run_verbose(["sudo", "-S", "pacman", "-S", package])
class PacaurPackageInstaller:
name = "pacaur"
@property
def supported(self):
return util.has_executable("pacaur")
def has_installed(self, package):
return util.run_silent(["pacaur", "-Q", package])[0]
def is_available(self, package):
return util.run_silent(["pacaur", "-Ss", package])[0]
def install(self, package):
return util.run_verbose(
["pacaur", "-S", package, "--noconfirm", "--noedit"]
)
class PipPackageInstaller:
name = "pip"
def __init__(self):
if "cygwin" in sys.platform:
self.executable = "pip3"
else:
self.executable = "pip"
@property
def supported(self):
return util.has_executable(self.executable)
def has_installed(self, package):
return (
re.search(
"^" + re.escape(package) + r"($|\s)",
util.run_silent([self.executable, "list"])[1],
re.MULTILINE,
)
is not None
)
def is_available(self, package):
return (
re.search(
"^" + re.escape(package) + r"($|\s)",
util.run_silent(
[
self.executable,
"search",
package,
]
)[1],
re.MULTILINE,
)
is not None
)
def install(self, package):
command = [
self.executable,
"install",
"--user",
package,
]
return util.run_verbose(command)
INSTALLERS = [
CygwinPackageInstaller(),
PacmanPackageInstaller(),
PacaurPackageInstaller(),
PipPackageInstaller(),
]
def try_install(package, method=None):
try:
install(package, method)
return True
except Exception as ex:
logger.info("Error installing %s: %s", package, ex)
return False
def has_installed(package, method=None):
chosen_installers = _choose_installers(method)
for installer in chosen_installers:
if installer.has_installed(package):
return True
return False
def install(package, method=None):
if has_installed(package, method):
logger.info("Package %s is already installed.", package)
return True
chosen_installers = _choose_installers(method)
for installer in chosen_installers:
if installer.is_available(package):
logger.info(
"Package %s is available, installing with %s",
package,
installer.name,
)
return installer.install(package)
if method is None:
raise RuntimeError(
"No package manager is capable of installing %s", package
)
else:
raise RuntimeError(
"%s is not capable of installing %s", method, package
)
def _choose_installers(method):
if method is None:
chosen_installers = INSTALLERS
else:
chosen_installers = [i for i in INSTALLERS if i.name == method]
chosen_installers = [i for i in chosen_installers if i.supported]
if len(chosen_installers) == 0:
if method is None:
raise RuntimeError(
"No package manager is supported on this system!"
)
else:
raise RuntimeError("%s is not supported on this system!", method)
return chosen_installers
|
Python
| 0.000001
|
@@ -630,32 +630,464 @@
l%22, package%5D)%0A%0A%0A
+class AptPackageInstaller:%0A name = %22apt%22%0A%0A @property%0A def supported(self):%0A return util.has_executable(%22apt%22)%0A%0A def has_installed(self, package):%0A return util.run_silent(%5B%22dpkg%22, %22-l%22, package%5D)%5B0%5D%0A%0A def is_available(self, package):%0A return util.run_silent(%5B%22apt%22, %22show%22, package%5D)%5B0%5D%0A%0A def install(self, package):%0A return util.run_verbose(%5B%22sudo%22, %22-S%22, %22apt%22, %22install%22, package%5D)%0A%0A%0A
class PacmanPack
@@ -3332,16 +3332,43 @@
ller(),%0A
+ AptPackageInstaller(),%0A
%5D%0A%0A%0Adef
|
d333135c42b43a7d55acbc3c34a12cc7c4fbc40f
|
replace write_log func with logging module and upto Python3
|
Dailyv2ex/v2ex.py
|
Dailyv2ex/v2ex.py
|
#!/usr/bin/env python
# coding:utf-8
import re
import sys
import logging
import requests
from bs4 import BeautifulSoup
def _log():
logging.basicConfig(level=logging.DEBUG,
filename='V2EX.log',
format='[%(levelname)s]: [%(asctime)s]: %(message)s',
datefmt='%d-%b-%Y %H:%M:%S')
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('[%(levelname)s-%(asctime)s]: %(message)s')
handler.setFormatter(formatter)
logging.getLogger('').addHandler(handler)
return logging
class V2EX(object):
headers = {
'User-Agent': (
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36"
),
'Origin': 'http://www.v2ex.com',
'Referer': 'http://www.v2ex.com/signin',
'Host': 'www.v2ex.com'
}
def __init__(self, usrname, usrpswd):
self.usrname = usrname
self.usrpswd = usrpswd
self.log = _log()
def login(self):
sess = requests.Session()
html_login = sess.get('http://www.v2ex.com/signin', headers=self.headers)
soup_login = BeautifulSoup(html_login.text, 'html.parser')
usrname_code = soup_login.find('input', {'class': 'sl'})['name']
usrpswdcode = soup_login.find('input', {'type': 'password'})['name']
once = soup_login.find('input', {'name': 'once'})['value']
form_data = {
usrname_code: self.usrname,
usrpswdcode: self.usrpswd,
'once': once,
'next': '/'
}
sess.post('http://www.v2ex.com/signin', form_data, headers=self.headers)
sethtml = sess.get('http://www.v2ex.com/settings', headers=self.headers)
soup = BeautifulSoup(sethtml.text, 'html.parser')
email = soup.find('input', {'type': 'email'})['value']
status = True if email else False
message = '登录成功!' if status else '登录失败!'
self.log.info('{0} {1}'.format(self.usrname, message))
return [sess, status]
def balance(self, sess):
"""
:param sess: 登录状态
:return: 获取签到奖励和余额
"""
html_balance = sess.get('http://www.v2ex.com/balance', headers={'Referer': 'http://www.v2ex.com/balance'}).text
today_gold = re.findall(u'>(\d+.+的每日.+)</span', html_balance)[0]
return today_gold
def daily(self, sess):
url_sing = 'http://www.v2ex.com/mission/daily'
html_daily = sess.get(url_sing, headers=self.headers)
soup_m = BeautifulSoup(html_daily.text, 'html.parser')
u = soup_m.find('input', {"type": 'button'})['onclick'].split('\'')[1]
sign_url = 'http://www.v2ex.com' + u # 签到 url
res = sess.get(sign_url, headers={'Referer': 'http://www.v2ex.com/mission/daily'})
des = self.balance(sess)
self.log.info(des)
if res.text.find(u'已成功领取每日登录奖励') > 0:
self.log.info('已成功领取每日登录奖励...')
else:
self.log.info('已经领取过每日登录奖励...')
if __name__ == '__main__':
try:
usrname = raw_input('用户名: ')
usrpswd = raw_input('密码: ')
except:
usrname = input('用户名: ')
usrpswd = input('密码: ')
foo = V2EX(usrname, usrpswd)
try:
sess = foo.login()
if sess[1] is True:
foo.daily(sess[0])
except:
foo.log.error('error...')
foo.log.error(sys.exc_info())
|
Python
| 0.000002
|
@@ -1252,35 +1252,28 @@
ogin.text, '
-html.parser
+lxml
')%0A u
@@ -1843,35 +1843,28 @@
html.text, '
-html.parser
+lxml
')%0A e
@@ -2625,19 +2625,12 @@
t, '
-html.parser
+lxml
')%0A
|
b0da39d309d0ca348f608e4954dc7cd23e75e02c
|
change default keystone service to httpd
|
packstack/plugins/keystone_100.py
|
packstack/plugins/keystone_100.py
|
# -*- coding: utf-8 -*-
"""
Installs and configures Keystone
"""
import logging
import uuid
from packstack.installer import validators
from packstack.installer import processors
from packstack.installer import basedefs
from packstack.installer import utils
from packstack.modules.ospluginutils import (getManifestTemplate,
appendManifestFile,
createFirewallResources)
#------------------ oVirt installer initialization ------------------
PLUGIN_NAME = "OS-Keystone"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
def initConfig(controller):
params = [
{"CMD_OPTION": "keystone-db-passwd",
"USAGE": "The password to use for the Keystone to access DB",
"PROMPT": "Enter the password for the Keystone DB access",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"PROCESSORS": [processors.process_password],
"DEFAULT_VALUE": "PW_PLACEHOLDER",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_KEYSTONE_DB_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": True,
"CONDITION": False},
{"CMD_OPTION": "keystone-region",
"USAGE": "Region name",
"PROMPT": "Region name",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"DEFAULT_VALUE": "RegionOne",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_KEYSTONE_REGION",
"USE_DEFAULT": True,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "keystone-admin-token",
"USAGE": "The token to use for the Keystone service api",
"PROMPT": "The token to use for the Keystone service api",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"DEFAULT_VALUE": uuid.uuid4().hex,
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_KEYSTONE_ADMIN_TOKEN",
"USE_DEFAULT": True,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "keystone-admin-passwd",
"USAGE": "The password to use for the Keystone admin user",
"PROMPT": "Enter the password for the Keystone admin user",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"DEFAULT_VALUE": "PW_PLACEHOLDER",
"PROCESSORS": [processors.process_password],
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_KEYSTONE_ADMIN_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": True,
"CONDITION": False},
{"CMD_OPTION": "keystone-demo-passwd",
"USAGE": "The password to use for the Keystone demo user",
"PROMPT": "Enter the password for the Keystone demo user",
"OPTION_LIST": [],
"VALIDATORS": [validators.validate_not_empty],
"DEFAULT_VALUE": "PW_PLACEHOLDER",
"PROCESSORS": [processors.process_password],
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_KEYSTONE_DEMO_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": True,
"CONDITION": False},
{"CMD_OPTION": "keystone-token-format",
"USAGE": "Kestone token format. Use either UUID or PKI",
"PROMPT": "Enter the Keystone token format.",
"OPTION_LIST": ['UUID', 'PKI'],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": 'UUID',
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": 'CONFIG_KEYSTONE_TOKEN_FORMAT',
"USE_DEFAULT": True,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "keystone-service-name",
"USAGE": "Name of service to use to run keystone (keystone or httpd)",
"PROMPT": "Enter the Keystone service name.",
"OPTION_LIST": ['keystone', 'httpd'],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": "keystone",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": 'CONFIG_KEYSTONE_SERVICE_NAME',
"USE_DEFAULT": True,
"NEED_CONFIRM": False,
"CONDITION": False},
]
group = {"GROUP_NAME": "KEYSTONE",
"DESCRIPTION": "Keystone Config parameters",
"PRE_CONDITION": lambda x: 'yes',
"PRE_CONDITION_MATCH": "yes",
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True}
controller.addGroup(group, params)
def initSequences(controller):
keystonesteps = [
{'title': 'Adding Keystone manifest entries',
'functions': [create_manifest]},
]
controller.addSequence("Installing OpenStack Keystone", [], [],
keystonesteps)
#-------------------------- step functions --------------------------
def create_manifest(config, messages):
manifestfile = "%s_keystone.pp" % config['CONFIG_CONTROLLER_HOST']
manifestdata = getManifestTemplate("keystone.pp")
fw_details = dict()
key = "keystone"
fw_details.setdefault(key, {})
fw_details[key]['host'] = "ALL"
fw_details[key]['service_name'] = "keystone"
fw_details[key]['chain'] = "INPUT"
fw_details[key]['ports'] = ['5000', '35357']
fw_details[key]['proto'] = "tcp"
config['FIREWALL_KEYSTONE_RULES'] = fw_details
manifestdata += createFirewallResources('FIREWALL_KEYSTONE_RULES')
appendManifestFile(manifestfile, manifestdata)
|
Python
| 0.000001
|
@@ -4162,24 +4162,21 @@
ALUE%22: %22
-keystone
+httpd
%22,%0A
|
075c06a6360d8b88745e3bffd4883beead36c59b
|
Add orders_script
|
config_example.py
|
config_example.py
|
CHROMEDRIVER_PATH = '/usr/lib/chromium-browser/chromedriver'
FACEBOOK = {
'email': '',
'password': '',
}
HIPMENU = {
'restaurant_url': 'https://www.hipmenu.ro/#p1/rg/cluj-prod/group/98254//',
}
SKYPE = {
'username': '',
'password': '',
'conversation_title': '',
}
NEXMO = {
'api_key': '',
'api_secret': '',
'phone_number': '40744444444',
}
TEST = True
|
Python
| 0.000001
|
@@ -386,8 +386,648 @@
= True%0A
+%0Aorders_script = %22%22%22%0Avar orders = %5B%5D;%0Avar my_name = document.querySelector('#h-profilename').textContent;%0Avar name_tags = Array.prototype.slice.call(document.querySelectorAll('.container-white-rounded .header-left p'));%0Avar price_tags = Array.prototype.slice.call(document.querySelectorAll('.container-white-rounded .summary-total .value'));%0Aif (name_tags.length %3E price_tags.length) %7B%0A name_tags.splice(0, 1);%0A%7D%0Afor (var i = 0; i %3C name_tags.length; i++) %7B%0A orders.push(%7B%0A name: name_tags%5Bi%5D.textContent.replace('Selec%C8%9Biile mele', my_name).trim(),%0A price: price_tags%5Bi%5D.textContent.trim(),%0A %7D);%0A%7D%0Areturn orders;%0A%22%22%22%0A
|
60890b614132a8cfd48be3e001114275752e9ac4
|
fix typo
|
megnet/config.py
|
megnet/config.py
|
"""Data types"""
import numpy as np
import tensorflow as tf
DTYPES = {'float32': {'numpy': np.float32, 'tf': tf.float32},
'float16': {'numpy': np.float16, 'tf': tf.float16},
'int32': {'numpy': np.int32, 'tf': tf.int32},
'int16': {'numpy': np.int32, 'tf': tf.int32}}
class DataType:
np_float = np.float32
np_int = np.int32
tf_float = tf.float32
tf_int = tf.int32
@classmethod
def set_dtype(cls, data_type: str) -> None:
"""
Class method to set the data types
Args:
data_type (str): '16' or '32'
"""
if data_type.endswith('32'):
float_key = 'float32'
int_key = 'int32'
elif data_type.endswith('16'):
float_key = 'float16'
int_key = 'int16'
else:
raise ValueError("Data type not known, choose '16' or '32'")
cls.np_float = DTYPES[float_key]['numpy']
cls.tf_float = DTYPES[float_key]['tf']
cls.np_int = DTYPES[int_key]['numpy']
cls.tf_int = DTYPES[int_key]['tf']
def set_global_dtypes(data_type) -> None:
"""
Function to set the data types
Args:
data_type (str): '16' or '32'
Returns:
"""
DataType.set_dtype(data_type)
|
Python
| 0.999991
|
@@ -262,34 +262,34 @@
%7B'numpy': np.int
-32
+16
, 'tf': tf.int32
@@ -286,18 +286,18 @@
: tf.int
-32
+16
%7D%7D%0A%0A%0Acla
|
2b622f2f0675581b63837bff568858b27a19a4a3
|
Move golden ratio as global variable.
|
latexipy/latexipy.py
|
latexipy/latexipy.py
|
'''
Automatically change matplotlib figures to LaTeX figures.
'''
from contextlib import contextmanager
import errno
import logging
import math
from pathlib import Path
import sys
import warnings
import matplotlib.pyplot as plt
logger = logging.getLogger('latexipy')
INCH_PER_POINT = 1/72.27
MAX_HEIGHT_INCH = 8
FONT_SIZE = 8
PARAMS = {
'pgf.texsystem': 'xelatex', # change this if using xetex or luatex
'text.usetex': True,
'font.family': 'serif',
'font.serif': [],
'font.sans-serif': [],
'font.monospace': [],
'pgf.preamble': [
r'\usepackage[utf8x]{inputenc}',
r'\usepackage[T1]{fontenc}',
],
'font.size': FONT_SIZE,
'axes.labelsize': FONT_SIZE,
'axes.titlesize': FONT_SIZE,
'legend.fontsize': FONT_SIZE,
'xtick.labelsize': FONT_SIZE,
'ytick.labelsize': FONT_SIZE,
}
def latexify(params=PARAMS):
'''
Set up matplotlib's RC params for LaTeX plotting.
Call this function before plotting the first figure.
Parameters
----------
params : Optional[dict]
A dictionary containing the RC params that need to be updated. Default
is `PARAMS`. The defaults should be okay for most cases, but `PARAMS`
can be updated via `.update()` as well.
Example
-------
>>> params = PARAMS.copy()
>>> params.update({'font.family': 'sans-serif'})
>>> latexify(params)
'''
plt.rcParams.update(params)
plt.switch_backend('pgf')
def fig_size(fig_width_tw=0.9, *, fig_ratio=None, fig_height=None, n_columns=1,
max_height=MAX_HEIGHT_INCH, doc_width_pt=345):
r'''
Get the necessary figure size.
Parameters
----------
fig_width_tw : Optional[float]
The width of the figure, as a proportion of the text width, between 0
and 1. Default is 0.9.
fig_ratio: Optional[float]
The ratio of the figure height to figure width. If `fig_height` is
specified, `fig_ratio` is calculated from that and `fig_width`. Default
is the golden ratio.
fig_height : Optional[float]
The height of the figure in inches. Default is the golden ratio of the
figure width.
n_columns : Optional[int]
The number of equally sized columns in the document. The figure will
never be larger than the width of one column. Default is 1.
max_height : Optional[float]
The maximum height of the figure, in inches. Default is
`MAX_HEIGHT_INCH`.
doc_width_pt : float
The text width of the document, in points. Can be obtained by typing
`\the\textwidth` in the LaTeX document. Default is 345.
Returns
-------
fig_width : float
The figure width, in inches.
fig_height : float
The figure height in inches.
'''
doc_width_in = doc_width_pt * INCH_PER_POINT
fig_width = doc_width_in * fig_width_tw / n_columns
if fig_ratio is None:
if fig_height is None:
golden_mean = (math.sqrt(5)-1.0)/2.0
fig_ratio = golden_mean
else:
fig_ratio = fig_height / fig_width
fig_height = fig_width * fig_ratio
if fig_height > max_height:
warnings.warn(f'fig_height too large at {fig_height} inches; '
'will automatically reduce to {max_height} inches.')
fig_height = max_height
return fig_width, fig_height
def save_fig(filename, folder, exts, from_context=False, mkdir=True):
'''
Save the figure in each of the extensions.
Parameters
----------
filename : str
The base name of the file, without extensions.
folder : str
The name of the directory in which to store the saved files.
exts : Sequence
A list of all the extensions to be saved, without the dot.
from_context : Optional[bool]
Whether the function is being called from the context manager. This only
affects the logging output. Default is False.
mkdir : Optional[bool]
Whether the folder should be created automatically if it does not exist.
Default is True.
'''
folder = Path(folder)
if not from_context:
logger.info(f'Saving {filename}... ')
plt.tight_layout(0)
if mkdir:
if folder.is_file():
msg = 'A file exists at directory location'
e = NotADirectoryError(errno.ENOTDIR, msg, str(folder))
logger.error(e)
return
folder.mkdir(parents=True, exist_ok=True)
for ext in exts:
if from_context:
logger.info(f' Saving {ext}...')
try:
plt.savefig(str(folder/f'{filename}.{ext}'))
except FileNotFoundError as e:
logger.error(e)
logger.error('Create the directory, or set `mkdir` to True.')
break
except PermissionError as e:
logger.error(e)
break
@contextmanager
def figure(filename, *, folder='img', exts=['pgf', 'png'], size=None,
mkdir=True):
'''
The primary interface for creating figures.
Any matplotlib-derived code in the scope of this context manager is valid,
and should output as expected.
Parameters
----------
filename : str
The base name of the file, without extensions.
folder : Optional[str]
The name of the directory in which to store the saved files. Default is
'img'.
exts : Sequence
A list of all the extensions to be saved, without the dot. Default is
['pgf', 'png'].
size : Optional[Sequence[float, float]]
The width and height of the figure, in inches. Default is `fig_size()`.
mkdir : Optional[bool]
Whether the folder should be created automatically if it does not exist.
Default is True.
Notes
-----
When integrating with LaTeX, the recommended format is PGF. PNG can be used
externally, such as in blog posts or as embedded images, while PDF can be
standalone, or inserted into LaTeX documents. A full list of supported
formats can be found by calling
`plt.gcf().canvas.get_supported_filetypes_grouped()`
'''
logger.info(f'{filename}:')
logger.info(' Plotting...')
yield
plt.gcf().set_size_inches(*size)
save_fig(filename, folder=folder, exts=exts, from_context=True, mkdir=mkdir)
plt.close()
|
Python
| 0
|
@@ -291,16 +291,55 @@
1/72.27%0A
+GOLDEN_RATIO = (math.sqrt(5)-1.0)/2.0%0A%0A
MAX_HEIG
@@ -350,17 +350,16 @@
NCH = 8%0A
-%0A
FONT_SIZ
@@ -3007,80 +3007,32 @@
-golden_mean = (math.sqrt(5)-1.0)/2.0%0A fig_ratio = golden_mean
+fig_ratio = GOLDEN_RATIO
%0A
|
5eb2c6f7e1bf0cc1b73b167a08085fccf77974fe
|
Tidy up and doc-comment AWSInstanceEnv class
|
app/config/aws.py
|
app/config/aws.py
|
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
|
Python
| 0
|
@@ -1,8 +1,102 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0ADictionary-like class for config settings from AWS credstash%0A%22%22%22%0A%0A
from bot
@@ -679,41 +679,47 @@
ble=
-%22credential-store%22, context=None,
+None, context=None, profile_name=None):
%0A
@@ -723,24 +723,28 @@
+%22%22%22%0A
profil
@@ -739,29 +739,157 @@
- profile_name=None):
+Low level API for fetching secrets for the current instance%0A %22%22%22%0A%0A if not table:%0A table = '%7B%7D-credentials'.format(self.env)%0A
%0A
@@ -1135,114 +1135,209 @@
-return self.getSecret(key, table='%7B%7D-credentials'.format(self.env))%0A%0A def get(self, key, default=None):
+%22%22%22%0A Enable dict-like access%0A %22%22%22%0A%0A return self.getSecret(key)%0A%0A def get(self, key, default=None):%0A %22%22%22%0A Return the value, or the default if not found%0A %22%22%22%0A
%0A
|
9642b8f3d2f14b3a61054f68f05f4ef8eaca0803
|
add validation
|
molo/core/management/commands/add_translated_pages_to_pages.py
|
molo/core/management/commands/add_translated_pages_to_pages.py
|
from __future__ import absolute_import, unicode_literals
from django.core.management.base import BaseCommand
from molo.core.models import PageTranslation, SiteLanguage, Page
class Command(BaseCommand):
def handle(self, *args, **options):
# first add all the translations to the main language Page
# and add the main language page as a translated page
# to the translated pages
main_language = SiteLanguage.objects.get(is_main_language=True)
pages = Page.objects.all().exclude(depth__in=[1, 2, 3])
for page in pages:
if page.specific.language.pk == main_language.pk:
for translation in PageTranslation.objects.filter(page=page):
if translation.page and translation.translated_page:
page.specific.translated_pages.add(
translation.translated_page.specific)
translation.translated_page.specific.translated_pages\
.add(page.specific)
page.save()
translation.translated_page.save()
else:
self.stdout.write(self.style.NOTICE(
'Translation with pk "%s"'
'is missing page/translated_page'
% (translation.pk)))
# loop through all translated_pages on the main language page and
# add all the translations to the rest of the translated pages
# except the language that it is in
for page in Page.objects.all().exclude(depth__in=[1, 2, 3]):
if page.specific.language.pk == main_language.pk:
for translated_page in page.specific.translated_pages.all():
translations = page.specific.translated_pages.all().\
exclude(language__pk=translated_page.language.pk)
for translation in translations:
translated_page.translated_pages.add(translation)
translated_page.save()
|
Python
| 0.000001
|
@@ -1638,16 +1638,50 @@
2, 3%5D):%0A
+ if page.language:%0A
@@ -1742,32 +1742,36 @@
+
for translated_p
@@ -1773,24 +1773,54 @@
ted_page in
+%5C%0A
page.specifi
@@ -1845,16 +1845,20 @@
.all():%0A
+
@@ -1947,16 +1947,20 @@
+
exclude(
@@ -2013,32 +2013,36 @@
+
+
for translation
@@ -2058,16 +2058,20 @@
ations:%0A
+
@@ -2136,16 +2136,20 @@
lation)%0A
+
|
cb4c91e3d109c939236f9581691f837fa0709108
|
Delete manage hackathon detail router
|
open-hackathon-client/src/client/views/route_manage.py
|
open-hackathon-client/src/client/views/route_manage.py
|
# -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
sys.path.append("..")
from client import app
from . import render
from flask_login import login_required
from client.functions import is_local
@app.route("/manage/create_event")
@login_required
def create_event():
return render("/create_event.html", islocal=is_local())
@app.route("/manage")
@login_required
def myhackathon():
return render("/manage/myhackathon.html", hackathon_name="")
# get registered user list of a hackathon
# @app.route("/manage/<hackathon_name>")
# @login_required
# def hackathon_manage_detail(hackathon_name):
# return render("/manage/detail.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/user")
@login_required
def registerusers(hackathon_name):
return render("/manage/registerusers.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/azurecert")
@login_required
def azurecert(hackathon_name):
return render("/manage/azurecert.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/organizers")
@login_required
def organizers(hackathon_name):
return render("/manage/organizers.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/edit")
@login_required
def edithackathon(hackathon_name):
return render("/manage/edithackathon.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/template")
@login_required
def template(hackathon_name):
return render("/manage/template.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/adminmgr")
@login_required
def adminmgr(hackathon_name):
return render("/manage/adminmgr.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/experiment")
@login_required
def experiment(hackathon_name):
return render("/manage/experiment.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/team")
@login_required
def team(hackathon_name):
return render("/manage/team.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/team/<team_id>")
@login_required
def team_award(hackathon_name, team_id):
return render("/manage/team_award.html", hackathon_name=hackathon_name, team_id=team_id)
@app.route("/manage/<hackathon_name>/award")
@login_required
def award(hackathon_name):
return render("/manage/award.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/host_server")
@login_required
def host_server(hackathon_name):
return render("/manage/host_server.html", hackathon_name=hackathon_name)
|
Python
| 0
|
@@ -1579,233 +1579,8 @@
)%0A%0A%0A
-# get registered user list of a hackathon%0A%0A# @app.route(%22/manage/%3Chackathon_name%3E%22)%0A# @login_required%0A# def hackathon_manage_detail(hackathon_name):%0A# return render(%22/manage/detail.html%22, hackathon_name=hackathon_name)%0A%0A%0A
@app
|
58d7592c603509f2bb625e4e2e5cb31ada4a8194
|
Change test for make_kernel(kerneltype='airy') from class to function
|
astropy/nddata/convolution/tests/test_make_kernel.py
|
astropy/nddata/convolution/tests/test_make_kernel.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
|
Python
| 0.000001
|
@@ -121,22 +121,8 @@
lose
-, assert_equal
%0A%0Afr
@@ -285,93 +285,8 @@
se%0A%0A
-class TestMakeKernel(object):%0A %22%22%22%0A Test the make_kernel function%0A %22%22%22%0A%0A
@pyt
@@ -318,20 +318,16 @@
SCIPY')%0A
-
def test
@@ -336,31 +336,19 @@
iry(
-self
):%0A
-
-
%22%22%22%0A
-
@@ -390,20 +390,16 @@
all%0A
-
-
Checks h
@@ -445,20 +445,16 @@
939%0A
-
%22%22%22%0A
@@ -449,20 +449,16 @@
%22%22%22%0A
-
k1 =
@@ -518,86 +518,8 @@
y')%0A
- k2 = make_kernel(%5B3, 3%5D, kernelwidth=0.5, kerneltype='brickwall')%0A
@@ -591,28 +591,24 @@
-
%5B 0.12992753
@@ -652,28 +652,24 @@
-
-
%5B 0.06375119
@@ -698,20 +698,16 @@
5119%5D%5D)%0A
-
asse
@@ -750,33 +750,4 @@
-7)%0A
- assert_equal(k1, k2)%0A
|
e7647da318b2fc5f973080446882347a287aec3a
|
use same cache for custom domain redirect
|
openedx/core/djangoapps/appsembler/sites/middleware.py
|
openedx/core/djangoapps/appsembler/sites/middleware.py
|
from django.conf import settings
from django.core.cache import cache
from django.contrib.redirects.models import Redirect
from django.shortcuts import redirect
from .models import AlternativeDomain
import logging
log = logging.getLogger(__name__)
class CustomDomainsRedirectMiddleware(object):
def process_request(self, request):
hostname = request.get_host()
if hostname.endswith(settings.SITE_NAME):
cache_key = '{prefix}-{site}'.format(prefix=settings.CUSTOM_DOMAINS_REDIRECT_CACHE_KEY_PREFIX, site=hostname)
custom_domain = cache.get(cache_key)
if custom_domain is None:
try:
alternative_domain = AlternativeDomain.objects.select_related('site').get(domain=hostname)
custom_domain = alternative_domain.site.domain
except AlternativeDomain.DoesNotExist:
custom_domain = ""
cache.set(cache_key, custom_domain, settings.CUSTOM_DOMAINS_REDIRECT_CACHE_TIMEOUT)
if custom_domain:
return redirect("https://" + custom_domain)
return
class RedirectMiddleware(object):
"""
Redirects requests for URLs persisted using the django.contrib.redirects.models.Redirect model.
With the exception of the main site.
"""
def process_request(self, request):
"""
Redirects the current request if there is a matching Redirect model
with the current request URL as the old_path field.
"""
site = request.site
try:
in_whitelist = any(map(
lambda p: p in request.path,
settings.MAIN_SITE_REDIRECT_WHITELIST))
if (site.id == settings.SITE_ID) and not in_whitelist:
return redirect("https://appsembler.com/tahoe/")
except Exception:
# I'm not entirely sure this middleware get's called only in LMS or in other apps as well.
# Soooo just in case
pass
cache_key = '{prefix}-{site}'.format(prefix=settings.REDIRECT_CACHE_KEY_PREFIX, site=site.domain)
redirects = cache.get(cache_key)
if redirects is None:
redirects = {redirect.old_path: redirect.new_path for redirect in Redirect.objects.filter(site=site)}
cache.set(cache_key, redirects, settings.REDIRECT_CACHE_TIMEOUT)
redirect_to = redirects.get(request.path)
if redirect_to:
return redirect(redirect_to, permanent=True)
|
Python
| 0
|
@@ -61,17 +61,25 @@
rt cache
+, caches
%0A
-
from dja
@@ -336,25 +336,66 @@
, request):%0A
+ cache_general = caches%5B'general'%5D
%0A
-
host
@@ -617,32 +617,40 @@
m_domain = cache
+_general
.get(cache_key)%0A
@@ -980,16 +980,16 @@
in = %22%22%0A
-
@@ -993,32 +993,40 @@
cache
+_general
.set(cache_key,
|
21850d8ab44981b2bb02cb50386db717aacc730b
|
Fix poor coverage
|
paystackapi/tests/test_product.py
|
paystackapi/tests/test_product.py
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
class TestProduct(BaseTestCase):
@httpretty.activate
def test_product_create(self):
"""Method defined to test product creation."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Product successfully created"}',
status=201,
)
response = Product.create(
name="Product pypaystack test", description="my test description",
price=500000, currency="NGN"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_product_list(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}',
status=201,
)
response = Product.list()
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.fetch(5499)
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.update(product_id=5499, name="Product pypaystack test",
description="my test description", price=500000000,
currency="USD"
)
self.assertEqual(response['status'], True)
|
Python
| 0.002037
|
@@ -1292,36 +1292,37 @@
to test Product
-list
+fetch
method.%22%22%22%0A
@@ -1708,37 +1708,38 @@
ef test_product_
-fetch
+update
(self):%0A
@@ -1766,36 +1766,38 @@
to test Product
-list
+update
method.%22%22%22%0A
|
c11b5b2181434651e1979c6db328ba81ed19566d
|
Add debug to see why test-meson-helloworld.
|
meson_install.py
|
meson_install.py
|
#!/usr/bin/env python3
# Copyright 2015 wink saville
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import utils
import parseinstallargs
import subprocess
import sys
import os
import traceback
import shutil
APP='meson'
URL='https://github.com/mesonbuild/meson.git'
DEFAULT_VER='0.27.0'
#CHECKOUT=DEFAULT_VER
CHECKOUT='master'
class Installer:
'''Installer for meson.'''
def __init__(self, defaultVer=DEFAULT_VER, defaultCodePrefixDir=None,
defaultInstallPrefixDir=None, defaultForceInstall=None):
'''See parseinstallargs for defaults prefixes'''
self.args = parseinstallargs.InstallArgs(APP, defaultVer, defaultCodePrefixDir,
defaultInstallPrefixDir, defaultForceInstall)
def install(self):
dst_dir = os.path.join(self.args.installPrefixDir, 'bin')
os.makedirs(dst_dir, exist_ok=True)
retval = 0
try:
dst = os.path.join(dst_dir, self.args.app)
output = subprocess.check_output([dst, '-v'],
stderr=subprocess.STDOUT)
if output is None:
output = b''
except BaseException as err:
output = b''
if not self.args.forceInstall and bytes(self.args.ver, 'utf-8') in output:
print('{app} {ver} is already installed'
.format(app=self.args.app, ver=self.args.ver))
else:
print('compiling {app} {ver}'
.format(app=self.args.app, ver=self.args.ver))
code_dir = os.path.join(self.args.codePrefixDir, self.args.app)
if self.args.forceInstall:
shutil.rmtree(code_dir, ignore_errors=True)
os.makedirs(code_dir)
utils.git('clone', [URL, code_dir])
os.chdir(code_dir)
utils.git('checkout', [CHECKOUT])
# Not a list but a string
subprocess.check_call('./install_meson.py --prefix {}'
.format(self.args.installPrefixDir), shell=True)
return retval
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[1] == 'printVer':
print(DEFAULT_VER)
else:
installer = Installer()
installer.install()
|
Python
| 0
|
@@ -1389,16 +1389,56 @@
try:%0A
+ print('dst_dir =', dst_dir)%0A
@@ -1476,32 +1476,64 @@
self.args.app)%0A
+ print('dst =', dst)%0A
outp
@@ -1636,118 +1636,368 @@
-if output is None:%0A output = b''%0A except BaseException as err:%0A output = b''%0A
+print('output =', output)%0A if output is None:%0A print('output is None')%0A output = b''%0A except BaseException as err:%0A traceback.print_exc()%0A output = b''%0A%0A print('forceInstall =', self.args.forceInstall)%0A print('self.args.ver =', self.args.ver)%0A print('output =', output)
%0A
|
9c0ab20e62f8eb67001dcd165b55d30cf2f912c7
|
add refresh in update_ui_lang
|
wxbreads/utils.py
|
wxbreads/utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, division
import time
from datetime import datetime
import wx
import wx.richtext as rt
import windbreads.utils as wdu
RTC_ALIGNS = dict(default=wx.TEXT_ALIGNMENT_DEFAULT,
left=wx.TEXT_ALIGNMENT_LEFT,
centre=wx.TEXT_ALIGNMENT_CENTRE,
center=wx.TEXT_ALIGNMENT_CENTER,
right=wx.TEXT_ALIGNMENT_RIGHT,
)
def get_text_width(text, wgt):
font = wgt.GetFont()
dc = wx.WindowDC(wgt)
dc.SetFont(font)
return dc.GetTextExtent(text)
def get_adjust_size(size=(-1, -1), **kwargs):
if size == (-1, -1):
return size
ratio_size = kwargs.get('ratio_size', (1600, 900))
w, h = size
rw, rh = ratio_size
dw, dh = wx.GetDisplaySize()
if rw == dw and rh == dh:
return size
bw, bh = -1, -1
if w != -1:
bw = int((w * dw) / rw)
if h != -1:
bh = int((h * dh) / rh)
return (bw, bh)
def require_int(evt, min_value=1):
wgt = evt.GetEventObject()
value = wgt.GetValue().strip()
if not value.isdigit() or int(value) < min_value:
wgt.ChangeValue('')
else:
wgt.ChangeValue(value)
wgt.SetInsertionPointEnd()
def pydate2wxdate(date):
tt = date.timetuple()
dmy = (tt[2], tt[1] - 1, tt[0])
return wx.DateTimeFromDMY(*dmy)
def wxdate2pydate(date):
if date.IsValid():
return datetime.date(*(map(int, date.FormatISODate().split('-'))))
return None
def echo_text(rtc, text='', fg=None, bg=None, ts=True, nl=True, italic=False,
align=None, underline=False, bold=False, ts_style=False,
font=None, size=None, log_file=False, clear=False, **kwargs):
if clear:
rtc.Clear()
t = kwargs.pop('t', None)
ts_text = '[{}] '.format(datetime.now()) if ts else ''
if isinstance(text, basestring):
if not isinstance(text, unicode):
utext = text.decode(wdu.detect_encoding(text)['encoding'])
else:
utext = text
else:
utext = '{}'.format(text)
rtc.SetInsertionPointEnd()
rta = rt.RichTextAttr()
rta.SetAlignment(RTC_ALIGNS['default'])
rta.SetTextColour('black')
rta.SetBackgroundColour('white')
rta.SetFontStyle(wx.FONTSTYLE_NORMAL)
rta.SetFontWeight(wx.FONTWEIGHT_NORMAL)
rta.SetFontUnderlined(False)
rtc.SetDefaultStyle(rta)
if ts_text and not ts_style:
rtc.WriteText(ts_text)
align = RTC_ALIGNS.get(align)
if align:
rta.SetAlignment(align)
if fg:
rta.SetTextColour(fg)
if bg:
rta.SetBackgroundColour(bg)
if font:
rta.SetFontFaceName(font)
if size:
rta.SetFontSize(size)
if bold is True:
rta.SetFontWeight(wx.FONTWEIGHT_BOLD)
elif bold is False:
rta.SetFontWeight(wx.FONTWEIGHT_NORMAL)
if italic is True:
rta.SetFontStyle(wx.FONTSTYLE_ITALIC)
elif italic is False:
rta.SetFontStyle(wx.FONTSTYLE_NORMAL)
if underline is not None:
rta.SetFontUnderlined(underline)
rtc.BeginStyle(rta)
if ts_text and ts_style:
rtc.WriteText(ts_text)
rtc.WriteText(wdu.ttt(utext, t))
rtc.EndStyle()
if nl:
rtc.Newline()
rtc.ShowPosition(rtc.GetLastPosition())
if log_file:
with open(log_file, kwargs.pop('log_mode', 'a')) as f:
if ts_text:
f.write(ts_text)
if kwargs.pop('tff', None) and t: # t for file
text = wdu.ttt(utext, t)
if isinstance(text, unicode):
text = text.encode('utf-8')
f.write(text)
if nl:
f.write('\n')
def on_hide(self, evt=None):
self.Hide()
def on_echoing(self, **kwargs):
"""Default method for echoing text."""
if self.is_echoing or not self.echo_lines:
return
self.is_echoing = True
while self.echo_lines:
line = self.echo_lines.pop(0)
self.echo_text(line[0], **line[1])
self.is_echoing = False
def start_timer(timer, miliseconds=1000, one_shot=False):
if timer.IsRunning():
return
timer.Start(int(miliseconds), one_shot)
def stop_timer(timer):
if timer.IsRunning():
timer.Stop()
def stop_timers(timers=[]):
[stop_timer(timer) for timer in timers]
def update_clock_statusbar(sbar, ts_fmt='%d-%b-%Y %H:%M', idx=2):
set_status_text(sbar, time.strftime(ts_fmt), idx)
def set_status_text(sbar, text, idx, t=None):
sbar.SetStatusText(t(text) if t else text, idx)
def on_popup_lang(self, evt):
if not hasattr(self, 'english_id'):
self.english_id = wx.NewId()
self.chinese_id = wx.NewId()
self.Bind(wx.EVT_MENU, self.popup_lang, id=self.english_id)
self.Bind(wx.EVT_MENU, self.popup_lang, id=self.chinese_id)
menu = wx.Menu()
menu.Append(self.english_id, 'English', '', wx.ITEM_RADIO)
menu.Append(self.chinese_id, 'Chinese - 简体中文', '', wx.ITEM_RADIO)
if self.lang == 'zh':
menu.Check(self.chinese_id, True)
else:
menu.Check(self.english_id, True)
self.PopupMenu(menu)
menu.Destroy()
def popup_lang(self, evt):
lang = 'zh' if evt.GetId() == self.chinese_id else 'en'
if lang != self.lang:
self.lang = lang
self.update_t()
self.update_ui_lang()
self.save_lang(lang)
evt.Skip()
def update_ui_lang(self):
if hasattr(self, 'lang_wgts'):
for lwgt in self.lang_wgts:
tooltip = ''
if len(lwgt) == 2:
wgt, label = lwgt
else:
wgt, label, tooltip = lwgt
if tooltip:
wgt.SetToolTipString(self.tt(tooltip))
wgt.SetLabel(self.tt(label))
|
Python
| 0.000001
|
@@ -5488,16 +5488,30 @@
ang(self
+, refresh=True
):%0A i
@@ -5849,8 +5849,116 @@
label))%0A
+%0A if refresh:%0A if getattr(self, 'panel'):%0A self.panel.Layout()%0A%0A self.Refresh()%0A
|
396fbb31fdfe212da9c531e5aa6240c554f0d86f
|
Refactor logging to use recommended pylint format
|
xboxapi/client.py
|
xboxapi/client.py
|
#-*- coding: utf-8 -*-
import requests
import logging
import json
import os
# Local libraries
from .gamer import Gamer
import xboxapi
logging.basicConfig()
class Client(object):
def __init__(self, api_key=None, timeout=None, lang=None):
self.api_key = api_key
self.timeout = timeout
self.endpoint = 'https://xboxapi.com/v2/'
self.timeout = timeout if timeout is not None else 3 # Seconds
self.lang = lang
self.last_method_call = None
self.continuation_token = None
# Debug logging can be triggered from environment variable
# XBOXAPI_DEBUG=1
self.logger = logging.getLogger('xboxapi')
log_level = logging.DEBUG if os.getenv(
'XBOXAPI_DEBUG') else logging.INFO
self.logger.setLevel(log_level)
if self.api_key is None:
raise ValueError('Api key is missing')
def gamer(self, gamertag=None, xuid=None):
''' return a gamer object '''
if gamertag is None:
raise ValueError('No gamertag given!')
return Gamer(gamertag=gamertag, client=self, xuid=xuid)
def api_get(self, method):
''' GET wrapper on requests library '''
headers = {'X-Auth': self.api_key,
'User-Agent': 'Python/XboxApi ' + xboxapi.__version__}
if self.lang is not None:
headers['Accept-Language'] = self.lang
url = self.endpoint + method
# Check for continuation token and the method match the last call
if method == self.last_method_call and self.continuation_token is not None:
url = url + '?continuationToken=' + self.continuation_token
self.logger.debug('{} {}'.format('GET', url))
self.logger.debug('Headers: {}'.format(headers))
res = requests.get(self.endpoint + method,
headers=headers, timeout=self.timeout)
self.logger.debug('Response: {}'.format(res.json()))
# Track method calls and peak for continuation token
self.last_method_call = method
self.continuation_token = None
if 'X-Continuation-Token' in res.headers:
self.continuation_token = res.headers['X-Continuation-Token']
return res
def api_post(self, method, body):
''' POST wrapper on requests library '''
headers = {
'X-AUTH': self.api_key,
'Content-Type': 'application/json'
}
url = '{}{}'.format(self.endpoint, method)
self.logger.debug('{} {}'.format('POST', url))
self.logger.debug('Headers: {}'.format(headers))
self.logger.debug('Body: {}'.format(body))
res = requests.post(self.endpoint + method, headers=headers, data=json.dumps(body),
timeout=self.timeout)
self.logger.debug('Response: {}'.format(res.json()))
return res
def calls_remaining(self):
''' Check on the limits from server '''
server_headers = self.api_get('accountxuid').headers
limit_headers = {}
limit_headers[
'X-RateLimit-Reset'] = server_headers['X-RateLimit-Reset']
limit_headers[
'X-RateLimit-Limit'] = server_headers['X-RateLimit-Limit']
limit_headers[
'X-RateLimit-Remaining'] = server_headers['X-RateLimit-Remaining']
return limit_headers
|
Python
| 0
|
@@ -719,29 +719,16 @@
.getenv(
-%0A
'XBOXAPI
@@ -754,16 +754,16 @@
ng.INFO%0A
+
@@ -1689,30 +1689,24 @@
.debug('
-%7B%7D %7B%7D'.format(
+%25s %25s',
'GET', u
@@ -1700,33 +1700,32 @@
%25s', 'GET', url)
-)
%0A self.lo
@@ -1745,35 +1745,29 @@
eaders:
-%7B%7D'.format(
+%25s',
headers)
)%0A%0A
@@ -1758,17 +1758,16 @@
headers)
-)
%0A%0A
@@ -1869,32 +1869,73 @@
t=self.timeout)%0A
+ self.xboxapi_response_error(res)%0A
self.log
@@ -1951,35 +1951,29 @@
('Response:
-%7B%7D'.format(
+%25s',
res.json()))
@@ -1963,33 +1963,32 @@
%25s', res.json())
-)
%0A%0A # Trac
@@ -2541,22 +2541,16 @@
ug('
-%7B%7D %7B%7D'.format(
+%25s %25s',
'POS
@@ -2557,17 +2557,16 @@
T', url)
-)
%0A
@@ -2598,27 +2598,21 @@
rs:
-%7B%7D'.format(
+%25s',
headers)
)%0A
@@ -2607,17 +2607,16 @@
headers)
-)
%0A
@@ -2645,25 +2645,18 @@
dy:
-%7B%7D'.format(
+%25s',
body)
-)
%0A%0A
@@ -2787,32 +2787,74 @@
t=self.timeout)%0A
+ self.xboxapi_response_error(res)%0A%0A
self.log
@@ -2874,27 +2874,21 @@
sponse:
-%7B%7D'.format(
+%25s',
res.json
@@ -2890,17 +2890,16 @@
.json())
-)
%0A%0A
@@ -3097,37 +3097,24 @@
mit_headers%5B
-%0A
'X-RateLimit
@@ -3178,37 +3178,24 @@
mit_headers%5B
-%0A
'X-RateLimit
@@ -3263,29 +3263,16 @@
headers%5B
-%0A
'X-RateL
@@ -3334,33 +3334,4 @@
g'%5D%0A
- return limit_headers%0A
|
460c89f57875e06427ad15759065c219a600044e
|
Add missing import
|
byceps/blueprints/ticketing_admin/views.py
|
byceps/blueprints/ticketing_admin/views.py
|
"""
byceps.blueprints.ticketing_admin.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, g, redirect, request, url_for
from ...services.party import service as party_service
from ...services.shop.order import service as order_service
from ...services.ticketing import exceptions as ticket_exceptions, \
ticket_bundle_service, ticket_service, ticket_user_management_service
from ...util.framework.blueprint import create_blueprint
from ...util.framework.flash import flash_error, flash_success
from ...util.framework.templating import templated
from ...util.views import respond_no_content
from ..authorization.decorators import permission_required
from ..authorization.registry import permission_registry
from .authorization import TicketingPermission
from .forms import SpecifyUserForm
from . import service
blueprint = create_blueprint('ticketing_admin', __name__)
permission_registry.register_enum(TicketingPermission)
@blueprint.route('/tickets/for_party/<party_id>', defaults={'page': 1})
@blueprint.route('/tickets/for_party/<party_id>/pages/<int:page>')
@permission_required(TicketingPermission.view)
@templated
def index_for_party(party_id, page):
"""List tickets for that party."""
party = party_service.find_party(party_id)
if party is None:
abort(404)
per_page = request.args.get('per_page', type=int, default=15)
search_term = request.args.get('search_term', default='').strip()
tickets = ticket_service.get_tickets_with_details_for_party_paginated(
party.id, page, per_page, search_term=search_term)
return {
'party': party,
'search_term': search_term,
'tickets': tickets,
}
@blueprint.route('/tickets/<uuid:ticket_id>')
@permission_required(TicketingPermission.view)
@templated
def view_ticket(ticket_id):
"""Show a ticket."""
ticket = ticket_service.get_ticket_with_details(ticket_id)
if ticket is None:
abort(404)
party = party_service.find_party(ticket.category.party_id)
if ticket.order_number:
order = order_service.find_order_by_order_number(ticket.order_number)
else:
order = None
events = service.get_events(ticket.id)
return {
'party': party,
'ticket': ticket,
'order': order,
'events': events,
}
@blueprint.route('/tickets/<uuid:ticket_id>/appoint_user')
@permission_required(TicketingPermission.checkin)
@templated
def appoint_user_form(ticket_id, erroneous_form=None):
"""Show a form to select a user to appoint for the ticket."""
ticket = _get_ticket_or_404(ticket_id)
form = erroneous_form if erroneous_form else SpecifyUserForm()
return {
'ticket': ticket,
'form': form,
}
@blueprint.route('/tickets/<uuid:ticket_id>/user', methods=['POST'])
@permission_required(TicketingPermission.checkin)
def appoint_user(ticket_id):
"""Appoint a user for the ticket."""
form = SpecifyUserForm(request.form)
if not form.validate():
return appoint_user_form(ticket_id, form)
ticket = _get_ticket_or_404(ticket_id)
user = form.user.data
manager = g.current_user
ticket_user_management_service.appoint_user(ticket.id, user.id, manager.id)
flash_success('{} wurde als Nutzer/in von Ticket {} eingetragen.',
user.screen_name, ticket.code)
return redirect(url_for('.view_ticket', ticket_id=ticket.id))
@blueprint.route('/tickets/<uuid:ticket_id>/flags/user_checked_in', methods=['POST'])
@permission_required(TicketingPermission.checkin)
@respond_no_content
def set_user_checked_in_flag(ticket_id):
"""Check the user in."""
ticket = _get_ticket_or_404(ticket_id)
initiator_id = g.current_user.id
try:
ticket_user_checkin_service.check_in_user(ticket.id, initiator_id)
except ticket_exceptions.UserAccountSuspended:
flash_error(
'Das dem Ticket zugewiesene Benutzerkonto ist gesperrt. '
'Der Check-In ist nicht erlaubt.')
return
flash_success("Benutzer '{}' wurde eingecheckt.", ticket.used_by.screen_name)
@blueprint.route('/tickets/<uuid:ticket_id>/flags/user_checked_in', methods=['DELETE'])
@permission_required(TicketingPermission.checkin)
@respond_no_content
def unset_user_checked_in_flag(ticket_id):
"""Revert the user check-in state."""
ticket = _get_ticket_or_404(ticket_id)
initiator_id = g.current_user.id
ticket_user_checkin_service.revert_user_check_in(ticket.id, initiator_id)
flash_success('Der Check-In wurde rückgängig gemacht.')
@blueprint.route('/bundles/<uuid:bundle_id>')
@permission_required(TicketingPermission.view)
@templated
def view_bundle(bundle_id):
"""Show a ticket bundle."""
bundle = ticket_bundle_service.find_bundle(bundle_id)
if bundle is None:
abort(404)
party = party_service.find_party(bundle.ticket_category.party_id)
tickets = ticket_bundle_service.find_tickets_for_bundle(bundle.id)
return {
'party': party,
'bundle': bundle,
'tickets': tickets,
}
def _get_ticket_or_404(ticket_id):
ticket = ticket_service.find_ticket(ticket_id)
if ticket is None:
abort(404)
return ticket
|
Python
| 0.000466
|
@@ -449,32 +449,67 @@
ticket_service,
+ ticket_user_checkin_service, %5C%0A
ticket_user_man
|
2726ec1c400a212b1cac13f20d65c1b43eb042b0
|
Fix formatting in download-google-smart-card-client-library.py
|
example_js_standalone_smart_card_client_app/download-google-smart-card-client-library.py
|
example_js_standalone_smart_card_client_app/download-google-smart-card-client-library.py
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downloads from GitHub the latest released version of the client library for
communicating to the Google Smart Card Connector app."""
import json
import os
import sys
import urllib2
GITHUB_REPO_OWNER = "GoogleChrome"
GITHUB_REPO = "chromeos_smart_card_connector"
CLIENT_LIBRARY_ASSET_NAME = "google-smart-card-client-library.js"
OUTPUT_FILE_NAME = "google-smart-card-client-library.js"
GITHUB_LATEST_RELEASE_URL_TEMPLATE = \
"https://api.github.com/repos/{owner}/{repo}/releases/latest"
def main():
sys.stderr.write('Accessing GitHub API...\n')
latest_release_url = GITHUB_LATEST_RELEASE_URL_TEMPLATE.format(
owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO)
latest_release_info = json.load(urllib2.urlopen(latest_release_url))
client_library_download_url = None
for asset in latest_release_info.get("assets", []):
if asset["name"] == CLIENT_LIBRARY_ASSET_NAME:
client_library_download_url = asset["browser_download_url"]
if client_library_download_url is None:
raise RuntimeError("Asset with the client library not found in the latest "
"GitHub release")
sys.stderr.write('Downloading from "{0}"...\n'.format(
client_library_download_url))
client_library = urllib2.urlopen(client_library_download_url).read()
if os.path.dirname(__file__):
output_file_path = os.path.join(
os.path.relpath(os.path.dirname(__file__)), OUTPUT_FILE_NAME)
else:
output_file_path = OUTPUT_FILE_NAME
with open(output_file_path, "wt") as f:
f.write(client_library)
sys.stderr.write(
'Successfully finished. The library is stored at "{0}".\n'.format(
output_file_path))
if __name__ == '__main__':
main()
|
Python
| 0.999998
|
@@ -1925,26 +1925,24 @@
ile__):%0A
-
output_file_
@@ -1962,18 +1962,16 @@
h.join(%0A
-
@@ -2040,18 +2040,16 @@
else:%0A
-
outp
|
83549f9549a253fb7a86e8c051bd24fab91a0f5f
|
Make the output a little better
|
conda_build/main_inspect.py
|
conda_build/main_inspect.py
|
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import sys
import argparse
from collections import defaultdict
from conda.misc import which_package
from conda.lock import Locked
from conda_build.main_build import args_func
from conda_build.config import config
from conda_build.ldd import get_package_linkages
def main():
p = argparse.ArgumentParser(
description='tool for inspecting conda packages'
)
p.add_argument(
'packages',
action='store',
nargs='+',
help='conda packages to inspect',
)
p.add_argument(
'--linkages',
action="store_true",
help="inspect the linkages of the binary files in the package",
)
p.set_defaults(func=execute)
args = p.parse_args()
args_func(args, p)
def print_linkages(depmap):
# Print system and not found last
k = sorted(depmap.keys() - {'system', 'not found'})
for dep in k + ['system', 'not found']:
print(dep)
for lib, path in depmap[dep]:
print(" %s => %s" % (lib, path))
print()
def execute(args, parser):
with Locked(config.croot):
for pkg in args.packages:
if args.linkages:
linkages = get_package_linkages(pkg)
depmap = defaultdict(set)
for binary in linkages:
for lib, path in linkages[binary]:
if path.startswith(config.test_prefix):
deps = list(which_package(path))
if len(deps) > 1:
print("Warning: %s comes from multiple packages: %s" % (path, ' and '.join(deps)), file=sys.stderr)
for d in deps:
depmap[d].add((lib, path))
elif path == 'not found':
depmap['not found'].add((lib, path))
else:
depmap['system'].add((lib, path))
print_linkages(depmap)
|
Python
| 0.999998
|
@@ -1194,16 +1194,24 @@
print(
+%22%25s:%22 %25
dep)%0A
@@ -1232,16 +1232,23 @@
path in
+sorted(
depmap%5Bd
@@ -1250,16 +1250,17 @@
map%5Bdep%5D
+)
:%0A
@@ -1272,16 +1272,18 @@
print(%22
+
%25s =%3E
|
ff9e3c6ef604a47a616e111ee2a90fda77692977
|
Bump version to 3.3.2
|
src/jukeboxmaya/__init__.py
|
src/jukeboxmaya/__init__.py
|
__author__ = 'David Zuber'
__email__ = 'zuber.david@gmx.de'
__version__ = '3.3.1'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
|
Python
| 0.000002
|
@@ -76,9 +76,9 @@
3.3.
-1
+2
'%0A%0A%0A
|
94abb4f44fe9965053990265f79e0dc0356c2bad
|
Add default for bear_dirs
|
coalib/collecting/BearCollector.py
|
coalib/collecting/BearCollector.py
|
"""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import importlib
import inspect
import os
import re
import sys
from coalib.collecting.FileCollector import FileCollector
from coalib.output.ConsolePrinter import ConsolePrinter
class BearCollector(FileCollector):
def __init__(self,
bear_kinds,
bear_dirs,
bear_names=None,
ignored_bears=None,
regexs=None,
log_printer=ConsolePrinter()):
"""
This collector stores bear classes (not instances) in self._items
:param bear_kinds: the KINDs of bears to be collected
:param bear_dirs: list of strings: directories from which bears should be collected
:param bear_names: list of strings: names of bears that should be collected. Default is all.
:param ignored_bears: list of strings: names of bears that should not be collected. Default is none.
:param regexs: list of strings: regexs that match bears to be collected.
:param log_printer: LogPrinter to handle logging of debug, warning and error messages
"""
if bear_names is None:
bear_names = []
if ignored_bears is None:
ignored_bears = []
if regexs is None:
regexs = []
if not isinstance(bear_kinds, list):
raise TypeError("bear_kinds should be of type list")
if not isinstance(bear_names, list):
raise TypeError("bear_names should be of type list")
if not isinstance(ignored_bears, list):
raise TypeError("ignored should be of type list")
if not isinstance(regexs, list):
raise TypeError("regexs should be of type list")
FileCollector.__init__(self,
flat_dirs=bear_dirs,
allowed_types=["py"],
log_printer=log_printer)
self._bear_kinds = bear_kinds
self._bear_names = bear_names
self._ignored_bears = ignored_bears
self._regexs = regexs
def _is_target(self, file_path):
"""
:param file_path: absolute path to a file
:return: Bool value to determine if bears should be imported from this file
This method assumes that the given path lies in a directory that should be collected. However it will check if
the path is a subpath of an ignored directory.
"""
# type disallowed
if not os.path.splitext(file_path)[1].lower().lstrip('.') in self._allowed_types:
return False
bear_name = os.path.splitext(os.path.basename(file_path))[0]
# ignored bear
if bear_name in self._ignored_bears:
return False
# explicitly included
if bear_name in self._bear_names:
return True
# regex included
if any(re.match(regex, bear_name) for regex in self._regexs):
return True # specifically called
# dont include if not everything is to be included
if self._bear_names or self._regexs:
return False # specific bears were called but not this one
# include everything
return True
def collect(self):
"""
:return: list of classes (not instances) of all collected bears
"""
files = FileCollector.collect(self) # needs to be upfront since it calls _unfold_params()
bears = []
for f_dir in self._flat_dirs:
if f_dir not in sys.path:
sys.path.insert(0, f_dir)
for file in files:
module_name = os.path.splitext(os.path.basename(file))[0]
module = importlib.import_module(module_name)
for name, p_object in inspect.getmembers(module):
if hasattr(p_object, "kind"):
if inspect.getfile(p_object) == file:
bear_kind = None
try:
bear_kind = p_object.kind()
except:
pass # Bear base class
if bear_kind in self._bear_kinds:
bears.append(p_object)
self._items = bears
return bears
|
Python
| 0
|
@@ -733,16 +733,72 @@
llector%0A
+from coalib.misc.StringConstants import StringConstants%0A
from coa
@@ -953,32 +953,68 @@
bear_dirs
+=%5BStringConstants.coalib_bears_root%5D
,%0A
|
a9c7a6e441159bdf1fd13d70bcc91617dee93f03
|
revert revert.
|
lib/kodi65/selectdialog.py
|
lib/kodi65/selectdialog.py
|
# -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <phil65@kodi.tv>
# This program is Free Software see LICENSE file for details
import xbmcgui
import xbmc
from kodi65 import addon
C_LIST_SIMPLE = 3
C_LIST_DETAIL = 6
C_BUTTON_GET_MORE = 5
C_LABEL_HEADER = 1
class SelectDialog(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self)
self.items = kwargs.get('listing')
self.header = kwargs.get('header')
self.detailed = kwargs.get('detailed')
self.extrabutton = kwargs.get('extrabutton')
self.listitems = [i.get_listitem() for i in self.items] if self.items else []
self.index = -1
def onInit(self):
if not self.listitems:
self.index == -1
self.close()
elif len(self.listitems) == 1:
self.index == 0
self.close()
self.list = self.getControl(C_LIST_DETAIL)
self.getControl(C_LIST_DETAIL).setVisible(self.detailed)
self.getControl(C_LIST_SIMPLE).setVisible(not self.detailed)
self.getControl(C_BUTTON_GET_MORE).setVisible(bool(self.extrabutton))
if self.extrabutton:
self.getControl(C_BUTTON_GET_MORE).setLabel(self.extrabutton)
self.getControl(C_LABEL_HEADER).setLabel(self.header)
self.list.addItems(self.listitems)
self.setFocus(self.list)
def onClick(self, control_id):
if control_id in [C_LIST_SIMPLE, C_LIST_DETAIL]:
self.index = int(self.list.getSelectedPosition())
elif control_id == C_BUTTON_GET_MORE:
self.index = -2
self.close()
def onFocus(self, control_id):
pass
def open(listitems, header, detailed=True, extrabutton=False):
"""
open selectdialog, return index (-1 for closing, -2 for extra button)
"""
xbmc.executebuiltin("Dialog.Close(busydialog)")
w = SelectDialog('DialogSelect.xml', addon.PATH,
listing=listitems,
header=header,
detailed=detailed,
extrabutton=extrabutton)
w.doModal()
return w.index
|
Python
| 0.00001
|
@@ -814,100 +814,8 @@
e()%0A
- elif len(self.listitems) == 1:%0A self.index == 0%0A self.close()%0A
|
7a83a9be7e2a986979cc898c3fd3aa3bb49442cc
|
modify dx model
|
cea/technologies/direct_expansion_units.py
|
cea/technologies/direct_expansion_units.py
|
# -*- coding: utf-8 -*-
"""
direct expansion units
"""
from __future__ import division
from scipy.interpolate import interp1d
from math import log, ceil
import pandas as pd
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
__author__ = "Shanshan Hsieh"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Shanshan Hsieh"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
# FIXME: this model is simplified, and required update
PRICE_DX_PER_W = 1.373 #USD FIXME: to be moved to database
# operation costs
def calc_cop_DX(Q_load_W):
cop = 2.7
return cop
def calc_DX(mdot_kgpers, T_sup_K, T_re_K):
q_chw_W = mdot_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK * (T_re_K - T_sup_K)
cop_DX = calc_cop_DX(q_chw_W)
wdot_W = q_chw_W/cop_DX
return wdot_W
# investment and maintenance costs
def calc_Cinv_DX(Q_design_W):
"""
Assume the same cost as gas boilers.
:type Q_design_W : float
:param Q_design_W: Design Load of Boiler in [W]
:param gV: globalvar.py
:rtype InvCa : float
:returns InvCa: Annualized investment costs in CHF/a including Maintenance Cost
"""
Capex_a = 0
Opex_fixed = 0
if Q_design_W > 0:
InvC = Q_design_W * PRICE_DX_PER_W
Inv_IR = 5 / 100
Inv_LT = 25
Inv_OM = 5 / 100
Capex_a = InvC * (Inv_IR) * (1 + Inv_IR) ** Inv_LT / ((1 + Inv_IR) ** Inv_LT - 1)
Opex_fixed = Capex_a * Inv_OM
return Capex_a, Opex_fixed
|
Python
| 0
|
@@ -167,16 +167,35 @@
s as pd%0A
+import numpy as np%0A
from cea
@@ -597,11 +597,9 @@
= 1.
-373
+6
#US
@@ -695,9 +695,9 @@
= 2.
-7
+3
%0A%0A
@@ -751,24 +751,95 @@
, T_re_K):%0A%0A
+ if np.isclose(mdot_kgpers, 0.0):%0A%0A wdot_W = 0%0A else:%0A
q_chw_W
@@ -907,16 +907,20 @@
sup_K)%0A%0A
+
cop_
@@ -946,16 +946,20 @@
chw_W)%0A%0A
+
wdot
|
8b359d97e59d759bfd7711c8aacf9abc657fe457
|
fix demo
|
pipeline/demo/pipeline-homo-data-split-demo.py
|
pipeline/demo/pipeline-homo-data-split-demo.py
|
from pipeline.component.homo_data_split import HomoDataSplit
from pipeline.backend.config import Backend
from pipeline.backend.config import WorkMode
from pipeline.backend.pipeline import PipeLine
from pipeline.component.dataio import DataIO
from pipeline.component.input import Input
from pipeline.interface.data import Data
guest = 9999
host = 10000
arbiter = 10002
guest_train_data = {"name": "breast_homo_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_homo_host", "namespace": "experiment"}
input_0 = Input(name="train_data")
print ("get input_0's init name {}".format(input_0.name))
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).algorithm_param(with_label=True, output_format="dense")
dataio_0.get_party_instance(role='host', party_id=host).algorithm_param(with_label=True)
homo_data_split_0 = HomoDataSplit(name="homo_data_split_0", stratified=True, test_size=0.2, validate_size=0.1)
print ("get input_0's name {}".format(input_0.name))
pipeline.add_component(dataio_0, data=Data(data=input_0.data))
pipeline.add_component(homo_data_split_0, data=Data(data=dataio_0.output.data))
pipeline.compile()
pipeline.fit(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
# predict
pipeline.predict(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
print (pipeline.get_component("dataio_0").get_model_param())
print (pipeline.get_component("homo_data_split_0").summary())
|
Python
| 0
|
@@ -1645,368 +1645,8 @@
%7D)%0A%0A
-# predict%0A%0Apipeline.predict(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,%0A feed_dict=%7Binput_0:%0A %7B%22guest%22: %7B9999: guest_train_data%7D,%0A %22host%22: %7B%0A 10000: host_train_data%0A %7D%0A %7D%0A%0A %7D)%0A%0A
prin
|
7ad707e722eabefc989cfa41fbf17c8315d948fd
|
Add optional parameters for Django fields.
|
oauth2client/django_orm.py
|
oauth2client/django_orm.py
|
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utilities for Django.
Utilities for using OAuth 2.0 in conjunction with
the Django datastore.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import oauth2client
import base64
import pickle
from django.db import models
from oauth2client.client import Storage as BaseStorage
class CredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
if not value:
return None
if isinstance(value, oauth2client.client.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class FlowField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Flow):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from
the datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsField
on a db model class.
"""
def __init__(self, model_class, key_name, key_value, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
key_value: string, key value for the entity that has the credentials
property_name: string, name of the property that is an CredentialsProperty
"""
self.model_class = model_class
self.key_name = key_name
self.key_value = key_value
self.property_name = property_name
def get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credential = None
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query)
if len(entities) > 0:
credential = getattr(entities[0], self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self.put)
return credential
def put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
args = {self.key_name: self.key_value}
entity = self.model_class(**args)
setattr(entity, self.property_name, credentials)
entity.save()
|
Python
| 0
|
@@ -962,32 +962,49 @@
def db_type(self
+, connection=None
):%0A return 'V
@@ -1380,24 +1380,41 @@
db_type(self
+, connection=None
):%0A retur
|
ed48555984886ff5ade23aeb23ad5f85e77e5b69
|
fix docs
|
chainercv/transforms/image/pca_lighting.py
|
chainercv/transforms/image/pca_lighting.py
|
import numpy
def pca_lighting(img, sigma, eigen_value=None, eigen_vector=None):
"""Alter the intensities of input image using PCA.
This is used in training of AlexNet [Krizhevsky]_.
.. [Krizhevsky] Alex Krizhevsky, Ilya Sutskever, Geoffrey E. Hinton. \
ImageNet Classification with Deep Convolutional Neural Networks. \
NIPS 2012.
Args:
image (numpy.ndarray): An image array to be augmented. This is in
CHW format.
sigma (float): Standard deviation of the Gaussian. In AlexNet
[Krizhevsky]_, this value is 10% of the range of intensity
(25.5 if the range is [0, 255]).
eigen_value: (numpy.ndarray): An array of eigen values. The shape
have to be (3,). If it is not specified, the values computed from
ImageNet is used.
eigen_vector: (numpy.ndarray): An array of eigen vectors. The shape
have to be (3, 3). If it is not specified, the vectors computed
from ImageNet is used.
Returns:
An image in CHW format.
"""
if sigma <= 0:
return img
# these values are copied from facebook/fb.resnet.torch
if eigen_value is None:
eigen_value = numpy.array((0.2175, 0.0188, 0.0045))
if eigen_vector is None:
eigen_vector = numpy.array((
(0.4009, -0.814, 0.4203),
(0.7192, -0.0045, -0.6948),
(-0.5675, -0.5808, -0.5836)))
alpha = numpy.random.normal(0, sigma, size=3)
img = img.copy()
img += eigen_vector.dot(eigen_value * alpha).reshape(-1, 1, 1)
return img
|
Python
| 0.000001
|
@@ -812,26 +812,27 @@
ImageNet
-is
+are
used.%0A
@@ -1004,18 +1004,19 @@
mageNet
-is
+are
used.%0A%0A
|
6f04f1ed35635c08836f1eee67983abf9735f5db
|
handle more exceptions
|
channelstream/wsgi_views/error_handlers.py
|
channelstream/wsgi_views/error_handlers.py
|
from pyramid.view import exception_view_config
@exception_view_config(context='marshmallow.ValidationError', renderer='json')
def marshmallow_invalid_data(context, request):
request.response.status = 422
return context.messages
@exception_view_config(context='itsdangerous.BadTimeSignature', renderer='json')
def itsdangerous_signer_error(context, request):
request.response.status = 401
return {'request': 'Bad Signature'}
|
Python
| 0.000002
|
@@ -310,24 +310,101 @@
rer='json')%0A
+@exception_view_config(context='itsdangerous.BadSignature', renderer='json')%0A
def itsdange
|
155fd9ae952a4eba53521739589d5e3462108ed2
|
remove default statement per Gunther's comment
|
chatterbot/ext/django_chatterbot/models.py
|
chatterbot/ext/django_chatterbot/models.py
|
from django.db import models
class Statement(models.Model):
"""A short (<255) chat message, tweet, forum post, etc"""
text = models.CharField(
unique=True,
blank=False,
null=False,
default='<empty>',
max_length=255
)
def __str__(self):
if len(self.text.strip()) > 60:
return '{}...'.format(self.text[:57])
elif len(self.text.strip()) > 0:
return self.text
return '<empty>'
class Response(models.Model):
"""Connection between a response and the statement that triggered it
Comparble to a ManyToMany "through" table, but without the M2M indexing/relations.
Only the text and number of times it has occurred are currently stored.
Might be useful to store additional features like language, location(s)/region(s),
first created datetime(s), username, user full name, user gender, etc.
A the very least occurrences should be an FK to a meta-data table with this info.
"""
statement = models.ForeignKey(
'Statement',
related_name='in_response_to'
)
response = models.ForeignKey(
'Statement',
related_name='+'
)
unique_together = (('statement', 'response'),)
occurrence = models.PositiveIntegerField(default=0)
def __str__(self):
s = self.statement.text if len(self.statement.text) <= 20 else self.statement.text[:17] + '...'
s += ' => '
s += self.response.text if len(self.response.text) <= 40 else self.response.text[:37] + '...'
return s
|
Python
| 0
|
@@ -213,35 +213,8 @@
se,%0A
- default='%3Cempty%3E',%0A
|
4d81c88627b0f71c765112b9a814fe876239bcc5
|
Print stats for constant points to.
|
src/main/copper/analysis.py
|
src/main/copper/analysis.py
|
import os
from .project import ProjectManager
from .analysis_steps import *
from .analysis_stats import AnalysisStatisticsBuilder as StatBuilder
class Analysis(object):
def __init__(self, config, projects=ProjectManager()):
self.logger = logging.getLogger(__name__)
self._config = config
self._stats = None
self._pipeline = [
CleaningStep(),
FactGenerationStep(),
DatabaseCreationStep(),
SanityCheckStep(projects.SCHEMA),
LoadProjectStep(projects.SYMBOL_LOOKUP),
LoadProjectStep(projects.CALLGRAPH),
LoadProjectStep(projects.POINTS_TO),
]
@property
def pipeline(self):
return [step.check() for step in self._pipeline]
@property
def stats(self):
# Compute stats if needed
if self._stats is None:
self.compute_stats()
return self._stats
@property
def input_files(self):
return [os.path.abspath(f) for f in self._config.input_files]
@property
def output_directory(self):
return os.path.abspath(self._config.output_directory)
@property
def facts_directory(self):
return os.path.join(self.output_directory, 'facts')
@property
def database_directory(self):
return os.path.join(self.output_directory, 'db')
def load_project(self, project):
LoadProjectStep(project).apply(self)
def run(self):
# Run each step of pipeline
for step in self.pipeline:
step.apply(self)
# Compute stats
self.compute_stats()
def compute_stats(self):
self._stats = (
StatBuilder(self)
.count('instruction')
.count('reachable_function')
.count('callgraph:fn_edge', 'call-graph edges')
.count('var_points_to', 'var-points-to')
.count('ptr_points_to', 'ptr-points-to')
.build()
)
|
Python
| 0
|
@@ -1875,32 +1875,95 @@
var-points-to')%0A
+ .count('constant_points_to', 'constant-points-to')%0A
.cou
|
76ec40ba3bcdd6805ac043d5cf3298ba312a15d7
|
Make sure Scatter Redistribute takes only one constraint
|
op_scatter/scatter_func.py
|
op_scatter/scatter_func.py
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# JewelCraft jewelry design toolkit for Blender.
# Copyright (C) 2015-2020 Mikhail Rachinskiy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from mathutils import Matrix, Vector
from ..lib import mesh, asset
class Scatter:
def execute(self, context):
space_data = context.space_data
use_local_view = bool(space_data.local_view)
collection = context.collection
start = self.start
end = self.end
# Init
# ---------------------------
if self.is_scatter:
num = self.number - 1
curve = context.object
curve.select_set(False)
ob = context.selected_objects[0]
context.view_layer.objects.active = ob
else:
obs = [
(ob, con, con.offset)
for ob in context.selected_objects
for con in ob.constraints
if con.type == "FOLLOW_PATH"
]
obs.sort(key=lambda x: x[2], reverse=True)
num = len(obs) - 1
ob = context.object
for con in ob.constraints:
if con.type == "FOLLOW_PATH":
break
else:
ob, con, _ = obs[0]
curve = con.target
curve.data.use_radius = False
asset.apply_scale(curve)
# Offset
# ---------------------------
ofst = 0.0
if num:
if self.use_absolute_offset:
ob_size = ob.dimensions[1]
base_unit = 100.0 / self.curve_length
ofst = base_unit * (ob_size + self.spacing)
else:
closed_scattering = True if round(end - start, 1) == 100.0 else False
if self.cyclic and closed_scattering:
ofst = (end - start) / (num + 1)
else:
if not self.cyclic:
start = start if start >= 0.0 else 0.0
end = end if end <= 100.0 else 100.0
ofst = (end - start) / num
# Scatter/Redistribute
# ---------------------------
if self.is_scatter:
mat_sca = Matrix.Diagonal(ob.scale).to_4x4()
ob.matrix_world = mat_sca
if self.rot_x:
mat_rot = Matrix.Rotation(self.rot_x, 4, "X")
ob.matrix_world @= mat_rot
if self.rot_z:
mat_rot = Matrix.Rotation(self.rot_z, 4, "Z")
ob.matrix_world @= mat_rot
if self.loc_z:
mat_loc = Matrix.Translation((0.0, 0.0, self.loc_z))
ob.matrix_world @= mat_loc
ofst_fac = start + ofst
for _ in range(num):
ob_copy = ob.copy()
collection.objects.link(ob_copy)
if use_local_view:
ob_copy.local_view_set(space_data, True)
con = ob_copy.constraints.new("FOLLOW_PATH")
con.target = curve
con.offset = -ofst_fac
con.use_curve_follow = True
con.forward_axis = "FORWARD_X"
ofst_fac += ofst
if ob.children:
for child in ob.children:
child_copy = child.copy()
collection.objects.link(child_copy)
child_copy.parent = ob_copy
child_copy.matrix_parent_inverse = child.matrix_parent_inverse
con = ob.constraints.new("FOLLOW_PATH")
con.target = curve
con.offset = -start
con.use_curve_follow = True
con.forward_axis = "FORWARD_X"
else:
ofst_fac = start
world_loc = Vector()
for ob, con, _ in obs:
if self.rot_x:
ob_mat_rot = ob.matrix_basis.to_quaternion().to_matrix().to_4x4()
mat_rot = Matrix.Rotation(self.rot_x, 4, "X")
ob.matrix_basis @= ob_mat_rot.inverted() @ mat_rot @ ob_mat_rot
if self.rot_z:
mat_rot = Matrix.Rotation(self.rot_z, 4, "Z")
ob.matrix_basis @= mat_rot
if self.rot_x or self.loc_z:
dist = (ob.matrix_basis.translation - world_loc).length
mat_rot = ob.matrix_basis.to_quaternion().to_matrix()
ob.matrix_basis.translation = mat_rot @ Vector((0.0, 0.0, dist + self.loc_z))
con.offset = -ofst_fac
ofst_fac += ofst
return {"FINISHED"}
def invoke(self, context, event):
wm = context.window_manager
if self.is_scatter:
if len(context.selected_objects) < 2:
self.report({"ERROR"}, "At least two objects must be selected")
return {"CANCELLED"}
curve = context.object
if curve.type != "CURVE":
self.report({"ERROR"}, "Active object must be a curve")
return {"CANCELLED"}
self.cyclic = curve.data.splines[0].use_cyclic_u
self.curve_length = mesh.curve_length(curve)
wm.invoke_props_popup(self, event)
return self.execute(context)
values = []
curve = None
for ob in context.selected_objects:
for con in ob.constraints:
if con.type == "FOLLOW_PATH":
values.append(-con.offset)
curve = con.target
if not curve:
self.report({"ERROR"}, "Selected objects do not have Follow Path constraint")
return {"CANCELLED"}
self.start = min(values)
self.end = max(values)
self.cyclic = curve.data.splines[0].use_cyclic_u
self.curve_length = mesh.curve_length(curve)
return wm.invoke_props_popup(self, event)
|
Python
| 0
|
@@ -1438,16 +1438,17 @@
obs = %5B
+%5D
%0A
@@ -1456,38 +1456,26 @@
- (ob, con, con.offset)%0A
+app = obs.append%0A%0A
@@ -1516,16 +1516,17 @@
_objects
+:
%0A
@@ -1559,17 +1559,20 @@
straints
-%0A
+:%0A
@@ -1571,32 +1571,34 @@
+
if con.type == %22
@@ -1609,16 +1609,17 @@
OW_PATH%22
+:
%0A
@@ -1623,17 +1623,84 @@
-%5D
+ app((ob, con, con.offset))%0A break
%0A%0A
@@ -6299,32 +6299,58 @@
rve = con.target
+%0A break
%0A%0A if not
|
ffe5b45cc1040357d517e63a7e46fe64705f60d1
|
Enable test (#17585)
|
python/ray/experimental/workflow/tests/test_virtual_actor_3.py
|
python/ray/experimental/workflow/tests/test_virtual_actor_3.py
|
import pytest
import ray
from ray import workflow
from typing import Optional, Dict, Tuple, List
@ray.workflow.virtual_actor
class InventoryPrice:
def __init__(self):
self._prices: Dict[str, float] = {}
@ray.workflow.virtual_actor.readonly
def get_price(self, name) -> Optional[float]:
return self._prices.get(name)
def update_price(self, name: str, price: float) -> None:
self._prices[name] = price
@ray.workflow.virtual_actor.readonly
def total_value(self, items) -> Tuple[float, List[str]]:
unknown_items = []
value = 0.0
for (name, num) in items.items():
price = self._prices.get(name)
if price is None:
unknown_items.append(name)
else:
value += price * num
return value, unknown_items
def __setstate__(self, prices):
self._prices = prices
def __getstate__(self):
return self._prices
@ray.workflow.virtual_actor
class UserAccount:
def __init__(self, inventory_id: str):
self._goods: Dict[str, int] = {}
self._balance: float = 0
self._inventory_id = inventory_id
def add_money(self, amount: float) -> float:
if amount < 0:
raise ValueError("amount can't be negative")
self._balance += amount
return self._balance
def withdraw_money(self, amount: float) -> float:
if amount < 0:
raise ValueError("amount can't be negative")
if amount > self._balance:
raise ValueError("Withdraw more money than balance")
self._balance -= amount
return self._balance
def buy(self, name: str, price: float):
if price > self._balance:
raise ValueError("Not enough balance")
self._balance -= price
if name not in self._goods:
self._goods[name] = 0
self._goods[name] += 1
actor = workflow.get_actor(self._inventory_id)
actor.update_price.run(name, price)
return self._balance
def sell(self, name: str, price: float):
if name not in self._goods:
raise ValueError("No such item")
self._goods[name] -= 1
if self._goods[name] == 0:
self._goods.pop(name)
self._balance += price
actor = workflow.get_actor(self._inventory_id)
actor.update_price.run(name, price)
return self._balance
def __setstate__(self, state):
self._goods, self._balance, self._inventory_id = state
def __getstate__(self):
return self._goods, self._balance, self._inventory_id
@workflow.virtual_actor.readonly
def goods_value(self):
actor = workflow.get_actor(self._inventory_id)
return actor.total_value.run(self._goods)
@workflow.virtual_actor.readonly
def balance(self):
return self._balance
@pytest.mark.parametrize(
"workflow_start_regular",
[{
"num_cpus": 4, # increase CPUs to add pressure
}],
indirect=True)
def test_writer_actor_pressure_test(workflow_start_regular):
inventory_actor = InventoryPrice.get_or_create("inventory")
ray.get(inventory_actor.ready())
user = UserAccount.get_or_create("user", "inventory")
ray.get(user.ready())
balance_1 = user.add_money.run_async(100)
balance_2 = user.buy.run_async("item_1", 10)
balance_3 = user.buy.run_async("item_1", 10)
balance_4 = user.sell.run_async("item_1", 5)
# get the result out of order
assert ray.get(balance_1) == 100
assert ray.get(balance_4) == 85
assert ray.get(balance_2) == 90
assert ray.get(balance_3) == 80
assert user.balance.run() == 85
assert user.goods_value.run() == (5, [])
|
Python
| 0
|
@@ -3730,8 +3730,96 @@
(5, %5B%5D)%0A
+%0A%0Aif __name__ == %22__main__%22:%0A import sys%0A sys.exit(pytest.main(%5B%22-v%22, __file__%5D))%0A
|
97dcb530c92d7f62eb69e5bea7696b8733cb326f
|
Fix StringIO call
|
src/crammit/__init__.py
|
src/crammit/__init__.py
|
###############################################################################
#
# Copyright (c) 2012 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <ruslan.spivak@gmail.com>'
import os
import fnmatch
import hashlib
import sys
import optparse
import gzip
from collections import defaultdict
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import yaml
import slimit
import cssmin
OUTPUT_DIR = 'assets'
CONFIG_FILE = 'assets.yaml'
ASSETS_INFO_FILE = 'assetsinfo.yaml'
def _log(msg):
sys.stderr.write('%s\n' % msg)
def load_config(path):
return yaml.load(open(path))
class AssetManager(object):
"""I manage assets bundles."""
def __init__(self, config, basedir=None):
self.config = config
self.basedir = basedir or os.getcwd()
def _get_bundles_by_type(self, type):
"""Get a dictionary of bundles for requested type.
Args:
type: 'javascript' or 'css'
"""
bundles = {}
bundle_definitions = self.config.get(type)
if bundle_definitions is None:
return bundles
# bundle name: common
for bundle_name, paths in bundle_definitions.items():
bundle_files = []
# path: static/js/vendor/*.js
for path in paths:
# pattern: /tmp/static/js/vendor/*.js
pattern = abspath = os.path.join(self.basedir, path)
# assetdir: /tmp/static/js/vendor
# assetdir contents:
# - /tmp/static/js/vendor/t1.js
# - /tmp/static/js/vendor/t2.js
# - /tmp/static/js/vendor/index.html
assetdir = os.path.dirname(abspath)
# expanded_fnames after filtering using the pattern:
# - /tmp/static/js/vendor/t1.js
# - /tmp/static/js/vendor/t2.js
fnames = [os.path.join(assetdir, fname)
for fname in os.listdir(assetdir)]
expanded_fnames = fnmatch.filter(fnames, pattern)
bundle_files.extend(expanded_fnames)
bundles[bundle_name] = bundle_files
return bundles
def _compress(self, data):
compresslevel = 9 # max
buffer = StringIO.StringIO()
with gzip.GzipFile(fileobj=buffer, mode='wb',
compresslevel=compresslevel) as fout:
fout.write(data)
return buffer.getvalue()
def _process_bundle(self, name, paths, type):
sha1, sep = '', ''
raw_data = ''.join(open(path).read() for path in paths)
if self.config.get('fingerprint'):
sha1, sep = hashlib.sha1(raw_data).hexdigest(), '-'
file_ext = {
'javascript': '.js',
'css': '.css',
}.get(type)
fname_template = '%s%s%s{suffix}%s{gz}' % (name, sep, sha1, file_ext)
raw_fname = fname_template.format(suffix='', gz='')
self.write(raw_fname, raw_data)
if type == 'javascript':
minified_data = slimit.minify(raw_data, mangle=True)
elif type == 'css':
minified_data = cssmin.cssmin(raw_data)
minifed_fname = fname_template.format(suffix='.min', gz='')
self.write(minifed_fname, minified_data)
gzipped_data = self._compress(minified_data)
gzipped_fname = fname_template.format(suffix='.min', gz='.gz')
self.write(gzipped_fname, gzipped_data)
bundle_info = {
name: {
'fingerprint': sha1,
'output': {
'raw': raw_fname,
'min': minifed_fname,
'gz': gzipped_fname,
},
'size': {
'raw': len(raw_data),
'min': len(minified_data),
'gz': len(gzipped_data),
},
}
}
return bundle_info
def write(self, fname, data):
output = os.path.abspath(self.config.get('output', OUTPUT_DIR))
if not os.path.exists(output):
os.makedirs(output)
path = os.path.join(output, fname)
with open(path, 'w') as fout:
fout.write(data)
def write_info(self, bundles_info):
self.write(ASSETS_INFO_FILE,
yaml.dump(dict(bundles_info), default_flow_style=False))
def get_bundles(self):
bundles = {
'javascript': self._get_bundles_by_type('javascript'),
'css': self._get_bundles_by_type('css'),
}
return bundles
def process_bundles(self):
info = defaultdict(dict)
bundles = self.get_bundles()
for bundle_type in bundles:
for name, paths in bundles[bundle_type].items():
bundle_info = self._process_bundle(name, paths, bundle_type)
info[bundle_type].update(bundle_info)
return info
def main():
parser = optparse.OptionParser()
parser.add_option('-c', '--config', dest='config',
help='path to assets.yaml (default: ./assets.yaml)')
parser.add_option('-b', '--basedir', dest='basedir',
help=('base directory to which all '
'assets paths are relative (default: ./)'))
options, args = parser.parse_args()
config_path = options.config
if config_path is None:
config_path = os.path.join(os.getcwd(), CONFIG_FILE)
if not os.path.exists(config_path):
_log('Could not find the asset configuration file "%s"' % config_path)
sys.exit(1)
config = load_config(config_path)
manager = AssetManager(config, options.basedir)
bundles_info = manager.process_bundles()
manager.write_info(bundles_info)
|
Python
| 0.00001
|
@@ -3414,17 +3414,8 @@
r =
-StringIO.
Stri
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.