commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
1c6ea402b918ee23c13fc6dc5bef80b97f6f9d67 | Add check_tar script. | mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju | check_tar.py | check_tar.py | #!/usr/bin/env python3
from argparse import ArgumentParser
import logging
import os
import tarfile
import sys
class TarfileNotFound(Exception):
"""Raised when specified tarfile cannot be found."""
class TestedDirNotFound(Exception):
"""Raised when specified tested text dir cannot be found."""
def get_fpc_text(juju_tar):
"""Return the fallback-public-cloud.yaml text from a tarball.
Raises an exception if the tarfile contains more or fewer than one
falllback-public-cloud.yaml.
"""
fpc_members = [
m for m in juju_tar.getmembers()
if os.path.basename(m.name) == 'fallback-public-cloud.yaml']
if len(fpc_members) == 1:
return juju_tar.extractfile(fpc_members[0]).read()
else:
if len(fpc_members) == 0:
raise Exception('Tarfile has no fallback-public-cloud.')
else:
raise Exception(
'Tarfile {:d} copies of fallback-public-cloud.'.format(
len(fpc_members)))
def check_tar(tested_texts_dir, tar_filename):
"""Check the contents of the tarfile.
tested_texts_dir is the name of a directory with the texted
fallback-public-cloud texts.
tar_filename is the filename of the tarfile.
"""
try:
tf = tarfile.open(tar_filename, 'r:*')
except FileNotFoundError:
raise TarfileNotFound('Tarfile not found: "{}"'.format(tar_filename))
with tf:
fpc_text = get_fpc_text(tf)
try:
tested_list = os.listdir(tested_texts_dir)
except FileNotFoundError:
raise TestedDirNotFound(
'Tested dir not found: "{}"'.format(tested_texts_dir))
for tested in tested_list:
if tested.startswith('.'):
continue
with open(os.path.join(tested_texts_dir, tested), 'rb') as tested_file:
if tested_file.read() == fpc_text:
logging.info('Matched {}.'.format(tested))
return 0
else:
print(
'fallback-public-clouds.yaml does not match a tested version.\n'
'Please submit it to the QA team for testing before landing.',
file=sys.stderr)
return 1
def main():
logging.basicConfig(level=logging.INFO)
parser = ArgumentParser()
parser.add_argument('tested_texts_dir')
parser.add_argument('tarfile')
args = parser.parse_args()
try:
return check_tar(args.tested_texts_dir, args.tarfile)
except (TarfileNotFound, TestedDirNotFound) as e:
print(e, file=sys.stderr)
return 1
if __name__ == '__main__':
sys.exit(main())
| agpl-3.0 | Python | |
bd1ad829908f0f0a029e4e00bdd2f12512fff297 | add formaction class | RasaHQ/rasa_core,RasaHQ/rasa_nlu,RasaHQ/rasa_core,RasaHQ/rasa_nlu,RasaHQ/rasa_core,RasaHQ/rasa_nlu | rasa_core/actions/forms.py | rasa_core/actions/forms.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.actions.action import Action
from rasa_core.dispatcher import Button
from rasa_core.events import ConversationPaused, SlotSet
import logging
import random
logger = logging.getLogger(__name__)
class FormField(object):
def validate(self, value):
return value is not None
class EntityFormField(FormField):
def __init__(self, entity_name, slot_name):
self.entity_name = entity_name
self.slot_name = slot_name
def extract(self, tracker):
value = None
for e in tracker.latest_message.entities:
if e["entity"] == self.entity_name:
value = e["value"]
if value:
return [SlotSet(self.slot_name, value)]
else:
return []
class BooleanFormField(FormField):
def __init__(self, slot_name, affirm_intent, deny_intent):
self.slot_name = slot_name
self.affirm_intent = affirm_intent
self.deny_intent = deny_intent
def extract(self, tracker):
value = None
intent = tracker.latest_message.intent["name"]
if intent == self.affirm_intent:
value = True
elif intent == self.deny_intent:
value = False
return [SlotSet(self.slot_name, value)]
class FormAction(Action):
REQUIRED_FIELDS = []
RANDOMIZE = True
def should_request_slot(self, tracker, slot_name, events):
existing_val = tracker.get_slot(slot_name)
pending = [e.key for e in events if e.key == slot_name]
return existing_val is None and not slot_name in pending
def get_requested_slot(self, tracker):
requested_slot = tracker.get_slot("requested_slot")
if requested_slot is None:
return []
else:
try:
required = self.REQUIRED_FIELDS[:]
if self.RANDOMIZE:
random.shuffle(required)
fields = [f for f in required if f.slot_name == requested_slot]
return fields[0].extract(tracker)
except:
raise
return []
def ready_to_submit(self, tracker):
return not any([
self.should_request_slot(tracker, field.slot_name, events)
for field in self.REQUIRED_FIELDS])
def run(self, dispatcher, tracker, domain):
events = self.get_requested_slot(tracker)
if self.ready_to_submit(tracker):
return self.submit(dispatcher, tracker, domain)
for field in self.REQUIRED_FIELDS:
if self.should_request_slot(tracker, field.slot_name, events):
dispatcher.utter_template("utter_ask_{}".format(field.slot_name))
events.append(SlotSet("requested_slot", field.slot_name))
return events
return self.submit(dispatcher, tracker, domain)
def submit(self, dispatcher, tracker, domain):
dispatcher.utter_message("done!")
return []
| apache-2.0 | Python | |
4836db9ba6686557ed005b8518098b9dd1a302e1 | Add drown* to offensive regex | hugovk/CyberPrefixer,molly/CyberPrefixer | offensive.py | offensive.py | import re
offensive = re.compile(
r"\b(deaths?|dead(ly)?|die(s|d)?|hurts?|(sex|child)[ -]?(abuse|trafficking)|injur(e|i?es|ed|y)|"
r"kill(ing|ed|er|s)?s?|wound(ing|ed|s)?|fatal(ly|ity)?|shoo?t(s|ing|er)?s?|crash(es|ed|ing)?|"
r"attack(s|ers?|ing|ed)?|murder(s|er|ed|ing)?s?|hostages?|rap(e|es|ed|ing)|assault(s|ed)?|"
r"pile-?ups?|massacre(s|d)?|assassinate(d|s)?|sla(y|in|yed|ys)|victims?|tortur(e|ed|ing|es)|"
r"execut(e|ion|ed)s?|gun(man|men|ned)|suicid(e|al|es)|bomb(s|ed|ing|ings|er|ers)?|"
r"mass[- ]?graves?|bloodshed|state[- ]?of[- ]?emergency|al[- ]?Qaeda|blasts?|violen(t|ce)|"
r"lethal|cancer(ous)?|stab(bed|bing|ber)?|casualt(y|ies)|sla(y|ying|yer|in)|"
r"drown(s|ing|ed|ings)?)\W?\b", flags=re.IGNORECASE)
| import re
offensive = re.compile(
r"\b(deaths?|dead(ly)?|die(s|d)?|hurts?|(sex|child)[ -]?(abuse|trafficking)|injur(e|i?es|ed|y)|"
r"kill(ing|ed|er|s)?s?|wound(ing|ed|s)?|fatal(ly|ity)?|shoo?t(s|ing|er)?s?|crash(es|ed|ing)?|"
r"attack(s|ers?|ing|ed)?|murder(s|er|ed|ing)?s?|hostages?|rap(e|es|ed|ing)|assault(s|ed)?|"
r"pile-?ups?|massacre(s|d)?|assassinate(d|s)?|sla(y|in|yed|ys)|victims?|tortur(e|ed|ing|es)|"
r"execut(e|ion|ed)s?|gun(man|men|ned)|suicid(e|al|es)|bomb(s|ed|ing|ings|er|ers)?|"
r"mass[- ]?graves?|bloodshed|state[- ]?of[- ]?emergency|al[- ]?Qaeda|blasts?|violen(t|ce)|"
r"lethal|cancer(ous)?|stab(bed|bing|ber)?|casualt(y|ies)|sla(y|ying|yer|in))\W?\b",
flags=re.IGNORECASE)
| mit | Python |
fdc31769208dc3a6fc81758083de1d98a968262b | Add new py-asserts package (#14038) | LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/py-asserts/package.py | var/spack/repos/builtin/packages/py-asserts/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyAsserts(PythonPackage):
"""Stand-alone Assertions."""
homepage = "https://github.com/srittau/python-asserts"
url = "https://github.com/srittau/python-asserts/archive/v0.10.0.tar.gz"
version('0.10.0', sha256='3466cf32c36d97ee2802121438003693546f590be81095317d0d698066bb5df7')
version('0.9.1', sha256='a73ea6b2ddc77364a5f0e13197f00662485944a6dd31c1f7555ff2f99c0f7319')
version('0.9.0', sha256='e3b8b06309234f9a7c6e4679e0f3cc127cf18da95c30fbb524ff47d439e22b17')
version('0.8.6', sha256='8a477746dbc501ac0d1fe9e593a1faafa7d361ceca79d994d3b2ebeecc7fbf32')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
| lgpl-2.1 | Python | |
e101665e5e206c66198a9f30fcdf2a2ff53a128b | Update version to 1.5 (#5524) | skosukhin/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,skosukhin/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,skosukhin/spack,lgarren/spack,krafczyk/spack,tmerrick1/spack,lgarren/spack,EmreAtes/spack,matthiasdiener/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,skosukhin/spack,matthiasdiener/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,krafczyk/spack,iulian787/spack,iulian787/spack,LLNL/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,mfherbst/spack,krafczyk/spack | var/spack/repos/builtin/packages/r-jsonlite/package.py | var/spack/repos/builtin/packages/r-jsonlite/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RJsonlite(RPackage):
"""A fast JSON parser and generator optimized for statistical data and the
web. Started out as a fork of 'RJSONIO', but has been completely rewritten
in recent versions. The package offers flexible, robust, high performance
tools for working with JSON in R and is particularly powerful for building
pipelines and interacting with a web API. The implementation is based on
the mapping described in the vignette (Ooms, 2014). In addition to
converting JSON data from/to R objects, 'jsonlite' contains functions to
stream, validate, and prettify JSON data. The unit tests included with the
package verify that all edge cases are encoded and decoded consistently for
use with dynamic data in systems and applications."""
homepage = "https://github.com/jeroenooms/jsonlite"
url = "https://cran.rstudio.com/src/contrib/jsonlite_1.5.tar.gz"
list_url = homepage
version('1.5', '2a81c261a702fccbbd5d2b32df108f76')
version('1.2', '80cd2678ae77254be470f5931db71c51')
version('1.0', 'c8524e086de22ab39b8ac8000220cc87')
version('0.9.21', '4fc382747f88a79ff0718a0d06bed45d')
| ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RJsonlite(RPackage):
"""A fast JSON parser and generator optimized for statistical data and the
web. Started out as a fork of 'RJSONIO', but has been completely rewritten
in recent versions. The package offers flexible, robust, high performance
tools for working with JSON in R and is particularly powerful for building
pipelines and interacting with a web API. The implementation is based on
the mapping described in the vignette (Ooms, 2014). In addition to
converting JSON data from/to R objects, 'jsonlite' contains functions to
stream, validate, and prettify JSON data. The unit tests included with the
package verify that all edge cases are encoded and decoded consistently for
use with dynamic data in systems and applications."""
homepage = "https://github.com/jeroenooms/jsonlite"
url = "https://cran.r-project.org/src/contrib/jsonlite_1.2.tar.gz"
version('1.2', '80cd2678ae77254be470f5931db71c51')
version('1.0', 'c8524e086de22ab39b8ac8000220cc87')
version('0.9.21', '4fc382747f88a79ff0718a0d06bed45d')
| lgpl-2.1 | Python |
85a8741564266fae7f7e4b1ccd518324277ad58a | Create diss.py | Sitryk/Sitryk-Cogs | diss/diss.py | diss/diss.py | from __main__ import send_cmd_help
from discord.ext import commands
from random import choice
import discord
class Diss:
"""Example cog for dissing or respecting a user"""
def __init__(self, bot):
self.bot = bot
self.users = { 'name1' : ['disses', 'disses2'], 'name2' : ['more disses', 'another diss']}
@commands.command(name="diss", no_pm=True)
async def _diss(self, name):
userInDict = False
if name in self.users:
userInDict = True
else:
userInDict = False
if userInDict is True:
await self.bot.say(choice(self.users[name]))
else:
await self.bot.say("User is not listed.")
def setup(bot):
n = Diss(bot)
bot.add_cog(n)
| mit | Python | |
a244ffe1814e8ad7966f90b212faebfaf35424be | Initialize P01_tablePrinter | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter06/PracticeProjects/P01_tablePrinter.py | books/AutomateTheBoringStuffWithPython/Chapter06/PracticeProjects/P01_tablePrinter.py | # This program displays a list of strings in a table
#
# Write a function named printTable() that takes a list of lists of strings and
# displays it in a well-organized table with each column right-justified. Assume
# that all the inner lists will contain the same number of strings.
#
# For example, the value could look like this:
# tableData = [['apples', 'oranges', 'cherries', 'banana'],
# ['Alice', 'Bob', 'Carol', 'David'],
# ['dogs', 'cats', 'moose', 'goose']]
#
# Your printTable() function would print the following:
# apples Alice dogs
# oranges Bob cats
# cherries Carol moose
# banana David goose
| mit | Python | |
feae414803ba3a6458660314f212e5e20820d979 | add core codes for distances | tech-teach/microservice-topology,tech-teach/microservice-topology,tech-teach/microservice-topology,tech-teach/microservice-topology | core/core.py | core/core.py | # internal libraries
import multiprocessing
from time import time
import csv
import sys
# external libraries
from sklearn.metrics import pairwise, accuracy_score
import numpy as np
NOT_BOOL_METRICS = [
'euclidean',
'l2',
'l1',
'manhattan',
'minkowski',
'canberra',
'cosine',
'cityblock',
'braycurtis',
'chebyshev',
'correlation',
'hamming',
'mahalanobis',
'seuclidean',
'sqeuclidean',
# "wminkowski"
]
BOOL_METRIC = [
'dice',
'jaccard',
'kulsinski',
'matching',
'rogerstanimoto',
'russellrao',
'sokalmichener',
'sokalsneath',
'yule',
]
def get_accuracy(file_handler, n_workers):
accuracy_list = []
csv_content = np.loadtxt(
file_handler, # Csv file to process
delimiter=","
)
data = csv_content[:, 0:-1]
labels = csv_content[:, -1]
for metric in NOT_BOOL_METRICS:
matrix_distances = pairwise.pairwise_distances(
data,
metric=metric,
n_jobs=n_workers # Number of workers
)
# Replace zeros by infinity
# matrix_distances[matrix_distances == 0.0] = float("inf")
np.fill_diagonal(matrix_distances, float("inf"))
supposed_labels = np.array(
[labels[np.argmin(x)] for x in matrix_distances]
)
accuracy = accuracy_score(labels, supposed_labels)
accuracy_list.append(accuracy)
return accuracy_list
| mit | Python | |
7c750650961a6aac4e189b46e1353a3ec003dd56 | add test-send-file-ipv6.py but disabled for now because of Python bug | freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut | tests/twisted/avahi/test-send-file-ipv6.py | tests/twisted/avahi/test-send-file-ipv6.py | import avahi
from saluttest import exec_test
from avahitest import AvahiAnnouncer, get_host_name
from xmppstream import setup_stream_listener6
from file_transfer_helper import SendFileTest, CHANNEL_TYPE_FILE_TRANSFER, FT_STATE_PENDING, \
FT_STATE_CHANGE_REASON_NONE, FT_STATE_OPEN
print "FIXME: This is disabled because of a bug in Python's httplib. http://bugs.python.org/issue5111"
# exiting 77 causes automake to consider the test to have been skipped
raise SystemExit(77)
class SendFileTransferIPv6(SendFileTest):
CONTACT_NAME = 'test-ft'
def announce_contact(self, name=CONTACT_NAME):
basic_txt = { "txtvers": "1", "status": "avail" }
self.contact_name = '%s@%s' % (name, get_host_name())
self.listener, port = setup_stream_listener6(self.q, self.contact_name)
self.contact_service = AvahiAnnouncer(self.contact_name, "_presence._tcp", port,
basic_txt, proto=avahi.PROTO_INET6)
def provide_file(self):
SendFileTest.provide_file(self)
# state is still Pending as remote didn't accept the transfer yet
state = self.ft_props.Get(CHANNEL_TYPE_FILE_TRANSFER, 'State')
assert state == FT_STATE_PENDING
def client_request_file(self):
SendFileTest.client_request_file(self)
e = self.q.expect('dbus-signal', signal='InitialOffsetDefined')
offset = e.args[0]
# We don't support resume
assert offset == 0
# Channel is open. We can start to send the file
e = self.q.expect('dbus-signal', signal='FileTransferStateChanged')
state, reason = e.args
assert state == FT_STATE_OPEN
assert reason == FT_STATE_CHANGE_REASON_NONE
if __name__ == '__main__':
test = SendFileTransferIPv6()
exec_test(test.test)
| lgpl-2.1 | Python | |
bec2f43e9e6a6f858c97ae19c07eb35cd912457c | Create 3_flickrlikes_assembledata_directory.py | sharathchandra92/flickrapi_downloadfavorites,sharathchandra92/flickrapi_downloadfavorites | 3_flickrlikes_assembledata_directory.py | 3_flickrlikes_assembledata_directory.py | #!/usr/bin/env python
"""
This will create train and test txt files and a directory named images in flickr_likes parent directory,
where all the folders exist. Then, run following commands to copy all files into the images directory
# find ./ -name '*.jpg' -exec cp '{}' ./ \; To copy all images in subdirectories into the parent directory
# mv *.jpg images/
"""
import os
import urllib
import hashlib
import argparse
import numpy as np
import pandas as pd
import multiprocessing
# Flickr returns a special image if the request is unavailable.
MISSING_IMAGE_SHA1 = '6a92790b1c2a301c6e7ddef645dca1f53ea97ac2'
example_dirname = os.path.abspath('/home/sharathc001/caffe-master/data/flickr_likes/')
caffe_dirname = os.path.abspath(os.path.join(example_dirname, '../..'))
training_dirname = os.path.abspath('/home/sharathc001/caffe-master/data/flickr_likes/')
def download_image(args_tuple):
"For use with multiprocessing map. Returns filename on fail."
try:
url, filename = args_tuple
if not os.path.exists(filename):
urllib.urlretrieve(url, filename)
with open(filename) as f:
assert hashlib.sha1(f.read()).hexdigest() != MISSING_IMAGE_SHA1
return True
except KeyboardInterrupt:
raise Exception() # multiprocessing doesn't catch keyboard exceptions
except:
return False
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Download a subset of Flickr Style to a directory')
parser.add_argument(
'-s', '--seed', type=int, default=0,
help="random seed")
parser.add_argument(
'-i', '--images', type=int, default=-1,
help="number of images to use (-1 for all [default])",
)
parser.add_argument(
'-w', '--workers', type=int, default=-1,
help="num workers used to download images. -x uses (all - x) cores [-1 default]."
)
args = parser.parse_args()
np.random.seed(args.seed)
# Read data, shuffle order, and subsample.
csv_filename = os.path.join(example_dirname, 'psychoflickr_originalset_output.csv')
df = pd.read_csv(csv_filename, index_col=0)
df = df.iloc[np.random.permutation(df.shape[0])]
# Make directory for images and get local filenames.
if training_dirname is None:
training_dirname = os.path.join(caffe_dirname, 'data/flickr_style')
images_dirname = os.path.join(training_dirname, 'images')
if not os.path.exists(images_dirname):
os.makedirs(images_dirname)
df['image_filename'] = [os.path.join(images_dirname, _.split('/')[-1]) for _ in df['image_url']]
# Only keep rows with valid images, and write out training file lists.
for split in ['train', 'test']:
split_df = df[df['_split'] == split]
filename = os.path.join(training_dirname, '{}.txt'.format(split))
split_df[['image_filename', 'label']].to_csv(filename, sep=' ', header=None, index=None)
print('Writing train/val for {} successfully downloaded images.'.format(df.shape[0]))
| mit | Python | |
bb7abbcd87bdce4709b37d83742289fbf9a155a3 | Add student dilemma pb | eliemichel/IRL | student.py | student.py | """
This module implements the "student dilemma" as briefly presented in RL lecture
as a gym module.
"""
# code structure is from https://github.com/openai/gym/blob/master/gym/envs/classic_control/cartpole.py
import sys
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
class StudentDilemmaEnv(gym.Env):
metadata = {'render.modes': ['human', 'ansi']}
def __init__(self):
k = 2
n = 7
REST, WORK = 0, 1
self.action_space = spaces.Discrete(k)
self.state_space = range(n)
self.transition = np.ndarray((k, n, n))
self.transition[REST] = np.array([
[0.5, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.6, 0.0, 0.0, 0.4, 0.0, 0.0],
[0.0, 0.4, 0.6, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.1, 0.0, 0.9, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
])
self.transition[WORK] = np.array([
[0.5, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0],
[0.3, 0.0, 0.7, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
])
assert (self.transition.sum(axis=2) == 1.0).all(), "invalid transition matrix"
self.reward = np.array([
0, 1, -1, -10, -10, 100, -1000
])
self._seed()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def _step(self, action):
assert self.action_space.contains(action), "%r (%s) invalid"%(action, type(action))
self.state = np.random.choice(self.state_space, p=self.transition[action, self.state])
reward = self.reward[self.state]
done = False
return self.state, reward, done, {}
def _reset(self):
self.state = 0
return self.state
def _render(self, mode='human', close=False):
if close:
return
outfile = StringIO() if mode == 'ansi' else sys.stdout
outfile.write("state: {}\n".format(self.state))
return outfile
| mit | Python | |
9acd61f160035c240d6db3fca97821f08d397a02 | Add this one more train/validation data generator | kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,crista/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style | 36-dnn/generate_c2w_data.py | 36-dnn/generate_c2w_data.py | import os, sys
import collections
import numpy as np
import re, string
MAX_LINE_SIZE = 80
MAX_WORDS_IN_LINE = 20
all_chars = ""
with open('pride-and-prejudice.txt') as f:
all_chars = f.read().replace('\n', ' ')
all_words = re.findall('[a-z]{2,}', all_chars.lower())
words = list(set(all_words))
def generate_pair():
# Grab a slice of the input file of size MAX_LINE_SIZE
index = np.random.randint(0, len(all_chars) - MAX_LINE_SIZE)
cquery = ' ' + all_chars[index:index+MAX_LINE_SIZE - 2] + ' '
# Replace unknown words with known ones
wquery = set(re.findall('[a-z]{2,}', cquery.lower()))
for w in wquery:
if w not in words[:VOCAB_SIZE]:
# Replace ALL occurrences in query with the same replacement word
other = words[np.random.randint(0, VOCAB_SIZE/2)]
exp = '[^a-z]' + w + '[^a-z]'
indices = [(m.start()+1, m.end()-1) for m in re.finditer(exp, cquery.lower())]
for b, e in reversed(indices):
cquery = cquery[0:b] + other + cquery[e:]
# Make sure the size of all chars is less than MAX_LINE_SIZE
if len(cquery) >= MAX_LINE_SIZE:
last_sp = cquery[:MAX_LINE_SIZE].rfind(' ')
cquery = cquery[:last_sp] + ' ' * (MAX_LINE_SIZE - last_sp)
# OK, now that we have the sequence of chars, find its sequence of words
# [TODO] Remember to remove stop words
list_of_words = re.findall('[a-z]{2,}', cquery.lower())
return cquery.strip(), list_of_words
def generate_data(ntrain, nval, vocab_size, data_folder, train_x, train_y, val_x, val_y):
if not os.path.exists(data_folder):
os.makedirs(data_folder)
global VOCAB_SIZE
VOCAB_SIZE = vocab_size
with open(train_x, 'w') as fx, open(train_y, 'w') as fy:
for _ in range(0, ntrain):
query, ans = generate_pair()
fx.write(query + '\n')
fy.write(','.join(ans) + '\n')
with open(val_x, 'w') as fx, open(val_y, 'w') as fy:
for _ in range(0, nval):
query, ans = generate_pair()
fx.write(query + '\n')
fy.write(','.join(ans) + '\n')
def main():
# [1]: number of samples in training set
# [2]: number of samples in validation set
# [3]: vocabulary size
data_folder = 'c2w_data'
if len(sys.argv) > 3: data_folder = data_folder + "_" + sys.argv[3]
train_x = os.path.join(data_folder, 'train_x.txt')
train_y = os.path.join(data_folder, 'train_y.txt')
val_x = os.path.join(data_folder, 'val_x.txt')
val_y = os.path.join(data_folder, 'val_y.txt')
generate_data(int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), data_folder, train_x, train_y, val_x, val_y)
if __name__ == "__main__":
main()
| mit | Python | |
68942512cd5bb9ea1c15a3dc663b8ea1a08df8f4 | Update the deprecation message of PageSet. | axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Chilledheart/chromium,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk | tools/telemetry/telemetry/page/page_set.py | tools/telemetry/telemetry/page/page_set.py | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.page import page as page_module
from telemetry import decorators
from telemetry import story
PUBLIC_BUCKET = story.PUBLIC_BUCKET
PARTNER_BUCKET = story.PARTNER_BUCKET
INTERNAL_BUCKET = story.INTERNAL_BUCKET
@decorators.Deprecated(
2015, 6, 30, 'Please use the StorySet class instead (crbug.com/439512). '
'Instructions for conversion can be found in: https://goo.gl/JsaEez')
class PageSet(story.StorySet):
"""
This class contains all Chromium-specific configurations necessary to run a
Telemetry benchmark.
"""
def __init__(self, base_dir=None, archive_data_file='', user_agent_type=None,
serving_dirs=None, bucket=None):
if base_dir and not os.path.isdir(base_dir):
raise ValueError('Invalid base_dir value')
super(PageSet, self).__init__(
archive_data_file=archive_data_file, cloud_storage_bucket=bucket,
base_dir=base_dir, serving_dirs=serving_dirs)
# These attributes can be set dynamically by the page set.
self.user_agent_type = user_agent_type
@property
def pages(self):
return self.user_stories
def AddUserStory(self, user_story):
assert isinstance(user_story, page_module.Page)
assert user_story.page_set is self
super(PageSet, self).AddUserStory(user_story)
| # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.page import page as page_module
from telemetry import decorators
from telemetry import story
PUBLIC_BUCKET = story.PUBLIC_BUCKET
PARTNER_BUCKET = story.PARTNER_BUCKET
INTERNAL_BUCKET = story.INTERNAL_BUCKET
@decorators.Deprecated(
2015, 6, 25, 'Please use the UserStory class instead (crbug.com/439512). '
'Instructions for conversion can be found in: https://goo.gl/JsaEez')
class PageSet(story.StorySet):
"""
This class contains all Chromium-specific configurations necessary to run a
Telemetry benchmark.
"""
def __init__(self, base_dir=None, archive_data_file='', user_agent_type=None,
serving_dirs=None, bucket=None):
if base_dir and not os.path.isdir(base_dir):
raise ValueError('Invalid base_dir value')
super(PageSet, self).__init__(
archive_data_file=archive_data_file, cloud_storage_bucket=bucket,
base_dir=base_dir, serving_dirs=serving_dirs)
# These attributes can be set dynamically by the page set.
self.user_agent_type = user_agent_type
@property
def pages(self):
return self.user_stories
def AddUserStory(self, user_story):
assert isinstance(user_story, page_module.Page)
assert user_story.page_set is self
super(PageSet, self).AddUserStory(user_story)
| bsd-3-clause | Python |
e8c8464d36e91c9a8d61db0531a2e73dcdee88b7 | Add a test for check_inputs. | jason-neal/companion_simulations,jason-neal/companion_simulations | utilities/tests/test_simulation_utils.py | utilities/tests/test_simulation_utils.py | from utilities.simulation_utilities import check_inputs
import pytest
import numpy as np
@pytest.mark.parametrize("input,expected", [
(None, np.ndarray([0])),
([0], np.array([0])),
(1, np.array([1])),
(range(5), np.array([0,1,2,3,4]))
])
def test_check_inputs(input, expected):
assert np.allclose(check_inputs(input), expected) | mit | Python | |
0c5cbacab1c2925ed0a00d3af0415b76f50c1eec | Create wikipedia.py | fnielsen/dasem,fnielsen/dasem | dasem/wikipedia.py | dasem/wikipedia.py | """Wikipedia interface."""
| apache-2.0 | Python | |
ecef0ea7743f25326183d09622949682ce6feb3c | Test traverses a TS file quickly | johnoneil/arib,johnoneil/arib | test/ts.py | test/ts.py | #!/usr/bin/env python
'''
Module: test
Desc: Test to see how quickly I can parse TS es packets
Author: John O'Neil
Email: oneil.john@gmail.com
DATE: Thursday, October 20th 2016
'''
import os
import sys
import argparse
PACKET_SIZE = 188
SYNC_BYTE = 'G'
#generator
def next_packet(filename):
with open(filename, 'rb') as f:
while True:
packet = f.read(PACKET_SIZE)
if packet:
# first byte SHOULD be the sync byte
# but if it isn't find one.
if packet[0] != SYNC_BYTE:
start_byte = 0
print packet[0]
for i in range(start_byte, PACKET_SIZE):
if packet[i] == SYNC_BYTE:
start_byte = i
break
# didn't find a new start? FAIL
if start_byte == 0:
#print ":".join("{:02x}".format(ord(c)) for c in packet)
raise Exception("failure to find sync byte in ts packet size.")
continue
remainder = f.read(PACKET_SIZE - start_byte)
packet = packet[start_byte:] + remainder
yield packet
else:
break
def main():
parser = argparse.ArgumentParser(description='Remove ARIB formatted Closed Caption information from an MPEG TS file and format the results as a standard .ass subtitle file.')
parser.add_argument('infile', help='Input filename (MPEG2 Transport Stream File)', type=str)
args = parser.parse_args()
infilename = args.infile
if not os.path.exists(infilename):
print 'Input filename :' + infilename + " does not exist."
os.exit(-1)
total_filesize = os.path.getsize(infilename)
read_size = 0
percent_read = 0
prev_percent_read = percent_read
#CC data is not, in itself timestamped, so we've got to use packet info
#to reconstruct the timing of the closed captions (i.e. how many seconds into
#the file are they shown?)
#show initial progress information
sys.stdout.write("progress: %d%% \r" % (percent_read) )
sys.stdout.flush()
for packet in next_packet(infilename):
read_size += PACKET_SIZE
percent_read =((read_size/float(total_filesize))* 100)
new_percent_read = int(percent_read * 100)
if new_percent_read != prev_percent_read:
prev_percent_read = new_percent_read
sys.stdout.write("progress: %.2f%% \r" % (percent_read) )
sys.stdout.flush()
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
2bbd2ac9b4620f33189fa202ee17dee7f7481330 | Create start.py | googleinterns/automated-windows-vms,googleinterns/automated-windows-vms | master_server/start.py | master_server/start.py | import os
import threading
import sys
port=5000
def new_serve():
global port
port=port + 1
os.system('python dummy_vm_server.py '+str(port))
def master_server():
os.system('python master_server.py')
if __name__ == '__main__':
t=threading.Thread(target=master_server)
t.start()
z=int(sys.argv[1])
for i in range(z):
t=threading.Thread(target=new_serve)
t.start()
| apache-2.0 | Python | |
b5c6e37bdcb88545d187ad1e3adbbbe1d466f874 | Add py solution for 688. Knight Probability in Chessboard | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | py/knight-probability-in-chessboard.py | py/knight-probability-in-chessboard.py | from collections import Counter
from math import log, exp
class Solution(object):
def knightProbability(self, N, K, r, c):
"""
:type N: int
:type K: int
:type r: int
:type c: int
:rtype: float
"""
p = Counter()
p[r, c] += 1
ds = [(-1, -2), (-2, -1), (1, -2), (2, -1), (1, 2), (2, 1), (-1, 2), (-2, 1)]
for i in xrange(K):
np = Counter()
for (px, py), prev_p in p.iteritems():
for dx, dy in ds:
nx, ny = px + dx, py + dy
if 0 <= nx < N and 0 <= ny < N:
np[nx, ny] += prev_p
p = np
s = sum(p.values())
return 0 if s == 0 else exp(log(sum(p.values())) - K * log(8))
| apache-2.0 | Python | |
b6ec697d0dd8baa8866fb6619275d556e8b0165a | Add clear_cache command to clear cache | scdoshi/djutils | djutils/management/commands/clear_cache.py | djutils/management/commands/clear_cache.py | ###############################################################################
## Imports
###############################################################################
# Django
from django.core.management.base import BaseCommand
from django.core.cache import cache
###############################################################################
## Command
###############################################################################
class Command(BaseCommand):
def handle(self, *args, **kwargs):
cache.clear()
self.stdout.write('Cleared cache\n')
| bsd-3-clause | Python | |
2d663cf136b3bb2954ea1a53b17d70a28bfbd62f | add kattis/addingwords | mjenrungrot/competitive_programming,mjenrungrot/algorithm,mjenrungrot/competitive_programming,mjenrungrot/competitive_programming,mjenrungrot/competitive_programming | Kattis/addingwords.py | Kattis/addingwords.py | """
Problem: addingwords
Link: https://open.kattis.com/problems/addingwords
Source: Kattis
"""
import sys
lines = sys.stdin
memory = dict()
def define(variable, value):
memory[variable] = value
def calc(args):
for i in range(0, len(args), 2):
if args[i] not in memory:
return "unknown"
else:
args[i] = str(memory[args[i]])
expr = "".join(args)
value = eval(expr)
for (variable, val) in memory.items():
if val == value:
return variable
return "unknown"
def clear():
memory.clear()
def process(line):
line = line.split()
command, args = line[0], line[1:]
if command == 'def':
define(args[0], int(args[1]))
elif command == 'calc':
output = calc(args[:-1])
print("{:} = {:}".format(" ".join(args[:-1]), output))
elif command == 'clear':
clear()
for line in lines:
process(line)
| mit | Python | |
f80acf05f7d492f3716be961b88c4e82d332500c | Create class to update instances with offering | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/util/update_instances_with_offering.py | dbaas/util/update_instances_with_offering.py | # coding: utf-8
class UpdateInstances(object):
@staticmethod
def do():
from dbaas_cloudstack.models import DatabaseInfraOffering
from dbaas_cloudstack.models import PlanAttr
from physical.models import Instance
infra_offerings = DatabaseInfraOffering.objects.all()
for infra_offering in infra_offerings:
plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan)
strong_offering = infra_offering.offering
weaker_offering = plan_attr.get_weaker_offering()
for instance in infra_offering.databaseinfra.instances.all():
if (instance.instance_type == Instance.MONGODB_ARBITER or
instance.instance_type == Instance.Sentinel):
instance.offering = weaker_offering
else:
instance.oferring = strong_offering
instance.save()
| bsd-3-clause | Python | |
e6afab39aaf263525c5abf67d46a85ff13418340 | Add default_settings.py | pakal/django-compat-patcher,pakal/django-compat-patcher | django_compat_patcher/default_settings.py | django_compat_patcher/default_settings.py |
DCP_INCLUDE_FIXER_IDS = None
DCP_INCLUDE_FIXER_FAMILIES = None
DCP_EXCLUDE_FIXER_IDS = None
DCP_EXCLUDE_FIXER_FAMILIES = None | mit | Python | |
445b6f254ed1fa6193b1c29a9012fe38d9bc768f | Create ClassStaticRegularMethods.py | madhurilalitha/Python-Projects | EmployeeManagementSystem/Findings/ClassStaticRegularMethods.py | EmployeeManagementSystem/Findings/ClassStaticRegularMethods.py | #class methods vs regular methods vs static methods
#regular methods -automatically take the instance as the first argument ("self" by convention)
#class methods - takes "cls" as the first argument ("a decorator class-method should be used)
#static methods - dont pass anything automatically - behave just like regular functions just because they have some connection
# with the classes
class Employee:
num_of_emps = 0
raise_amount = 1.04
def __init__(self,first,last,pay):
self.fname = first
self.lname = last
self.epay = pay
self.email = first+"."+last+"@company.com"
Employee.num_of_emps+=1 #it is good to have class name value instead of instance depends upon logic
def fullname(self):
return '{} {}'.format(self.fname,self.lname)
def apply_raise(self):
self.epay = int(self.epay* self.raise_amount) #we can also use Employee.raise_amount
#When we try to access attribute of an instance it first checks if that instance has the attribute,
#if it doesn't, it checks if its class or its parent class contains the attribute
@classmethod
def set_raise_amount(cls,amount): #"cls should be used as convention not class as it is a python keyword"
cls.raise_amount=amount
@classmethod
def from_string(cls,emp_str):
first,last,pay = emp_str.split('-')
return cls(first,last,pay)
#Few people write class methods and regular methods which should be static methods
# usually the giveaway is that when a method does not access the instance anwhere within the function - staticmethod should be used
# we can pass arguments that we are going to work with
@staticmethod #decorator
def is_workday(day):#this has a logical connection to the Employee class but it does not specifically depend on any instance or class variable
if day.weekday() == 5 or day.weekday()==6:
return False
return True
import datetime
my_date = datetime.date(2017,9,9)
print ("Whether the entered day is a weekday or not")
print (Employee.is_workday(my_date))
print ("Employee 3 details")
emp3 = 'John-Doe-70000'
emp3 = Employee.from_string(emp3)
print(emp3.email)
print (emp3.epay)
emp1 = Employee('Manoj','Kumar',1000)
emp2 = Employee('Lalitha','Madhuri',2000)
print (Employee.raise_amount)
print(emp1.raise_amount)
print (emp2.raise_amount)
print ("After calling the raise_amount function")
Employee.set_raise_amount(1.05) # modifying the class variable by calling a function - all instances are modified
print (Employee.raise_amount)
print(emp1.raise_amount)
print (emp2.raise_amount)
print ("Important : Calling through instance also modifies the class variable")
emp2.set_raise_amount(1.05) # modifying the class variable by calling a function by instance- all instances are modified
print (Employee.raise_amount)
print(emp1.raise_amount)
print (emp2.raise_amount)
# Class Methods may also be used as alternative constructors. Class methods provides multiple of ways of creating our object
| mit | Python | |
febc64250995cc3e110b553697f615213c1c4d1f | implement RGBAColor | missionpinball/mpf,missionpinball/mpf | mpf/core/rgba_color.py | mpf/core/rgba_color.py | """RGBA Color."""
from typing import Tuple, Union
from mpf.core.rgb_color import RGBColor
class RGBAColor(RGBColor):
"""RGB Color with alpha channel."""
def __init__(self, color: Union[RGBColor, Tuple[int, int, int, int]]):
"""Initialise RGBA color."""
if isinstance(color, RGBColor):
self.opacity = 255
super().__init__(color)
else:
self.opacity = color[3]
super().__init__((color[0], color[1], color[2]))
def __iter__(self):
"""Return iterator."""
return iter([self._color[0], self._color[1], self._color[2], self.opacity])
def __str__(self):
"""Return string representation."""
return "{} Opacity: {}".format(self._color, self.opacity)
| mit | Python | |
fa244afcbd9d7f76f473c5deac8f1852cb04eda9 | add ODEProblem class and parser | theosysbio/means,lukauskas/means | MEA_package/ProgramFiles/ode_problem.py | MEA_package/ProgramFiles/ode_problem.py |
import sympy
class ODEProblem(object):
"""
Stores the left and right hand side equations to be simulated
"""
# These are private (as indicated by __, the code is a bit messier, but we can ensure immutability this way)
__right_hand_side = None
__left_hand_side = None
__moment_dic = None
__constants = None
def __init__(self, left_hand_side, right_hand_side, constants, moments):
"""
Creates a `ODEProblem` object that stores the problem to be simulated/used for inference
:param left_hand_side: the left hand side of equations
:param right_hand_side: the right hand side of equations
:param constants: the constants of the model
:param moments: the moments as a list of n-tuple, where n is the number of species
"""
self.__left_hand_side = left_hand_side
self.__right_hand_side = right_hand_side
self.__constants = constants[:]
self.__moment_dic = self.make_moment_dic(moments)
self.validate()
#
def make_moment_dic(self, moments):
dic_out = dict()
for i,m in enumerate(moments):
dic_out[m] = i
return dic_out
def validate(self):
"""
Validates whether the particular model is created properly
"""
if self.left_hand_side.rows != self.right_hand_side.rows:
raise ValueError("There is i% left hand side equations and %i right hand side ones. The same number is expected." % (self.left_hand_side.rows, self.right_hand_side.rows))
if self.left_hand_side.rows != len(self.__moment_dic):
raise ValueError("There is i% equations and %i hand side and %i moments. The same number is expected." % (self.left_hand_side.rows, len(self.__moment_dic)))
# Expose public interface for the specified instance variables
# Note that all properties here are "getters" only, thus assignment won't work
@property
def left_hand_side(self):
return self.__left_hand_side
@property
def right_hand_side(self):
return self.__right_hand_side
@property
def constants(self):
return self.__constants
@property
def moment_dic(self):
return self.__moment_dic
def parse_model(input_filename):
"""
Parses model from the `input_filename` file and returns it
:param input_filename:
:return: Parsed `ODEProblem` object
:rtype: ODEProblem
"""
# Strings to identify appropriate fields of the file
STRING_RIGHT_HAND = 'RHS of equations:'
STRING_LEFT_HAND = 'LHS:'
STRING_CONSTANT = 'Constants:'
STRING_MOM = 'List of moments:'
infile = open(input_filename)
try:
lines = infile.readlines() #read input data
finally:
infile.close()
all_fields = dict()
field = None
# cut the file into chunks. The lines containing ":" are field headers
for i, line in enumerate(lines):
if ":" in line:
field = line.rstrip()
all_fields[field]=[]
elif field:
rsline = line.rstrip()
if rsline:
all_fields[field].append(rsline)
# now we query all the fields we need
try:
right_hand_side = sympy.Matrix([sympy.simplify(l) for l in all_fields[STRING_RIGHT_HAND]])
except KeyError:
print 'The field "' + STRING_RIGHT_HAND + '" is not in the input file "' + input_filename +'"'
raise
try:
left_hand_side = sympy.Matrix([l for l in all_fields[STRING_LEFT_HAND]])
except KeyError:
print 'The field "' + STRING_LEFT_HAND + '" is not in the input file "' + input_filename +'"'
raise
try:
constants = sympy.Matrix([l for l in all_fields[STRING_CONSTANT]])
except KeyError:
print 'The field "' + STRING_CONSTANT + '" is not in the input file "' + input_filename +'"'
raise
try:
moments = [tuple(eval(l)) for l in all_fields[STRING_MOM]]
except KeyError:
print 'The field "' + STRING_CONSTANT + '" is not in the input file "' + input_filename +'"'
raise
return ODEProblem(left_hand_side, right_hand_side,constants, moments)
parse_model("../Inoutput/ODEout.tmp")
| mit | Python | |
c0137e888f3ea073fcca7b4abe05e8a52357b75d | Add base.py | lltk/Koko,lltk/Koko | Koko/base.py | Koko/base.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
__author__ = 'Markus Beuckelmann'
__author_email__ = 'email@markus-beuckelmann.de'
__version__ = '0.0.1'
DEBUG = True
HOST = '127.0.0.1'
PORT = 5002
NAME = 'Koko'
from flask import Flask
app = Flask(NAME)
if __name__ == '__main__':
if DEBUG:
# Run the development server if debug mode is on
app.run(debug = True , host = HOST, port = PORT);
else:
try:
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.log import enable_pretty_logging
enable_pretty_logging()
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(PORT, HOST)
IOLoop.instance().start()
except ImportError:
app.run(debug = False, host = HOST, port = PORT);
| agpl-3.0 | Python | |
689fe4b59e339f15de7e8e1fd6bc9ed1a2588cf3 | Create two_sum.py | schana/random-hacking,lambeau/random-hacking,schana/random-hacking,schana/random-hacking | two_sum.py | two_sum.py | import random
items = [random.randint(-50, 50) for i in range(100)]
expected = 27
print(items)
seen = set()
for i in items:
if expected - i in seen:
print(i, expected - i)
seen.add(i)
print(seen)
| apache-2.0 | Python | |
2e2f48ca37d18386a0faba23cbbdd94fff5727b2 | Añade model para event | migonzalvar/alpha,migonzalvar/alpha,abertal/alpha,abertal/alpha,abertal/alpha,migonzalvar/alpha,abertal/alpha,migonzalvar/alpha | core/migrations/0018_event.py | core/migrations/0018_event.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-26 14:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0017_auto_20170426_1007'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('event_name', models.TextField(verbose_name='Nombre actividad')),
('event_start', models.DateField(verbose_name='Fecha inicio')),
('event_end', models.DateField(verbose_name='Fecha fin')),
('comment', models.TextField(blank=True, default='', verbose_name='Observaciones')),
],
options={
'verbose_name': 'Actividades',
},
),
]
| bsd-3-clause | Python | |
73ff8eb3a994741bb19384ae839f41a5125e3c7a | Add create_dev_account_roles | stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub | director/scripts/create_dev_account_roles.py | director/scripts/create_dev_account_roles.py | """
Assign account roles to the test users.
"""
from django.contrib.auth.models import User
from django.conf import settings
from accounts.models import Account, AccountUserRole, AccountRole
def run(*args):
# Ensure that this is only used in development
assert settings.DEBUG
for user in User.objects.all():
for account in Account.objects.all():
AccountUserRole.objects.create(
account=account,
user=user,
role=AccountRole.objects.order_by('?').first()
)
| apache-2.0 | Python | |
38b26f038fb7855a6b4d80fad897997acd4f0ef2 | handle double deletion caused by httpretty | okomestudio/moto,ZuluPro/moto,kennethd/moto,william-richard/moto,whummer/moto,botify-labs/moto,william-richard/moto,zonk1024/moto,Brett55/moto,jotes/moto,dbfr3qs/moto,silveregg/moto,Brett55/moto,ZuluPro/moto,jrydberg/moto,kefo/moto,dbfr3qs/moto,Affirm/moto,andresriancho/moto,rocky4570/moto,heddle317/moto,botify-labs/moto,behanceops/moto,spulec/moto,heddle317/moto,spulec/moto,botify-labs/moto,ZuluPro/moto,mrucci/moto,gjtempleton/moto,EarthmanT/moto,im-auld/moto,2rs2ts/moto,Brett55/moto,okomestudio/moto,gjtempleton/moto,ludia/moto,kefo/moto,rocky4570/moto,rocky4570/moto,araines/moto,braintreeps/moto,dbfr3qs/moto,spulec/moto,2rs2ts/moto,okomestudio/moto,2rs2ts/moto,william-richard/moto,okomestudio/moto,dbfr3qs/moto,Brett55/moto,2mf/moto,ZuluPro/moto,botify-labs/moto,okomestudio/moto,rouge8/moto,rocky4570/moto,william-richard/moto,heddle317/moto,ZuluPro/moto,ZuluPro/moto,Brett55/moto,pior/moto,kefo/moto,kefo/moto,Brett55/moto,Affirm/moto,2rs2ts/moto,tootedom/moto,gjtempleton/moto,whummer/moto,spulec/moto,Affirm/moto,rocky4570/moto,alexdebrie/moto,whummer/moto,william-richard/moto,riccardomc/moto,dbfr3qs/moto,heddle317/moto,spulec/moto,Affirm/moto,gjtempleton/moto,DataDog/moto,kefo/moto,Affirm/moto,botify-labs/moto,whummer/moto,IlyaSukhanov/moto,rocky4570/moto,okomestudio/moto,whummer/moto,heddle317/moto,Affirm/moto,dbfr3qs/moto,jszwedko/moto,spulec/moto,botify-labs/moto,ImmobilienScout24/moto,gjtempleton/moto,whummer/moto,2rs2ts/moto,william-richard/moto | moto/route53/models.py | moto/route53/models.py | from moto.core import BaseBackend
from moto.core.utils import get_random_hex
class FakeZone:
def __init__(self, name, id):
self.name = name
self.id = id
self.rrsets = {}
def add_rrset(self, name, rrset):
self.rrsets[name] = rrset
def delete_rrset(self, name):
self.rrsets.pop(name, None)
class Route53Backend(BaseBackend):
def __init__(self):
self.zones = {}
def create_hosted_zone(self, name):
new_id = get_random_hex()
new_zone = FakeZone(name, new_id)
self.zones[new_id] = new_zone
return new_zone
def get_all_hosted_zones(self):
return self.zones.values()
def get_hosted_zone(self, id):
return self.zones.get(id)
def delete_hosted_zone(self, id):
zone = self.zones.get(id)
if zone:
del self.zones[id]
return zone
return None
route53_backend = Route53Backend()
| from moto.core import BaseBackend
from moto.core.utils import get_random_hex
class FakeZone:
def __init__(self, name, id):
self.name = name
self.id = id
self.rrsets = {}
def add_rrset(self, name, rrset):
self.rrsets[name] = rrset
def delete_rrset(self, name):
del self.rrsets[name]
class Route53Backend(BaseBackend):
def __init__(self):
self.zones = {}
def create_hosted_zone(self, name):
new_id = get_random_hex()
new_zone = FakeZone(name, new_id)
self.zones[new_id] = new_zone
return new_zone
def get_all_hosted_zones(self):
return self.zones.values()
def get_hosted_zone(self, id):
return self.zones.get(id)
def delete_hosted_zone(self, id):
zone = self.zones.get(id)
if zone:
del self.zones[id]
return zone
return None
route53_backend = Route53Backend()
| apache-2.0 | Python |
bc17847b9e9cc71ea7ba45ef7292547384c70883 | Create Remove_Nth_Node_From_End_of_List.py | UmassJin/Leetcode | Array/Remove_Nth_Node_From_End_of_List.py | Array/Remove_Nth_Node_From_End_of_List.py | Given a linked list, remove the nth node from the end of list and return its head.
For example,
Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
Note:
Given n will always be valid.
Try to do this in one pass
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param {ListNode} head
# @param {integer} n
# @return {ListNode}
def removeNthFromEnd(self, head, n):
if (not head) or (n == 0): return head
fast = head; slow = head
for _ in xrange(n):
fast = fast.next
if not fast:
return head.next
while fast.next:
slow = slow.next
fast = fast.next
slow.next = slow.next.next
return head
| mit | Python | |
112e3516d7c77f23edfed704651e8c5e7f7d75e4 | Add example heat map script for data | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/DEV/adm/heatmap.py | scripts/DEV/adm/heatmap.py | import numpy as np
import numpy.ma as ma
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from pyiem.plot import MapPlot
import sys
year = int(sys.argv[1])
x = []
y = []
for linenum, line in enumerate(open('visit_history_093013_st12.csv')):
if linenum == 0:
continue
tokens = line.split(",")
if int(tokens[5]) != year:
continue
try:
y.append( float(tokens[17].strip()) )
x.append( float(tokens[18].strip()) )
except:
continue
H2, xedges, yedges = np.histogram2d(y, x, bins=(50, 100),range=[[25,50],[-130,-60]])
m = MapPlot(sector='conus', title='Heat Map of Location of Visitors, year=%s' % (year,),
subtitle='from visit_history_093013_st12.csv',nologo=True)
x,y = np.meshgrid(yedges, xedges)
levels = [1,2,5,7,10,15,20,25,30,40,50,60,70,80,90,100,200]
H3 = ma.array(H2)
H3.mask = np.where(H2 < 1, True, False)
cmap = cm.get_cmap('jet')
cmap.set_under('white')
cmap.set_over('black')
m.pcolormesh(x,y, H3, levels, cmap=cmap, units='count')
#m.drawcounties()
m.postprocess(filename='conus_heatmap_%s.png' % (year,))
| mit | Python | |
a2ff82db981aa6fd29f0b2d2d66e7cb02fb1813c | Create ctci-connected-cell-in-a-grid.py | OursRonchon/hackerrank,OursRonchon/hackerrank | cracking-the-coding-interview/ctci-connected-cell-in-a-grid.py | cracking-the-coding-interview/ctci-connected-cell-in-a-grid.py | def getBiggestRegion(grid,m,n):
def size(i,j):
if 0 <= i < len(grid) and 0 <= j < len(grid[i]) and grid[i][j] == 1:
grid[i][j] = 0
return 1 + sum(size(i2,j2) for i2 in range(i - 1,i + 2) for j2 in range(j - 1,j + 2))
return 0
return max(size(i,j) for i in range(n) for j in range(m))
n = int(input().strip())
m = int(input().strip())
grid = []
for grid_i in range(n):
grid_t = list(map(int, input().strip().split(' ')))
grid.append(grid_t)
print(getBiggestRegion(grid,m,n))
| mit | Python | |
fe4e1eb3cc4485af9a1a48bfb02be4f651b23b1d | add config/ | T620/globe,T620/globe,T620/globe | config/config.py | config/config.py | #SECRETS
print "[INFO] Env config vars loaded"
#DATABASE_URL is defined in the environment of the machine running the app
#for instance, if running locally, DATABASE_URL is set doing:
#export DATABASE_URL="psql://epb/" etc
#for the time being
DEBUG=True
DEVELOPMENT=True
BCRYPT_LOG_ROUNDS = 5
SQLALCHEMY_TRACK_MODIFICATIONS=False
#so secure lol
SECRET_KEY="dev"
#FLASK_DEBUG=Heroku
print "[INFO] Dev Mode=%s" % DEVELOPMENT
| mit | Python | |
f59bc6d034438ca85f4a9f55033523ac238c9e98 | Create Prototype.py | Xustyx/RouteSimulator | Prototype.py | Prototype.py | from math import radians, cos, sin, asin, sqrt
from random import randint
def haversine(point1, point2):
lon1 = point1['lon']
lat1 = point1['lat']
lon2 = point2['lon']
lat2 = point2['lat']
# Convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# Haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
r = 6371000 # Radius of earth in meters. Use 3956 for miles
return c * r
def doRoute(vectorList):
routeList = []
for x in xrange(len(vectorList)-1):
v1 = vectorList[x]
v2 = vectorList[x+1]
routeList += route(v1['point'],v2['point'],v1['speed'])
return routeList
def route(point1, point2, speed):
d = haversine(point1, point2)
nSteps = d / speed
dStepLon = (point2['lon'] - point1['lon']) / nSteps
dStepLat = (point2['lat'] - point1['lat']) / nSteps
routeList = []
for x in xrange(int(nSteps)):
rLon = (x+1) * dStepLon + point1['lon']
rLat = (x+1) * dStepLat + point1['lat']
routeList.append([rLat,rLon])
return routeList
def doNoise(points, noise):
nPoints = []
for x in xrange(len(points)):
nPoints.append(addNoise(points[x], noise))
return nPoints
def addNoise(point, noise):
nPoint = []
nPoint.append(point[0] + randomNoise(noise))
nPoint.append(point[1] + randomNoise(noise))
return nPoint
def randomNoise(noise):
if randint(0,2):
z = -1
else:
z = 1
rNoise = (randint(0,noise) / 1000000.0) * z
return rNoise
def routePrint(routeList):
for x in xrange(len(routeList)):
print("{0},Step,{1},{2}").format(x,routeList[x][0],routeList[x][1])
def main():
vectors =[
{'point':{'lat':41.967782,'lon':2.837736},'speed':1.3},
{'point':{'lat':41.967691,'lon':2.837481},'speed':0.1},
{'point':{'lat':41.967657,'lon':2.837486},'speed':1},
{'point':{'lat':41.967175,'lon':2.836808},'speed':1.3},
{'point':{'lat':41.967418,'lon':2.836306},'speed':0}
]
routeList = doRoute(vectors)
nRouteList = doNoise(routeList,20)
routePrint(nRouteList)
if __name__ == "__main__":
main()
| mit | Python | |
62a860112db8c263d26eb9e4cdb67d23ce61848a | Add a snippet (Python OpenCV). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/opencv/opencv_2/write_image.py | python/opencv/opencv_2/write_image.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Write image: write an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#write-an-image
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image files to read and write)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE")
parser.add_argument("--outfile", "-o", help="The picture file to write", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
outfile_str = args.outfile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_GRAYSCALE
img_np_array = cv.imread(infile_str, imread_flags) # Read the image
cv.imwrite(outfile_str, img_np_array) # Write the image
if __name__ == '__main__':
main()
| mit | Python | |
fa0752a319ba7412c438a5501a54446aa6f61bc7 | Create xgboost.py | Diyago/Machine-Learning-scripts,Diyago/Machine-Learning-scripts | xgboost.py | xgboost.py | # Imports
# This Python 3 environment comes with many helpful analytics libraries installed
# It is defined by the kaggle/python docker image: https://github.com/kaggle/docker-python
# For example, here's several helpful packages to load in
from __future__ import division
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import roc_auc_score
from sklearn.pipeline import Pipeline
from sklearn.svm import OneClassSVM
# Input data files are available in the "../input/" directory.
# For example, running this (by clicking run or pressing Shift+Enter) will list the files in the input directory
from subprocess import check_output
print(check_output(["ls", "../input"]).decode("utf8"))
# Any results you write to the current directory are saved as output.
# # Load and preprocess data
# load data
df_train = pd.read_csv('../input/train.csv')
df_test = pd.read_csv('../input/test.csv')
ignored_columns = ['ID', 'TARGET']
C = df_train.columns
# remove constant columns
eps = 1e-10
dropped_columns = set()
print('Identifing low-variance columns...', end=' ')
for c in C:
if df_train[c].var() < eps:
# print('.. %-30s: too low variance ... column ignored'%(c))
dropped_columns.add(c)
print('done!')
C = list(set(C) - dropped_columns - set(ignored_columns))
# remove duplicate columns
print('Identifying duplicate columns...', end=' ')
for i, c1 in enumerate(C):
f1 = df_train[c1].values
for j, c2 in enumerate(C[i+1:]):
f2 = df_train[c2].values
if np.all(f1 == f2):
dropped_columns.add(c2)
print('done!')
C = list(set(C) - dropped_columns - set(ignored_columns))
print('# columns dropped: %d'%(len(dropped_columns)))
print('# columns retained: %d'%(len(C)))
df_train.drop(dropped_columns, axis=1, inplace=True)
df_test.drop(dropped_columns, axis=1, inplace=True)
# # Split the Learning Set
y_learning = df_train['TARGET'].values
X_learning = df_train.drop(['ID','TARGET'], axis=1).values
id_test = df_test['ID']
X_test = df_test.drop(['ID'], axis=1).values
# Split the learning set into training, validation and local-test sets.
X_train_valid, X_local_test, y_train_valid, y_local_test = train_test_split(
X_learning, y_learning, test_size=0.168, random_state=1
)
X_fit, X_eval, y_fit, y_eval= train_test_split(
X_train_valid, y_train_valid, test_size=0.2, random_state=1
)
print('# train: %5d (0s: %5d, 1s: %4d)'%(len(y_fit), sum(y_fit==0), sum(y_fit==1)))
print('# valid: %5d (0s: %5d, 1s: %4d)'%(len(y_eval), sum(y_eval==0), sum(y_eval==1)))
print('# test: %5d (0s: %5d, 1s: %4d)'%((len(y_local_test), sum(y_local_test==0), sum(y_local_test==1))))
# classifier
clf = xgb.XGBClassifier(missing=np.nan, max_depth=5,
n_estimators=1000, learning_rate=0.01,
subsample=0.5, colsample_bytree=0.9, seed=4242)
# fitting
clf.fit(X_fit, y_fit, early_stopping_rounds=50, eval_metric="auc", eval_set=[(X_eval, y_eval)])
# compute the AUC for the learnt model on training, validation, and local test data.
auc_train = roc_auc_score(y_fit, clf.predict_proba(X_fit)[:,1])
auc_valid = roc_auc_score(y_eval, clf.predict_proba(X_eval)[:,1])
auc_test = roc_auc_score(y_local_test, clf.predict_proba(X_local_test)[:,1])
print('\n-----------------------')
print(' AUC train: %.5f'%auc_train)
print(' AUC valid: %.5f'%auc_valid)
print(' AUC test : %.5f'%auc_test)
print('-----------------------')
print('\nModel parameters...')
print(clf.get_params())
print('\n-----------------------\n')
# predicting
y_pred= clf.predict_proba(X_test)[:,1]
submission = pd.DataFrame({"ID":id_test, "TARGET":y_pred})
submission.to_csv("submission.csv", index=False)
print('Completed!')
| apache-2.0 | Python | |
f49cc16d96b5a833293ebbc777b4268142d07bd1 | ADD example | Teekuningas/mne-python,mne-tools/mne-python,cjayb/mne-python,jaeilepp/mne-python,cmoutard/mne-python,nicproulx/mne-python,mne-tools/mne-python,teonlamont/mne-python,alexandrebarachant/mne-python,rkmaddox/mne-python,pravsripad/mne-python,mne-tools/mne-python,wmvanvliet/mne-python,rkmaddox/mne-python,bloyl/mne-python,pravsripad/mne-python,kingjr/mne-python,drammock/mne-python,jmontoyam/mne-python,wmvanvliet/mne-python,jmontoyam/mne-python,olafhauk/mne-python,wmvanvliet/mne-python,alexandrebarachant/mne-python,jniediek/mne-python,jaeilepp/mne-python,adykstra/mne-python,adykstra/mne-python,drammock/mne-python,kambysese/mne-python,kingjr/mne-python,Eric89GXL/mne-python,yousrabk/mne-python,yousrabk/mne-python,cmoutard/mne-python,teonlamont/mne-python,bloyl/mne-python,larsoner/mne-python,larsoner/mne-python,wronk/mne-python,ARudiuk/mne-python,nicproulx/mne-python,Teekuningas/mne-python,cjayb/mne-python,olafhauk/mne-python,larsoner/mne-python,olafhauk/mne-python,jniediek/mne-python,Teekuningas/mne-python,wronk/mne-python,kingjr/mne-python,pravsripad/mne-python,kambysese/mne-python,ARudiuk/mne-python,Eric89GXL/mne-python,drammock/mne-python | examples/visualization/plot_report_slider.py | examples/visualization/plot_report_slider.py | """
=======================================
Plot Time-Series Slider with MNE-Report
=======================================
In this example, MEG evoked data are plotted in an html slider.
"""
# Authors: Teon Brooks <teon.brooks@gmail.com
#
# License: BSD (3-clause)
from mne.report import Report
from mne.datasets import sample
from mne import read_evokeds
from mne.utils import _TempDir
report = Report()
path = sample.data_path()
fname = path + '/MEG/sample/sample_audvis-ave.fif'
tempdir = _TempDir()
report_path = tempdir + '/report.html'
# Load the evoked data
evoked = read_evokeds(fname, condition='Left Auditory',
baseline=(None, 0), verbose=False)
evoked.crop(0, .2)
times = evoked.times[::4]
# Create a list of figs for the slider
figs = list()
for time in times:
figs.append(evoked.plot_topomap(time, vmin=-300, vmax=300,
res=100, show=False))
plt.close()
report.add_slider_to_section(figs, times, 'Evoked Response')
report.save(report_path, open_browser=True)
| bsd-3-clause | Python | |
50c3fca5576237a1757574b6b24b259567c1bee1 | Fix committed real DBHOSTNAME. | joshk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,valyala/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,denkab/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,testn/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zloster/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,leafo/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,khellang/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,grob/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,joshk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zloster/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,torhve/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,dmacd/FB-try1,grob/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,testn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,torhve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,khellang/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,dmacd/FB-try1,Synchro/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,grob/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sgml/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,joshk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sxend/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,denkab/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,leafo/FrameworkBenchmarks,methane/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zloster/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,doom369/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zloster/FrameworkBenchmarks,dmacd/FB-try1,denkab/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,methane/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,actframework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jamming/FrameworkBenchmarks,dmacd/FB-try1,hperadin/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,actframework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,dmacd/FB-try1,MTDdk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,torhve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,herloct/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,methane/FrameworkBenchmarks,leafo/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,herloct/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,valyala/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,herloct/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Verber/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,valyala/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zapov/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,actframework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,doom369/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,joshk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,denkab/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,dmacd/FB-try1,sgml/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Verber/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,grob/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,methane/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,grob/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sxend/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,actframework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Verber/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,torhve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,joshk/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,methane/FrameworkBenchmarks,dmacd/FB-try1,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,dmacd/FB-try1,dmacd/FB-try1,saturday06/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,leafo/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,dmacd/FB-try1,julienschmidt/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,khellang/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,dmacd/FB-try1,nathana1/FrameworkBenchmarks,methane/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,khellang/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,doom369/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sgml/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,testn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Verber/FrameworkBenchmarks,leafo/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,actframework/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,grob/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jamming/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,testn/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,dmacd/FB-try1,youprofit/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,leafo/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Verber/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,torhve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,leafo/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Verber/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,doom369/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,leafo/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,leafo/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,valyala/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,joshk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zapov/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,doom369/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Verber/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,methane/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,leafo/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks | flask/app.py | flask/app.py | from flask import Flask, jsonify, request
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy import create_engine
from random import randint
try:
import MySQLdb
mysql_schema = "mysql:"
except ImportError:
mysql_schema = "mysql+pymysql:"
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = mysql_schema + '//benchmarkdbuser:benchmarkdbpass@DBHOSTNAME:3306/hello_world'
db = SQLAlchemy(app)
dbraw_engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
class World(db.Model):
__tablename__ = "World"
id = db.Column(db.Integer, primary_key=True)
randomNumber = db.Column(db.Integer)
# http://stackoverflow.com/questions/7102754/jsonify-a-sqlalchemy-result-set-in-flask
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id' : self.id,
'randomNumber': self.randomNumber
}
@app.route("/json")
def hello():
resp = {"message": "Hello, World!"}
return jsonify(resp)
@app.route("/db")
def get_random_world():
num_queries = request.args.get("queries", 1)
worlds = []
for i in range(int(num_queries)):
wid = randint(1, 10000)
worlds.append(World.query.get(wid).serialize)
return jsonify(worlds=worlds)
@app.route("/dbs")
def get_random_world_single():
wid = randint(1, 10000)
worlds = [World.query.get(wid).serialize]
return jsonify(worlds=worlds)
@app.route("/dbraw")
def get_random_world_raw():
connection = dbraw_engine.connect()
num_queries = request.args.get("queries", 1)
worlds = []
for i in range(int(num_queries)):
wid = randint(1, 10000)
result = connection.execute("SELECT * FROM world WHERE id = " + str(wid)).fetchone()
worlds.append({'id': result[0], 'randomNumber': result[1]})
connection.close()
return jsonify(worlds=worlds)
@app.route("/dbsraw")
def get_random_world_single_raw():
connection = dbraw_engine.connect()
wid = randint(1, 10000)
result = connection.execute("SELECT * FROM world WHERE id = " + str(wid)).fetchone()
worlds = [{'id': result[0], 'randomNumber': result[1]}]
connection.close()
return jsonify(worlds=worlds)
if __name__ == "__main__":
app.run()
| from flask import Flask, jsonify, request
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy import create_engine
from random import randint
try:
import MySQLdb
mysql_schema = "mysql:"
except ImportError:
mysql_schema = "mysql+pymysql:"
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = mysql_schema + '//benchmarkdbuser:benchmarkdbpass@localhost:3306/hello_world'
db = SQLAlchemy(app)
dbraw_engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
class World(db.Model):
__tablename__ = "World"
id = db.Column(db.Integer, primary_key=True)
randomNumber = db.Column(db.Integer)
# http://stackoverflow.com/questions/7102754/jsonify-a-sqlalchemy-result-set-in-flask
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id' : self.id,
'randomNumber': self.randomNumber
}
@app.route("/json")
def hello():
resp = {"message": "Hello, World!"}
return jsonify(resp)
@app.route("/db")
def get_random_world():
num_queries = request.args.get("queries", 1)
worlds = []
for i in range(int(num_queries)):
wid = randint(1, 10000)
worlds.append(World.query.get(wid).serialize)
return jsonify(worlds=worlds)
@app.route("/dbs")
def get_random_world_single():
wid = randint(1, 10000)
worlds = [World.query.get(wid).serialize]
return jsonify(worlds=worlds)
@app.route("/dbraw")
def get_random_world_raw():
connection = dbraw_engine.connect()
num_queries = request.args.get("queries", 1)
worlds = []
for i in range(int(num_queries)):
wid = randint(1, 10000)
result = connection.execute("SELECT * FROM world WHERE id = " + str(wid)).fetchone()
worlds.append({'id': result[0], 'randomNumber': result[1]})
connection.close()
return jsonify(worlds=worlds)
@app.route("/dbsraw")
def get_random_world_single_raw():
connection = dbraw_engine.connect()
wid = randint(1, 10000)
result = connection.execute("SELECT * FROM world WHERE id = " + str(wid)).fetchone()
worlds = [{'id': result[0], 'randomNumber': result[1]}]
connection.close()
return jsonify(worlds=worlds)
if __name__ == "__main__":
app.run()
| bsd-3-clause | Python |
dcf49377defe8ce3debadc8d0c09b683663f7b73 | Add migration for the new PrivateAPIKey model (Bravo Nils !) | UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator | space/migrations/0011_privateapikey.py | space/migrations/0011_privateapikey.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('space', '0010_auto_20151129_2322'),
]
operations = [
migrations.CreateModel(
name='PrivateAPIKey',
fields=[
('key', models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name='Clef', editable=False, serialize=False)),
('name', models.CharField(max_length=250, verbose_name='Utilisée pour')),
('active', models.BooleanField(default=False)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur')),
],
options={
'verbose_name_plural': "Clefs d'accès à l'API privée",
'verbose_name': "Clef d'accès à l'API privée",
},
),
]
| agpl-3.0 | Python | |
0460c3d8e25e6f060b42e19bdfdd8213e02d6c7b | Add Exercise 9.11. | skidzo/pydy,skidzo/pydy,Shekharrajak/pydy,skidzo/pydy,Shekharrajak/pydy,jcrist/pydy,oliverlee/pydy,Shekharrajak/pydy,skidzo/pydy,oliverlee/pydy,jcrist/pydy,oliverlee/pydy,Shekharrajak/pydy,jcrist/pydy,jcrist/pydy,jcrist/pydy,jcrist/pydy,jcrist/pydy | Kane1985/Chapter5/Ex9.11.py | Kane1985/Chapter5/Ex9.11.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 9.11 from Kane 1985."""
from __future__ import division
from sympy import Dummy
from sympy import collect, expand, sin, cos, symbols
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dynamicsymbols
from util import msprint, subs, partial_velocities
from util import generalized_active_forces, potential_energy
from util import generalized_active_forces_V
q = dynamicsymbols('q')
qd = dynamicsymbols('q', level=1)
u = dynamicsymbols('u')
L, L_prime, m, g = symbols('L L\' m g', real=True, positive=True)
# reference frames
N = ReferenceFrame('N')
# N.x points of the the plane of Figure P9.11, N.z points upward.
A = N.orientnew('A', 'axis', [q, N.x])
# define points
pO = Point('O') # point O is where the pendulum attaches to the ceiling
pP = pO.locatenew('P', -L * A.z) # mass center of the pendulum
pP.set_vel(N, pP.pos_from(pO).dt(N))
# kinematic differential equations
kde_map = {qd: u}
# forces
k = 5*m*g/L
r = (L_prime + L*sin(q))*N.y + (L - L*cos(q))*N.z
forces = [(pP, -m*g*N.z), (pP, -k*(r.magnitude() - L_prime)*r.normalize())]
partials = partial_velocities(zip(*forces)[0], [u], N, kde_map)
Fr, _ = generalized_active_forces(partials, forces)
# use a dummy symbol since series() does not work with dynamicsymbols
print('part a')
_q = Dummy('q')
terms = Fr[0].subs(q, _q).series(_q, n=4).removeO().subs(_q, q)
print('Using a series approximation of order 4:')
print('F1 ≈ {0}'.format(msprint(collect(terms, m*g*L))))
V = potential_energy([terms], [q], [u], kde_map)
print('V = {0}'.format(msprint(V)))
print('Setting C = 0, α1 = 0')
V = V.subs(dict(zip(symbols('C α1'), [0, 0])))
print('V = {0}'.format(msprint(collect(V, m*g*L))))
V_expected = m*g*L*(0*q + 3*q**2 + 0*q**3 + -7*q**4/8)
assert expand(V - V_expected) == 0
print('\npart b')
Fr_expected = m*g*L*(-6*q + 0*q**2 + 7*q**3/2)
print('Fr using V')
Fr_V = generalized_active_forces_V(V, [q], [u], kde_map)
print('F1_V = {0}'.format(msprint(collect(Fr_V[0], m*g*L))))
assert expand(Fr_V[0] - Fr_expected) == 0
print('Fr not using V, as calculated in part a')
print('F1 = {0}'.format(msprint(collect(terms, m*g*L))))
assert expand(terms - Fr_expected) == 0
| bsd-3-clause | Python | |
bd080d7cb36c5471ba1d13700f0a1a4debb07b82 | add tests for ext changes | TheTrain2000/async2rewrite | async2rewrite/tests/test_ext_changes.py | async2rewrite/tests/test_ext_changes.py | import async2rewrite
def test_remove_pass_context_true():
converted_code = async2rewrite.from_text("@bot.command(pass_context=True)\nasync def test(ctx):\n pass")
assert converted_code == "@bot.command()\nasync def test(ctx):\n pass"
def test_dont_remove_pass_context_false():
converted_code = async2rewrite.from_text("@bot.command(pass_context=False)\nasync def test():\n pass")
assert converted_code == "@bot.command()\nasync def test(ctx):\n pass"
def test_say_to_send():
converted_code = async2rewrite.from_text("bot.say('Test')")
assert converted_code == "ctx.send('Test')"
def test_shortcut_author():
converted_code = async2rewrite.from_text("ctx.message.author")
assert converted_code == "ctx.author"
def test_shortcut_guild():
converted_code = async2rewrite.from_text("ctx.message.server")
assert converted_code == "ctx.guild"
def test_shortcut_channel():
converted_code = async2rewrite.from_text("ctx.message.channel")
assert converted_code == "ctx.channel"
def test_shortcut_me():
converted_code = async2rewrite.from_text("ctx.message.server.me")
assert converted_code == "ctx.me"
| mit | Python | |
f4628678066c72309d3fd121af1aaf54d9905ca3 | Make sure xla_client is always imported before TPU client extension. | frreiss/tensorflow-fred,freedomtan/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,freedomtan/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,annarev/tensorflow,aam-at/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,cxxgtxy/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,davidzchen/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,annarev/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,frreiss/tensorflow-fred,frreiss/tensorflow-fred,davidzchen/tensorflow,cxxgtxy/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,sarvex/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,sarvex/tensorflow,petewarden/tensorflow,cxxgtxy/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,petewarden/tensorflow,cxxgtxy/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,tensorflow/tensorflow,annarev/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,cxxgtxy/tensorflow,aam-at/tensorflow,aam-at/tensorflow,aam-at/tensorflow,cxxgtxy/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,karllessard/tensorflow,annarev/tensorflow,annarev/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,aldian/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,sarvex/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,annarev/tensorflow,davidzchen/tensorflow,yongtang/tensorflow,annarev/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,aldian/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aldian/tensorflow,paolodedios/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,aldian/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,davidzchen/tensorflow,annarev/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,petewarden/tensorflow,aam-at/tensorflow,sarvex/tensorflow,aldian/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow | tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.py | tensorflow/compiler/xla/python/tpu_driver/client/tpu_client.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""XLA LocalClient interface for interacting with TPUs via the TPU driver."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
# Import xla_client to load shared C++ extensions (just CompileOptions at the
# time of writing).
from tensorflow.compiler.xla.python import xla_client # pylint: disable=unused-import
from tensorflow.compiler.xla.python.tpu_driver.client import tpu_client_extension as _tpu_client
class TpuBackend(object):
"""XLA backend implemented using the Tpu driver API."""
# Cache the backends to prevent double driver initializations.
_local_backend = None
@staticmethod
def create(worker=None, force=False):
"""Constructs a Cloud TPU backend."""
# `force` == True will skip caching any backends (if applicable) and will
# always try to create a new client.
if worker is None:
raise ValueError(
'Failed to create TpuBackend. The `worker` parameter must not be '
'`None`. Use `local` to connect to a local TPU or '
'`grpc://host:port` to connect to a remote TPU.')
if worker == 'local' or 'local://' in worker:
# We usually want to cache for local backends to prevent double
# initialization, except where `force` == True.
if worker == 'local':
worker = 'local://'
if force:
return _tpu_client.TpuClient.Get(worker)
if TpuBackend._local_backend is None:
logging.info('Starting the local TPU driver.')
TpuBackend._local_backend = _tpu_client.TpuClient.Get(worker)
return TpuBackend._local_backend
else:
# We do not cache for non-local backends.
return _tpu_client.TpuClient.Get(worker)
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""XLA LocalClient interface for interacting with TPUs via the TPU driver."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
from tensorflow.compiler.xla.python.tpu_driver.client import tpu_client_extension as _tpu_client
class TpuBackend(object):
"""XLA backend implemented using the Tpu driver API."""
# Cache the backends to prevent double driver initializations.
_local_backend = None
@staticmethod
def create(worker=None, force=False):
"""Constructs a Cloud TPU backend."""
# `force` == True will skip caching any backends (if applicable) and will
# always try to create a new client.
if worker is None:
raise ValueError(
'Failed to create TpuBackend. The `worker` parameter must not be '
'`None`. Use `local` to connect to a local TPU or '
'`grpc://host:port` to connect to a remote TPU.')
if worker == 'local' or 'local://' in worker:
# We usually want to cache for local backends to prevent double
# initialization, except where `force` == True.
if worker == 'local':
worker = 'local://'
if force:
return _tpu_client.TpuClient.Get(worker)
if TpuBackend._local_backend is None:
logging.info('Starting the local TPU driver.')
TpuBackend._local_backend = _tpu_client.TpuClient.Get(worker)
return TpuBackend._local_backend
else:
# We do not cache for non-local backends.
return _tpu_client.TpuClient.Get(worker)
| apache-2.0 | Python |
e78e56e38dfa7bfac79bc3a699ca76236d700e2a | Add tests creation of grammar with terminals as parameters | PatrikValkovic/grammpy | tests/grammar_term-nonterm_test/TerminalAddWhenCreatingTest.py | tests/grammar_term-nonterm_test/TerminalAddWhenCreatingTest.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase
from grammpy import Grammar
from grammpy.Terminal import Terminal
class TerminalAddWhenCreatingTest(TestCase):
def test_addOneInArray(self):
gr = Grammar(terminals=['A'])
self.assertTrue(gr.have_term('A'))
def test_addTwoInArray(self):
gr = Grammar(terminals=['A', 0])
self.assertTrue(gr.have_term('A'))
self.assertTrue(gr.have_term(0))
self.assertTrue(gr.have_term([0, 'A']))
def test_addOneSeparate(self):
gr = Grammar(terminals='A')
self.assertTrue(gr.have_term('A'))
def test_addThreeInString(self):
gr = Grammar(terminals='ABC')
self.assertTrue(gr.have_term('A'))
self.assertTrue(gr.have_term('B'))
self.assertTrue(gr.have_term('C'))
self.assertTrue(gr.have_term(('A','B','C')))
self.assertFalse(gr.have_term('D'))
def test_addThreeInTuple(self):
gr = Grammar(terminals=('A', 'B', 'C'))
self.assertTrue(gr.have_term('A'))
self.assertTrue(gr.have_term('B'))
self.assertTrue(gr.have_term('C'))
self.assertTrue(gr.have_term(['A', 'B', 'C']))
self.assertFalse(gr.have_term('D'))
def test_addThreeOneDelete(self):
gr = Grammar(terminals=('A', 'B', 'C'))
self.assertTrue(gr.have_term('A'))
self.assertTrue(gr.have_term('B'))
self.assertTrue(gr.have_term('C'))
self.assertTrue(gr.have_term(['A', 'B', 'C']))
self.assertFalse(gr.have_term('D'))
gr.remove_term('B')
self.assertTrue(gr.have_term('A'))
self.assertFalse(gr.have_term('B'))
self.assertTrue(gr.have_term('C'))
self.assertTrue(gr.have_term(['A', 'C']))
self.assertFalse(gr.have_term('D'))
| mit | Python | |
7d604af457a17fdf6cb433ce46a3f691ad566658 | Implement client side component of b0wser | Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe | ooni/plugins/b0wser.py | ooni/plugins/b0wser.py | """
This is a self genrated test created by scaffolding.py.
you will need to fill it up with all your necessities.
Safe hacking :).
"""
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.internet import protocol, endpoints
from ooni.plugoo.tests import ITest, OONITest
from ooni.plugoo.assets import Asset
from ooni.protocols import b0wser
from ooni.utils import log
class B0wserClientProtocol(b0wser.B0wserProtocol):
def connectionMade(self):
self.next_state()
def connectionLost(self, reason):
print "LOST!"
class B0wserClientFactory(protocol.ClientFactory):
protocol = B0wserClientProtocol
mutator = None
steps = None
def buildProtocol(self, addr):
p = self.protocol()
p.factory = self
if self.steps:
p.steps = self.steps
if not self.mutator:
self.mutator = b0wser.Mutator(p.steps)
p.mutator = self.mutator
else:
print "Moving on to next mutation"
self.mutator.next_mutation()
return p
def clientConnectionFailed(self, reason):
print "We failed connecting the the OONIB"
print "Cannot perform test. Perhaps it got blocked?"
print "Please report this to tor-assistants@torproject.org"
def clientConnectionLost(self, reason):
print "Connection Lost."
class b0wserArgs(usage.Options):
optParameters = [['pcap', 'f', None, 'PCAP file to take as input'],
['host', 'h', None, 'Target Hostname'],
['port', 'p', None, 'Target port number'],
['resume', 'r', 0, 'Resume at this index']]
class b0wserTest(OONITest):
implements(IPlugin, ITest)
shortName = "b0wser"
description = "b0wser"
requirements = None
options = b0wserArgs
blocking = False
def initialize(self):
#pass
self.factory = B0wserClientFactory()
def experiment(self, args):
steps = b0wser.get_b0wser_dictionary_from_pcap(self.local_options['pcap'])
print steps
self.factory.steps = steps
host = self.local_options['host']
port = int(self.local_options['port'])
log.msg("Connecting to %s:%s" % (host, port))
endpoint = endpoints.TCP4ClientEndpoint(self.reactor, host, port)
return endpoint.connect(self.factory)
#return endpoint.connect(B0wserClientFactory)
def load_assets(self):
return {}
# We need to instantiate it otherwise getPlugins does not detect it
# XXX Find a way to load plugins without instantiating them.
b0wsertest = b0wserTest(None, None, None)
| bsd-2-clause | Python | |
0fc65182b269de4975eba9060a1afe7a5ecacd67 | add empty shell for tests | patochectp/navitia,patochectp/navitia,xlqian/navitia,pbougue/navitia,antoine-de/navitia,kadhikari/navitia,antoine-de/navitia,xlqian/navitia,xlqian/navitia,Tisseo/navitia,pbougue/navitia,CanalTP/navitia,CanalTP/navitia,Tisseo/navitia,Tisseo/navitia,kinnou02/navitia,kinnou02/navitia,pbougue/navitia,Tisseo/navitia,CanalTP/navitia,kadhikari/navitia,Tisseo/navitia,xlqian/navitia,kinnou02/navitia,kadhikari/navitia,patochectp/navitia,antoine-de/navitia,CanalTP/navitia,pbougue/navitia,xlqian/navitia,patochectp/navitia,kinnou02/navitia,CanalTP/navitia,antoine-de/navitia,kadhikari/navitia | source/jormungandr/tests/schema_tests.py | source/jormungandr/tests/schema_tests.py | # Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from tests.tests_mechanism import dataset, AbstractTestFixture
@dataset({"main_autocomplete_test": {}})
class TestAutocomplete(AbstractTestFixture):
"""
Test swagger schema
"""
def test_swagger(self):
"""
Test the global schema
"""
response = self.query("v1/schema")
assert response.get('info')
#TODO!
| agpl-3.0 | Python | |
a32b033b8c30fa038ce8845333c1560059475f39 | Add regression test for #4002 | spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy | spacy/tests/regression/test_issue4002.py | spacy/tests/regression/test_issue4002.py | # coding: utf8
from __future__ import unicode_literals
import pytest
from spacy.matcher import PhraseMatcher
from spacy.tokens import Doc
@pytest.mark.xfail
def test_issue4002(en_vocab):
"""Test that the PhraseMatcher can match on overwritten NORM attributes.
"""
matcher = PhraseMatcher(en_vocab, attr="NORM")
pattern1 = Doc(en_vocab, words=["c", "d"])
assert [t.norm_ for t in pattern1] == ["c", "d"]
matcher.add("TEST", None, pattern1)
doc = Doc(en_vocab, words=["a", "b", "c", "d"])
assert [t.norm_ for t in doc] == ["a", "b", "c", "d"]
matches = matcher(doc)
assert len(matches) == 1
matcher = PhraseMatcher(en_vocab, attr="NORM")
pattern2 = Doc(en_vocab, words=["1", "2"])
pattern2[0].norm_ = "c"
pattern2[1].norm_ = "d"
assert [t.norm_ for t in pattern2] == ["c", "d"]
matcher.add("TEST", None, pattern2)
matches = matcher(doc)
assert len(matches) == 1
| mit | Python | |
03f2440374a69aa9194891b0e8a9cbf0355b034f | Rename the former test settings file to be 'test_settings'. This settings file is used to run unit tests. | penzance/ab-testing-tool,penzance/ab-testing-tool,penzance/ab-testing-tool,penzance/ab-testing-tool | ab_testing_tool/settings/test_settings.py | ab_testing_tool/settings/test_settings.py | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
STATIC_ROOT = normpath(join(SITE_ROOT, 'http_static'))
LOGGING = {
'version': 1,
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
},
'loggers': {
'ab_tool': {
'handlers':['null'],
'propagate': True,
'level':'DEBUG',
},
'error_middleware': {
'handlers':['null'],
'propagate': True,
'level':'DEBUG',
},
}
}
| mit | Python | |
8f8a1b4d3613908ed7b26af04ec83f27f1f095fb | add solution for Summary Ranges | zhyu/leetcode,zhyu/leetcode | algorithms/summaryRanges/summaryRanges.py | algorithms/summaryRanges/summaryRanges.py | class Solution:
# @param {integer[]} nums
# @return {string[]}
def summaryRanges(self, nums):
res = []
for num in nums:
if not res or num-res[-1][-1] > 1:
res += [],
res[-1][1:] = num,
return ['->'.join(map(str, r)) for r in res]
| mit | Python | |
e97d98aba638cd58d9f0822cf486aad7f85c23c6 | Add 04-nodes.py | dustalov/watset,dustalov/watset | 04-nodes.py | 04-nodes.py | #!/usr/bin/env python
import csv
import sys
synsets, index = {}, {}
with open('03-cw.txt') as f:
reader = csv.reader(f, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
synsets[int(row[0])] = [word for word in row[2].split(', ') if word]
for word in row[2].split(', '):
if word:
index[word] = int(row[0])
with open('02-edges.txt') as fr, open('04-edges-pre.txt', 'w', newline='') as fw:
writer = csv.writer(fw, dialect='excel-tab', lineterminator='\n')
for line in fr:
word1, word2, weight = line.rstrip().split('\t', 2)
if word1 in index and word2 in index and index[word1] != index[word2]:
writer.writerow((index[word1], index[word2], weight))
with open('04-nodes.csv', 'w', newline='') as f:
writer = csv.writer(f, dialect='excel', lineterminator='\n')
writer.writerow(('id', 'label'))
for sid, words in synsets.items():
label = ', '.join(words[:3])
if len(words) > 3:
label += ', ...'
writer.writerow((sid, '%s: %s' % (sid, label)))
| mit | Python | |
f46bd0f572698689fdef6b777cecd8219c542ef5 | Create 20_lines.py | johnowhitaker/turtle-frac | 20_lines.py | 20_lines.py | #Same as fractastic.py but squeezed down to 20 lines
import turtle
max_iterations = 500
bob = turtle.Turtle()
bob.speed(0)
def mandel(zx, zy):
z = zx + zy * 1j
c = z
for i in xrange(max_iterations):
if abs(z) > 2.0: break
z = z * z + c
return i
def draw_plot(xstart, ystart, xsize, ysize, xa, xb, ya, yb):
for y in range(ystart, ystart+ysize):
#go to the beginning of a line
bob.penup()
bob.goto(xstart, y)
bob.pendown()
#go along, changing colour depending on mandel(x, y)
for x in range(xstart, xstart+xsize):
m = 1000*(1-mandel((x * (xb - xa) / xsize + xa),(y * (yb - ya) / ysize + ya))/float(max_iterations))
bob.color((m-(10*int(m/10)))*0.1, (m-(100*int(m/100)))*0.01, (m-int(m/1000))/1000)
bob.forward(1)
#The whole set:
draw_plot(0, 0, 100, 100, -2.0, 1.0, -1.5, 1.5)
#Zoomed in a bit:
#draw_plot(100, 0, 100, 100, -0.5, 0.25, -0.375, 0.375)
#Nice, really zoomed in:
#draw_plot(-300, -300, 500, 500, -0.2034, -0.1992, -0.8163, -0.8127)
| mit | Python | |
a2216c5a8626fb7d18e9c1f16870123aabbde389 | Fix so MindReader can player against itself | mojones/Axelrod,drvinceknight/Axelrod,risicle/Axelrod,uglyfruitcake/Axelrod,bootandy/Axelrod,uglyfruitcake/Axelrod,emmagordon/Axelrod,mojones/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,kathryncrouch/Axelrod,emmagordon/Axelrod,bootandy/Axelrod | axelrod/strategies/mindreader.py | axelrod/strategies/mindreader.py | from axelrod import Player, RoundRobin, Game
import copy
import inspect
class MindReader(Player):
"""A player that looks ahead at what the opponent will do and decides what to do."""
name = 'Mind Reader'
max_look_ahead = 200
def strategy(self, opponent):
"""Pretends to play the opponent 50 times before each match.
The primary purpose is to look far enough ahead to see if a defect will be punished by the opponent.
If the MindReader attempts to play itself (or another similar strategy), then it will cause a recursion loop, so this is also handeled in this method, by defecting if the method is called by strategy
"""
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
calname = calframe[1][3]
if calname in ('strategy', 'simulate_match'):
return 'D'
best_strategy = self.look_ahead(opponent)
return best_strategy
def simulate_match(self, opponent, strategy, rounds = 10):
"""Simulates a number of matches."""
for match in range(rounds):
play_1, play_2 = strategy, opponent.strategy(self)
self.history.append(play_1)
opponent.history.append(play_2)
def look_ahead(self, opponent, rounds = 10):
"""Plays a number of rounds to determine the best strategy."""
results = []
game = Game()
round_robin = RoundRobin(players=[self, opponent], game=game, turns=rounds)
strategies = ['C', 'D']
dummy_history_self = copy.copy(self.history)
dummy_history_opponent = copy.copy(opponent.history)
for strategy in strategies:
self.simulate_match(opponent, strategy, rounds)
results.append(round_robin.calculate_scores(self, opponent)[0])
self.history = copy.copy(dummy_history_self)
opponent.history = copy.copy(dummy_history_opponent)
return strategies[results.index(min(results))]
class ProtectedMindReader(MindReader):
"""A player that looks ahead at what the opponent will do and decides what to do.
It is also protected from mind control strategies"""
name = 'Protected Mind Reader'
def __setattr__(self, name, val):
"""Stops any other strategy altering the methods of this class """
if name == 'strategy':
pass
else:
self.__dict__[name] = val
| from axelrod import Player, RoundRobin, Game
import copy
import inspect
class MindReader(Player):
"""A player that looks ahead at what the opponent will do and decides what to do."""
name = 'Mind Reader'
def strategy(self, opponent):
"""Pretends to play the opponent 50 times before each match.
The primary purpose is to look far enough ahead to see if a defect will be punished by the opponent.
If the MindReader attempts to play itself (or another similar strategy), then it will cause a recursion loop, so this is also handeled in this method, by defecting if the method is called by strategy
"""
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
calname = calframe[1][3]
if calname == 'strategy':
return 'D'
best_strategy = self.look_ahead(opponent)
return best_strategy
def simulate_match(self, opponent, strategy, rounds = 10):
"""Simulates a number of matches."""
for match in range(rounds):
play_1, play_2 = strategy, opponent.strategy(self)
self.history.append(play_1)
opponent.history.append(play_2)
def look_ahead(self, opponent, rounds = 10):
"""Plays a number of rounds to determine the best strategy."""
results = []
game = Game()
round_robin = RoundRobin(players=[self, opponent], game=game, turns=rounds)
strategies = ['C', 'D']
dummy_history_self = copy.copy(self.history)
dummy_history_opponent = copy.copy(opponent.history)
for strategy in strategies:
self.simulate_match(opponent, strategy, rounds)
results.append(round_robin.calculate_scores(self, opponent)[0])
self.history = copy.copy(dummy_history_self)
opponent.history = copy.copy(dummy_history_opponent)
return strategies[results.index(min(results))]
class ProtectedMindReader(MindReader):
"""A player that looks ahead at what the opponent will do and decides what to do.
It is also protected from mind control strategies"""
name = 'Protected Mind Reader'
def __setattr__(self, name, val):
"""Stops any other strategy altering the methods of this class """
if name == 'strategy':
pass
else:
self.__dict__[name] = val
| mit | Python |
7f2a598f70ac45af00395faa64e91e51de0cab47 | Update storage tests | b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril | tests/laser/state/storage_test.py | tests/laser/state/storage_test.py | import pytest
from mythril.laser.smt import symbol_factory
from mythril.laser.ethereum.state.account import Storage
from mythril.laser.smt import Expression
BVV = symbol_factory.BitVecVal
storage_uninitialized_test_data = [({}, 1), ({1: 5}, 2), ({1: 5, 3: 10}, 2)]
@pytest.mark.parametrize("initial_storage,key", storage_uninitialized_test_data)
def test_concrete_storage_uninitialized_index(initial_storage, key):
# Arrange
storage = Storage(concrete=True)
for k, val in initial_storage.items():
storage[BVV(k, 256)] = BVV(val, 256)
# Act
value = storage[BVV(key, 256)]
# Assert
assert value == 0
@pytest.mark.parametrize("initial_storage,key", storage_uninitialized_test_data)
def test_symbolic_storage_uninitialized_index(initial_storage, key):
# Arrange
storage = Storage(concrete=False)
for k, val in initial_storage.items():
storage[BVV(k, 256)] = BVV(val, 256)
# Act
value = storage[BVV(key, 256)]
# Assert
assert isinstance(value, Expression)
def test_storage_set_item():
# Arrange
storage = Storage()
# Act
storage[BVV(1, 256)] = BVV(13, 256)
# Assert
assert storage[BVV(1, 256)] == BVV(13, 256)
def test_storage_change_item():
# Arrange
storage = Storage()
storage[BVV(1, 256)] = BVV(12, 256)
# Act
storage[BVV(1, 256)] = BVV(14, 256)
# Assert
assert storage[BVV(1, 256)] == BVV(14, 256)
| mit | Python | |
629553ec992c59500ef64b04b8fc9fb0500bcaee | Add tests for cookie verification | pjotrp/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2 | wqflask/tests/wqflask/test_user_session.py | wqflask/tests/wqflask/test_user_session.py | """Test cases for some methods in user_session.py"""
import unittest
from wqflask.user_session import verify_cookie
class TestUserSession(unittest.TestCase):
def test_verify_cookie(self):
"""
Test cookie verification
"""
self.assertEqual(
"3f4c1dbf-5b56-4260-87d6-f35445bda37e",
verify_cookie(("3f4c1dbf-5b56-4260-87d6-"
"f35445bda37e:af4fcf5eace9e7c864ce")))
| agpl-3.0 | Python | |
f4e2135f23f768da2923b48b4d3567071c386148 | Add Google backend | comandrei/django-template-shortcuts | template_shortcuts/providers/google.py | template_shortcuts/providers/google.py | from provider import CDNProvider
class Google(CDNProvider):
base_url = "//ajax.googleapis.com/ajax/libs/%s/%s/%s"
def angular(self, version):
return self._build_js_url("angularjs", version, "angular")
def chrome_frame(self, version):
return self._build_js_url("chrome-frame", version, "CFInstall")
def jquery(self, version):
return self._build_js_url("jquery", version)
def ext_core(self, version):
return self._build_js_url("ext-core", version)
def jquery_ui(self, version):
return self._build_js_url("jqueryui", version, "jquery-ui")
def mootools(self, version):
return self._build_js_url("mootools", version)
def prototype(self, version):
return self._build_js_url("prototype", version)
def scriptaculos(self, version):
return self._build_js_url("scriptaculos", version)
def swfobject(self, version):
return self._build_js_url("swfobject", version)
def webfont(self, version):
return self._build_js_url("webfont", version)
| bsd-3-clause | Python | |
8957ad97b560251e02c857683a5b77301ee49293 | add performance test for a few client server queries | storborg/manhattan | manhattan/tests/perf/test_clientserver.py | manhattan/tests/perf/test_clientserver.py | import time
from manhattan.client import Client
client = Client()
def test(f, trials=500):
start = time.time()
for ii in range(trials):
f()
end = time.time()
elapsed = end - start
print ("Ran %d trials, %0.2f ms each" %
(trials, ((1000. * elapsed) / trials)))
def get_results():
client.test_results(u'Discount Rate for Datrek')
def get_tests():
client.tests()
if __name__ == '__main__':
print "Testing tests list."
test(get_tests)
print "Testing resuls page."
test(get_results)
| mit | Python | |
d3712e9f567ba0924810768e5c6e1d208fd99816 | Add visualize_filters.py | aidiary/keras_examples,aidiary/keras_examples | cnn/cifar10/visualize_filters.py | cnn/cifar10/visualize_filters.py | import os
import json
import matplotlib.pyplot as plt
from keras.models import model_from_json
if __name__ == '__main__':
model_file = os.path.join('result', 'model.json')
weight_file = os.path.join('result', 'model.h5')
with open(model_file, 'r') as fp:
model = model_from_json(fp.read())
model.load_weights(weight_file)
model.summary()
W = model.layers[0].get_weights()[0]
W = W.transpose(3, 2, 0, 1)
print(W.shape)
nb_filter, nb_channel, nb_row, nb_col = W.shape
plt.figure()
for i in range(nb_filter):
im = W[i]
plt.subplot(4, 8, i + 1)
plt.axis('off')
plt.imshow(im)
plt.show()
| mit | Python | |
4d30dd240e3b66cf3c35bbac24a2ae6043a7a081 | Add accuracy test | ttakamura/chainer,rezoo/chainer,benob/chainer,chainer/chainer,AlpacaDB/chainer,pfnet/chainer,wkentaro/chainer,chainer/chainer,benob/chainer,kikusu/chainer,woodshop/complex-chainer,kashif/chainer,okuta/chainer,ikasumi/chainer,ysekky/chainer,chainer/chainer,minhpqn/chainer,masia02/chainer,okuta/chainer,okuta/chainer,woodshop/chainer,t-abe/chainer,muupan/chainer,kuwa32/chainer,muupan/chainer,hvy/chainer,niboshi/chainer,jnishi/chainer,ronekko/chainer,ktnyt/chainer,sou81821/chainer,tkerola/chainer,hidenori-t/chainer,hvy/chainer,cupy/cupy,aonotas/chainer,chainer/chainer,nushio3/chainer,ytoyama/yans_chainer_hackathon,tigerneil/chainer,wavelets/chainer,hvy/chainer,sinhrks/chainer,cemoody/chainer,cupy/cupy,anaruse/chainer,truongdq/chainer,okuta/chainer,keisuke-umezawa/chainer,1986ks/chainer,kikusu/chainer,ttakamura/chainer,tscohen/chainer,truongdq/chainer,delta2323/chainer,tereka114/chainer,wkentaro/chainer,niboshi/chainer,kiyukuta/chainer,nushio3/chainer,jnishi/chainer,laysakura/chainer,bayerj/chainer,sinhrks/chainer,yanweifu/chainer,keisuke-umezawa/chainer,cupy/cupy,wkentaro/chainer,jnishi/chainer,hvy/chainer,Kaisuke5/chainer,keisuke-umezawa/chainer,jnishi/chainer,AlpacaDB/chainer,cupy/cupy,elviswf/chainer,ktnyt/chainer,umitanuki/chainer,t-abe/chainer,jfsantos/chainer,keisuke-umezawa/chainer,niboshi/chainer,wkentaro/chainer,ktnyt/chainer,ktnyt/chainer,niboshi/chainer | tests/functions_tests/test_accuracy.py | tests/functions_tests/test_accuracy.py | from unittest import TestCase
import numpy
from chainer import cuda, Variable
from chainer.cuda import to_cpu, to_gpu
from chainer.gradient_check import assert_allclose
from chainer.functions import accuracy
cuda.init()
class TestAccuracy(TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = Variable(x_data)
t = Variable(t_data)
y = accuracy(x, t)
count = 0
for i in xrange(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
assert_allclose(expected, to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
def test_forward_gpu(self):
self.check_forward(to_gpu(self.x), to_gpu(self.t))
| mit | Python | |
932d6c10af549307547aeb5c920283ae68a0c114 | Create ShinyeiPPD42.py | nejohnson2/ShinyeiPPD42 | pigpio/ShinyeiPPD42.py | pigpio/ShinyeiPPD42.py | import pigpio
import time
class Shinyei(object):
def __init__(self, pi, gpio):
self.pi = pi
self.gpio = gpio
self._start_tick = None
self._last_tick = None
self._low_ticks = 0
self._high_ticks = 0
pi.set_mode(gpio, pigpio.INPUT)
self._cb = pi.callback(gpio, pigpio.EITHER_EDGE, self._cbf)
def read(self):
duration = self._low_ticks + self._high_ticks
if duration > 0:
ratio = float(self._low_ticks) / float(duration) * 100.0
conc = 1.1 * pow(ratio,3) - 3.8 * pow(ratio, 2) + 520 * ratio + 0.62
else:
ratio = 0
conc = 0
self._start_tick = None
self._last_tick = None
self._low_ticks = 0
self._high_ticks = 0
return (self.gpio, ratio, conc)
def _cbf(self, gpio, level, tick):
if self._start_tick is not None:
ticks = pigpio.tickDiff(self._last_tick, tick)
self._last_tick = tick
if level == 0: # falling edge
self._high_ticks = self._high_ticks + ticks
elif level == 1: # Rising edge
self._low_ticks = self._low_ticks + ticks
else: # timeout level, not used
pass
else:
self._start_tick = tick
self._last_tick = tick
if __name__ == '__main__':
pi = pigpio.pi() # connect to pi
s = Shinyei(pi, 23)
while True:
time.sleep(5) # use 30 for properly calibrated reading
g, r, c = s.read()
print "GPIO=%s Ratio=%s Concenration=%s pcs per 0.01 cubic foot" %(g, r, int(c))
pi.stop() # Disconnect from pi
| mit | Python | |
7bf3337a1b632590cf22ecb99c1cb3d97cf35af6 | Implement test_build_pawn | LogicalDash/LiSE,LogicalDash/LiSE | ELiDE/ELiDE/tests/test_sprite_builder.py | ELiDE/ELiDE/tests/test_sprite_builder.py | from pprint import pprint
from ..pallet import Pallet
from .util import ELiDEAppTest, idle_until, window_with_widget
class TestSpriteBuilder(ELiDEAppTest):
def test_build_pawn(self):
app = self.app
win = window_with_widget(app.build())
app.manager.current = 'pawncfg'
idle_until(lambda: 'dialog' in app.pawncfg.ids, 100, "Never made dialog for pawncfg")
pawn_cfg_dialog = app.pawncfg.ids.dialog
idle_until(lambda: 'builder' in pawn_cfg_dialog.ids, 100, "Never made pawn builder")
builder = pawn_cfg_dialog.ids.builder
idle_until(lambda: builder.labels, 100, "Never got any builder labels")
idle_until(lambda: builder.pallets, 100, "Never got any builder pallets")
idle_until(lambda: len(builder.labels) == len(builder.pallets), 100, "Never updated pawn builder")
palbox = builder._palbox
for child in palbox.children:
if not isinstance(child, Pallet):
continue
idle_until(lambda: child.swatches, 100, "Never got swatches for " + child.filename)
if 'draconian_m' in child.swatches:
child.swatches['draconian_m'].state = 'down'
idle_until(lambda: child.swatches['draconian_m'] in child.selection, 100, "Selection never updated")
if 'robe_red' in child.swatches:
child.swatches['robe_red'].state = 'down'
idle_until(lambda: child.swatches['robe_red'] in child.selection, 100, "Selection never updated")
idle_until(lambda: pawn_cfg_dialog.ids.selector.imgpaths, 100, "Never got imgpaths")
pawn_cfg_dialog.pressed()
idle_until(lambda: pawn_cfg_dialog.imgpaths, 100, "Never propagated imgpaths")
assert pawn_cfg_dialog.imgpaths == ['atlas://base.atlas/draconian_m', 'atlas://body.atlas/robe_red']
| agpl-3.0 | Python | |
06d4a93ecbce90e752712f69b8ffd435019337b2 | Create metric_table script for comparing metrics | kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve | app/scripts/metric_table.py | app/scripts/metric_table.py | from tabulate import tabulate
import csv
import yaml
import sys
import os.path as path
base_directory = path.dirname(path.dirname(path.abspath(__file__)))
sys.path.append(base_directory)
import parse
import deparse
import metrics
def main():
target_words = ['bːɒtl', 'b\u02D0ɒtl']
with open(path.join(base_directory, 'data', 'features.csv'), 'r') as f:
segments = [segment for segment in csv.DictReader(f)]
with open(path.join(base_directory, 'data', 'diacritics.yaml')) as f:
diacritics = yaml.load(f)
words = parse.parse_words(target_words, segments, diacritics)
print('Metrics')
print('===============')
results = []
for word, word_string in zip(words, target_words):
results.append([word_string, metrics.phonetic_product(word)])
print(tabulate(results, headers=['Word', 'Phonetic Product']))
if __name__ == '__main__':
main()
| mit | Python | |
428d57040b35b3d644cda05c3c9cbdb630622146 | Create Udpclient.py | saifeldinhesham/ClientServerModel | Udpclient.py | Udpclient.py | import socket #for sockets
import sys #for exit
from random import randint
import time
from thread import *
import threading
from threading import Thread, current_thread
host ="172.20.10.3";
port = 6667;
def sentthread(num):
# create dgram udp socket
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except socket.error:
print 'Failed to create socket'
sys.exit()
msg1=randint(0,100) #Inclusive
msg2=randint(0,100) #Inclusive
msg=str(msg1)+','+str(msg2)
#Set the whole string
s.sendto(msg, (host, port))
# receive data from client (data, addr)
d = s.recvfrom(1024)
reply = d[0]
print str(msg1)+'+' +str(msg2) +'=' +str(reply)+'\n'
try:
threads = []
for i in range(7):
t = threading.Thread(target=sentthread, args=(i,))
threads.append(t)
t.start()
except:
print'Error: unable to start client thread'
| unlicense | Python | |
a4f86e11bbdcf2f478fd9cfb8df33a150f8a086e | Add poolstat api example to project root. | benjiboi214/mmpl-wagtail,benjiboi214/mmpl-wagtail,benjiboi214/mmpl-wagtail | poolstatapi-example.py | poolstatapi-example.py | import requests
import hmac
import hashlib
import base64
PUBLIC_KEY = '##'
PRIVATE_KEY = '##'
URL = 'https://www.poolstat.net.au/restapi/v1/ladders'
digest = hmac.new(PRIVATE_KEY, URL, digestmod=hashlib.sha256).hexdigest()
# signature = base64.b64encode(digest).decode()
print digest
#print signature
headers = {
'X-Public': PUBLIC_KEY,
'X-Hash': digest
};
def get_response():
params = {"year": "2017"}
return requests.get(URL, headers=headers)
res = get_response()
print res
| mit | Python | |
da36f32893b3ba58bfecdaa819bb780845e94747 | add script to extract annotations from csv dumps | CKrawczyk/python-reducers-for-caesar | bin/extract_panoptes_csv.py | bin/extract_panoptes_csv.py | #!/usr/bin/env python
import argparse
from panoptes_aggregation import extractors
import json
import pandas
import progressbar
parser = argparse.ArgumentParser(description="extract data from panoptes classifications based on the workflow")
parser.add_argument("classification_csv", help="the classificaiton csv file containing the panoptes data dump", type=str)
parser.add_argument("workflow_csv", help="the csv file containing the workflow data", type=str)
parser.add_argument("workflow_id", help="the workflow ID you would like to extract", type=int)
parser.add_argument("-v", "--version", help="the workflow version to extract", type=int, default=1)
parser.add_argument("-o", "--output", help="the output csv file to store the annotation extractions", type=str, default="extractions.csv")
args = parser.parse_args()
workflows = pandas.read_csv(args.workflow_csv)
wdx = (workflows.workflow_id == args.workflow_id) & (workflows.version == args.version)
if wdx.sum() == 0:
raise IndexError('workflow ID and workflow version combination does not exist')
if wdx.sum() > 1:
raise IndexError('workflow ID and workflow version combination is not unique')
workflow = workflows[wdx].iloc[0]
workflow_tasks = json.loads(workflow.tasks)
extractor_config = {}
for task_key, task in workflow_tasks.items():
# only extracts drawing at the moment
# this config maps the tool number to the extractor type
if task['type'] == 'drawing':
tools_config = {}
for tdx, tool in enumerate(task['tools']):
tools_config.setdefault('{0}_extractor'.format(tool['type']), []).append(tdx)
extractor_config[task_key] = tools_config
def filter_annotations(annotations, config):
# this is specific to drawing tasks at the moment
# each tool can use a different extractor
# this will split the annotations by extractor type
annotations_by_extractor = {}
for annotation in annotations:
if annotation['task'] in config:
for extractor_name, tool_idx in config[annotation['task']].items():
extracted_annotation = {'task': annotation['task'], 'value': []}
for value in annotation['value']:
if value['tool'] in tool_idx:
extracted_annotation['value'].append(value)
annotations_by_extractor[extractor_name] = extracted_annotation
return annotations_by_extractor
extracted_data = {
"classification_id": [],
"user_name": [],
"user_id": [],
"workflow_id": [],
"created_at": [],
"subject_id": [],
"extractor": [],
"data": []
}
classifications = pandas.read_csv(args.classification_csv)
widgets = [
'Extracting: ',
progressbar.Percentage(),
' ', progressbar.Bar(),
' ', progressbar.ETA()
]
pbar = progressbar.ProgressBar(widgets=widgets, max_value=len(classifications))
pbar.start()
for cdx, classification in classifications.iterrows():
annotations_by_extractor = filter_annotations(json.loads(classification.annotations), extractor_config)
for extractor_name, annotations in annotations_by_extractor.items():
extract = extractors.extractors_base[extractor_name]({'annotations': [annotations]})
extracted_data['classification_id'].append(classification.classification_id)
extracted_data['user_name'].append(classification.user_name)
extracted_data['user_id'].append(classification.user_id)
extracted_data['workflow_id'].append(classification.workflow_id)
extracted_data['created_at'].append(classification.created_at)
extracted_data['subject_id'].append(classification.subject_ids)
extracted_data['extractor'].append(extractor_name)
# This uses a json column for the extracts since multiple extractros
# can be in the same csv file
extracted_data['data'].append(json.dumps(extract))
pbar.update(cdx + 1)
pbar.finish()
pandas.DataFrame(extracted_data).to_csv(args.output, index=False)
| apache-2.0 | Python | |
dc386a432e23425667f05a8b2ff943d12e44dac8 | bump version to 0.8.19 | aatchison/mycroft-core,linuxipho/mycroft-core,forslund/mycroft-core,linuxipho/mycroft-core,Dark5ide/mycroft-core,MycroftAI/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core,Dark5ide/mycroft-core,aatchison/mycroft-core | mycroft/version/__init__.py | mycroft/version/__init__.py | # Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import json
from genericpath import exists, isfile
from mycroft.util.log import getLogger
__author__ = 'augustnmonteiro'
# The following lines are replaced during the release process.
# START_VERSION_BLOCK
CORE_VERSION_MAJOR = 0
CORE_VERSION_MINOR = 8
CORE_VERSION_BUILD = 19
# END_VERSION_BLOCK
CORE_VERSION_STR = (str(CORE_VERSION_MAJOR) + "." +
str(CORE_VERSION_MINOR) + "." +
str(CORE_VERSION_BUILD))
LOG = getLogger(__name__)
class VersionManager(object):
__location = "/opt/mycroft/version.json"
@staticmethod
def get():
if (exists(VersionManager.__location) and
isfile(VersionManager.__location)):
try:
with open(VersionManager.__location) as f:
return json.load(f)
except:
LOG.error("Failed to load version from '%s'"
% VersionManager.__location)
return {"coreVersion": None, "enclosureVersion": None}
| # Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import json
from genericpath import exists, isfile
from mycroft.util.log import getLogger
__author__ = 'augustnmonteiro'
# The following lines are replaced during the release process.
# START_VERSION_BLOCK
CORE_VERSION_MAJOR = 0
CORE_VERSION_MINOR = 8
CORE_VERSION_BUILD = 18
# END_VERSION_BLOCK
CORE_VERSION_STR = (str(CORE_VERSION_MAJOR) + "." +
str(CORE_VERSION_MINOR) + "." +
str(CORE_VERSION_BUILD))
LOG = getLogger(__name__)
class VersionManager(object):
__location = "/opt/mycroft/version.json"
@staticmethod
def get():
if (exists(VersionManager.__location) and
isfile(VersionManager.__location)):
try:
with open(VersionManager.__location) as f:
return json.load(f)
except:
LOG.error("Failed to load version from '%s'"
% VersionManager.__location)
return {"coreVersion": None, "enclosureVersion": None}
| apache-2.0 | Python |
3a48a8919b633a1244024d96445f880f9d03247b | add problem0002.py | Furisuke/ProjectEuler,Furisuke/ProjectEuler,Furisuke/ProjectEuler | python3/problem0002.py | python3/problem0002.py | from itertools import takewhile
def fibonacci(first=1, second=2):
'''yields fibonacci sequence'''
x = first
x_ = second
yield first
yield second
while True:
x__ = x + x_
yield x__
x = x_
x_ = x__
if __name__ == '__main__':
print(sum(n for n in takewhile(lambda n: n < 4000000, fibonacci()) if n % 2 == 0))
| mit | Python | |
64bfcf8e87ef8186a446bce0ec73502e48b3b9a4 | Create construct-binary-tree-from-string.py | kamyu104/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015 | Python/construct-binary-tree-from-string.py | Python/construct-binary-tree-from-string.py | # Time: O(n)
# Space: O(h)
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def str2tree(self, s):
"""
:type s: str
:rtype: TreeNode
"""
def str2treeHelper(s, i):
start = i
if s[i] == '-': i += 1
while i < len(s) and s[i].isdigit(): i += 1
node = TreeNode(int(s[start:i]))
if i < len(s) and s[i] == '(':
i += 1
node.left, i = str2treeHelper(s, i)
i += 1
if i < len(s) and s[i] == '(':
i += 1
node.right, i = str2treeHelper(s, i)
i += 1
return node, i
return str2treeHelper(s, 0)[0] if s else None
| mit | Python | |
55ccaad47c2e3a1432b012218c72bca28fe06d73 | Create a GreetingsPlugin as a sample plugin | dnif/fnExchange-sample-plugin | greetings.py | greetings.py | from fnexchange.core.plugins import AbstractPlugin
class GreetingsPlugin(AbstractPlugin):
"""
GreetingsPlugin provides an interface to generate greetings in different languages
for given users (provided their names and locales).
At this time, only the following locales are supported: "en-us", "hi-in"
Request payload schema:
payload = [
{"name": "John", "locale": "en-us"},
...
{"name": "Emma", "locale": "hi-in"},
]
Response payload schema:
payload = [
{"name": "John", "locale": "en-us", greeting: "Hello, John"},
...
{"name": "Emma", "locale": "en-us", greeting: "Namaste, Emma"},
]
"""
DEFAULT_LOCALE = "en-us"
hello_map = {
'en-us': "Hello, {name}! My name is {greeter}",
'hi-in': "Namaste, {name}! My name is {greeter}",
}
bye_map = {
'en-us': "Goodbye, {name}!",
'hi-in': "Phir Milenge, {name}!",
}
def __greet(self, greeting_map, element):
name = element["name"]
locale = element["locale"]
try:
greeting = greeting_map[locale].format(name=name, greeter=self.config.greeter)
except KeyError:
greeting = "Greetings!"
return dict(name=name, locale=locale, greeting=greeting)
def __hello(self, element):
return self.__greet(self.hello_map, element)
def __bye(self, element):
return self.__greet(self.hello_map, element)
def say_hello(self, payload):
return map(self.__hello, payload)
def say_bye(self, payload):
return map(self.__bye, payload)
| apache-2.0 | Python | |
0b83d7baf6cd02fb673aaf9a9e7b4b7498f4240a | Create rsa_monitor.py | sh0wrun/RSA_status_check | rsa_monitor.py | rsa_monitor.py | #!/usr/bin/env python
import os
log=file('RSA_check.log','a')
def check_rsa_result(rsa_server):
''' use os.putenv to set shell env is very important used in cron'''
os.putenv('HOME','/root/')
os.putenv('USER','root')
os.putenv('USERNAME','root')
tocken_f=os.popen('/usr/local/stocken/bin/stoken')
tocken=tocken_f.readlines()
tocken_f.close()
log.write("\n\ntocken"+tocken[0])
radtest_command='/usr/bin/radtest username <pin>'+tocken[0].strip()+' '+rsa_server+' 0 <radius_pwd> <group> <source_ip>'
log.write(radtest_command+"\n")
radtest_f=os.popen(radtest_command)
radtest=radtest_f.readlines()
result1=''.join(radtest)
radtest_f.close()
return result1
if __name__ == "__main__":
result=check_rsa_result('192.168.1.1')
log.write(result)
if result.find('Access-Accept packet') == -1:
print "check 192.168.1.1 failed"
log.write("check 192.168.1.1 failed")
send_SMS.send_SMS(mobile_list, "RSA-master 192.168.1.1 failed")
else:
log.write("check 192.168.1.1 seccussful")
print "check 192.168.1.1 seccussful"
log.close()
| apache-2.0 | Python | |
1cccfd3eb0ea34450555fab444eb055ff56a7dd6 | Move decorators to special file | daurer/afnumpy,FilipeMaia/afnumpy | afnumpy/decorators.py | afnumpy/decorators.py | import afnumpy
def outufunc(func):
def wrapper(*args, **kws):
out = kws.pop('out', None)
ret = func(*args, **kws)
if out is not None:
out[:] = ret
return ret
return wrapper
def iufunc(func):
def wrapper(*args, **kws):
if all(isinstance(A, afnumpy.ndarray) for A in args):
bcast_args = afnumpy.broadcast_arrays(*args)
if(bcast_args[0].shape is not args[0].shape):
raise ValueError("non-broadcastable output operand with"
" shape %s doesn't match the broadcast"
" shape %s" % (args[0].shape, bcast_args[0].shape))
args = bcast_args
return func(*args, **kws)
return wrapper
def ufunc(func):
def wrapper(*args, **kws):
if all(isinstance(A, afnumpy.ndarray) for A in args):
args = afnumpy.broadcast_arrays(*args)
return func(*args, **kws)
return wrapper
| bsd-2-clause | Python | |
be8b7c27b25b60540c2e53504ce42543724577df | Add utilities for the clients | napalm-automation/napalm-logs,napalm-automation/napalm-logs | napalm_logs/utils/__init__.py | napalm_logs/utils/__init__.py | # -*- coding: utf-8 -*-
'''
napalm-logs utilities
'''
from __future__ import absolute_import
from __future__ import unicode_literals
# Import pythond stdlib
import ssl
import socket
# Import python stdlib
import umsgpack
import nacl.secret
import nacl.signing
import nacl.encoding
from nacl.exceptions import CryptoError
from nacl.exceptions import BadSignatureError
# Import napalm-logs pkgs
import napalm_logs.config as defaults
from napalm_logs.exceptions import CryptoException
from napalm_logs.exceptions import BadSignatureException
def authenticate(certificate,
address=defaults.AUTH_ADDRESS,
port=defaults.AUTH_PORT):
'''
Authenticate the client and return the private
and signature keys.
Establish a connection through a secured socket,
then do the handshake using the napalm-logs
auth algorithm.
'''
if ':' in address:
skt_ver = socket.AF_INET6
else:
skt_ver = socket.AF_INET
skt = socket.socket(skt_ver, socket.SOCK_STREAM)
ssl_skt = ssl.wrap_socket(skt,
ca_certs=certificate,
cert_reqs=ssl.CERT_REQUIRED)
ssl_sock.connect((address, port))
# Explicit INIT
ssl_sock.write(defaults.MAGIC_REQ)
# Receive the private key
private_key = ssl_sock.read()
# Send back explicit ACK
ssl_sock.write(defaults.MAGIC_ACK)
# Read the hex of the verification key
verify_key_hex = ssl_sock.read()
# Send back explicit ACK
ssl_sock.write(defaults.MAGIC_ACK)
# Close the socket
ssl_sock.close()
private_key_obj = nacl.secret.SecretBox(private_key)
verify_key_obj = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder)
return private_key_obj, verify_key_obj
def decrypt(binary, verify_key_obj, private_key_obj):
'''
Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted.
'''
try:
encrypted = verify_key_obj.verify(binary)
except BadSignatureError as bserr:
log.error('Signature was forged or corrupt', exc_info=True)
raise BadSignatureException('Signature was forged or corrupt')
try:
packed = private_key_obj.decrypt(encrypted)
except CryptoError as cerr:
log.error('Unable to decrypt', exc_info=True)
raise CryptoException('Unable to decrypt')
return umsgpack.unpackb(packed)
| apache-2.0 | Python | |
1ecb594b945bff57ad60c703740cdbb8d8509d9d | Add systemd utils module | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/utils/systemd.py | salt/utils/systemd.py | '''
Contains systemd related help files
'''
# import python libs
import os
def sd_booted(context):
'''
Return True if the system was booted with systemd, False otherwise.
Pass in the loader context "__context__", this function will set the
systemd.sd_booted key to represent if systemd is running
'''
# We can cache this for as long as the minion runs.
if "systemd.sd_booted" not in context:
try:
# This check does the same as sd_booted() from libsystemd-daemon:
# http://www.freedesktop.org/software/systemd/man/sd_booted.html
if os.stat('/run/systemd/system'):
context['systemd.sd_booted'] = True
except OSError:
context['systemd.sd_booted'] = False
return context['systemd.sd_booted']
| apache-2.0 | Python | |
848b5f0984844eb2d1d32d0cf96038e597a634eb | add automobile csv_upload | paul-rs/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python | amaascore/csv_upload/assets/automobile.py | amaascore/csv_upload/assets/automobile.py | import logging.config
import csv
from amaascore.tools.csv_tools import csv_stream_to_objects
from amaascore.assets.automobile import Automobile
from amaascore.assets.interface import AssetsInterface
from amaasutils.logging_utils import DEFAULT_LOGGING
class AutomobileUploader(object):
def __init__(self):
pass
@staticmethod
def json_handler(orderedDict, params):
Dict = dict(orderedDict)
for key, var in params.items():
Dict[key]=var
asset_id = Dict.pop('asset_id', None)
asset_status = Dict.pop('asset_status','Active')
trans_type = Dict.pop('trans_type', 0)
print("TRANS_TYPE:::::"+trans_type)
automobile = Automobile(asset_id=asset_id, asset_status=asset_status, **dict(Dict))
return automobile
@staticmethod
def upload(asset_manager_id, client_id, csvpath):
"""convert csv file rows to objects and insert;
asset_manager_id and client_id from the UI (login)"""
interface = AssetsInterface(environment='local')
logging.config.dictConfig(DEFAULT_LOGGING)
logger = logging.getLogger(__name__)
params = {'asset_manager_id': asset_manager_id, 'client_id': client_id}
with open(csvpath) as csvfile:
automobiles = csv_stream_to_objects(stream=csvfile, json_handler=AutomobileUploader.json_handler, **params)
for automobile in automobiles:
interface.new(automobile)
logger.info('Creating new equity %s successfully', automobile.display_name)
@staticmethod
def download(asset_manager_id, asset_id_list):
"""retrieve the assets mainly for test purposes"""
interface = AssetsInterface(environment='local')
logging.config.dictConfig(DEFAULT_LOGGING)
logger = logging.getLogger(__name__)
automobiles = []
for asset_id in asset_id_list:
automobiles.append(interface.retrieve(asset_manager_id=asset_manager_id, asset_id=asset_id))
interface.deactivate(asset_manager_id=asset_manager_id, asset_id=asset_id)
return automobiles | apache-2.0 | Python | |
52e087c20cacc4b664fbfc781eafecfebe0cf43c | Add views to display list of current projects accepted in the program. | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | app/soc/modules/gsoc/views/projects_list.py | app/soc/modules/gsoc/views/projects_list.py | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for listing all the projects accepted
into a GSoC program.
"""
__authors__ = [
'"Madhusudan.C.S" <madhusudancs@gmail.com>',
]
from django.conf.urls.defaults import url
from soc.logic.exceptions import AccessViolation
from soc.views.template import Template
from soc.modules.gsoc.logic.models.student_project import logic as sp_logic
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import lists
from soc.modules.gsoc.views.helper import url_patterns
class ProjectList(Template):
"""Template for listing the student projects accepted in the program.
"""
def __init__(self, request, data):
self.request = request
self.data = data
list_config = lists.ListConfiguration()
list_config.addColumn('student', 'Student',
lambda entity, *args: entity.student.user.name)
list_config.addSimpleColumn('title', 'Title')
list_config.addColumn('org', 'Organization',
lambda entity, *args: entity.scope.name)
list_config.addColumn('mentor', 'Mentor',
lambda entity, *args: entity.mentor.user.name)
self._list_config = list_config
def context(self):
list = lists.ListConfigurationResponse(
self._list_config, idx=0,
description='List of projects accepted into %s' % (
self.data.program.name))
return {
'lists': [list],
}
def getListData(self):
"""Returns the list data as requested by the current request.
If the lists as requested is not supported by this component None is
returned.
"""
idx = lists.getListIndex(self.request)
if idx == 0:
fields = {'program': self.data.program,
'status': 'accepted'}
response_builder = lists.QueryContentResponseBuilder(
self.request, self._list_config, sp_logic,
fields)
return response_builder.build()
else:
return None
def templatePath(self):
return "v2/modules/gsoc/projects_list/_project_list.html"
class ListProjects(RequestHandler):
"""View methods for listing all the projects accepted into a program.
"""
def templatePath(self):
return 'v2/modules/gsoc/projects_list/base.html'
def djangoURLPatterns(self):
"""Returns the list of tuples for containing URL to view method mapping.
"""
return [
url(r'^gsoc/list_projects/%s$' % url_patterns.PROGRAM, self,
name='gsoc_accepted_projects')
]
def checkAccess(self):
"""Access checks for the view.
"""
pass
def jsonContext(self):
"""Handler for JSON requests.
"""
list_content = ProjectList(self.request, self.data).getListData()
if not list_content:
raise AccessViolation(
'You do not have access to this data')
return list_content.content()
def context(self):
"""Handler for GSoC Accepted Projects List page HTTP get request.
"""
program = self.data.program
return {
'page_name': '%s - Accepted Projects' % program.short_name,
'program_name': program.name,
'project_list': ProjectList(self.request, self.data),
}
| apache-2.0 | Python | |
b9d4ad23eb1bd08cae006350dc6c219f838ecdd7 | add tests | saklar13/Meowth,uaprom-summer-2015/Meowth,saklar13/Meowth,uaprom-summer-2015/Meowth,saklar13/Meowth,saklar13/Meowth,uaprom-summer-2015/Meowth,uaprom-summer-2015/Meowth | project/tests/test_mail.py | project/tests/test_mail.py | from sqlalchemy.exc import StatementError
from project.models import MailTemplate
from project.tests.utils import ProjectTestCase
class TestMailTemplateBl(ProjectTestCase):
def test_get(self):
mail1 = MailTemplate.bl.get(MailTemplate.MAIL.CV)
mail2 = MailTemplate.query.filter(
MailTemplate.mail == MailTemplate.MAIL.CV
).one()
self.assertEqual(mail1, mail2)
with self.assertRaises(StatementError):
MailTemplate.bl.get(100)
| bsd-3-clause | Python | |
6d76193a9f6e8beee29d92ec3c79efb4f6218e86 | Add IslaAukate and IslaAukateColor | webcomics/dosage,webcomics/dosage,peterjanes/dosage,peterjanes/dosage | dosagelib/plugins/i.py | dosagelib/plugins/i.py | # -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2017 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from re import compile, escape
from ..scraper import _BasicScraper, _ParserScraper
from ..util import tagre
from .common import _WordPressScraper, _WPNavi
class IAmArg(_BasicScraper):
url = 'http://iamarg.com/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2011/05/08/05082011'
imageSearch = compile(tagre("img", "src", r'(//iamarg.com/comics/\d+-\d+-\d+[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(%s\d+/\d+/\d+/[^"]+)' % rurl, after="prev"))
help = 'Index format: yyyy/mm/dd/stripname'
class ICanBarelyDraw(_BasicScraper):
url = 'http://www.icanbarelydraw.com/comic/'
rurl = escape(url)
stripUrl = url + '%s'
firstStripUrl = stripUrl % '39'
imageSearch = compile(tagre("img", "src", r'(%scomics/\d+-\d+-\d+-[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\d+)' % rurl))
help = 'Index format: number'
class IDreamOfAJeanieBottle(_WordPressScraper):
url = 'http://jeaniebottle.com/'
class InternetWebcomic(_WPNavi):
url = 'http://www.internet-webcomic.com/'
stripUrl = url + '?p=%s'
firstStripUrl = stripUrl % '30'
help = 'Index format: n'
class IrregularWebcomic(_BasicScraper):
url = 'http://www.irregularwebcomic.net/'
stripUrl = url + '%s.html'
firstStripUrl = stripUrl % '1'
imageSearch = compile(r'<img .*src="(.*comics/.*(png|jpg|gif))".*>')
prevSearch = compile(r'<a href="(/\d+\.html|/cgi-bin/comic\.pl\?comic=\d+)">Previous ')
help = 'Index format: nnn'
class IslaAukate(_ParserScraper):
url = 'https://overlordcomic.com/archive/default/latest'
stripUrl = 'https://overlordcomic.com/archive/default/pages/%s'
firstStripUrl = stripUrl % '001'
imageSearch = '//div[@id="comicpage"]/img'
prevSearch = '//nav[@class="comicnav"]/a[text()="Prev"]'
class IslaAukateColor(_ParserScraper):
url = 'https://overlordcomic.com/archive/color/latest'
stripUrl = 'https://overlordcomic.com/archive/color/pages/%s'
firstStripUrl = stripUrl % '001'
imageSearch = '//div[@id="comicpage"]/img'
prevSearch = '//nav[@class="comicnav"]/a[text()="Prev"]'
def namer(self, imageUrl, pageUrl):
# Fix filenames of early comics
filename = imageUrl.rsplit('/', 1)[-1]
if filename[0].isdigit():
filename = 'Aukate' + filename
return filename
class ItsWalky(_WordPressScraper):
url = 'http://www.itswalky.com/'
| # -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2017 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from re import compile, escape
from ..scraper import _BasicScraper
from ..util import tagre
from .common import _WordPressScraper, _WPNavi
class IAmArg(_BasicScraper):
url = 'http://iamarg.com/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2011/05/08/05082011'
imageSearch = compile(tagre("img", "src", r'(//iamarg.com/comics/\d+-\d+-\d+[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(%s\d+/\d+/\d+/[^"]+)' % rurl, after="prev"))
help = 'Index format: yyyy/mm/dd/stripname'
class ICanBarelyDraw(_BasicScraper):
url = 'http://www.icanbarelydraw.com/comic/'
rurl = escape(url)
stripUrl = url + '%s'
firstStripUrl = stripUrl % '39'
imageSearch = compile(tagre("img", "src", r'(%scomics/\d+-\d+-\d+-[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\d+)' % rurl))
help = 'Index format: number'
class IDreamOfAJeanieBottle(_WordPressScraper):
url = 'http://jeaniebottle.com/'
class InternetWebcomic(_WPNavi):
url = 'http://www.internet-webcomic.com/'
stripUrl = url + '?p=%s'
firstStripUrl = stripUrl % '30'
help = 'Index format: n'
class IrregularWebcomic(_BasicScraper):
url = 'http://www.irregularwebcomic.net/'
stripUrl = url + '%s.html'
firstStripUrl = stripUrl % '1'
imageSearch = compile(r'<img .*src="(.*comics/.*(png|jpg|gif))".*>')
prevSearch = compile(r'<a href="(/\d+\.html|/cgi-bin/comic\.pl\?comic=\d+)">Previous ')
help = 'Index format: nnn'
class ItsWalky(_WordPressScraper):
url = 'http://www.itswalky.com/'
| mit | Python |
afaaef761ca394f1db4516252ffb89789c606890 | Add search.py | irqed/octokit.py | octokit/resources/search.py | octokit/resources/search.py | # encoding: utf-8
"""Methods for the Search API
http://developer.github.com/v3/search/
"""
| mit | Python | |
df4b47b02cd0d8a24e3939457d6787f6a203539e | Create WebUtils.py | Soncrates/stock-study,Soncrates/stock-study | data/WebUtils.py | data/WebUtils.py | class WebUtils(object) :
@staticmethod
def invoke_url(url,headers=None, raw=False) :
import requests
if headers is not None :
ret = requests.get(url, headers=headers)
else :
ret = requests.get(url)
if not raw : ret = ret.text
else : ret = ret.content
return ret
@staticmethod
def format_as_soup(url_response) :
from bs4 import BeautifulSoup
return BeautifulSoup(url_response)
@staticmethod
def parse_number(s):
ret=""
try:
ret = float(s)
except ValueError:
return s
if ret - int(ret) == 0 : return int(ret)
return ret
@staticmethod
def parse_html_p_text(url) :
html = WebUtils.invoke_url(url)
soup = WebUtils.format_as_soup(html)
ret = []
for p in soup.body.findAll('p') :
ret.append(p.text)
return ret
@staticmethod
def format_noodle(url) :
if 'www.noodls.com' not in url : return None
ret = WebUtils.parse_html_p_text(url)
return "|".join(ret)
@staticmethod
def format_yahoo_finance(url) :
if 'finance.yahoo.com' not in url : return None
ret = WebUtils.parse_html_p_text(url)
return "|".join(ret)
@staticmethod
def format_biz_yahoo(url) :
if 'biz.yahoo.com' not in url : return None
ret = WebUtils.parse_html_p_text(url)
return "|".join(ret)
@staticmethod
def format_investopedia(url) :
if 'www.investopedia.com' not in url : return None
ret = WebUtils.parse_html_p_text(url)
return "|".join(ret)
@staticmethod
def format_generic(url) :
html = WebUtils.invoke_url(url)
soup = WebUtils.format_as_soup(html)
return soup.body.prettify()
class YahooParse(object) :
@staticmethod
def finance_1(soup) :
factor = 1
thousands = soup.body.findAll(text= "All numbers in thousands")
if thousands : factor = 1000
table = soup.find("table", { "class" : "yfnc_tabledata1" })
prev = ''
for cell in table.findAll(YahooParse.validtag):
text = cell.find(text=True)
if not text : continue
text = text.replace(u'\xa0', u' ')
text = text.strip()
if len(text) == 0: continue
if text == prev : continue
prev=text
yield text,factor
@staticmethod
def finance_2(text) :
text = " ".join(text.split()).replace(',', "")
if len(text) == 0 : return ''
if text[0] == "(":
text_list = list(text)
text_list[0] = "-"
text_list[-1] = ""
text = "".join(text_list)
return parse_number(text)
@staticmethod
def validtag(tag) :
if tag.name not in ['td','strong'] : return False
return True
@staticmethod
def finance(soup) :
for text,factor in YahooParse.finance_1(soup) :
text = YahooParse.finance_2(text)
if isinstance(text, str) : yield text
else : yield str(text*factor)
| lgpl-2.1 | Python | |
26f153877ad141032bc78e2ca89026cf125c71d7 | Create PedidoCadastrar.py | AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb | backend/Models/Matricula/PedidoCadastrar.py | backend/Models/Matricula/PedidoCadastrar.py | from Framework.Pedido import Pedido
from Framework.ErroNoHTTP import ErroNoHTTP
class PedidoCadastrar(Pedido):
def __init__(self,variaveis_do_ambiente):
super(PedidoCadastrar, self).__init__(variaveis_do_ambiente)
try:
self.ano = self.corpo['ano']
self.nome = self.corpo['nome']
self.periodo = self.corpo['periodo']
self.id_disciplina = self.corpo['id_disciplina']
self.id_usuario = self.corpo['id_usuario']
except:
raise ErroNoHTTP(400)
def getNome(self):
return self.nome
def getAno(self):
return self.ano
def getId_disciplina(self):
return self.id_disciplina
def getId_usuario(self):
return self.id_usuario
def getPeriodo(self):
return self.periodo
| mit | Python | |
80261391bf75c4904825d97ae7b3168e4652463a | Create most_digits.py | Kunalpod/codewars,Kunalpod/codewars | most_digits.py | most_digits.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Most digits
#Problem level: 7 kyu
def find_longest(arr):
return arr[[len(str(x)) for x in arr].index(max([len(str(x)) for x in arr]))]
| mit | Python | |
986ee0ffae416cf1dc833f581a89d718dc2ff2fe | Add a first draft of the evaluation module. | johnmartinsson/bird-species-classification,johnmartinsson/bird-species-classification | bird/evaluate.py | bird/evaluate.py | from models.cuberun import CubeRun
import numpy as np
import utils
import loader
nb_classes = 19
input_shape = (257, 624, 1)
(cols, rows, chs) = input_shape
image_shape = (cols, rows)
batch_size=32
def evaluate(model, data_filepath, file2labels_filepath):
model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['binary_accuracy', 'fbeta_score'])
(X_test, Y_test, filenames) = loader.load_all_data(data_filepath, file2labels_filepath,
nb_classes=nb_classes,
image_shape=image_shape)
print("Predicting ...")
Y = model.predict(X_test, batch_size=batch_size, verbose=1)
Y = np.round(Y)
for (y, gt) in zip(Y, Y_test):
print("predicted: ", binary_to_id(y), "\t ground truth: ", binary_to_id(gt))
#print("Evaluating ...")
#scores = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1)
#print("%s: %.2f%%" % (model.metrics_names[0], scores[0]))
#print("%s: %.2f%%" % (model.metrics_names[1], scores[1]))
#print("%s: %.2f%%" % (model.metrics_names[2], scores[2]))
def binary_to_id(Y):
i = 0
r = []
for y in Y:
if y == 1:
r.append(i)
i = i+1
return r
| mit | Python | |
c59a08d60e42f56be628d36e4572772fb5707c98 | Create pyblitzle.py | filinger/blitzle,filinger/blitzle,filinger/blitzle | pyblitzle.py | pyblitzle.py | import argparse
import cv2
class FilterValue(object):
def __init__(self, x, y, z):
super(FilterValue, self).__init__()
self.x = x
self.y = y
self.z = z
def set_x(self, x):
self.x = x
def set_y(self, y):
self.y = y
def set_z(self, z):
self.z = z
def scalar(self):
return self.x, self.y, self.z
class FilterRange(object):
def __init__(self, lower, higher):
super(FilterRange, self).__init__()
self.lower = lower
self.higher = higher
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True, help="path to the input image")
args = vars(ap.parse_args())
src_image = cv2.imread(args["image"])
lower = FilterRange(FilterValue(0, 190, 205), FilterValue(2, 205, 225))
higher = FilterRange(FilterValue(175, 180, 170), FilterValue(180, 200, 240))
other = {'dilateX': 1, 'dilateY': 1}
resultWindow = 'Result'
cv2.namedWindow(resultWindow)
def recompute(update_func=None):
if update_func is not None:
update_func()
src_hsv = cv2.cvtColor(src_image, cv2.COLOR_BGR2HSV)
lower_hue = cv2.inRange(src_hsv, lower.lower.scalar(), lower.higher.scalar())
higher_hue = cv2.inRange(src_hsv, higher.lower.scalar(), higher.higher.scalar())
filtered = cv2.addWeighted(lower_hue, 1.0, higher_hue, 1.0, 0.0)
d_x = other['dilateX']
d_y = other['dilateY']
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (d_x, d_y))
result = cv2.morphologyEx(filtered, cv2.MORPH_CLOSE, kernel)
result = cv2.resize(result, (0, 0), None, 0.5, 0.5, cv2.INTER_AREA)
cv2.imshow(resultWindow, result)
controlWindow1 = 'Lower HSV'
cv2.namedWindow(controlWindow1)
cv2.createTrackbar('H1', controlWindow1, lower.lower.x, 180, lambda x: recompute(lower.lower.set_x(x)))
cv2.createTrackbar('S1', controlWindow1, lower.lower.y, 255, lambda x: recompute(lower.lower.set_y(x)))
cv2.createTrackbar('V1', controlWindow1, lower.lower.z, 255, lambda x: recompute(lower.lower.set_z(x)))
cv2.createTrackbar('H2', controlWindow1, lower.higher.x, 180, lambda x: recompute(lower.higher.set_x(x)))
cv2.createTrackbar('S2', controlWindow1, lower.higher.y, 255, lambda x: recompute(lower.higher.set_y(x)))
cv2.createTrackbar('V2', controlWindow1, lower.higher.z, 255, lambda x: recompute(lower.higher.set_z(x)))
controlWindow2 = 'Higher HSV'
cv2.namedWindow(controlWindow2)
cv2.createTrackbar('H1', controlWindow2, higher.lower.x, 180, lambda x: recompute(higher.lower.set_x(x)))
cv2.createTrackbar('S1', controlWindow2, higher.lower.y, 255, lambda x: recompute(higher.lower.set_y(x)))
cv2.createTrackbar('V1', controlWindow2, higher.lower.z, 255, lambda x: recompute(higher.lower.set_z(x)))
cv2.createTrackbar('H2', controlWindow2, higher.higher.x, 180, lambda x: recompute(higher.higher.set_x(x)))
cv2.createTrackbar('S2', controlWindow2, higher.higher.y, 255, lambda x: recompute(higher.higher.set_y(x)))
cv2.createTrackbar('V2', controlWindow2, higher.higher.z, 255, lambda x: recompute(higher.higher.set_z(x)))
controlWindow3 = 'Post Filter'
cv2.namedWindow(controlWindow3)
cv2.createTrackbar('Dilate X', controlWindow3, other.get('dilateX'), 100,
lambda x: recompute(other.__setitem__('dilateX', x)))
cv2.createTrackbar('Dilate Y', controlWindow3, other.get('dilateY'), 100,
lambda x: recompute(other.__setitem__('dilateY', x)))
recompute()
cv2.waitKey(0)
| mit | Python | |
ff4b6b7ac283f8828c6c268c9b4072ff72171776 | add Digital Filter class | pylayers/pylayers,dialounke/pylayers,dialounke/pylayers,pylayers/pylayers | pylayers/signal/DF.py | pylayers/signal/DF.py | #!/usr/bin/python
# -*- coding: latin1 -*-
from numpy import *
from scipy import io
from scipy.signal import *
from pylab import *
from EnergyDetector import *
class DF(object):
""" Digital Filter Class
Methods
-------
filter : flilter a signal
"""
def __init__(self,b=array([1]),a=array([1,-0.5])):
self.b = b
self.a = a
def filter(self,x):
y = lfilter(self.b,self.a,x)
return(y)
def order(self):
self.order = max(len(self.a),len(self.b))-1
return self.order
def freqz(self):
"""
freqz : display filter transfer function
"""
(w,h) = freqz(self.b,self.a)
self.w = w
self.h = h
subplot(211)
plot(w/pi,20*log10(abs(h)+1e-15))
ylabel('dB')
title('Modulus')
grid()
subplot(212)
plot(w/pi,angle(h)*180./pi)
ylabel('deg')
xlabel('Relative frequency')
title('Phase')
grid()
#show()
def ellip_bp(self,wp,ws,gpass=0.5,gstop=20):
""" Elliptic Bandpath filter
wp :
ws :
gpass
gstop :
See Also
--------
iirdesign
"""
(b,a) = iirdesign(wp,ws,gpass,gstop,analog=0,ftype='ellip',output='ba')
self.b = b
self.a = a
def remez(self,numtaps=401,bands=(0,0.1,0.11,0.2,0.21,0.5),desired=(0.0001,1,0.0001)):
"""
FIR design Remez algorithm
flt.remez(numtaps=401,bands=(0,0.1,0.11,0.2,0.21,0.5),desired=(0.0001,1,0.0001)):
numtaps = 401
bands = (0,0.1,0.11,0.2,0.21,0.5)
desired = (0.0001,1,0.0001))
flt.remez( numtaps , bands , desired)
"""
self.a = array(1)
self.b = remez(numtaps, bands, desired, weight=None, Hz=1, type='bandpass', maxiter=25, grid_density=16)
def zplane(self):
"""
Display filter in the complex plane
"""
A = poly1d(self.a)
B = poly1d(self.b)
ra = A.r
rb = B.r
t = arange(0,2*pi+0.1,0.1)
plot(cos(t),sin(t),'k')
plot(real(ra),imag(ra),'xr')
plot(real(rb),imag(rb),'ob')
axis('equal')
show()
def ir(self,N):
ip = zeros(N)
ip[0] = 1
rip = self.filter(ip)
stem(arange(N),rip)
show()
if __name__ == "__main__":
fe = 10
fN = fe/2.0
wt = 0.01
ws = array([3.168,3.696])/fN
wp = [ws[0]+wt,ws[1]-wt]
flt = DF()
gpass = 0.5
gstop = 40
flt.ellip_bp(wp,ws,gpass,gstop)
flt.zplane()
flt.freqz()
| mit | Python | |
020f78b6aa265f925db6a21435a074f6ca1a3cbf | update ipython notebook runner to accomodate specific usages of matplotlib (i.e. import matplotlib.pyplot as plt) and ipython magic functions (ignore them) within the demo testing framework. | YzPaul3/h2o-3,kyoren/https-github.com-h2oai-h2o-3,mathemage/h2o-3,madmax983/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,datachand/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,spennihana/h2o-3,tarasane/h2o-3,datachand/h2o-3,kyoren/https-github.com-h2oai-h2o-3,datachand/h2o-3,datachand/h2o-3,h2oai/h2o-dev,junwucs/h2o-3,junwucs/h2o-3,junwucs/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,brightchen/h2o-3,brightchen/h2o-3,michalkurka/h2o-3,kyoren/https-github.com-h2oai-h2o-3,datachand/h2o-3,mathemage/h2o-3,madmax983/h2o-3,pchmieli/h2o-3,printedheart/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,brightchen/h2o-3,brightchen/h2o-3,brightchen/h2o-3,michalkurka/h2o-3,madmax983/h2o-3,madmax983/h2o-3,pchmieli/h2o-3,datachand/h2o-3,spennihana/h2o-3,YzPaul3/h2o-3,madmax983/h2o-3,YzPaul3/h2o-3,junwucs/h2o-3,kyoren/https-github.com-h2oai-h2o-3,brightchen/h2o-3,printedheart/h2o-3,tarasane/h2o-3,YzPaul3/h2o-3,tarasane/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,tarasane/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,pchmieli/h2o-3,junwucs/h2o-3,h2oai/h2o-3,h2oai/h2o-3,datachand/h2o-3,printedheart/h2o-3,pchmieli/h2o-3,printedheart/h2o-3,kyoren/https-github.com-h2oai-h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,mathemage/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,printedheart/h2o-3,h2oai/h2o-dev,tarasane/h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,junwucs/h2o-3,spennihana/h2o-3,printedheart/h2o-3,madmax983/h2o-3,madmax983/h2o-3,jangorecki/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,spennihana/h2o-3,YzPaul3/h2o-3,printedheart/h2o-3,tarasane/h2o-3,mathemage/h2o-3,h2oai/h2o-3,junwucs/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,h2oai/h2o-3,kyoren/https-github.com-h2oai-h2o-3,tarasane/h2o-3,pchmieli/h2o-3,brightchen/h2o-3 | h2o-py/tests/utils/ipynb_demo_runner.py | h2o-py/tests/utils/ipynb_demo_runner.py | import json
import os
def ipy_notebook_exec(path,save_and_norun=False):
notebook = json.load(open(path))
program = ''
for block in ipy_code_blocks(notebook):
for line in ipy_valid_lines(block):
if "h2o.init" not in line:
program += line if '\n' in line else line + '\n'
if save_and_norun:
with open(os.path.basename(path).split('ipynb')[0]+'py',"w") as f:
f.write(program)
else:
d={}
exec program in d # safe, but horrible (exec is horrible)
def ipy_blocks(notebook):
if 'worksheets' in notebook.keys():
return notebook['worksheets'][0]['cells'] # just take the first worksheet
elif 'cells' in notebook.keys():
return notebook['cells']
else:
raise NotImplementedError, "ipython notebook cell/block json format not handled"
def ipy_code_blocks(notebook):
return [cell for cell in ipy_blocks(notebook) if cell['cell_type'] == 'code']
def ipy_lines(block):
if 'source' in block.keys():
return block['source']
elif 'input' in block.keys():
return block['input']
else:
raise NotImplementedError, "ipython notebook source/line json format not handled"
def ipy_valid_lines(block):
# remove ipython magic functions
lines = [line for line in ipy_lines(block) if not line.startswith('%')]
# (clunky) matplotlib handling
for line in lines:
if line == "import matplotlib.pyplot as plt":
import matplotlib
matplotlib.use('Agg', warn=False)
return [line for line in lines if not "plt.show()" in line]
| import json
import os
def ipy_notebook_exec(path,save_and_norun=False):
notebook = json.load(open(path))
program = ''
for block in ipy_blocks(notebook):
for line in ipy_lines(block):
if "h2o.init" not in line:
program += line if '\n' in line else line + '\n'
if save_and_norun:
with open(os.path.basename(path).split('ipynb')[0]+'py',"w") as f:
f.write(program)
else:
d={}
exec program in d # safe, but horrible (exec is horrible)
def ipy_blocks(notebook):
if 'worksheets' in notebook.keys():
return notebook['worksheets'][0]['cells'] # just take the first worksheet
elif 'cells' in notebook.keys():
return notebook['cells']
else:
raise NotImplementedError, "ipython notebook cell/block json format not handled"
def ipy_lines(block):
if 'source' in block.keys():
return block['source']
elif 'input' in block.keys():
return block['input']
else:
raise NotImplementedError, "ipython notebook source/line json format not handled"
| apache-2.0 | Python |
944d0ff3995916d80e871e47d73d9b9df7848e6e | Add wsgi | ML42/facemash,ML42/facemash,ML42/facemash | server/wsgi.py | server/wsgi.py | from main import app as application
if __name__ == "__main__":
application.run()
| bsd-3-clause | Python | |
5c2461ca9dced8099eb67d80fdd118d1ec012b2d | add amendment content type | ecreall/nova-ideo,ecreall/nova-ideo,ecreall/nova-ideo,ecreall/nova-ideo,ecreall/nova-ideo | novaideo/content/amendment.py | novaideo/content/amendment.py | import colander
import deform
from zope.interface import implementer
from persistent.list import PersistentList
from pyramid.threadlocal import get_current_request
from substanced.interfaces import IUserLocator
from substanced.principal import DefaultUserLocator
from substanced.content import content
from substanced.schema import NameSchemaNode
from substanced.util import renamer
from dace.util import getSite
from dace.objectofcollaboration.principal.util import get_current
from dace.descriptors import (
CompositeMultipleProperty,
SharedUniqueProperty,
SharedMultipleProperty
)
from pontus.widget import RichTextWidget,Select2Widget
from pontus.core import VisualisableElementSchema
from pontus.schema import Schema
from pontus.file import Object as ObjectType
from .interface import IAmendment
from novaideo.core import (
SearchableEntity,
SearchableEntitySchema,
CorrelableEntity,
Commentable,
can_access)
from novaideo import _
from novaideo.views.widget import ConfirmationWidget
@colander.deferred
def intention_choice(node, kw):
root = getSite()
intentions = sorted(root.idea_intentions)
values = [(i, i) for i in intentions ]
values.insert(0, ('', '- Select -'))
return Select2Widget(values=values)
def context_is_a_amendment(context, request):
return request.registry.content.istype(context, 'amendment')
@colander.deferred
def replaced_ideas_choice(node, kw):
context = node.bindings['context']
root = getSite()
user = get_current()
ideas = [i for i in context.related_ideas if can_access(user, i)]
values = [(i, i.title) for i in ideas]
values.insert(0, ('', '- Select -'))
return Select2Widget(values=values)
class ReplacedIdeaSchema(Schema):
replaced_idea = colander.SchemaNode(
ObjectType(),
widget=replaced_ideas_choice,
title=_('Replaced ideas'),
missing=None,
description=_('Choose the replaced idea')
)
not_identified = colander.SchemaNode(
colander.Boolean(),
widget=deform.widget.CheckboxWidget(),
label=_('Idea not identified'),
title =_(''),
missing=False
)
@colander.deferred
def ideas_replacement_choice(node, kw):
root = getSite()
user = get_current()
ideas = [i for i in root.ideas if can_access(user, i)]
values = [(i, i.title) for i in ideas]
values.insert(0, ('', '- Select -'))
return Select2Widget(values=values)
class IdeaOfReplacementSchema(Schema):
idea_of_replacement = colander.SchemaNode(
ObjectType(),
widget=ideas_replacement_choice,
title=_('Idea of replacement'),
missing=None,
description=_('Choose the idea of replacement')
)
new_idea = colander.SchemaNode(
colander.Boolean(),
widget=deform.widget.CheckboxWidget(),
label=_('Creat a new idea'),
title =_(''),
missing=False
)
class AmendmentConfirmationSchema(Schema):
intention = colander.SchemaNode(
colander.String(),
widget=intention_choice,
title=_('Intention'),
)
comment = colander.SchemaNode(
colander.String(),
validator=colander.Length(max=500),
widget=deform.widget.TextAreaWidget(rows=4, cols=60),
)
replaced_idea = ReplacedIdeaSchema(widget=deform.widget.MappingWidget())
idea_of_replacement = IdeaOfReplacementSchema(widget=deform.widget.MappingWidget())
class AmendmentSchema(VisualisableElementSchema, SearchableEntitySchema):
name = NameSchemaNode(
editing=context_is_a_amendment,
)
text = colander.SchemaNode(
colander.String(),
widget=RichTextWidget()
)
confirmation = AmendmentConfirmationSchema(widget=ConfirmationWidget(css_class='confirmation'))
@content(
'amendment',
icon='glyphicon glyphicon-align-left',
)
@implementer(IAmendment)
class Amendment(Commentable, CorrelableEntity, SearchableEntity):
name = renamer()
author = SharedUniqueProperty('author')
proposal = SharedUniqueProperty('proposal', 'amendments')
replaced_idea = SharedUniqueProperty('replaced_idea')
idea_of_replacement = SharedUniqueProperty('idea_of_replacement')
def __init__(self, **kwargs):
super(Amendment, self).__init__(**kwargs)
self.set_data(kwargs)
| agpl-3.0 | Python | |
34979240ae3cd9ed617a358a0097376badfdc229 | add announcement view | Scifabric/pybossa,Scifabric/pybossa,PyBossa/pybossa,PyBossa/pybossa | pybossa/view/announcements.py | pybossa/view/announcements.py | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2017 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
"""Announcements view for PYBOSSA."""
from flask import Blueprint, current_app
from flask import render_template
from flask.ext.login import current_user
from pybossa.cache import users as cached_users
from pybossa.util import handle_content_type
from pybossa.core import announcement_repo
blueprint = Blueprint('announcements', __name__)
@blueprint.route('/')
def show_announcements():
"""Show all announcements"""
announcements = announcement_repo.get_all_announcements()
response = dict(template="",
announcements=announcements)
return handle_content_type(response)
| agpl-3.0 | Python | |
546c38310ff49af4f9490ba32ce3ab2cfc4d5e8d | Create sherpa_test.py | kelceydamage/sherpa,kelceydamage/sherpa | sherpa_test.py | sherpa_test.py | #!/usr/bin/env python
import optparse
from sherpa import Sherpa
from sherpa_helpers import debug_results, distribution, quartermaster
from perf import *
parser = optparse.OptionParser()
parser.add_option('-p', '--packages', dest='packages', help='specify the minimum number of packages to ship')
parser.add_option('-r', '--routes', dest='routes', help='specify the amount of routes to ship to')
parser.add_option('-c', '--parcels', dest='parcels', help='specify the amount of parcels to package')
parser.add_option('-s', '--shape', dest='shape', help='specify the shape of the parcels [use a 2 value tupple]')
(options, args) = parser.parse_args()
if not options.packages and not options.routes and not options.parcels:
quit()
min_packages = int(options.packages)
routes = int(options.routes)
parcels = quartermaster(int(options.parcels), [4, 16])
order = []
# Actual code for using sherpa
sherpa = Sherpa(min_packages)
for parcel in parcels:
order.append(sherpa.packer(parcel, routes))
routes, packages = distribution(order)
debug_results(
sherpa,
packages,
routes,
parcels
)
perf_results()
| apache-2.0 | Python | |
21d73b5afc04a718c0db2bff899e7d1f4020deb3 | set blank=True on editoritem.licence | numbas/editor,numbas/editor,numbas/editor | editor/migrations/0019_auto_20160601_1528.py | editor/migrations/0019_auto_20160601_1528.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('editor', '0018_site_broadcasts'),
]
operations = [
migrations.AlterField(
model_name='editoritem',
name='licence',
field=models.ForeignKey(blank=True, to='editor.Licence', null=True),
),
]
| apache-2.0 | Python | |
5e401d2f2a2f9c874667a4aa372fd07755ab9c31 | add Brain.save_movie() example | diego0020/PySurfer,bpinsard/PySurfer,haribharadwaj/PySurfer,mwaskom/PySurfer,effigies/PySurfer,nipy/PySurfer,Eric89GXL/PySurfer | examples/save_movie.py | examples/save_movie.py | """
Create movie from MEG inverse solution
=======================================
Data were computed using mne-python (http://martinos.org/mne)
"""
print __doc__
import os
import numpy as np
from surfer import Brain
from surfer.io import read_stc
"""
create Brain object for visualization
"""
brain = Brain('fsaverage', 'split', 'inflated',
config_opts=dict(width=800, height=400))
"""
read MNE dSPM inverse solution
"""
for hemi in ['lh', 'rh']:
stc_fname = os.path.join('example_data',
'meg_source_estimate-' + hemi + '.stc')
stc = read_stc(stc_fname)
data = stc['data']
"""
time points in milliseconds
"""
time = 1e3 * np.linspace(stc['tmin'],
stc['tmin'] + data.shape[1] * stc['tstep'],
data.shape[1])
brain.add_data(data, colormap='hot', vertices=stc['vertices'],
smoothing_steps=10, time=time, time_label='time=%0.2f ms',
hemi=hemi)
"""
scale colormap
"""
brain.scale_data_colormap(fmin=13, fmid=18, fmax=22, transparent=True)
"""
Save movies with different combinations of views
"""
brain.save_movie('example_current.mov')
brain.save_movie('example_single.mov', montage='single')
brain.save_movie('example_h.mov', montage=['lat', 'med'], orientation='h')
brain.save_movie('example_v.mov', montage=[['lat'], ['med']])
brain.close()
| bsd-3-clause | Python | |
a482ac3b96e4c8b72d1055b7f29a474c85101f67 | Create solution.py | lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges | leetcode/easy/power_of_two/py/solution.py | leetcode/easy/power_of_two/py/solution.py | class Solution(object):
def isPowerOfTwo(self, n):
"""
:type n: int
:rtype: bool
"""
return n > 0 and (n & (n - 1)) == 0
| mit | Python | |
4b0221ca503be9450919e4ed4e6a75ce92cd2d63 | Create new module for continuous design variables | csdms/dakota,csdms/dakota | csdms/dakota/variables/continuous_design.py | csdms/dakota/variables/continuous_design.py | """Implementation of a Dakota continous design variable."""
from .base import VariableBase
classname = 'ContinuousDesign'
class ContinuousDesign(VariableBase):
"""Define attributes for Dakota continous design variables."""
def __init__(self,
variables=('x1', 'x2'),
initial_point=None,
lower_bounds=None,
upper_bounds=None,
scale_types=None,
scales=None,
**kwargs):
VariableBase.__init__(self, **kwargs)
self.variables = variables
self._initial_point = initial_point
self._lower_bounds = lower_bounds
self._upper_bounds = upper_bounds
if initial_point is None and lower_bounds is None and
upper_bounds is None: self._initial_point = (0.0, 0.0)
@property
def initial_point(self):
"""Start points used by study variables."""
return self._initial_point
@initial_point.setter
def initial_point(self, value):
"""Set start points used by study variables.
Parameters
----------
value : list or tuple of numbers
The new initial points.
"""
if not isinstance(value, (tuple, list)):
raise TypeError("Initial points must be a tuple or a list")
self._initial_point = value
@property
def lower_bounds(self):
"""Minimum values of study variables."""
return self._lower_bounds
@lower_bounds.setter
def lower_bounds(self, value):
"""Set minimum values of study variables.
Parameters
----------
value : list or tuple of numbers
The minimum values.
"""
if not isinstance(value, (tuple, list)):
raise TypeError("Lower bounds must be a tuple or a list")
self._lower_bounds = value
@property
def upper_bounds(self):
"""Maximum values of study variables."""
return self._upper_bounds
@upper_bounds.setter
def upper_bounds(self, value):
"""Set maximum values of study variables.
Parameters
----------
value : list or tuple of numbers
The maximum values.
"""
if not isinstance(value, (tuple, list)):
raise TypeError("Upper bounds must be a tuple or a list")
self._upper_bounds = value
def variables_block(self):
"""Define the variables block for continous design variables."""
s = 'variables\n'
s += ' {0} = {1}'.format(self.variable_type,
len(self.variables))
if self.initial_point is not None:
s += '\n' \
+ ' initial_point ='
for pt in self.initial_point:
s += ' {}'.format(pt)
if self.lower_bounds is not None:
s += '\n' \
+ ' lower_bounds ='
for b in self.lower_bounds:
s += ' {}'.format(b)
if self.upper_bounds is not None:
s += '\n' \
+ ' upper_bounds ='
for b in self.upper_bounds:
s += ' {}'.format(b)
s += '\n' \
+ ' descriptors ='
for vd in self.variables:
s += ' {!r}'.format(vd)
s += '\n\n'
return(s)
| mit | Python | |
9f8f5e731d0cd9096e230c78b40a60be7eceef56 | update : minor changes | black-perl/ptop | ptop/plugins/disk_sensor.py | ptop/plugins/disk_sensor.py | '''
Disk Sensor Plugin
'''
from ptop.core import Plugin
import psutil
class DiskSensor(Plugin):
def __init__(self,**kwargs):
super(DiskSensor,self).__init__(**kwargs)
# overriding the update method
def update(self):
# there can be many text (key,value) pairs to display corresponding to each key
self.currentValue['text'] = {'/' : []}
# no graph part will be there
disk_usage = psutil.disk_usage('/')
self.currentValue['text']['/'].append(('Total',float(disk_usage.total)/(1024*1024)))
self.currentValue['text']['/'].append(('Used',float(disk_usage.used)/(1024*1024)))
self.currentValue['text']['/'].append(('Percent',int(disk_usage.percent)))
disk_sensor = DiskSensor(name='Disk',sensorType='text',interval=1)
| mit | Python | |
6064f3897699903b85a469a9ee77e44116f7df4c | add a network utility to check for ipv6 | ceph/radosgw-agent,ceph/radosgw-agent | radosgw_agent/util/network.py | radosgw_agent/util/network.py | import socket
def is_ipv6(address):
"""
Check if an address is an IPV6 one, but trim commonly used brackets as the
``socket`` module complains about them.
"""
if not isinstance(address, str):
return False
if address.startswith('['): # assume we need to split on possible port
address = address.split(']:')[0]
# strip leading/trailing brackets so inet_pton understands the address
address = address.strip('[]')
try:
socket.inet_pton(socket.AF_INET6, address)
except socket.error: # not a valid address
return False
return True
| mit | Python | |
0a4984fa49ec53ae5b2908e0518b4e72c031fc1b | add python3 encoder for bytes and sets | Caleydo/caleydo_server,phovea/phovea_server,phovea/phovea_server,Caleydo/caleydo_server,phovea/phovea_server,phovea/phovea_server | phovea_server/python3_encoder.py | phovea_server/python3_encoder.py | """
This encoder is required to handle changes of data types in Python 3.7 by decoding bytes objects to strings and adding list() to set().
"""
class Python3Encoder(object):
def __contains__(self, obj):
if isinstance(obj, bytes):
return True
if isinstance(obj, set):
return True
return False
def __call__(self, obj, base_encoder):
if isinstance(obj, bytes):
return obj.decode('utf-8')
if isinstance(obj, set):
return list(obj)
return None
encoder = Python3Encoder()
def create():
return encoder
| bsd-3-clause | Python | |
9b1b66f72b3e9ca383d2396a368f8ce5e85d9e7c | Add a snippet (python/matplotlib). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/matplotlib/plot2d.py | python/matplotlib/plot2d.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import numpy as np
import matplotlib.pyplot as plt
def main():
x_vec = np.arange(-10, 10, 0.01)
y_vec = np.sin(2 * 2 * np.pi * x_vec) * 1/np.sqrt(2*np.pi) * np.exp(-(x_vec**2)/2)
fig = plt.figure(figsize=(16.0, 10.0))
ax = fig.add_subplot(111)
ax.plot(x_vec, y_vec, "-", label="Test")
# TITLE AND LABELS
ax.set_title(r"Test", fontsize=20)
ax.set_xlabel(r"$x$", fontsize=32)
ax.set_ylabel(r"$f(x)$", fontsize=32)
# LEGEND
ax.legend(loc='lower right', fontsize=20)
# SAVE FILES ######################
plt.savefig("test.pdf")
# PLOT ############################
plt.show()
if __name__ == "__main__":
main()
| mit | Python | |
dd53b54fbf64a51231a35f0f87ed6b543a8666fe | add genia cache | text-machine-lab/CliNER,text-machine-lab/CliNER | clicon/features_dir/genia/genia_cache.py | clicon/features_dir/genia/genia_cache.py | import cPickle as pickle
import os
class GeniaCache:
def __init__(self):
try:
prefix = os.path.dirname(__file__)
self.filename = os.path.join( prefix, 'genia_cache' )
self.cache = pickle.load( open( self.filename , "rb" ) ) ;
except IOError:
self.cache = {}
def has_key(self, key):
return self.cache.has_key( str(key) )
def add_map(self, key, value):
self.cache[ str(key) ] = value
def get_map(self, key):
return self.cache[ str(key) ]
def __del__(self):
pickle.dump( self.cache, open( self.filename, "wb" ) )
| apache-2.0 | Python | |
2fbec137ba9efcbfc657a469856507ea8da079e3 | Create draw_shapes_gui.py | CSavvy/python | draw_shapes_gui.py | draw_shapes_gui.py | # Import Myro library and connect to the robot
from Myro import *
init("/dev/tty.Fluke2-0216-Fluke2")
# We also need to import the graphics library!
from Graphics import *
# Print the current battery level
print("Battery level:", getBattery())
# Make a graphics window with the title below and dimensions 500x500
win = Window('Draw Shapes with Scribbler', 500, 500)
# Now set the window's background with a picture.
# Remember to have this picture in the same folder as this program!!
background = makePicture("shape_gui_background.png")
# Draw the background picture on the graphics Window
background.draw(win)
# Run a loop while the window is open to keep getting where the user clicks to draw shapes
while win.isVisible():
# Wait for them to click, and store the x and y coordinates of their click in x and y
x, y = getMouse()
# Now analyze where they clicked by checking x and y location, and draw appropriate shape
if x < 250 and y < 250:
print('Drawing a Square')
forward(1, 1)
turnBy(90)
forward(1, 1)
turnBy(90)
forward(1, 1)
turnBy(90)
forward(1, 1)
turnBy(90)
elif x < 250 and y >= 250:
print('Drawing a Triangle')
forward(1,.9)
turnBy(120)
forward(1,.9)
turnBy(120)
forward(1,.9)
elif x >= 250 and y < 250:
print('Drawing a Circle')
motors(.1,.8)
wait(9)
stop()
elif x >= 250 and y >= 250:
print('Drawing a Heart')
motors(0,1)
wait(4)
forward(1,1)
turnLeft(1,.65)
forward(1,1)
motors(0,1)
wait(4)
stop()
# Wait a small amount before checking again
wait(0.05)
| mit | Python | |
17146ea739f714ffe2ad2a4c6f6b65f866f3f401 | Create heuristic1.py | ahadmushir/whatsCooking | heuristic1.py | heuristic1.py | import csv
import pandas
import json
import operator
def preprocess():
trainHandle = open('train.json').read()
j = json.loads(trainHandle)
#######################################
unqCus = list()
tempInList = list()
try:
count = 0
while True:
a = j[count]['cuisine']
if a not in unqCus:
unqCus.append(a)
count = count + 1
except:
print "unique got it"
#keeping the top 7 ingredients of each cuisine
finalList = list()
for cu in unqCus:
tempDict = dict()
try:
cc = 0
while True:
b = j[cc]['ingredients']
cus = j[cc]['cuisine']
if cu == cus:
for item in b:
tempDict[item] = tempDict.get(item,0) + 1
cc = cc + 1
print 'this is cc',cc
except:
# print 'processing...'
sorted_x = sorted(tempDict.items(), key=operator.itemgetter(1), reverse = True)
breakCnt = 0
for k,v in sorted_x:
finalList.append(k)
breakCnt = breakCnt + 1
if breakCnt == 200:
break
print sorted_x
continue
# print finalList
print len(finalList), len(unqCus)
finalListClear = list()
for li in finalList:
if li not in finalListClear:
finalListClear.append(li)
print 'the final len is', len(finalListClear)
return finalListClear
| apache-2.0 | Python | |
ed03e44ec58b6f757b0e62d25ea8f2bf822c0498 | Create LongComPrefix_002.py | cc13ny/algo,cc13ny/Allin,Chasego/cod,Chasego/cod,Chasego/codi,Chasego/codi,cc13ny/Allin,Chasego/codi,cc13ny/Allin,Chasego/codirit,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codi,cc13ny/algo,Chasego/cod,cc13ny/algo,cc13ny/Allin,Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/codirit,Chasego/codirit,Chasego/codirit,cc13ny/algo | leetcode/014-Longest-Common-Prefix/LongComPrefix_002.py | leetcode/014-Longest-Common-Prefix/LongComPrefix_002.py | #author: cchen
class Solution:
# @param {string[]} strs
# @return {string}
def longestCommonPrefix(self, strs):
res = ''
if strs == []:
return res
lens = [len(s) for s in strs]
mi = min(lens)
for i in range(mi):
a = set([s[i] for s in strs])
if len(a) != 1:
break
else:
res+=a.pop()
return res
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.