commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
4fbba7e581b9ba4f98a3be4d09fbf929b32a6a87 | add test_load.py | imaginal/openprocurement.search,openprocurement/openprocurement.search | openprocurement/search/test_load.py | openprocurement/search/test_load.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import logging
import simplejson as json
import urllib, urllib2
from random import choice
from multiprocessing import Process
from time import time
FORMAT='%(asctime)-15s %(levelname)s %(processName)s %(message)s'
g_args=None
g_dict={}
def worker():
logging.debug('Starting process')
requests = g_args.n
while requests > 0:
requests -= 1
key = choice(g_dict.keys())
args = list()
# full text query
if key == 'query':
code = choice(g_dict[key].keys())
name = g_dict[key][code]
word = choice(name.split(' '))
word = word.replace('"', '').encode('utf-8')
args.append((key, word))
elif key == 'cpv' or key == 'dkpp':
code = choice(g_dict[key].keys())
if len(code) > 8:
code = code[:8]
args.append((key, code))
else:
code = choice(g_dict[key].keys())
args.append((key, code))
qs = urllib.urlencode(args, True)
url = g_args.host[0] + '/search?' + qs
try:
req = urllib2.urlopen(url, timeout=g_args.t)
code = req.getcode()
resp = req.read()
if code == 200:
data = json.loads(resp)
logging.debug("%d %d %s total %d", code, len(resp), url, data['total'])
else:
logging.error("%d %d %s", code, len(resp), url)
except Exception as e:
logging.error('Exception %s on %s', str(e), url)
logging.debug('Leaving process')
def load_json(filename):
if not filename:
return {}
with open(filename) as f:
data = json.load(f)
return data
def prepare():
global g_args, g_dict
parser = argparse.ArgumentParser(description='openprocurement.search.test_load')
parser.add_argument('-c', metavar='concurrency', type=int, default=10)
parser.add_argument('-n', metavar='requests', type=int, default=100)
parser.add_argument('-t', metavar='timeout', type=int, default=10)
parser.add_argument('-v', metavar='verbosity', type=int, default=logging.INFO)
parser.add_argument('--cpv', metavar='cpv.json', nargs=1)
parser.add_argument('--dkpp', metavar='dkpp.json', nargs=1)
parser.add_argument('--edrpou', metavar='edrpou.json', nargs=1)
parser.add_argument('--region', metavar='region.json', nargs=1)
parser.add_argument('--status', metavar='status.json', nargs=1)
parser.add_argument('--query', metavar='query.json', nargs=1)
parser.add_argument('host', metavar='http://search-api.host[:port]', nargs=1)
g_args = parser.parse_args()
logging.basicConfig(level=g_args.v, format=FORMAT)
for key in ['cpv', 'dkpp', 'edrpou', 'region', 'status', 'query']:
args_list = getattr(g_args, key, None)
if isinstance(args_list, list):
for filename in args_list:
logging.debug('Load %s from %s', key, filename)
g_dict[key] = load_json(filename)
if not g_dict:
raise ValueError('At least one of cpv, dkpp, edrpou or query is required')
def main():
start_time = time()
prepare()
logging.debug('Starting workers...')
process_list = list()
for i in range(g_args.c):
p = Process(target=worker)
process_list.append(p)
p.start()
logging.debug('Waiting for workers')
for p in process_list:
p.join()
total_time = time() - start_time
query_rate = 1.0 * g_args.n * g_args.c / total_time
logging.info('Total %d x %d queries in %1.3f sec. %1.1f r/s',
g_args.n, g_args.c, total_time, query_rate)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
90e613063495dee9af8fefc7e682b15344eabc0d | Add an initial find missing translations script | quisquous/cactbot,quisquous/cactbot,quisquous/cactbot,quisquous/cactbot,quisquous/cactbot,quisquous/cactbot | util/find_missing_translations.py | util/find_missing_translations.py | import argparse
import os
import re
# Directory names to ignore when looking for JavaScript files.
ignore_dirs = [
".git",
"publish",
"ThirdParty",
]
# All valid two letter locale names.
all_locales = set(["en", "cn", "de", "fr", "ja", "ko"])
# Locales that are in zoneRegex object blocks.
zoneregex_locales = set(["en", "cn", "ko"])
# Locales that are not in zoneRegex object blocks.
non_zoneregex_locales = all_locales - zoneregex_locales
# Where to start looking for files.
def base_path():
return os.path.relpath(os.path.join(os.path.dirname(__file__), "..\\"))
# Return a list of all javascript filenames found under base_path()
def find_all_javascript_files():
python_files = []
for root, dirs, files in os.walk(base_path()):
dirs[:] = [d for d in dirs if d not in ignore_dirs]
for file in files:
if file.endswith(".js"):
python_files.append(os.path.join(root, file))
return python_files
# Print missing translations in |file| for |locales|
def parse_javascript_file(file, locales):
locales = set(locales)
with open(file, encoding="utf-8") as fp:
keys = []
open_match = None
open_obj_re = re.compile(r"(\s*)(.*{)\s*")
key_re = re.compile(r"\s*(\w\w):")
for idx, line in enumerate(fp):
# Any time we encounter what looks like a new object, start over.
# FIXME: this deliberately simplifies and will ignore nested objects.
# That's what we get for parsing javascript with regex.
m = open_obj_re.fullmatch(line)
if m:
open_match = m
# idx is zero-based, but line numbers are not.
line_number = idx + 1
keys = []
continue
# If we're not inside an object, keep looking for the start of one.
if not open_match:
continue
# If this object is ended with the same indentation,
# then we've probably maybe found the end of this object.
if re.match(open_match.group(1) + "}", line):
# Check if these keys look like a translation block.
if "en" in keys:
missing_keys = locales - set(keys)
open_str = open_match.group(2)
# Only some locales care about zoneRegex, so special case.
if open_str == "zoneRegex: {":
missing_keys -= non_zoneregex_locales
if missing_keys:
err = file + ":" + str(line_number)
err += ' "' + open_str + '"'
if len(locales) > 1:
err += " " + str(list(missing_keys))
print(err)
open_match = None
continue
# If we're inside an object, find anything that looks like a key.
key_match = key_re.match(line)
if key_match:
keys.append(key_match.group(1))
if __name__ == "__main__":
example_usage = ""
parser = argparse.ArgumentParser(
description="Prints out a list of missing translations",
epilog=example_usage,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument(
"-l", "--locale", help="The locale to find missing translations for, e.g. de"
)
args = parser.parse_args()
locales = all_locales
if args.locale:
if not args.locale in all_locales:
raise parser.error("Invalid locale: " + args.locale)
locales = [args.locale]
for file in find_all_javascript_files():
parse_javascript_file(file, locales)
| apache-2.0 | Python | |
38bfc1a536f43ece367a49a62501b57c89f689a1 | Add script to delete tables. | pixyj/feel,pixyj/feel,pixyj/feel,pixyj/feel,pixyj/feel | django-server/feel/core/db/reset.py | django-server/feel/core/db/reset.py | from django.db.models.base import ModelBase
from quiz.models import Quiz, ShortAnswer, Choice, QuizAttempt
from codequiz.models import CodeQuiz, CodeQuizAttempt
from concept.models import Concept, ConceptSection
from course.models import Course, CourseSlug, CourseConcept, ConceptDependency
def reset():
for key, item in globals().items():
if type(item) == ModelBase and item != ModelBase:
Model = item
Model.objects.all().delete()
if __name__ == '__main__':
reset() | mit | Python | |
4b3c3fb315c0f7450dd87a98e3d7f928408a8ab4 | add documentation for do_layout() method | aron-bordin/kivy,janssen/kivy,edubrunaldi/kivy,janssen/kivy,Cheaterman/kivy,jehutting/kivy,jffernandez/kivy,janssen/kivy,autosportlabs/kivy,denys-duchier/kivy,akshayaurora/kivy,andnovar/kivy,thezawad/kivy,ernstp/kivy,bhargav2408/kivy,MiyamotoAkira/kivy,yoelk/kivy,Ramalus/kivy,LogicalDash/kivy,vitorio/kivy,ehealthafrica-ci/kivy,bionoid/kivy,youprofit/kivy,zennobjects/kivy,adamkh/kivy,Ramalus/kivy,janssen/kivy,aron-bordin/kivy,bliz937/kivy,JohnHowland/kivy,bliz937/kivy,inclement/kivy,angryrancor/kivy,Farkal/kivy,ernstp/kivy,iamutkarshtiwari/kivy,mSenyor/kivy,vipulroxx/kivy,dirkjot/kivy,inclement/kivy,niavlys/kivy,andnovar/kivy,habibmasuro/kivy,aron-bordin/kivy,bionoid/kivy,bhargav2408/kivy,bionoid/kivy,Farkal/kivy,kivatu/kivy-bak,JohnHowland/kivy,jehutting/kivy,matham/kivy,akshayaurora/kivy,viralpandey/kivy,tony/kivy,rafalo1333/kivy,vipulroxx/kivy,wangjun/kivy,xpndlabs/kivy,jffernandez/kivy,kivatu/kivy-bak,yoelk/kivy,ernstp/kivy,manashmndl/kivy,manthansharma/kivy,arcticshores/kivy,eHealthAfrica/kivy,jkankiewicz/kivy,jkankiewicz/kivy,akshayaurora/kivy,xiaoyanit/kivy,xiaoyanit/kivy,dirkjot/kivy,adamkh/kivy,inclement/kivy,habibmasuro/kivy,ehealthafrica-ci/kivy,CuriousLearner/kivy,Ramalus/kivy,matham/kivy,manthansharma/kivy,LogicalDash/kivy,Davideddu/kivy-forkedtouch,dirkjot/kivy,bob-the-hamster/kivy,yoelk/kivy,xpndlabs/kivy,MiyamotoAkira/kivy,KeyWeeUsr/kivy,darkopevec/kivy,VinGarcia/kivy,bob-the-hamster/kivy,ernstp/kivy,Farkal/kivy,bob-the-hamster/kivy,zennobjects/kivy,el-ethan/kivy,iamutkarshtiwari/kivy,eHealthAfrica/kivy,arcticshores/kivy,habibmasuro/kivy,rafalo1333/kivy,gonzafirewall/kivy,cbenhagen/kivy,vitorio/kivy,Cheaterman/kivy,autosportlabs/kivy,iamutkarshtiwari/kivy,Shyam10/kivy,darkopevec/kivy,dirkjot/kivy,xpndlabs/kivy,denys-duchier/kivy,rnixx/kivy,Davideddu/kivy-forkedtouch,darkopevec/kivy,hansent/kivy,bob-the-hamster/kivy,LogicalDash/kivy,Shyam10/kivy,wangjun/kivy,bliz937/kivy,Shyam10/kivy,wangjun/kivy,hansent/kivy,zennobjects/kivy,cbenhagen/kivy,eHealthAfrica/kivy,vipulroxx/kivy,angryrancor/kivy,ehealthafrica-ci/kivy,el-ethan/kivy,jegger/kivy,jkankiewicz/kivy,andnovar/kivy,yoelk/kivy,eHealthAfrica/kivy,viralpandey/kivy,gonzafirewall/kivy,jkankiewicz/kivy,matham/kivy,viralpandey/kivy,ehealthafrica-ci/kivy,manashmndl/kivy,jehutting/kivy,denys-duchier/kivy,kivy/kivy,rafalo1333/kivy,KeyWeeUsr/kivy,niavlys/kivy,jegger/kivy,kived/kivy,mSenyor/kivy,jffernandez/kivy,VinGarcia/kivy,VinGarcia/kivy,edubrunaldi/kivy,Davideddu/kivy-forkedtouch,bhargav2408/kivy,aron-bordin/kivy,jffernandez/kivy,thezawad/kivy,kived/kivy,cbenhagen/kivy,tony/kivy,niavlys/kivy,Cheaterman/kivy,manthansharma/kivy,KeyWeeUsr/kivy,vipulroxx/kivy,kivy/kivy,adamkh/kivy,MiyamotoAkira/kivy,kivy/kivy,thezawad/kivy,manthansharma/kivy,arcticshores/kivy,Shyam10/kivy,arlowhite/kivy,rnixx/kivy,bionoid/kivy,Davideddu/kivy-forkedtouch,jegger/kivy,youprofit/kivy,tony/kivy,vitorio/kivy,xiaoyanit/kivy,wangjun/kivy,kivatu/kivy-bak,gonzafirewall/kivy,autosportlabs/kivy,JohnHowland/kivy,angryrancor/kivy,niavlys/kivy,MiyamotoAkira/kivy,youprofit/kivy,arlowhite/kivy,el-ethan/kivy,matham/kivy,rnixx/kivy,KeyWeeUsr/kivy,hansent/kivy,Cheaterman/kivy,mSenyor/kivy,denys-duchier/kivy,arcticshores/kivy,jegger/kivy,adamkh/kivy,Farkal/kivy,zennobjects/kivy,arlowhite/kivy,CuriousLearner/kivy,JohnHowland/kivy,angryrancor/kivy,kivatu/kivy-bak,LogicalDash/kivy,manashmndl/kivy,hansent/kivy,CuriousLearner/kivy,kived/kivy,darkopevec/kivy,gonzafirewall/kivy,edubrunaldi/kivy | kivy/uix/layout.py | kivy/uix/layout.py | '''
Layout
======
Layouts are used to calculate and assign widget positions.
The :class:`Layout` class itself cannot be used directly. You must use one of:
- Anchor layout : :class:`kivy.uix.anchorlayout.AnchorLayout`
- Box layout : :class:`kivy.uix.boxlayout.BoxLayout`
- Float layout : :class:`kivy.uix.floatlayout.FloatLayout`
- Grid layout : :class:`kivy.uix.gridlayout.GridLayout`
- Stack layout : :class:`kivy.uix.stacklayout.StackLayout`
Understanding `size_hint` property in `Widget`
----------------------------------------------
The :data:`~kivy.uix.Widget.size_hint` is mostly used in Layout. This is the
size in percent, not in pixels. The format is::
widget.size_hint = (width_percent, height_percent)
The percent is specified as a floating point number in the range 0-1, ie 0.5
is 50%, 1 is 100%.
If you want a widget's width to be half of the parent's and their heights to
be identical, you can do::
widget.size_hint = (0.5, 1.0)
If you don't want to use size_hint for one of width or height, set the value to
None. For example, to make a widget that is 250px wide and 30% of the parent's
height, you can write::
widget.size_hint = (None, 0.3)
widget.width = 250
'''
__all__ = ('Layout', )
from kivy.clock import Clock
from kivy.uix.widget import Widget
class Layout(Widget):
'''Layout interface class, used to implement every layout. Check module
documentation for more information.
'''
def __init__(self, **kwargs):
if self.__class__ == Layout:
raise Exception('The Layout class cannot be used.')
kwargs.setdefault('size', (1, 1))
self._trigger_layout = Clock.create_trigger(self.do_layout, -1)
super(Layout, self).__init__(**kwargs)
def reposition_child(self, child, **kwargs):
'''Force the child to be repositioned on the screen. This method is used
internally in boxlayout.
'''
for prop in kwargs:
child.__setattr__(prop, kwargs[prop])
def do_layout(self, *largs):
'''This function is called when a layout is needed, with by a trigger.
If you are doing a new Layout subclass, don't call this function
directly, use :prop:`_trigger_layout` instead.
.. versionadded:: 1.0.8
'''
pass
def add_widget(self, widget, index=0):
widget.bind(
size = self._trigger_layout,
size_hint = self._trigger_layout)
return super(Layout, self).add_widget(widget, index)
def remove_widget(self, widget):
widget.unbind(
size = self._trigger_layout,
size_hint = self._trigger_layout)
return super(Layout, self).remove_widget(widget)
| '''
Layout
======
Layouts are used to calculate and assign widget positions.
The :class:`Layout` class itself cannot be used directly. You must use one of:
- Anchor layout : :class:`kivy.uix.anchorlayout.AnchorLayout`
- Box layout : :class:`kivy.uix.boxlayout.BoxLayout`
- Float layout : :class:`kivy.uix.floatlayout.FloatLayout`
- Grid layout : :class:`kivy.uix.gridlayout.GridLayout`
- Stack layout : :class:`kivy.uix.stacklayout.StackLayout`
Understanding `size_hint` property in `Widget`
----------------------------------------------
The :data:`~kivy.uix.Widget.size_hint` is mostly used in Layout. This is the
size in percent, not in pixels. The format is::
widget.size_hint = (width_percent, height_percent)
The percent is specified as a floating point number in the range 0-1, ie 0.5
is 50%, 1 is 100%.
If you want a widget's width to be half of the parent's and their heights to
be identical, you can do::
widget.size_hint = (0.5, 1.0)
If you don't want to use size_hint for one of width or height, set the value to
None. For example, to make a widget that is 250px wide and 30% of the parent's
height, you can write::
widget.size_hint = (None, 0.3)
widget.width = 250
'''
__all__ = ('Layout', )
from kivy.clock import Clock
from kivy.uix.widget import Widget
class Layout(Widget):
'''Layout interface class, used to implement every layout. Check module
documentation for more information.
'''
def __init__(self, **kwargs):
if self.__class__ == Layout:
raise Exception('The Layout class cannot be used.')
kwargs.setdefault('size', (1, 1))
self._trigger_layout = Clock.create_trigger(self.do_layout, -1)
super(Layout, self).__init__(**kwargs)
def reposition_child(self, child, **kwargs):
'''Force the child to be repositioned on the screen. This method is used
internally in boxlayout.
'''
for prop in kwargs:
child.__setattr__(prop, kwargs[prop])
def do_layout(self, *largs):
pass
def add_widget(self, widget, index=0):
widget.bind(
size = self._trigger_layout,
size_hint = self._trigger_layout)
return super(Layout, self).add_widget(widget, index)
def remove_widget(self, widget):
widget.unbind(
size = self._trigger_layout,
size_hint = self._trigger_layout)
return super(Layout, self).remove_widget(widget)
| mit | Python |
586c047cebd679f6a736c2dfec9b6df762938b12 | Add command line tool. | jpbarela/panini_complete | simulate_packs.py | simulate_packs.py | #!/usr/local/bin/python3
import argparse
import Panini
from Panini import StickerCollection
from Accumulator import Accumulator
parser = argparse.ArgumentParser('Simulate creating a Panini sticker collection')
parser.add_argument('runs', metavar='N', type= int)
runs = parser.parse_args().runs
results = Accumulator.Accumulator()
for i in (1, runs+1):
collection = StickerCollection.StickerCollection(Panini.NUMBER_OF_STICKERS, Panini.STICKERS_TO_REQUEST, Panini.STICKERS_PER_PACK)
results.add_value(collection.packs_till_complete())
print("Number of runs: {0}".format(runs))
print("Average number of packs needed: {0}".format(results.average()))
print("Standard deviation of packs: {0}".format(results.standard_deviation())) | mit | Python | |
7cb5e7b7ea65841e3eea11e337fe64e578cec2ce | Create buzzer.py | Dasl0ki/PyroBuzzer,Dasl0ki/PyroBuzzer | buzzer.py | buzzer.py | #!/usr/bin/env python
import RPi.GPIO as GPIO
import os
import smbus
from time import sleep
b = smbus.SMBus(1)
address = 0x20
GPIOA = 0x12
GPIOB = 0x13
b.write_byte_data(address,0x0C,0xFF)
b.write_byte_data(address,0x0D,0xFF)
if os.path.exists('ranking.txt'):
os.remove('ranking.txt')
if os.path.exists('stop-script'):
os.remove('stop-script')
text_file = open('ranking.txt', 'w')
#text_file.write('Ranking:\n')
text_file.close()
i = 1
team = [
'01',
'02',
'03',
'04',
'05',
'06',
'07',
'08',
'09',
'10',
'11',
'12',
'13',
'14',
'15',
'16',
]
def checkseat(seat, i):
flag = False
with open('ranking.txt', 'r') as file_read:
for line in file_read:
if line[:2] == seat:
flag = True
if flag != True:
with open('ranking.txt', 'a') as file_w:
if int(seat) < 10:
id = int(seat[1:2])-1
else:
id = int(seat)-1
file_w.write("%s %s: Team %s - Button pressed \n" % (seat, i, team[id]))
return i+1
else:
return i
while True:
if os.path.exists('stop-script'):
break;
else:
buzzer_a = b.read_byte_data(address,GPIOA)
buzzer_b = b.read_byte_data(address,GPIOB)
result_a = '{0:08b}'.format(buzzer_a)
result_b = '{0:08b}'.format(buzzer_b)
if result_b[7:8] == "0":
i = checkseat("01", i)
print "Button 1 pressed"
if result_b[6:7] == "0":
i = checkseat("02", i)
print "Button 2 pressed"
if result_b[5:6] == "0":
i = checkseat("03", i)
print "Button 3 pressed"
if result_b[4:5] == "0":
i = checkseat("04", i)
print "Button 4 pressed"
if result_b[3:4] == "0":
i = checkseat("05", i)
print "Button 5 pressed"
if result_b[2:3] == "0":
i = checkseat("06", i)
print "Button 6 pressed"
if result_b[1:2] == "0":
i = checkseat("07", i)
print "Button 7 pressed"
if result_b[0:1] == "0":
i = checkseat("08", i)
print "Button 8 pressed"
if result_a[7:8] == "0":
i = checkseat("09", i)
print "Button 9 pressed"
if result_a[6:7] == "0":
i = checkseat("10", i)
print "Button 10 pressed"
if result_a[5:6] == "0":
i = checkseat("11", i)
print "Button 11 pressed"
if result_a[4:5] == "0":
i = checkseat("12", i)
print "Button 12 pressed"
if result_a[3:4] == "0":
i = checkseat("13", i)
print "Button 13 pressed"
if result_a[2:3] == "0":
i = checkseat("14", i)
print "Button 14 pressed"
if result_a[1:2] == "0":
i = checkseat("15", i)
print "Button 15 pressed"
if result_a[0:1] == "0":
i = checkseat("16", i)
print "Button 16 pressed"
| mit | Python | |
bad6bc988f09bf1f135d81eb654c5fc6c1de9a28 | add standalone gene report script. It will be used in the NDEx server as the GSEA exporter. | ndexbio/ndex-enrich | cx2grp.py | cx2grp.py | #!/usr/bin/python
'''
This script takes a CX network from stdin and print out a set of gene symbols found in node names,
represents, alias and function terms.
Gene Symbols are normallized to human genes using mygene.info services.
'''
import sys,json
import requests
def terms_from_function_term(function_term, term_set):
# if it is a function term, process all genes mentioned
for parameter in function_term['args']:
if type(parameter) in (str, unicode):
add_term( term_set,parameter)
else:
terms_from_function_term(parameter, term_set)
def query_to_gene_all(q, tax_id='9606'):
r = requests.get('http://mygene.info/v2/query?q='+q+'&fields=symbol%2Centrezgene%2Censemblgene%2Cuniprot%2Calias&species='+tax_id+'&entrezonly=true')
result = r.json()
hits = result.get("hits")
if hits and len(hits) > 0:
hit = hits[0]
gene = hit.get('symbol')
return gene
return None
def add_term (term_set,term):
term_set.add(term)
words = term.split(":")
if ( len(words)>1) :
del words[0]
term_set.add(":".join(words))
def main():
data = json.load(sys.stdin)
# f = open("/Users/abc/Downloads/S1P5 pathway.cx","r")
# data = json.load(f)
namespaces = {}
terms = set()
for aspect in data:
if '@context' in aspect:
elements = aspect['@context']
if len(elements) > 0:
if len(elements) > 1 or namespaces:
raise RuntimeError('@context aspect can only have one element')
else:
namespaces = elements[0]
elif 'nodes' in aspect:
for node in aspect.get('nodes'):
if 'n' in node and node['n']:
add_term(terms,node['n'])
if 'r' in node and node['r']:
add_term(terms,node['r'])
elif "nodeAttributes" in aspect:
for attr in aspect ["nodeAttributes"]:
if attr["n"] == "name" :
add_term(terms, attr["v"])
elif attr["n"] == "alias":
for alias in attr['v']:
add_term (terms, alias)
elif "functionTerms" in aspect:
for functionTerm in aspect['functionTerms']:
terms_from_function_term(functionTerm,terms)
genes =set()
for term in terms :
gene =query_to_gene_all(term)
if gene :
genes.add(gene)
for gene in genes:
sys.stdout.write(gene+ "\n")
sys.stdout.flush()
if __name__ == '__main__':
main() | bsd-2-clause | Python | |
9008d6e3d14a5a582f0ddbd6b4a113386b639f26 | Add Pyramid parser module | sloria/webargs,stas/webargs,Basis/webargs,hyunchel/webargs,yufeiminds/webargs,nealrs/webargs | webargs/pyramidparser.py | webargs/pyramidparser.py | # -*- coding: utf-8 -*-
"""Pyramid request argument parsing.
Example usage: ::
from wsgiref.simple_server import make_server
from pyramid.config import Configurator
from pyramid.response import Response
from webargs import Arg
from webargs.pyramidparser import use_args
hello_args = {
'name': Arg(str, default='World')
}
@use_args(hello_args)
def hello_world(request, args):
return Response('Hello ' + args['name'])
if __name__ == '__main__':
config = Configurator()
config.add_route('hello', '/')
config.add_view(hello_world, route_name='hello')
app = config.make_wsgi_app()
server = make_server('0.0.0.0', 6543, app)
server.serve_forever()
"""
import functools
import logging
from webargs import core
logger = logging.getLogger(__name__)
class PyramidParser(core.Parser):
"""Pyramid request argument parser."""
def parse_querystring(self, req, name, arg):
"""Pull a querystring value from the request."""
return core.get_value(req.GET, name, arg.multiple)
def parse_form(self, req, name, arg):
"""Pull a form value from the request."""
return core.get_value(req.POST, name, arg.multiple)
def parse_json(self, req, name, arg):
"""Pull a json value from the request."""
try:
json_data = req.json_body
except ValueError:
return core.Missing
return core.get_value(json_data, name, arg.multiple)
def parse_cookies(self, req, name, arg):
"""Pull the value from the cookiejar."""
return core.get_value(req.cookies, name, arg.multiple)
def parse_headers(self, req, name, arg):
"""Pull a value from the header data."""
return core.get_value(req.headers, name, arg.multiple)
def parse_files(self, req, name, arg):
raise NotImplementedError('Files parsing not supported by {0}'
.format(self.__class__.__name__))
def use_args(self, argmap, req=None, targets=core.Parser.DEFAULT_TARGETS,
validate=None):
"""Decorator that injects parsed arguments into a view function or method.
:param dict argmap: Dictionary of argument_name:Arg object pairs.
:param req: The request object to parse
:param tuple targets: Where on the request to search for values.
:param callable validate:
Validation function that receives the dictionary of parsed arguments.
If the function returns ``False``, the parser will raise a
:exc:`ValidationError`.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(obj, *args, **kwargs):
# The first argument is either `self` or `request`
try: # get self.request
request = obj.request
except AttributeError: # first arg is request
request = obj
parsed_args = self.parse(argmap, req=request, targets=targets,
validate=None)
return func(obj, parsed_args, *args, **kwargs)
return wrapper
return decorator
parser = PyramidParser()
use_args = parser.use_args
use_kwargs = parser.use_kwargs
| mit | Python | |
5149d86c7e787eff46f21669d448158ba0905a41 | Add dbck.py: a database check tool | daedalus/pynode,petertodd/stealth-addresses-ref-implementation,petertodd/dust-b-gone,daedalus/python-bitcoin_r,jgarzik/pynode,jgarzik/python-bitcoinlib,petertodd/tx-flood-attack,robertsdotpm/python-bitcoinlib | dbck.py | dbck.py | #!/usr/bin/python
#
# dbck.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import sys
import Log
import MemPool
import ChainDb
import cStringIO
from bitcoin.coredefs import NETWORKS
from bitcoin.core import CBlock
from bitcoin.scripteval import *
NET_SETTINGS = {
'mainnet' : {
'log' : '/tmp/dbck.log',
'db' : '/tmp/chaindb'
},
'testnet3' : {
'log' : '/tmp/dbcktest.log',
'db' : '/tmp/chaintest'
}
}
MY_NETWORK = 'mainnet'
SETTINGS = NET_SETTINGS[MY_NETWORK]
log = Log.Log(SETTINGS['log'])
mempool = MemPool.MemPool(log)
chaindb = ChainDb.ChainDb(SETTINGS['db'], log, mempool, NETWORKS[MY_NETWORK])
scanned = 0
failures = 0
for height in xrange(chaindb.getheight()):
heightidx = ChainDb.HeightIdx()
heightidx.deserialize(chaindb.height[str(height)])
blkhash = heightidx.blocks[0]
ser_hash = ser_uint256(blkhash)
f = cStringIO.StringIO(chaindb.blocks[ser_hash])
block = CBlock()
block.deserialize(f)
if not block.is_valid():
log.write("block %064x failed" % (blkhash,))
failures += 1
scanned += 1
if (scanned % 1000) == 0:
log.write("Scanned height %d (%d failures)" % (
height, failures))
log.write("Scanned %d blocks (%d failures)" % (scanned, failures))
| mit | Python | |
48bc3bfa4ab6648d3599af15cfe7a2dd69abdb40 | make gsid ctable schedule run hourly | puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,gmimano/commcaretest,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq | custom/apps/gsid/ctable_mappings.py | custom/apps/gsid/ctable_mappings.py | from ctable.fixtures import CtableMappingFixture
from ctable.models import ColumnDef, KeyMatcher
class PatientSummaryMapping(CtableMappingFixture):
name = 'patient_summary'
domains = ['gsid']
couch_view = 'gsid/patient_summary'
schedule_active = True
@property
def columns(self):
columns = [
ColumnDef(name="domain", data_type="string", value_source="key", value_index=0),
ColumnDef(name="disease_name", data_type="string", value_source="key", value_index=1),
ColumnDef(name="test_version", data_type="string", value_source="key", value_index=2),
ColumnDef(name="country", data_type="string", value_source="key", value_index=3),
ColumnDef(name="province", data_type="string", value_source="key", value_index=4),
ColumnDef(name="district", data_type="string", value_source="key", value_index=5),
ColumnDef(name="clinic", data_type="string", value_source="key", value_index=6),
ColumnDef(name="gender", data_type="string", value_source="key", value_index=7),
ColumnDef(name="date", data_type="date", value_source="key", value_index=8, date_format="%Y-%m-%d"),
ColumnDef(name="diagnosis", data_type="string", value_source="key", value_index=9),
ColumnDef(name="lot_number", data_type="string", value_source="key", value_index=10),
ColumnDef(name="gps", data_type="string", value_source="key", value_index=11),
ColumnDef(name="gps_country", data_type="string", value_source="key", value_index=12),
ColumnDef(name="gps_province", data_type="string", value_source="key", value_index=13),
ColumnDef(name="gps_district", data_type="string", value_source="key", value_index=14),
ColumnDef(name="age", data_type="integer", value_source="key", value_index=15),
ColumnDef(name="cases", data_type="integer", value_source="value", value_attribute="sum"),
]
return columns
def customize(self, mapping):
mapping.schedule_type = 'hourly'
mapping.schedule_hour = -1
mapping.schedule_day = -1
| from ctable.fixtures import CtableMappingFixture
from ctable.models import ColumnDef, KeyMatcher
class PatientSummaryMapping(CtableMappingFixture):
name = 'patient_summary'
domains = ['gsid']
couch_view = 'gsid/patient_summary'
schedule_active = True
@property
def columns(self):
columns = [
ColumnDef(name="domain", data_type="string", value_source="key", value_index=0),
ColumnDef(name="disease_name", data_type="string", value_source="key", value_index=1),
ColumnDef(name="test_version", data_type="string", value_source="key", value_index=2),
ColumnDef(name="country", data_type="string", value_source="key", value_index=3),
ColumnDef(name="province", data_type="string", value_source="key", value_index=4),
ColumnDef(name="district", data_type="string", value_source="key", value_index=5),
ColumnDef(name="clinic", data_type="string", value_source="key", value_index=6),
ColumnDef(name="gender", data_type="string", value_source="key", value_index=7),
ColumnDef(name="date", data_type="date", value_source="key", value_index=8, date_format="%Y-%m-%d"),
ColumnDef(name="diagnosis", data_type="string", value_source="key", value_index=9),
ColumnDef(name="lot_number", data_type="string", value_source="key", value_index=10),
ColumnDef(name="gps", data_type="string", value_source="key", value_index=11),
ColumnDef(name="gps_country", data_type="string", value_source="key", value_index=12),
ColumnDef(name="gps_province", data_type="string", value_source="key", value_index=13),
ColumnDef(name="gps_district", data_type="string", value_source="key", value_index=14),
ColumnDef(name="age", data_type="integer", value_source="key", value_index=15),
ColumnDef(name="cases", data_type="integer", value_source="value", value_attribute="sum"),
]
return columns
| bsd-3-clause | Python |
94ebfd057eb5a7c7190d981b26c027573578606d | validate using validator module | chrisseto/modular-odm,CenterForOpenScience/modular-odm,icereval/modular-odm,sloria/modular-odm | modularodm/fields/StringField.py | modularodm/fields/StringField.py | from ..fields import Field
from ..validators import StringValidator
class StringField(Field):
default = ''
validate = StringValidator()
def __init__(self, *args, **kwargs):
super(StringField, self).__init__(*args, **kwargs) | from ..fields import Field
import weakref
class StringField(Field):
default = ''
def __init__(self, *args, **kwargs):
super(StringField, self).__init__(*args, **kwargs)
def validate(self, value):
if isinstance(value, unicode):
return True
else:
try:
value.decode('utf-8')
return True
except:
return False
| apache-2.0 | Python |
5748666a1f2c6cd307be79c33117252e10d6df01 | Add matchup script | ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,Hutspace/odekro,Hutspace/odekro,ken-muturi/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,Hutspace/odekro,hzj123/56th,hzj123/56th,geoffkilpin/pombola,Hutspace/odekro,ken-muturi/pombola,Hutspace/odekro,mysociety/pombola,hzj123/56th | mzalendo/kenya/management/commands/kenya_matchup_coords_to_place.py | mzalendo/kenya/management/commands/kenya_matchup_coords_to_place.py | import re
import csv
import sys
from optparse import make_option
from django.core.management.base import LabelCommand
from django.contrib.gis.geos import Point
from mapit.models import Area, Generation, Type, NameType, Country
class Command(LabelCommand):
"""Read a file in, extract coordinates and lookup the constituency.
Outputs the coordinates, constituency slug and name as CSV to STDOUT
input is one pair of coords per line, eg:
(1.23, 4.56)
"""
help = 'Import KML data'
args = '<KML files>'
writer = csv.writer(sys.stdout)
def handle_label(self, input_coords, **options):
with open(input_coords) as input_file:
for line in input_file.readlines():
self.process_line( line.strip() )
def process_line(self, raw_line):
"""Extract coords from line and output constituency found"""
# print raw_line
line = re.sub(r'[^\d.,\-]+', '', raw_line)
lng, lat = map( lambda x: float(x), re.split(',', line) )
point = Point( lat, lng, srid=4326)
# print point
areas = Area.objects.by_location( point )
output = [ raw_line ]
if areas:
places = areas[0].place_set.all()
if places:
place = places[0]
output.append( place.name )
output.append( place.slug )
self.writer.writerow( output )
| agpl-3.0 | Python | |
9386236d41298ed8888a6774f40a15d44b7e53fe | Create command for Data Log Report fixtures | porksmash/swarfarm,porksmash/swarfarm,porksmash/swarfarm,porksmash/swarfarm | data_log/management/commands/generate_report_fixture.py | data_log/management/commands/generate_report_fixture.py | from django.core.management.base import BaseCommand
from django.core import serializers
from data_log import models
import json
class Command(BaseCommand):
help = 'Create Data Log Report fixtures'
def handle(self, *args, **kwargs):
self.stdout.write('Creating fixtures for Data Log Reports...')
JSONSerializer = serializers.get_serializer("json")
j = JSONSerializer()
data = []
models_to_serialize = [
models.LevelReport,
models.SummonReport,
models.MagicShopRefreshReport,
models.MagicBoxCraftingReport,
models.WishReport,
models.RuneCraftingReport
]
for model in models_to_serialize:
data += json.loads(j.serialize(model.objects.order_by('-generated_on')[:100]))
data += json.loads(j.serialize(models.Report.objects.order_by('-generated_on')[:1000]))
with open("fixture_reports.json", "w+") as f:
json.dump(data, f)
self.stdout.write(self.style.SUCCESS('Done!'))
| apache-2.0 | Python | |
ebf790c6c94131b79cb5da4de6cb665f97e54799 | Add viewset permission class for checking image permissions | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | app/grandchallenge/cases/permissions.py | app/grandchallenge/cases/permissions.py | from rest_framework import permissions
from grandchallenge.serving.permissions import user_can_download_image
class ImagePermission(permissions.BasePermission):
"""
Permission class for APIViews in retina app.
Checks if user is in retina graders or admins group
"""
def has_object_permission(self, request, view, obj):
return user_can_download_image(user=request.user, image=obj)
| apache-2.0 | Python | |
496754c54005cf7e1b49ada8e612207f5e2846ff | Add dead letter SQS queue example | awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples | python/example_code/sqs/dead_letter_queue.py | python/example_code/sqs/dead_letter_queue.py | # Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import json
import boto3
# Create SQS client
sqs = boto3.client('sqs')
queue_url = 'SOURCE_QUEUE_URL'
dead_letter_queue_arn = 'DEAD_LETTER_QUEUE_ARN'
redrive_policy = {
'deadLetterTargetArn': dead_letter_queue_arn,
'maxReceiveCount': '10'
}
# Configure queue to send messages to dead letter queue
sqs.set_queue_attributes(
QueueUrl=queue_url,
Attributes={
'RedrivePolicy': json.dumps(redrive_policy)
}
)
| apache-2.0 | Python | |
085a9aa05dfda6348d0e7e2aa6ac7f0c6ce6d63b | add some basic client-server tests | ychaim/opentxs-tests-1,Open-Transactions/opentxs-tests,ychaim/opentxs-tests-1,Open-Transactions/opentxs-tests | client_server_tests.py | client_server_tests.py | import pyopentxs
# this is defined by the sample data
SERVER_ID = "r1fUoHwJOWCuK3WBAAySjmKYqsG6G2TYIxdqY6YNuuG"
def test_check_server_id():
nym_id = pyopentxs.create_pseudonym()
assert pyopentxs.check_server_id(SERVER_ID, nym_id)
def test_register_nym():
nym_id = pyopentxs.create_pseudonym()
pyopentxs.register_nym(SERVER_ID, nym_id)
# returns server "contract"
# TODO: maybe perform checks on the returned contract
| mpl-2.0 | Python | |
55f11f5952ad7c53267ec31c60196dd329eb09c0 | add one-off service | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | htdocs/json/vtec_events_bywfo.py | htdocs/json/vtec_events_bywfo.py | """Pidgin-holed service for some WFO data... """
import json
from io import BytesIO, StringIO
import datetime
from paste.request import parse_formvars
from pyiem.util import get_sqlalchemy_conn, html_escape
import pandas as pd
from sqlalchemy import text
EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
def make_url(row):
"""Build URL."""
return (
f"https://mesonet.agron.iastate.edu/vtec/#{row['issued'][:4]}-"
f"O-NEW-K{row['wfo']}-"
f"{row['phenomena']}-{row['significance']}-{row['eventid']:04.0f}"
)
def get_df(wfo, start, end, phenomena, significance):
"""Answer the request!"""
params = {
"wfo": wfo,
"start": start,
"end": end,
}
plimiter = ""
if phenomena is not None:
params["phenomena"] = phenomena
plimiter = " and phenomena = :phenomena "
if significance is not None:
params["significance"] = significance
plimiter += " and significance = :significance "
with get_sqlalchemy_conn("postgis") as conn:
df = pd.read_sql(
text(
f"""
SELECT
to_char(issue at time zone 'UTC',
'YYYY-MM-DDThh24:MI:SSZ') as issued,
to_char(expire at time zone 'UTC',
'YYYY-MM-DDThh24:MI:SSZ') as expired,
eventid, phenomena, significance, hvtec_nwsli, wfo, ugc
from warnings WHERE wfo = :wfo and issue < :end
and (expire > :start or init_expire > :start)
{plimiter} ORDER by issue ASC
"""
),
conn,
params=params,
)
if df.empty:
return df
# Construct a URL
df["url"] = df.apply(make_url, axis=1)
return df
def as_json(df):
"""Materialize this df as JSON."""
res = {"events": []}
for _, row in df.iterrows():
res["events"].append(
{
"url": row["url"],
"issue": row["issued"],
"expire": row["expired"],
"eventid": row["eventid"],
"phenomena": row["phenomena"],
"hvtec_nwsli": row["hvtec_nwsli"],
"significance": row["significance"],
"wfo": row["wfo"],
"ugc": row["ugc"],
}
)
return json.dumps(res)
def application(environ, start_response):
"""Answer request."""
fields = parse_formvars(environ)
wfo = fields.get("wfo", "DMX")[:3].upper()
start = datetime.datetime.strptime(
fields.get("start", "2022-05-01T12:00")[:16], "%Y-%m-%dT%H:%M"
)
end = datetime.datetime.strptime(
fields.get("end", "2022-05-02T12:00")[:16], "%Y-%m-%dT%H:%M"
)
phenomena = fields.get("phenomena")
significance = fields.get("significance")
cb = fields.get("callback", None)
fmt = fields.get("fmt", "json")
df = get_df(wfo, start, end, phenomena, significance)
if fmt == "xlsx":
fn = f"vtec_{wfo}_{start:%Y%m%d%H%M}_{end:%Y%m%d%H%M}.xlsx"
headers = [
("Content-type", EXL),
("Content-disposition", "attachment; Filename=" + fn),
]
start_response("200 OK", headers)
bio = BytesIO()
df.to_excel(bio, index=False)
return [bio.getvalue()]
if fmt == "csv":
fn = f"vtec_{wfo}_{start:%Y%m%d%H%M}_{end:%Y%m%d%H%M}.csv"
headers = [
("Content-type", "application/octet-stream"),
("Content-disposition", "attachment; Filename=" + fn),
]
start_response("200 OK", headers)
bio = StringIO()
df.to_csv(bio, index=False)
return [bio.getvalue().encode("utf-8")]
res = as_json(df)
if cb is not None:
res = f"{html_escape(cb)}({res})"
headers = [("Content-type", "application/json")]
start_response("200 OK", headers)
return [res.encode("ascii")]
| mit | Python | |
7fc64847ed45229220e9bdfe20c25f3c83f10a80 | Add isup.py | fenhl/isup | isup.py | isup.py | #!/usr/bin/env python
import re
import sys
from urllib.request import urlopen
def isup(domain):
request = urlopen("http://www.isup.me/" + domain).read()
if type(request) != type(''):
request = request.decode('utf-8')
return domain + " " + ("UP" if "It's just you" in request else "DOWN")
def main(cmd, args):
if len(args):
for d in args:
print(isup(d))
else:
print("usage: " + cmd + " domain1 [domain2 .. domainN]")
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
| mit | Python | |
9c7935ebbd4d995c44526c91fdb3b647a15eb877 | Create API tasks.py with update_char_data | evewspace/eve-wspace,acdervis/eve-wspace,Maarten28/eve-wspace,marbindrakon/eve-wspace,acdervis/eve-wspace,evewspace/eve-wspace,Zumochi/eve-wspace,Unsettled/eve-wspace,nyrocron/eve-wspace,nyrocron/eve-wspace,gpapaz/eve-wspace,gpapaz/eve-wspace,Unsettled/eve-wspace,Zumochi/eve-wspace,gpapaz/eve-wspace,Unsettled/eve-wspace,proycon/eve-wspace,Zumochi/eve-wspace,Unsettled/eve-wspace,proycon/eve-wspace,mmalyska/eve-wspace,evewspace/eve-wspace,hybrid1969/eve-wspace,Zumochi/eve-wspace,proycon/eve-wspace,hybrid1969/eve-wspace,gpapaz/eve-wspace,evewspace/eve-wspace,Maarten28/eve-wspace,Maarten28/eve-wspace,Maarten28/eve-wspace,marbindrakon/eve-wspace,mmalyska/eve-wspace,acdervis/eve-wspace,hybrid1969/eve-wspace,mmalyska/eve-wspace,hybrid1969/eve-wspace,proycon/eve-wspace,marbindrakon/eve-wspace,acdervis/eve-wspace,marbindrakon/eve-wspace,mmalyska/eve-wspace,nyrocron/eve-wspace,nyrocron/eve-wspace | evewspace/API/tasks.py | evewspace/API/tasks.py | # Eve W-Space
# Copyright 2014 Andrew Austin and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from celery import task
from API.models import APIKey, MemberAPIKey
from django.core.cache import cache
from django.contrib.auth import get_user_model
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
User = get_user_model()
@task()
def update_char_data():
#Get all users
user_list = User.objects.all()
for user in user_list:
#Get all API keys of a user
for key in user.api_keys.all():
#Grab key and validate
current_key = MemberAPIKey(user=user,
keyid=key.keyid,
vcode=key.vcode)
current_key.validate()
| apache-2.0 | Python | |
1d1a37ad6f0aedbf18a72b551fdee4d96c92ea11 | Update RICA example. | chrinide/theanets,devdoer/theanets,lmjohns3/theanets | examples/mnist-rica.py | examples/mnist-rica.py | #!/usr/bin/env python
import climate
import matplotlib.pyplot as plt
import numpy as np
import theanets
from utils import load_mnist, plot_layers, plot_images
logging = climate.get_logger('mnist-rica')
climate.enable_default_logging()
class RICA(theanets.Autoencoder):
def J(self, weight_inverse=0, **kwargs):
cost = super(RICA, self).J(**kwargs)
if weight_inverse > 0:
cost += sum((weight_inverse / (w * w).sum(axis=0)).sum()
for l in self.layers for w in l.weights)
return cost
train, valid, _ = load_mnist()
# mean-center the digits and compute a pca whitening transform.
train -= 0.5
valid -= 0.5
logging.info('computing whitening transform')
vals, vecs = np.linalg.eigh(np.dot(train.T, train) / len(train))
vals = vals[::-1]
vecs = vecs[:, ::-1]
K = 197 # this retains 99% of the variance in the digit data.
vals = np.sqrt(vals[:K])
vecs = vecs[:, :K]
def whiten(x):
return np.dot(x, np.dot(vecs, np.diag(1. / vals)))
def color(z):
return np.dot(z, np.dot(np.diag(vals), vecs.T))
# now train our model on the whitened dataset.
N = 16
e = theanets.Experiment(
RICA,
layers=(K, N * N, K),
activation='linear',
hidden_l1=0.2,
no_learn_biases=True,
tied_weights=True,
train_batches=100,
weight_inverse=0.01,
)
e.train(whiten(train), whiten(valid))
# color the network weights so they are viewable as digits.
plot_layers(
[color(e.network.get_weights('hid1').T).T],
tied_weights=True)
plt.tight_layout()
plt.show()
plot_images(valid[:N*N], 121, 'Sample data')
plot_images(
color(e.network.predict(whiten(valid[:N*N]))),
122, 'Reconstructed data')
plt.tight_layout()
plt.show()
| #!/usr/bin/env python
import climate
import matplotlib.pyplot as plt
import numpy as np
import theanets
from utils import load_mnist, plot_layers, plot_images
logging = climate.get_logger('mnist-rica')
climate.enable_default_logging()
class RICA(theanets.Autoencoder):
def J(self, weight_inverse=0, **kwargs):
cost = super(RICA, self).J(**kwargs)
if weight_inverse > 0:
cost += sum((weight_inverse / (w * w).sum(axis=0)).sum() for w in self.weights)
return cost
train, valid, _ = load_mnist()
# mean-center the digits and compute a pca whitening transform.
train -= 0.5
valid -= 0.5
logging.info('computing whitening transform')
vals, vecs = np.linalg.eigh(np.dot(train.T, train) / len(train))
vals = vals[::-1]
vecs = vecs[:, ::-1]
K = 197 # this retains 99% of the variance in the digit data.
vals = np.sqrt(vals[:K])
vecs = vecs[:, :K]
def whiten(x):
return np.dot(x, np.dot(vecs, np.diag(1. / vals)))
def color(z):
return np.dot(z, np.dot(np.diag(vals), vecs.T))
# now train our model on the whitened dataset.
N = 16
e = theanets.Experiment(
RICA,
layers=(K, N * N, K),
activation='linear',
hidden_l1=0.2,
no_learn_biases=True,
tied_weights=True,
train_batches=100,
weight_inverse=0.01,
)
e.train(whiten(train), whiten(valid))
# color the network weights so they are viewable as digits.
plot_layers(
[color(e.network.weights[0].get_value().T).T],
tied_weights=True)
plt.tight_layout()
plt.show()
plot_images(valid[:N*N], 121, 'Sample data')
plot_images(
color(e.network.predict(whiten(valid[:N*N]))),
122, 'Reconstructed data')
plt.tight_layout()
plt.show()
| mit | Python |
57cf0e1d153c2d06e722329ac35f2093a1d1c17c | use .py file to make for setup.py | UWSEDS-aut17/uwseds-group-city-fynders | Docs/city_fynder.py | Docs/city_fynder.py | # Which city would like to live?
# Created by City Fynders - University of Washington
import pandas as pd
import numpy as np
import geopy as gy
from geopy.geocoders import Nominatim
import data_processing as dp
# import data
(natural, human, economy, tertiary) = dp.read_data()
# Add ranks in the DataFrame
(natural, human, economy, tertiary) = dp.data_rank(natural, human, economy, tertiary)
# Get location information
(Lat, Lon) = dp.find_loc(human)
# Create a rank DataFrame and save as csv file
rank = dp.create_rank(natural, human, economy, tertiary, Lat, Lon)
| mit | Python | |
64b321f1815c17562e4e8c3123b5b7fbbe23ce0b | Add logging test | pubres/pubres | pubres/tests/logging_test.py | pubres/tests/logging_test.py | import logging
import logging.handlers
import multiprocessing
import pubres
from pubres.pubres_logging import setup_logging
from .base import *
class MultiprocessingQueueStreamHandler(logging.handlers.BufferingHandler):
"""A logging handler that pushes the getMessage() of every
LogRecord into a multiprocessing.Queue.
Used to test log messages of a server started in its own process.
"""
def __init__(self, *args, **kwargs):
super(MultiprocessingQueueStreamHandler, self).__init__(*args,
**kwargs)
self.mp_logrecord_queue = multiprocessing.Queue()
# Don't override emit(self, record);
# BufferingHandler will append record to self.buffer
def emit(self, record):
super(MultiprocessingQueueStreamHandler, self).emit(record)
self.mp_logrecord_queue.put(record.getMessage())
def getLogRecordBuffer(self):
ret = []
while not self.mp_logrecord_queue.empty():
log = self.mp_logrecord_queue.get()
ret.append(log)
return ret
def test_logging():
# Set up log capturing
handler = MultiprocessingQueueStreamHandler(10)
setup_logging(handler=handler)
# Do some server actions
with pubres.BackgroundServer():
with pub('key1', 'val1'):
pass
# Make sure actions appear in log
log_buffer = handler.getLogRecordBuffer()
assert "pub {'key1': 'val1'}" in log_buffer
| mit | Python | |
b52bad82bafed23d3db5a0e73c22a056d1753174 | add card parsers | PeerAssets/pypeerassets | pypeerassets/card_parsers.py | pypeerassets/card_parsers.py | '''parse cards according to deck issue mode'''
def none_parser(cards):
'''parser for NONE [0] issue mode'''
return None
def custom_parser(cards, parser=None):
'''parser for CUSTOM [1] issue mode,
please provide your custom parser as argument'''
if not parser:
return cards
else:
return parser(cards)
def once_parser(cards):
'''parser for ONCE [2] issue mode'''
return [next(i for i in cards if i.type == "CardIssue")]
def multi_parser(cards):
'''parser for MULTI [4] issue mode'''
return cards
def mono_parser(cards):
'''parser for MONO [8] issue mode'''
return [next(i for i in cards if i.type == "CardIssue" and i.amount[0] == 1)]
| bsd-3-clause | Python | |
a5b28834bb5e52857720139a1fbe6dfd1d1ea266 | create a new string helper that concatenates arguments | ceph/radosgw-agent,ceph/radosgw-agent | radosgw_agent/util/string.py | radosgw_agent/util/string.py |
def concatenate(*a, **kw):
"""
helper function to concatenate all arguments with added (optional)
newlines
"""
newline = kw.get('newline', False)
string = ''
for item in a:
if newline:
string += item + '\n'
else:
string += item
return string
| mit | Python | |
68c66c397f11637f650131ef69f4f16ebe6f43e4 | Create luhn.py | ilismal/luhnCompliance | luhn.py | luhn.py | # Luhn algorithm check
# From https://en.wikipedia.org/wiki/Luhn_algorithm
def luhn_checksum(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd_digits = digits[-1::-2]
even_digits = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
for d in even_digits:
checksum += sum(digits_of(d*2))
return checksum % 10
def is_luhn_valid(card_number):
return luhn_checksum(card_number) == 0
# There's no do-while in python, lazy workaround
while True:
# Get the first value
# Check that's a number with 16 digits
try:
firstValue=int(raw_input('First PAN in range: '))
if (len(str(firstValue)) != 16):
print "PAN must be 16 chars long"
else:
break
except ValueError:
print("Not a number")
# Same process for the second number
# TODO: This should be a function
while True:
try:
lastValue = int(raw_input(('Last PAN in range: ')))
if (len(str(lastValue)) != 16):
print "PAN must be 16 chars long"
else:
break
except ValueError:
print("Not a number")
# Swap variables if the first value is higher than the last
if (firstValue > lastValue):
firstValue,lastValue = lastValue,firstValue
print "Valid card numbers in range {0}/{1}".format(firstValue,lastValue)
totalValid = 0
# Check if the values in the range are luhn compliant
for ccc in range(firstValue,lastValue):
if is_luhn_valid(ccc):
print "\t" + str(ccc)
totalValid += 1
print "Total: {0} valid cards in range".format(totalValid)
| unlicense | Python | |
1718926c99692fefb90627c55589990cd0e0225b | Make migrations in project_template home app reversible | rsalmaso/wagtail,FlipperPA/wagtail,chrxr/wagtail,nutztherookie/wagtail,mikedingjan/wagtail,nimasmi/wagtail,rsalmaso/wagtail,Toshakins/wagtail,mikedingjan/wagtail,thenewguy/wagtail,mikedingjan/wagtail,iansprice/wagtail,nilnvoid/wagtail,nimasmi/wagtail,mikedingjan/wagtail,nutztherookie/wagtail,zerolab/wagtail,FlipperPA/wagtail,mixxorz/wagtail,nimasmi/wagtail,torchbox/wagtail,torchbox/wagtail,torchbox/wagtail,gasman/wagtail,jnns/wagtail,gasman/wagtail,Toshakins/wagtail,jnns/wagtail,jnns/wagtail,timorieber/wagtail,nealtodd/wagtail,FlipperPA/wagtail,wagtail/wagtail,gasman/wagtail,Toshakins/wagtail,nilnvoid/wagtail,takeflight/wagtail,nilnvoid/wagtail,kaedroho/wagtail,nutztherookie/wagtail,zerolab/wagtail,thenewguy/wagtail,nealtodd/wagtail,gasman/wagtail,kaedroho/wagtail,mixxorz/wagtail,kaedroho/wagtail,rsalmaso/wagtail,iansprice/wagtail,rsalmaso/wagtail,nutztherookie/wagtail,takeflight/wagtail,thenewguy/wagtail,wagtail/wagtail,zerolab/wagtail,kaedroho/wagtail,zerolab/wagtail,chrxr/wagtail,takeflight/wagtail,takeflight/wagtail,mixxorz/wagtail,torchbox/wagtail,timorieber/wagtail,Toshakins/wagtail,mixxorz/wagtail,nilnvoid/wagtail,thenewguy/wagtail,zerolab/wagtail,nimasmi/wagtail,FlipperPA/wagtail,nealtodd/wagtail,gasman/wagtail,wagtail/wagtail,iansprice/wagtail,timorieber/wagtail,chrxr/wagtail,nealtodd/wagtail,mixxorz/wagtail,kaedroho/wagtail,wagtail/wagtail,thenewguy/wagtail,rsalmaso/wagtail,timorieber/wagtail,chrxr/wagtail,wagtail/wagtail,jnns/wagtail,iansprice/wagtail | wagtail/project_template/home/migrations/0002_create_homepage.py | wagtail/project_template/home/migrations/0002_create_homepage.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
# If migration is run multiple times, it may have already been deleted
Page.objects.filter(id=2).delete()
# Create content type for homepage model
homepage_content_type, __ = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
def remove_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
# Page and Site objects CASCADE
HomePage.objects.filter(slug='home', depth=2).delete()
# Delete content type for homepage model
ContentType.objects.filter(model='homepage', app_label='home').delete()
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage, remove_homepage),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
| bsd-3-clause | Python |
889b322261384c90ac165ddd1e8bf2944b3e7785 | Add machine types people use as host for TF builds. | annarev/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,karllessard/tensorflow,aam-at/tensorflow,sarvex/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,yongtang/tensorflow,yongtang/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,annarev/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,aldian/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,annarev/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,tensorflow/tensorflow,aam-at/tensorflow,aam-at/tensorflow,annarev/tensorflow,Intel-Corporation/tensorflow,freedomtan/tensorflow,Intel-Corporation/tensorflow,cxxgtxy/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,karllessard/tensorflow,annarev/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,cxxgtxy/tensorflow,tensorflow/tensorflow,petewarden/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,aldian/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,annarev/tensorflow,aam-at/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,annarev/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,aam-at/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,davidzchen/tensorflow,cxxgtxy/tensorflow,frreiss/tensorflow-fred,aldian/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,yongtang/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,sarvex/tensorflow,sarvex/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aldian/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow,davidzchen/tensorflow,annarev/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,sarvex/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,sarvex/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,petewarden/tensorflow,cxxgtxy/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,annarev/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred | third_party/remote_config/remote_platform_configure.bzl | third_party/remote_config/remote_platform_configure.bzl | """Repository rule to create a platform for a docker image to be used with RBE."""
def _remote_platform_configure_impl(repository_ctx):
platform = repository_ctx.attr.platform
if platform == "local":
os = repository_ctx.os.name.lower()
if os.startswith("windows"):
platform = "windows"
elif os.startswith("mac os"):
platform = "osx"
else:
platform = "linux"
cpu = "x86_64"
if "MACHTYPE" in repository_ctx.os.environ:
machine_type = repository_ctx.os.environ["MACHTYPE"]
if (machine_type.startswith("ppc") or
machine_type.startswith("powerpc")):
cpu = "ppc"
elif machine_type.startswith("s390x"):
cpu = "s390x"
exec_properties = repository_ctx.attr.platform_exec_properties
serialized_exec_properties = "{"
for k, v in exec_properties.items():
serialized_exec_properties += "\"%s\" : \"%s\"," % (k, v)
serialized_exec_properties += "}"
repository_ctx.template(
"BUILD",
Label("@org_tensorflow//third_party/remote_config:BUILD.tpl"),
{
"%{platform}": platform,
"%{exec_properties}": serialized_exec_properties,
"%{cpu}": cpu,
},
)
remote_platform_configure = repository_rule(
implementation = _remote_platform_configure_impl,
attrs = {
"platform_exec_properties": attr.string_dict(mandatory = True),
"platform": attr.string(default = "linux", values = ["linux", "windows", "local"]),
},
)
| """Repository rule to create a platform for a docker image to be used with RBE."""
def _remote_platform_configure_impl(repository_ctx):
platform = repository_ctx.attr.platform
if platform == "local":
os = repository_ctx.os.name.lower()
if os.startswith("windows"):
platform = "windows"
elif os.startswith("mac os"):
platform = "osx"
else:
platform = "linux"
cpu = "x86_64"
if "MACHTYPE" in repository_ctx.os.environ:
machine_type = repository_ctx.os.environ["MACHTYPE"]
if machine_type.startswith("ppc"):
cpu = "ppc"
exec_properties = repository_ctx.attr.platform_exec_properties
serialized_exec_properties = "{"
for k, v in exec_properties.items():
serialized_exec_properties += "\"%s\" : \"%s\"," % (k, v)
serialized_exec_properties += "}"
repository_ctx.template(
"BUILD",
Label("@org_tensorflow//third_party/remote_config:BUILD.tpl"),
{
"%{platform}": platform,
"%{exec_properties}": serialized_exec_properties,
"%{cpu}": cpu,
},
)
remote_platform_configure = repository_rule(
implementation = _remote_platform_configure_impl,
attrs = {
"platform_exec_properties": attr.string_dict(mandatory = True),
"platform": attr.string(default = "linux", values = ["linux", "windows", "local"]),
},
)
| apache-2.0 | Python |
c06ed61909cc9320b42c60fb435e4381f60e8b2e | Create Input-OutputNeuronGroup.py | ricardodeazambuja/BrianConnectUDP | examples/Input-OutputNeuronGroup.py | examples/Input-OutputNeuronGroup.py | '''
Example of a spike bridge (receives and sends spikes)
In this example spikes are received, processed and sent by UDP creating a raster plot at the end of the simulation.
'''
from brian import *
import numpy
from brian_multiprocess_udp import BrianConnectUDP
# The main function with the NeuronGroup(s) and Synapse(s) must be named "main_NeuronGroup".
# It will receive two objects: input_Neuron_Group and the simulation_clock. The input_Neuron_Group
# will supply the input spikes to the network. The size of the spike train received equals NumOfNeuronsInput.
# The size of the output spike train equals NumOfNeuronsOutput and must be the same size of the NeuronGroup who is
# going to interface with the rest of the system to send spikes.
# The function must return all the NeuronGroup objects and all the Synapse objects this way:
# ([list of all NeuronGroups],[list of all Synapses])
# and the FIRST (index 0) NeuronGroup of the list MUST be the one where the OUTPUT spikes will be taken by the simulation.
#
# Here is also possible to use "dummy" NeuronGroups only to receive and/or send spikes.
def main_NeuronGroup(input_Neuron_Group, simulation_clock):
print "main_NeuronGroup!" #DEBUG!
simclock = simulation_clock
Nr=NeuronGroup(45, model='v:1', reset=0, threshold=0.5, clock=simclock)
Nr.v=0
# SYNAPSES BETWEEN REAL NEURON NETWORK AND THE INPUT
Syn_iNG_Nr=Synapses(input_Neuron_Group, Nr, model='w:1', pre='v+=w', clock=simclock)
Syn_iNG_Nr[:,:]='i==j'
print "Total Number of Synapses:", len(Syn_iNG_Nr) #DEBUG!
Syn_iNG_Nr.w=1
MExt=SpikeMonitor(Nr) # Spikes sent by UDP
Mdummy=SpikeMonitor(input_Neuron_Group) # Spikes received by UDP
return ([Nr],[Syn_iNG_Nr],[MExt,Mdummy])
def post_simulation_function(input_NG, simulation_NG, simulation_SYN, simulation_MN):
"""
input_NG: the neuron group that receives the input spikes
simulation_NG: the neuron groups list passed to the system by the user function (main_NeuronGroup)
simulation_SYN: the synapses list passed to the system by the user function (main_NeuronGroup)
simulation_MN: the monitors list passed to the system by the user function (main_NeuronGroup)
This way it is possible to plot, save or do whatever you want with these objects after the end of the simulation!
"""
pass
figure()
raster_plot(simulation_MN[1],simulation_MN[0])
title("Spikes Received / Sent by UDP")
show(block=True)
# savefig('output.pdf')
if __name__=="__main__":
# my_simulation = BrainConnectUDP(main_NeuronGroup, NumOfNeuronsInput=45, post_simulation_function=post_simulation_function,
# UDP_IPI="192.168.1.123", UDP_PORTI=20202, simclock_dt=5, inputclock_dt=5, TotalSimulationTime=5000, sim_repetitions=0)
my_simulation = BrianConnectUDP(main_NeuronGroup, NumOfNeuronsInput=45, NumOfNeuronsOutput=45, post_simulation_function=post_simulation_function,
UDP_IPI="127.0.0.1", UDP_PORTI=20202, UDP_IPO="127.0.0.1", UDP_PORTO=10101,simclock_dt=5, inputclock_dt=5, TotalSimulationTime=10000, sim_repetitions=0)
| cc0-1.0 | Python | |
2dea3ee1e50d5365ca190ee894536faea3148c7d | Add ChromiumTestShell activity and socket to constants. | Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,ltilve/chromium,axinging/chromium-crosswalk,dednal/chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,Chilledheart/chromium,hujiajie/pa-chromium,M4sse/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,ltilve/chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,dednal/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,anirudhSK/chromium,Chilledheart/chromium,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,M4sse/chromium.src,ltilve/chromium,ltilve/chromium,ltilve/chromium,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Fireblend/chromium-crosswalk,timopulkkinen/BubbleFish,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,dednal/chromium.src,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,nacl-webkit/chrome_deps,Just-D/chromium-1,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,ondra-novak/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,dednal/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,M4sse/chromium.src,Jonekee/chromium.src,anirudhSK/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,hujiajie/pa-chromium,M4sse/chromium.src,nacl-webkit/chrome_deps,anirudhSK/chromium,M4sse/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,markYoungH/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,M4sse/chromium.src,zcbenz/cefode-chromium,Jonekee/chromium.src,hujiajie/pa-chromium,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,ondra-novak/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,jaruba/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,patrickm/chromium.src,nacl-webkit/chrome_deps,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,dushu1203/chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,anirudhSK/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,littlstar/chromium.src,zcbenz/cefode-chromium,jaruba/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,anirudhSK/chromium,littlstar/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,zcbenz/cefode-chromium,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,zcbenz/cefode-chromium,ondra-novak/chromium.src,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,ltilve/chromium,Just-D/chromium-1,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,Just-D/chromium-1,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,anirudhSK/chromium,zcbenz/cefode-chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,anirudhSK/chromium,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Chilledheart/chromium,ChromiumWebApps/chromium,Just-D/chromium-1,markYoungH/chromium.src | build/android/pylib/constants.py | build/android/pylib/constants.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Defines a set of constants shared by test runners and other scripts."""
import os
CHROME_PACKAGE = 'com.google.android.apps.chrome'
CHROME_ACTIVITY = 'com.google.android.apps.chrome.Main'
CHROME_DEVTOOLS_SOCKET = 'chrome_devtools_remote'
CHROME_TESTS_PACKAGE = 'com.google.android.apps.chrome.tests'
LEGACY_BROWSER_PACKAGE = 'com.google.android.browser'
LEGACY_BROWSER_ACTIVITY = 'com.android.browser.BrowserActivity'
CONTENT_SHELL_PACKAGE = 'org.chromium.content_shell_apk'
CONTENT_SHELL_ACTIVITY = 'org.chromium.content_shell_apk.ContentShellActivity'
CHROME_SHELL_PACKAGE = 'org.chromium.chrome.browser.test'
CHROMIUM_TEST_SHELL_PACKAGE = 'org.chromium.chrome.testshell'
CHROMIUM_TEST_SHELL_ACTIVITY = (
'org.chromium.chrome.testshell.ChromiumTestShellActivity')
CHROMIUM_TEST_SHELL_DEVTOOLS_SOCKET = 'chromium_testshell_devtools_remote'
CHROME_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..'))
# Ports arrangement for various test servers used in Chrome for Android.
# Lighttpd server will attempt to use 9000 as default port, if unavailable it
# will find a free port from 8001 - 8999.
LIGHTTPD_DEFAULT_PORT = 9000
LIGHTTPD_RANDOM_PORT_FIRST = 8001
LIGHTTPD_RANDOM_PORT_LAST = 8999
TEST_SYNC_SERVER_PORT = 9031
# The net test server is started from 10000. Reserve 20000 ports for the all
# test-server based tests should be enough for allocating different port for
# individual test-server based test.
TEST_SERVER_PORT_FIRST = 10000
TEST_SERVER_PORT_LAST = 30000
# A file to record next valid port of test server.
TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
TEST_EXECUTABLE_DIR = '/data/local/tmp'
# Directories for common java libraries for SDK build.
# These constants are defined in build/android/ant/common.xml
SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
SDK_BUILD_APKS_DIR = 'apks'
# The directory on the device where perf test output gets saved to.
DEVICE_PERF_OUTPUT_DIR = '/data/data/' + CHROME_PACKAGE + '/files'
SCREENSHOTS_DIR = os.path.join(CHROME_DIR, 'out_screenshots')
ANDROID_SDK_VERSION = 17
ANDROID_SDK_ROOT = os.path.join(CHROME_DIR, 'third_party/android_tools/sdk')
ANDROID_NDK_ROOT = os.path.join(CHROME_DIR, 'third_party/android_tools/ndk')
UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Defines a set of constants shared by test runners and other scripts."""
import os
CHROME_PACKAGE = 'com.google.android.apps.chrome'
CHROME_ACTIVITY = 'com.google.android.apps.chrome.Main'
CHROME_TESTS_PACKAGE = 'com.google.android.apps.chrome.tests'
LEGACY_BROWSER_PACKAGE = 'com.google.android.browser'
LEGACY_BROWSER_ACTIVITY = 'com.android.browser.BrowserActivity'
CONTENT_SHELL_PACKAGE = "org.chromium.content_shell_apk"
CONTENT_SHELL_ACTIVITY = "org.chromium.content_shell_apk.ContentShellActivity"
CHROME_SHELL_PACKAGE = 'org.chromium.chrome.browser.test'
CHROMIUM_TEST_SHELL_PACKAGE = 'org.chromium.chrome.testshell'
CHROME_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..'))
# Ports arrangement for various test servers used in Chrome for Android.
# Lighttpd server will attempt to use 9000 as default port, if unavailable it
# will find a free port from 8001 - 8999.
LIGHTTPD_DEFAULT_PORT = 9000
LIGHTTPD_RANDOM_PORT_FIRST = 8001
LIGHTTPD_RANDOM_PORT_LAST = 8999
TEST_SYNC_SERVER_PORT = 9031
# The net test server is started from 10000. Reserve 20000 ports for the all
# test-server based tests should be enough for allocating different port for
# individual test-server based test.
TEST_SERVER_PORT_FIRST = 10000
TEST_SERVER_PORT_LAST = 30000
# A file to record next valid port of test server.
TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
TEST_EXECUTABLE_DIR = '/data/local/tmp'
# Directories for common java libraries for SDK build.
# These constants are defined in build/android/ant/common.xml
SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
SDK_BUILD_APKS_DIR = 'apks'
# The directory on the device where perf test output gets saved to.
DEVICE_PERF_OUTPUT_DIR = '/data/data/' + CHROME_PACKAGE + '/files'
SCREENSHOTS_DIR = os.path.join(CHROME_DIR, 'out_screenshots')
ANDROID_SDK_VERSION = 17
ANDROID_SDK_ROOT = os.path.join(CHROME_DIR, 'third_party/android_tools/sdk')
ANDROID_NDK_ROOT = os.path.join(CHROME_DIR, 'third_party/android_tools/ndk')
UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
| bsd-3-clause | Python |
4ab784d9526b2a4555e288038df0490269b17683 | 完成1题 | karchi/codewars_kata | 已完成/ToLeetSpeak.py | 已完成/ToLeetSpeak.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
# ToLeetSpeak题目地址:https://www.codewars.com/kata/57c1ab3949324c321600013f/train/python
'''
import unittest
class TestCases(unittest.TestCase):
def setUp(self):
pass
def test1(self):self.assertEqual(to_leet_speak("LEET"), "1337")
def test2(self):self.assertEqual(to_leet_speak("CODEWARS"), "(0D3W@R$")
def test3(self):self.assertEqual(to_leet_speak("HELLO WORLD"), "#3110 W0R1D")
def test4(self):self.assertEqual(to_leet_speak("LOREM IPSUM DOLOR SIT AMET"), "10R3M !P$UM D010R $!7 @M37")
def test5(self):self.assertEqual(to_leet_speak("THE QUICK BROWN FOX JUMPS OVER THE LAZY DOG"), "7#3 QU!(K 8R0WN F0X JUMP$ 0V3R 7#3 1@2Y D06")
def to_leet_speak(str):
alphabet = {"A" : '@', "B": '8', "C" : '(', "D" : 'D', "E" : '3', "F" : 'F', "G" : '6', "H" : '#', "I" : '!', "J" : 'J', "K" : 'K', "L" : '1', "M" : 'M', "N" : 'N', "O" : '0', "P" : 'P', "Q" : 'Q', "R" : 'R', "S" : '$', "T" : '7', "U" : 'U', "V" : 'V', "W" : 'W', "X" : 'X', "Y" : 'Y', "Z" : '2'}
res = []
for i in range(len(str)):
if str[i] in alphabet:
res.append(alphabet[str[i]])
else:
res.append(str[i])
return "".join(res)
if __name__ == '__main__':
unittest.main()
'''
参考解法:
def to_leet_speak(str):
leet = {'A' : '@','B' : '8','C' : '(','D' : 'D','E' : '3','F' : 'F','G' : '6','H' : '#','I' : '!','J' : 'J','K' : 'K','L' : '1','M' : 'M','N' : 'N','O' : '0','P' : 'P','Q' : 'Q','R' : 'R','S' : '$','T' : '7','U' : 'U','V' : 'V','W' : 'W','X' : 'X','Y' : 'Y','Z' : '2'}
for i in leet:
str = str.replace(i, leet[i])
return str
''' | mit | Python | |
988b56b4348ec8be3127cfd6576779de4367d488 | Add pywikibot user-config file | wikimedia/labs-tools-lists,wikimedia/labs-tools-lists,wikimedia/labs-tools-lists | .pywikibot/user-config.py | .pywikibot/user-config.py | family = 'wikipedia'
mylang = 'en'
usernames['wikipedia']['en'] = u'ExampleBot'
console_encoding = 'utf-8'
textfile_encoding = 'unicode_escape'
| agpl-3.0 | Python | |
d808d55b5ca9ae2e45418aca718ee21a9beb84f9 | Create a custom reverse() function (not implemented yet) | ticosax/django-rest-framework,krinart/django-rest-framework,elim/django-rest-framework,delinhabit/django-rest-framework,qsorix/django-rest-framework,jerryhebert/django-rest-framework,mgaitan/django-rest-framework,MJafarMashhadi/django-rest-framework,leeahoward/django-rest-framework,wzbozon/django-rest-framework,ambivalentno/django-rest-framework,ebsaral/django-rest-framework,leeahoward/django-rest-framework,kennydude/django-rest-framework,callorico/django-rest-framework,andriy-s/django-rest-framework,fishky/django-rest-framework,werthen/django-rest-framework,aericson/django-rest-framework,edx/django-rest-framework,maryokhin/django-rest-framework,jerryhebert/django-rest-framework,arpheno/django-rest-framework,buptlsl/django-rest-framework,potpath/django-rest-framework,qsorix/django-rest-framework,antonyc/django-rest-framework,raphaelmerx/django-rest-framework,ajaali/django-rest-framework,jtiai/django-rest-framework,hunter007/django-rest-framework,cyberj/django-rest-framework,jtiai/django-rest-framework,rubendura/django-rest-framework,cheif/django-rest-framework,abdulhaq-e/django-rest-framework,nhorelik/django-rest-framework,nryoung/django-rest-framework,simudream/django-rest-framework,kylefox/django-rest-framework,agconti/django-rest-framework,MJafarMashhadi/django-rest-framework,xiaotangyuan/django-rest-framework,ambivalentno/django-rest-framework,tigeraniya/django-rest-framework,uploadcare/django-rest-framework,wangpanjun/django-rest-framework,thedrow/django-rest-framework-1,ticosax/django-rest-framework,elim/django-rest-framework,wedaly/django-rest-framework,paolopaolopaolo/django-rest-framework,johnraz/django-rest-framework,brandoncazander/django-rest-framework,wedaly/django-rest-framework,ticosax/django-rest-framework,uruz/django-rest-framework,kezabelle/django-rest-framework,VishvajitP/django-rest-framework,sehmaschine/django-rest-framework,vstoykov/django-rest-framework,cheif/django-rest-framework,gregmuellegger/django-rest-framework,yiyocx/django-rest-framework,d0ugal/django-rest-framework,James1345/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,akalipetis/django-rest-framework,callorico/django-rest-framework,jness/django-rest-framework,douwevandermeij/django-rest-framework,VishvajitP/django-rest-framework,canassa/django-rest-framework,rafaelcaricio/django-rest-framework,rhblind/django-rest-framework,tomchristie/django-rest-framework,ossanna16/django-rest-framework,johnraz/django-rest-framework,andriy-s/django-rest-framework,alacritythief/django-rest-framework,jness/django-rest-framework,leeahoward/django-rest-framework,cheif/django-rest-framework,AlexandreProenca/django-rest-framework,kezabelle/django-rest-framework,raphaelmerx/django-rest-framework,hnakamur/django-rest-framework,raphaelmerx/django-rest-framework,pombredanne/django-rest-framework,ebsaral/django-rest-framework,jpadilla/django-rest-framework,rafaelang/django-rest-framework,dmwyatt/django-rest-framework,jness/django-rest-framework,d0ugal/django-rest-framework,canassa/django-rest-framework,wwj718/django-rest-framework,xiaotangyuan/django-rest-framework,adambain-vokal/django-rest-framework,uploadcare/django-rest-framework,ossanna16/django-rest-framework,canassa/django-rest-framework,damycra/django-rest-framework,sheppard/django-rest-framework,pombredanne/django-rest-framework,tcroiset/django-rest-framework,justanr/django-rest-framework,akalipetis/django-rest-framework,paolopaolopaolo/django-rest-framework,kylefox/django-rest-framework,nhorelik/django-rest-framework,sheppard/django-rest-framework,brandoncazander/django-rest-framework,kennydude/django-rest-framework,buptlsl/django-rest-framework,hnarayanan/django-rest-framework,uruz/django-rest-framework,hunter007/django-rest-framework,HireAnEsquire/django-rest-framework,ambivalentno/django-rest-framework,tcroiset/django-rest-framework,waytai/django-rest-framework,tigeraniya/django-rest-framework,ossanna16/django-rest-framework,antonyc/django-rest-framework,andriy-s/django-rest-framework,elim/django-rest-framework,qsorix/django-rest-framework,James1345/django-rest-framework,agconti/django-rest-framework,lubomir/django-rest-framework,jpadilla/django-rest-framework,johnraz/django-rest-framework,rubendura/django-rest-framework,kezabelle/django-rest-framework,gregmuellegger/django-rest-framework,tigeraniya/django-rest-framework,abdulhaq-e/django-rest-framework,atombrella/django-rest-framework,hunter007/django-rest-framework,rafaelcaricio/django-rest-framework,callorico/django-rest-framework,abdulhaq-e/django-rest-framework,adambain-vokal/django-rest-framework,delinhabit/django-rest-framework,HireAnEsquire/django-rest-framework,douwevandermeij/django-rest-framework,rubendura/django-rest-framework,waytai/django-rest-framework,krinart/django-rest-framework,vstoykov/django-rest-framework,iheitlager/django-rest-framework,dmwyatt/django-rest-framework,yiyocx/django-rest-framework,rhblind/django-rest-framework,atombrella/django-rest-framework,jpulec/django-rest-framework,mgaitan/django-rest-framework,jtiai/django-rest-framework,nryoung/django-rest-framework,davesque/django-rest-framework,xiaotangyuan/django-rest-framework,douwevandermeij/django-rest-framework,hnakamur/django-rest-framework,jpulec/django-rest-framework,pombredanne/django-rest-framework,potpath/django-rest-framework,d0ugal/django-rest-framework,YBJAY00000/django-rest-framework,werthen/django-rest-framework,wwj718/django-rest-framework,kylefox/django-rest-framework,zeldalink0515/django-rest-framework,davesque/django-rest-framework,wangpanjun/django-rest-framework,bluedazzle/django-rest-framework,maryokhin/django-rest-framework,krinart/django-rest-framework,paolopaolopaolo/django-rest-framework,wzbozon/django-rest-framework,hnarayanan/django-rest-framework,edx/django-rest-framework,thedrow/django-rest-framework-1,bluedazzle/django-rest-framework,AlexandreProenca/django-rest-framework,YBJAY00000/django-rest-framework,jpulec/django-rest-framework,maryokhin/django-rest-framework,ezheidtmann/django-rest-framework,jpadilla/django-rest-framework,linovia/django-rest-framework,sheppard/django-rest-framework,dmwyatt/django-rest-framework,sehmaschine/django-rest-framework,linovia/django-rest-framework,rafaelcaricio/django-rest-framework,adambain-vokal/django-rest-framework,aericson/django-rest-framework,kgeorgy/django-rest-framework,gregmuellegger/django-rest-framework,ezheidtmann/django-rest-framework,tomchristie/django-rest-framework,edx/django-rest-framework,lubomir/django-rest-framework,nhorelik/django-rest-framework,ebsaral/django-rest-framework,ashishfinoit/django-rest-framework,zeldalink0515/django-rest-framework,arpheno/django-rest-framework,sbellem/django-rest-framework,sbellem/django-rest-framework,nryoung/django-rest-framework,wangpanjun/django-rest-framework,atombrella/django-rest-framework,damycra/django-rest-framework,buptlsl/django-rest-framework,rafaelang/django-rest-framework,wzbozon/django-rest-framework,vstoykov/django-rest-framework,akalipetis/django-rest-framework,potpath/django-rest-framework,yiyocx/django-rest-framework,simudream/django-rest-framework,James1345/django-rest-framework,AlexandreProenca/django-rest-framework,waytai/django-rest-framework,alacritythief/django-rest-framework,sehmaschine/django-rest-framework,cyberj/django-rest-framework,simudream/django-rest-framework,ezheidtmann/django-rest-framework,werthen/django-rest-framework,ajaali/django-rest-framework,ashishfinoit/django-rest-framework,arpheno/django-rest-framework,agconti/django-rest-framework,sbellem/django-rest-framework,lubomir/django-rest-framework,alacritythief/django-rest-framework,bluedazzle/django-rest-framework,wwj718/django-rest-framework,aericson/django-rest-framework,HireAnEsquire/django-rest-framework,uploadcare/django-rest-framework,fishky/django-rest-framework,uruz/django-rest-framework,linovia/django-rest-framework,wedaly/django-rest-framework,kgeorgy/django-rest-framework,YBJAY00000/django-rest-framework,hnakamur/django-rest-framework,antonyc/django-rest-framework,mgaitan/django-rest-framework,kennydude/django-rest-framework,tomchristie/django-rest-framework,brandoncazander/django-rest-framework,fishky/django-rest-framework,rhblind/django-rest-framework,VishvajitP/django-rest-framework,iheitlager/django-rest-framework,iheitlager/django-rest-framework,zeldalink0515/django-rest-framework,thedrow/django-rest-framework-1,MJafarMashhadi/django-rest-framework,davesque/django-rest-framework,justanr/django-rest-framework,ashishfinoit/django-rest-framework,rafaelang/django-rest-framework,jerryhebert/django-rest-framework,damycra/django-rest-framework,ajaali/django-rest-framework,tcroiset/django-rest-framework,delinhabit/django-rest-framework,cyberj/django-rest-framework,justanr/django-rest-framework | djangorestframework/urlresolvers.py | djangorestframework/urlresolvers.py | from django.core.urlresolvers import reverse
def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None):
raise NotImplementedError
| bsd-2-clause | Python | |
631faacaf077c2b4d0d446e42076fd4e4f27ed37 | Add tests for template tags | lotrekagency/djlotrek,lotrekagency/djlotrek | djlotrek/tests/test_templatetags.py | djlotrek/tests/test_templatetags.py | import os
import mock
from django.test import TestCase
from djlotrek.templatetags.djlotrek_tags import absolute_url
from django.test import RequestFactory
class TemplateTagsTestCase(TestCase):
def setUp(self):
pass
def test_absolute_url(self):
"""Our beloved get_host_url utility"""
request_factory = RequestFactory()
request = request_factory.get('/path')
request.META['HTTP_HOST'] = 'localhost'
context = {
'request' : request
}
abs_url = absolute_url(context, '/ciao/')
self.assertEqual(abs_url, 'http://localhost/ciao/')
abs_url = absolute_url(context, 'ciao/')
self.assertEqual(abs_url, 'http://localhost/ciao/')
abs_url = absolute_url(context, 'ciao')
self.assertEqual(abs_url, 'http://localhost/ciao')
abs_url = absolute_url(context, 'ciao/a/tutti')
self.assertEqual(abs_url, 'http://localhost/ciao/a/tutti')
abs_url = absolute_url(context, 'ciao/a/tutti?language=it')
self.assertEqual(abs_url, 'http://localhost/ciao/a/tutti?language=it')
def test_absolute_url_without_request(self):
"""Our beloved get_host_url utility"""
context = {}
abs_url = absolute_url(context, '/ciao/')
self.assertEqual(abs_url, '/ciao/') | mit | Python | |
e812029c03cb6a7a6e474546fb686342e6d2c064 | Add test for `wsgiref.simple_server` | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | python/ql/test/library-tests/frameworks/stdlib/wsgiref_simple_server_test.py | python/ql/test/library-tests/frameworks/stdlib/wsgiref_simple_server_test.py | # This test file demonstrates how to use an application with a wsgiref.simple_server
# see https://docs.python.org/3/library/wsgiref.html#wsgiref.simple_server.WSGIServer
import sys
import wsgiref.simple_server
def ignore(*arg, **kwargs): pass
ensure_tainted = ensure_not_tainted = ignore
ADDRESS = ("localhost", 8000)
# I wanted to showcase that we handle both functions and bound-methods, so it's possible
# to run this test-file in 2 different ways.
def func(environ, start_response): # $ MISSING: requestHandler
ensure_tainted(
environ, # $ MISSING: tainted
environ["PATH_INFO"], # $ MISSING: tainted
)
write = start_response("200 OK", [("Content-Type", "text/plain")])
write(b"hello") # $ MISSING: HttpResponse responseBody=b"hello"
write(data=b" ") # $ MISSING: HttpResponse responseBody=b" "
# function return value should be an iterable that will also be written to to the
# response.
return [b"world", b"!"] # $ MISSING: HttpResponse responseBody=List
class MyServer(wsgiref.simple_server.WSGIServer):
def __init__(self):
super().__init__(ADDRESS, wsgiref.simple_server.WSGIRequestHandler)
self.set_app(self.my_method)
def my_method(self, _env, start_response): # $ MISSING: requestHandler
start_response("200 OK", [])
return [b"my_method"] # $ MISSING: HttpResponse responseBody=List
case = sys.argv[1]
if case == "1":
server = wsgiref.simple_server.WSGIServer(ADDRESS, wsgiref.simple_server.WSGIRequestHandler)
server.set_app(func)
elif case == "2":
server = MyServer()
elif case == "3":
server = MyServer()
def func3(_env, start_response): # $ MISSING: requestHandler
start_response("200 OK", [])
return [b"foo"] # $ MISSING: HttpResponse responseBody=List
server.set_app(func3)
else:
sys.exit("wrong case")
print(f"Running on http://{ADDRESS[0]}:{ADDRESS[1]}")
server.serve_forever()
| mit | Python | |
f2028ab194fe7c1c1497ee9320ddddbbece6406a | Add eventlet backdoor to facilitate troubleshooting. | n0ano/ganttclient | nova/common/eventlet_backdoor.py | nova/common/eventlet_backdoor.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Openstack, LLC.
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gc
import traceback
import eventlet
import eventlet.backdoor
import greenlet
from nova import flags
from nova.openstack.common import cfg
eventlet_backdoor_opts = [
cfg.IntOpt('backdoor_port',
default=None,
help='port for eventlet backdoor to listen')
]
FLAGS = flags.FLAGS
FLAGS.register_opts(eventlet_backdoor_opts)
def dont_use_this():
print "Don't use this, just disconnect instead"
def find_objects(t):
return filter(lambda o: isinstance(o, t), gc.get_objects())
def print_greenthreads():
for i, gt in enumerate(find_objects(greenlet.greenlet)):
print i, gt
traceback.print_stack(gt.gr_frame)
print
backdoor_locals = {
'_': None, # So it doesn't interfere with the global
'exit': dont_use_this, # So we don't exit the entire process
'quit': dont_use_this, # So we don't exit the entire process
'fo': find_objects,
'pgt': print_greenthreads,
}
def initialize_if_enabled():
if FLAGS.backdoor_port is None:
return
eventlet.spawn(eventlet.backdoor.backdoor_server,
eventlet.listen(('localhost', FLAGS.backdoor_port)),
locals=backdoor_locals)
| apache-2.0 | Python | |
d81ba7d656f11e817eb610b1c65a4880fddc9004 | Fix getting money from arcade games. | saylua/SayluaV2,saylua/SayluaV2,LikeMyBread/Saylua,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua,LikeMyBread/Saylua | saylua/modules/arcade/api.py | saylua/modules/arcade/api.py | from saylua import db
from saylua.wrappers import api_login_required
from flask import g, request
from models.db import Game, GameLog
from saylua.utils import int_or_none
import json
# Send a score to the API.
@api_login_required()
def api_send_score(game_id):
try:
gameName = Game(game_id)
except IndexError:
return json.dumps(dict(error='Invalid game!')), 400
finally:
if gameName == "blocks":
# TODO sanity check the game log and other variables sent to catch
# low hanging fruit attempts at cheating.
data = request.get_json()
score = int_or_none(data.get('score')) or 0
GameLog.record_score(g.user.id, game_id, score)
g.user.cloud_coins += score
db.session.commit()
return json.dumps(dict(cloud_coins=g.user.cloud_coins, star_shards=g.user.star_shards))
return json.dumps(dict(error='Bad request.')), 400
| from saylua.wrappers import api_login_required
from flask import g, request
from models.db import Game, GameLog
from saylua.utils import int_or_none
import json
# Send a score to the API.
@api_login_required()
def api_send_score(game_id):
try:
gameName = Game(game_id)
except IndexError:
return json.dumps(dict(error='Invalid game!')), 400
finally:
if gameName == "blocks":
# TODO sanity check the game log and other variables sent to catch
# low hanging fruit attempts at cheating.
data = request.get_json()
score = int_or_none(data.get('score')) or 0
GameLog.record_score(g.user.id, game_id, score)
g.user.cloud_coins += score
return json.dumps(dict(cloud_coins=g.user.cloud_coins, star_shards=g.user.star_shards))
return json.dumps(dict(error='Bad request.')), 400
| agpl-3.0 | Python |
43d7160272511107528a33d7dff932ed274d9b58 | add sitemaps | edoburu/django-fluent-faq,edoburu/django-fluent-faq | fluent_faq/sitemaps.py | fluent_faq/sitemaps.py | from django.contrib.sitemaps import Sitemap
from fluent_faq.models import FaqCategory, FaqQuestion
from fluent_faq.urlresolvers import faq_reverse
class FaqQuestionSitemap(Sitemap):
"""
Sitemap for FAQ questions
"""
def items(self):
return FaqQuestion.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an question."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
class FaqCategorySitemap(Sitemap):
"""
Sitemap for FAQ categories.
"""
def items(self):
return FaqCategory.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an category."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
| apache-2.0 | Python | |
23f4e54ea84a23af55e29ead27a38af12672aa43 | Create multi_currency_prices.py | toloco/pyoanda | examples/multi_currency_prices.py | examples/multi_currency_prices.py |
from pyoanda import Client, PRACTICE
client = Client(environment=PRACTICE,account_id="Your Oanda account ID",access_token="Your Oanda access token")
# Get prices for a list of instruments
pair_list = ['AUD_JPY','EUR_JPY','GBP_JPY','AUD_USD']
dataset = client.get_prices(instruments=','.join(pair_list),stream=False)
#json response::
{u'prices': [{u'ask': 81.551,
u'bid': 81.53,
u'instrument': u'AUD_JPY',
u'time': u'2016-01-26T07:39:56.525788Z'},
{u'ask': 127.975,
u'bid': 127.957,
u'instrument': u'EUR_JPY',
u'time': u'2016-01-26T07:39:55.712253Z'},
{u'ask': 167.269,
u'bid': 167.239,
u'instrument': u'GBP_JPY',
u'time': u'2016-01-26T07:39:58.333404Z'},
{u'ask': 0.69277,
u'bid': 0.6926,
u'instrument': u'AUD_USD',
u'time': u'2016-01-26T07:39:50.358020Z'}]}
# simplistic way of extracting data from the json response::
aud_jpy = [d for d in dataset['prices'] if d['instrument']=='AUD_JPY']
bid = [d['bid'] for d in aud_jpy][-1]
ask = [d['ask'] for d in aud_jpy][-1]
time = [d['time'] for d in aud_jpy][-1]
| mit | Python | |
22b04a8a6a014ee4e077f2dc03338bdc9479cc5c | package module for handling wavelength calib | adrn/GaiaPairsFollowup | comoving_rv/longslit/wavelength.py | comoving_rv/longslit/wavelength.py | # Third-party
import numpy as np
from scipy.optimize import minimize, leastsq
from scipy.stats import scoreatpercentile
# Project
from .models import voigt_polynomial
__all__ = ['fit_emission_line']
def errfunc(p, pix, flux, flux_ivar):
amp, x_0, std_G, fwhm_L, *bg_coef = p
return (voigt_polynomial(pix, amp, x_0, std_G, fwhm_L, bg_coef) - flux) * np.sqrt(flux_ivar)
def fit_emission_line(pix, flux, flux_ivar=None,
amp0=None, x0=None, std_G0=None, fwhm_L0=None, n_bg_coef=1):
"""
TODO:
Parameters
----------
pix : array_like
Must be the same shape as ``flux``.
flux : array_like
Must be the same shape as ``pix_grid``.
amp0 : numeric (optional)
Initial guess for line amplitude.
x0 : numeric (optional)
Initial guess for line centroid.
n_bg_coef : int
Number of terms in the background polynomial fit.
"""
if x0 is None: # then estimate the initial guess for the centroid
x0 = pix[np.argmax(flux)]
int_ctrd0 = int(round(x0-pix.min()))
if amp0 is None: # then estimate the initial guess for amplitude
amp0 = flux[int_ctrd0] # flux at initial guess
bg0 = np.array([0.] * n_bg_coef)
bg0[0] = scoreatpercentile(flux[flux>0], 5.)
if std_G0 is None:
std_G0 = 2. # MAGIC NUMBER
if fwhm_L0 is None:
fwhm_L0 = 0.5 # MAGIC NUMBER
if flux_ivar is None:
flux_ivar = 1.
p0 = [amp0, x0, std_G0, fwhm_L0] + bg0.tolist()
print(p0)
p_opt,p_cov,*_,mesg,ier = leastsq(errfunc, p0, args=(pix, flux, flux_ivar),
full_output=True)
print(p_opt)
# res = minimize(_errfunc, x0=p0, args=(pix_grid, flux, flux_ivar))
# p = res.x
fit_amp, fit_x0, fit_std_G, fit_fwhm_L, *fit_bg = p_opt
fail_msg = "Fitting spectral line in comp lamp spectrum failed. {msg}"
if ier < 1 or ier > 4:
raise RuntimeError(fail_msg.format(msg=mesg))
if fit_x0 < min(pix) or fit_x0 > max(pix):
raise ValueError(fail_msg.format(msg="Unphysical peak centroid: {:.3f}".format(fit_x0)))
return dict(amp=fit_amp, x_0=fit_x0,
std_G=fit_std_G, fwhm_L=fit_fwhm_L,
bg_coef=fit_bg)
| mit | Python | |
21e411171e811e1b68ad3674567ecb05f6f7a7ad | add migrations | arteria/cmsplugin-contact-plus,arteria/cmsplugin-contact-plus | cmsplugin_contact_plus/migrations/0004_auto_20170410_1553.py | cmsplugin_contact_plus/migrations/0004_auto_20170410_1553.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cmsplugin_contact_plus', '0003_auto_20161102_1927'),
]
operations = [
migrations.AddField(
model_name='extrafield',
name='placeholder',
field=models.CharField(max_length=250, null=True, verbose_name='Placeholder Value', blank=True),
),
migrations.AlterField(
model_name='contactplus',
name='recipient_email',
field=models.EmailField(default=b'', max_length=254, verbose_name='Email of recipients'),
),
]
| bsd-3-clause | Python | |
fe18d3387f7f8072b4f23990e5108f646729a860 | Create pKaKs2.7.py | CharlesSanfiorenzo/Bioinformatics,CharlesSanfiorenzo/Bioinformatics,CharlesSanfiorenzo/Bioinformatics | Modules/pKaKs2.7.py | Modules/pKaKs2.7.py | #This short script uses the output values of KaKs.pl & SnpEff to calculate mutational load using Nei-Gojobori: pKa/Ks = [-3/4ln(1-4pn/3)] / [-3/4ln(1-4ps/3)], where ps = syn SNPs / syn sites and pn = nonsyn SNPs / nonsyn sites
from math import log #If for some reason you need to calculate the logarithm of a negative number, import cmath instead.
import ConfigParser
config = ConfigParser.RawConfigParser()
config.read("config.ini")
nonSyn_site = float(config.get("myvars", "non-synonymous_number"))
Syn_site = float(config.get("myvars", "synonymous_number"))
nonSyn_SNP = float(config.get("myvars", "non-synonymous_snp"))
Syn_SNP = float(config.get("myvars", "synonymous_snp"))
pn = nonSyn_SNP/nonSyn_site
ps = Syn_SNP/Syn_site
print "The pKs/Ks ratio for this organism is:", (-3/4*log(1-(4*pn)/3))/(-3/4*log(1-(4*ps)/3))
| mit | Python | |
61e56ad3feecef6fe422db8fb5d7b9b26dc03d6a | Add day 3 part 2. | SayWhat1/adventofcode2016 | day3-2.py | day3-2.py | """This module checks how many valid triangles are in the input data."""
def main():
"""Run main function."""
with open('data/day3data.txt', 'r') as f:
input = f.readlines()
dataList = [map(int, i.strip('\n').split()) for i in input]
# Transpose the data.
dataList = [list(i) for i in zip(*dataList)]
# Flatten the list.
triList = [item for sublist in dataList for item in sublist]
triangles = 0
for i in range(0, len(triList), 3):
print([triList[i], triList[i + 1], triList[i + 2]])
if isTriangle([triList[i], triList[i + 1], triList[i + 2]]):
triangles += 1
print('There are {} valid triagles.').format(triangles)
def isTriangle(input):
"""Check if list of three sides is a triangle."""
if 2 * max(input) < sum(input):
return True
return False
if __name__ == '__main__':
main()
| mit | Python | |
047541a111e9da5d59b47d40a528bc990bae6927 | add scope expression | ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study | compiler/eLisp/eLisp/expr/scope.py | compiler/eLisp/eLisp/expr/scope.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from eLisp.expr.procedure import (
primitive_procedure_names, primitive_procedure_values)
from eLisp.expr.util import pair_to_list
class Environment(object):
def __init__(self, parent=None, bindings=None):
self.parent = parent
self.bindings = dict() if bindings is None else bindings
def define_variable(self, symbol, val):
self.bindings[symbol.name] = val
def set_variable_value(self, symbol, val):
name = symbol.name
if name in self.bindings:
self.bindings[name] = val
elif self.parent is not None:
self.parent.set_variable_value(symbol, val)
else:
raise Number('Unbound variable - SET! %s' % name)
def load(self, symbol):
name = symbol.name
if name in self.bindings:
return self.bindings[name]
if self.parent is not None:
return self.parent.load(symbol)
return None
def setup_environment():
bindings = dict(zip(
primitive_procedure_names(), primitive_procedure_values()))
return Environment(bindings=bindings)
def define_variable(var, val, env):
env.define_variable(var, val)
def lookup_variable_value(var, env):
val = env.load(var)
if val is None:
raise NameError('Unbound variable: %s' % var)
return val
def extend_environment(variables, values, env):
bindings = dict(zip(
[var.name for var in pair_to_list(variables)],
pair_to_list(values)))
env = Environment(parent=env, bindings=bindings)
return env
| bsd-2-clause | Python | |
2d05a12a9b9534ad1925e7d543e6f66d8a79d3f8 | Initialize P02_deleteBigFiles | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter09/PracticeProjects/P02_deleteBigFiles.py | books/AutomateTheBoringStuffWithPython/Chapter09/PracticeProjects/P02_deleteBigFiles.py | # t’s not uncommon for a few unneeded but humongous files or folders to take up the
# bulk of the space on your hard drive. If you’re trying to free up room on your
# computer, you’ll get the most bang for your buck by deleting the most massive of
# the unwanted files. But first you have to find them.
#
# Write a program that walks through a folder tree and searches for exceptionally
# large files or folders—say, ones that have a file size of more than 100MB.
# (Remember, to get a file’s size, you can use os.path.getsize() from the os module.)
# Print these files with their absolute path to the screen.
| mit | Python | |
cee924604f070bd1bbca33dda53c5783e2678c5d | Add tests for sagepay | artursmet/django-payments,imakin/pysar-payments,derenio/django-payments,artursmet/django-payments,imakin/pysar-payments,artursmet/django-payments,dashmug/django-payments,derenio/django-payments,polonat/django-payments,derenio/django-payments,polonat/django-payments,polonat/django-payments,dashmug/django-payments,dashmug/django-payments,imakin/pysar-payments | payments/sagepay/test_sagepay.py | payments/sagepay/test_sagepay.py | from __future__ import unicode_literals
from unittest import TestCase
from mock import patch, MagicMock
from . import SagepayProvider
VENDOR = 'abcd1234'
ENCRYPTION_KEY = '1234abdd1234abcd'
class Payment(MagicMock):
id = 1
variant = 'sagepay'
currency = 'USD'
total = 100
status = 'waiting'
transaction_id = None
captured_amount = 0
billing_first_name = 'John'
def get_process_url(self):
return 'http://example.com'
def get_failure_url(self):
return 'http://cancel.com'
def get_success_url(self):
return 'http://success.com'
def change_status(self, status):
self.status = status
class TestSagepayProvider(TestCase):
def setUp(self):
self.payment = Payment()
self.provider = SagepayProvider(
vendor=VENDOR, encryption_key=ENCRYPTION_KEY)
@patch('payments.sagepay.redirect')
def test_provider_raises_redirect_needed_on_success(self, mocked_redirect):
data = {'Status': 'OK'}
data = "&".join(u"%s=%s" % kv for kv in data.items())
with patch.object(SagepayProvider, 'aes_dec', return_value=data):
self.provider.process_data(self.payment, MagicMock())
self.assertEqual(self.payment.status, 'confirmed')
self.assertEqual(self.payment.captured_amount, self.payment.total)
@patch('payments.sagepay.redirect')
def test_provider_raises_redirect_needed_on_failure(self, mocked_redirect):
data = {'Status': ''}
data = "&".join(u"%s=%s" % kv for kv in data.items())
with patch.object(SagepayProvider, 'aes_dec', return_value=data):
self.provider.process_data(self.payment, MagicMock())
self.assertEqual(self.payment.status, 'rejected')
self.assertEqual(self.payment.captured_amount, 0)
def test_provider_encrypts_data(self):
data = self.provider.get_hidden_fields(self.payment)
decrypted_data = self.provider.aes_dec(data['Crypt'])
self.assertIn(self.payment.billing_first_name, decrypted_data) | bsd-3-clause | Python | |
2528be1179355a9fcce40f283be18e87d682fede | add rpn creat tools | likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet,likelyzhao/mxnet | example/rcnn/rcnn/tools/proposal.py | example/rcnn/rcnn/tools/proposal.py | import argparse
import pprint
import mxnet as mx
from ..config import config, default, generate_config
from ..symbol import *
from ..dataset import *
from ..core.loader import TestLoader
from ..core.tester import Predictor, generate_proposals
from ..utils.load_model import load_param
def test_rpn(network, dataset, image_set, root_path, dataset_path,
ctx, prefix, epoch,
vis, shuffle, thresh):
# rpn generate proposal config
config.TEST.HAS_RPN = True
# print config
pprint.pprint(config)
# load symbol
sym = eval('get_' + network + '_rpn_test')(num_anchors=config.NUM_ANCHORS)
# load dataset and prepare imdb for training
imdb = eval(dataset)(image_set, root_path, dataset_path)
roidb = imdb.gt_roidb()
test_data = TestLoader(roidb, batch_size=1, shuffle=shuffle, has_rpn=True)
# load model
arg_params, aux_params = load_param(prefix, epoch, convert=True, ctx=ctx)
# infer shape
data_shape_dict = dict(test_data.provide_data)
arg_shape, _, aux_shape = sym.infer_shape(**data_shape_dict)
arg_shape_dict = dict(zip(sym.list_arguments(), arg_shape))
aux_shape_dict = dict(zip(sym.list_auxiliary_states(), aux_shape))
# check parameters
for k in sym.list_arguments():
if k in data_shape_dict or 'label' in k:
continue
assert k in arg_params, k + ' not initialized'
assert arg_params[k].shape == arg_shape_dict[k], \
'shape inconsistent for ' + k + ' inferred ' + str(arg_shape_dict[k]) + ' provided ' + str(arg_params[k].shape)
for k in sym.list_auxiliary_states():
assert k in aux_params, k + ' not initialized'
assert aux_params[k].shape == aux_shape_dict[k], \
'shape inconsistent for ' + k + ' inferred ' + str(aux_shape_dict[k]) + ' provided ' + str(aux_params[k].shape)
# decide maximum shape
data_names = [k[0] for k in test_data.provide_data]
label_names = None if test_data.provide_label is None else [k[0] for k in test_data.provide_label]
max_data_shape = [('data', (1, 3, max([v[0] for v in config.SCALES]), max([v[1] for v in config.SCALES])))]
# create predictor
predictor = Predictor(sym, data_names, label_names,
context=ctx, max_data_shapes=max_data_shape,
provide_data=test_data.provide_data, provide_label=test_data.provide_label,
arg_params=arg_params, aux_params=aux_params)
# start testing
imdb_boxes = generate_proposals(predictor, test_data, imdb, vis=vis, thresh=thresh)
#imdb.evaluate_recall(roidb, candidate_boxes=imdb_boxes)
def parse_args():
parser = argparse.ArgumentParser(description='Test a Region Proposal Network')
# general
parser.add_argument('--network', help='network name', default=default.network, type=str)
parser.add_argument('--dataset', help='dataset name', default=default.dataset, type=str)
args, rest = parser.parse_known_args()
generate_config(args.network, args.dataset)
parser.add_argument('--image_set', help='image_set name', default=default.test_image_set, type=str)
parser.add_argument('--root_path', help='output data folder', default=default.root_path, type=str)
parser.add_argument('--dataset_path', help='dataset path', default=default.dataset_path, type=str)
# testing
parser.add_argument('--prefix', help='model to test with', default=default.rpn_prefix, type=str)
parser.add_argument('--epoch', help='model to test with', default=default.rpn_epoch, type=int)
# rpn
parser.add_argument('--gpu', help='GPU device to test with', default=0, type=int)
parser.add_argument('--vis', help='turn on visualization', action='store_true')
parser.add_argument('--thresh', help='rpn proposal threshold', default=0, type=float)
parser.add_argument('--shuffle', help='shuffle data on visualization', action='store_true')
args = parser.parse_args()
return args
def main():
args = parse_args()
print 'Called with argument:', args
ctx = mx.gpu(args.gpu)
test_rpn(args.network, args.dataset, args.image_set, args.root_path, args.dataset_path,
ctx, args.prefix, args.epoch,
args.vis, args.shuffle, args.thresh)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
a07e4d08b475e0d921265f9da104f109943901bc | Add lammps wrapper tests with cuds | simphony/simphony-lammps-md,simphony/simphony-lammps-md | simlammps/tests/cuds_test.py | simlammps/tests/cuds_test.py | """Tests for running lammps using CUDS and Simulation classes."""
import unittest
from simphony.core.cuba import CUBA
from simphony import CUDS, Simulation
from simphony.engine import EngineInterface
from simphony.testing.utils import create_particles_with_id
from simphony.cuds.particles import Particle, Particles
class LAMMPSCUDSTestCase(unittest.TestCase):
def setUp(self):
self.cuds = self.generate_cuds()
def generate_cuds(self):
pset1 = create_particles_with_id(restrict=[CUBA.VELOCITY])
pset2 = create_particles_with_id(restrict=[CUBA.VELOCITY])
ps1 = Particles('ps1')
ps2 = Particles('ps2')
ps1.add_particles(pset1)
ps2.add_particles(pset2)
c = CUDS()
c.add(ps1)
c.add(ps2)
return c
def test_create_lammps_internal_simulation(self):
self.assertRaisesRegexp(RuntimeError,
'CUBAExtension.BOX_VECTORS',
Simulation,
self.cuds,
'LAMMPS',
EngineInterface.Internal)
def test_create_lammps_fileio_simulation(self):
sim = Simulation(self.cuds, 'LAMMPS', EngineInterface.FileIO)
def test_create_liggghts_internal_simulation(self):
self.assertRaisesRegexp(RuntimeError,
'DEM using the INTERNAL interface is not yet supported',
Simulation,
self.cuds,
'LIGGGHTS',
EngineInterface.Internal)
def test_create_liggghts_fileio_simulation(self):
sim = Simulation(self.cuds, 'LIGGGHTS', EngineInterface.FileIO) | bsd-2-clause | Python | |
257c5bffe1804d694510f5a4638de8e6ae6a1470 | Create lstm_gan_mnist.py | amitadate/S-LSTM-GAN-MNIST | lstm_gan_mnist.py | lstm_gan_mnist.py | import tensorflow as tf
| mit | Python | |
7a813d21043c394ab10e1ddb687d7827a8b7e761 | add slideshare plugin | figshare/Total-Impact,figshare/Total-Impact,figshare/Total-Impact,figshare/Total-Impact | plugins/slideshare/slideshare.py | plugins/slideshare/slideshare.py | #!/usr/bin/env python
import urllib2
import re
import urllib
import time
import sha
import BeautifulSoup
from BeautifulSoup import BeautifulStoneSoup
from optparse import OptionParser
TOTALIMPACT_SLIDESHARE_KEY = "nyHCUoNM"
TOTALIMPACT_SLIDESHARE_SECRET = "z7sRiGCG"
MENDELEY_DOI_URL = "http://www.slideshare.net/api/2/get_slideshow?api_key=nyHCUoNM&detailed=1&ts=%s&hash=%s&slideshow_url=%s"
def get_page(id):
if not id:
return(None)
ts = time.time()
hash_combo = sha.new(TOTALIMPACT_SLIDESHARE_SECRET + str(ts)).hexdigest()
url = MENDELEY_DOI_URL %(ts, hash_combo, id)
print url
try:
page = urllib2.urlopen(url).read()
except urllib2.HTTPError, err:
if err.code == 404:
page = None
else:
raise
return(page)
def get_stats(page):
if not page:
return(None)
soup = BeautifulStoneSoup(page)
downloads = soup.numdownloads.text
views = soup.numviews.text
comments = soup.numcomments.text
favorites = soup.numfavorites.text
response = {"downloads":downloads, "views":views, "comments":comments, "favorites":favorites}
return(response)
from optparse import OptionParser
def main():
parser = OptionParser(usage="usage: %prog [options] filename",
version="%prog 1.0")
#parser.add_option("-x", "--xhtml",
# action="store_true",
# dest="xhtml_flag",
# default=False,
# help="create a XHTML template instead of HTML")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("wrong number of arguments")
print options
print args
id = args[0]
page = get_page(id)
response = get_stats(page)
print response
if __name__ == '__main__':
main()
#example = "http://www.slideshare.net/hpiwowar/7-data-citation-challenges-illustrated-with-data-includes-elephants"
mendeley_test_id = "http://www.slideshare.net/hpiwowar/7-data-citation-challenges-illustrated-with-data-includes-elephants"
#mendeley_test_doi = "10.1371/journal.pcbi.1000361"
#mendeley_test_doi = "10.1371/journal.pmed.0040215"
#mendeley_test_doi = "10.1371/journal.pone.0000308"
#page = get_mendeley_page(mendeley_test_doi)
#response = get_stats(page)
#print response
| mit | Python | |
de2d21316ca47d1839584a7cccbe8026489ace7d | Change Schema.schema to a property | IATI/iati.core,IATI/iati.core | iati/core/schemas.py | iati/core/schemas.py | """A module containing a core representation of IATI Schemas."""
from lxml import etree
import iati.core.exceptions
import iati.core.resources
import iati.core.utilities
class Schema(object):
"""Represenation of a Schema as defined within the IATI SSOT.
Attributes:
name (str): The name of the Schema.
schema (etree.XMLSchema): An actual Schema that can be used for validation.
codelists (set): The Codelists asspciated with this Schema. This is a read-only attribute.
Todo:
Create a custom dictionary type that prevents overwriting values and only allows the correct types to be added.
"""
def __init__(self, name=None):
"""Initialise a Schema.
Args:
name (str): The name of the schema being initialised.
This name refers to a file contained within the core IATI resources folder.
Raises:
iati.core.exceptions.SchemaError: An error occurred during the creation of the Schema.
Todo:
Allow for generation of schemas outside the IATI SSOT.
Better use the try-except pattern.
"""
self.name = name
self._schema_base = None
self.codelists = set()
if name:
path = iati.core.resources.path_schema(self.name)
try:
loaded_tree = iati.core.resources.load_as_tree(path)
except (IOError, OSError):
msg = "Failed to load tree at '{0}' when creating Schema.".format(path)
iati.core.utilities.log_error(msg)
raise iati.core.exceptions.SchemaError
else:
generated_schema = iati.core.utilities.convert_tree_to_schema(loaded_tree)
if isinstance(generated_schema, etree.XMLSchema):
self._schema_base = generated_schema
@property
def schema(self):
"""A Schema that can be used for validation.
Takes the base Schema that was loaded and dynamically injects elements for content checking.
Raises:
TypeError: If a value being assigned is not an XMLSchema.
Note:
Setting this property will set the base schema, ontop of which content checking is added through the associated Codelists.
Todo:
Implement Codelist content checking.
Implement Ruleset content checking.
"""
return self._schema_base
@schema.setter
def schema(self, value):
if isinstance(value, etree.XMLSchema):
self._schema_base = value
else:
msg = "Schemas must be of type XMLSchemas. Actual type: {0}".format(type(value))
iati.core.utilities.log_error(msg)
raise TypeError(msg)
| """A module containing a core representation of IATI Schemas."""
from lxml import etree
import iati.core.exceptions
import iati.core.resources
import iati.core.utilities
class Schema(object):
"""Represenation of a Schema as defined within the IATI SSOT.
Attributes:
name (str): The name of the Schema.
schema (etree.XMLSchema): An actual Schema that can be used for validation.
codelists (set): The Codelists asspciated with this Schema. This is a read-only attribute.
Todo:
Create a custom dictionary type that prevents overwriting values and only allows the correct types to be added.
"""
def __init__(self, name=None):
"""Initialise a Schema.
Args:
name (str): The name of the schema being initialised.
This name refers to a file contained within the core IATI resources folder.
Raises:
iati.core.exceptions.SchemaError: An error occurred during the creation of the Schema.
Todo:
Allow for generation of schemas outside the IATI SSOT.
Better use the try-except pattern.
"""
self.name = name
self.schema = None
self.codelists = set()
if name:
path = iati.core.resources.path_schema(self.name)
try:
loaded_tree = iati.core.resources.load_as_tree(path)
except (IOError, OSError):
msg = "Failed to load tree at '{0}' when creating Schema.".format(path)
iati.core.utilities.log_error(msg)
raise iati.core.exceptions.SchemaError
else:
generated_schema = iati.core.utilities.convert_tree_to_schema(loaded_tree)
if isinstance(generated_schema, etree.XMLSchema):
self.schema = generated_schema
| mit | Python |
f015f04bf05e6e58efd5fd0f90bbe72745eb60b2 | add experimental blas/lapack waf tool. | cournape/Bento,cournape/Bento,cournape/Bento,cournape/Bento | bento/backends/waf_tools/blas_lapack.py | bento/backends/waf_tools/blas_lapack.py | """Experimental ! This will very likely change"""
import collections
import sys
from bento.commands.options \
import \
Option
from bento.backends.waf_backend \
import \
WAF_TOOLDIR
import waflib
from waflib import Options
_PLATFORM_TO_DEFAULT = collections.defaultdict(lambda: "atlas")
_PLATFORM_TO_DEFAULT.update({
"win32": "mkl",
"darwin": "accelerate",
})
_OPTIMIZED_CBLAS_TO_KWARGS = {
"mkl": {"lib": "mkl_intel_c,mkl_intel_thread,mkl_core,libiomp5md".split(",")},
"atlas": {"lib": ["cblas", "atlas"]},
"accelerate": {"framework": ["Accelerate"]},
"openblas": {"lib": ["openblas"]},
}
_OPTIMIZED_LAPACK_TO_KWARGS = {
"mkl": {"lib": "mkl_lapack95,mkl_blas95,mkl_intel_c,mkl_intel_thread,mkl_core,libiomp5md".split(",")},
"atlas": {"lib": ["lapack", "f77blas", "cblas", "atlas"]},
"accelerate": {"framework": ["Accelerate"]},
"openblas": {"lib": ["openblas"]},
}
def get_optimized_name(context):
o, a = context.options_context.parser.parse_args(context.command_argv)
if o.blas_lapack_type == "default" or o.blas_lapack_type is None:
optimized = _PLATFORM_TO_DEFAULT[sys.platform]
else:
optimized = o.blas_lapack_type
return optimized
def check_cblas(context, optimized):
conf = context.waf_context
msg = "Checking for %s (CBLAS)" % optimized.upper()
kwargs = _OPTIMIZED_CBLAS_TO_KWARGS[optimized]
kwargs.update({"msg": msg, "uselib_store": "CBLAS"})
try:
conf.check_cc(**kwargs)
conf.env.HAS_CBLAS = True
except waflib.Errors.ConfigurationError:
conf.env.HAS_CBLAS = False
def check_lapack(context, optimized):
conf = context.waf_context
msg = "Checking for %s (LAPACK)" % optimized.upper()
if optimized in ["openblas", "atlas"]:
check_fortran(context)
kwargs = _OPTIMIZED_LAPACK_TO_KWARGS[optimized]
kwargs.update({"msg": msg, "uselib_store": "LAPACK"})
try:
conf.check_cc(**kwargs)
conf.env.HAS_LAPACK = True
except waflib.Errors.ConfigurationError:
conf.env.HAS_LAPACK = False
def check_blas_lapack(context):
optimized = get_optimized_name(context)
o, a = context.options_context.parser.parse_args(context.command_argv)
if o.blas_lapack_libdir:
context.waf_context.env.append_value("LIBPATH", o.blas_lapack_libdir)
check_cblas(context, optimized)
check_lapack(context, optimized)
# You can manually set up blas/lapack as follows:
#conf.env.HAS_CBLAS = True
#conf.env.LIB_CBLAS = ["cblas", "atlas"]
#conf.env.HAS_LAPACK = True
#conf.env.LIB_LAPACK = ["lapack", "f77blas", "cblas", "atlas"]
def check_fortran(context):
opts = context.waf_options_context
conf = context.waf_context
opts.load("compiler_fc")
Options.options.check_fc = "gfortran"
conf.load("compiler_fc")
conf.load("ordered_c", tooldir=[WAF_TOOLDIR])
conf.check_fortran_verbose_flag()
conf.check_fortran_clib()
def add_options(global_context):
global_context.add_option_group("configure", "blas_lapack", "blas/lapack")
available_optimized = ",".join(_OPTIMIZED_LAPACK_TO_KWARGS.keys())
global_context.add_option("configure",
Option("--blas-lapack-type", help="Which blas lapack to use (%s)" % available_optimized),
"blas_lapack")
global_context.add_option("configure",
Option("--with-blas-lapack-libdir", dest="blas_lapack_libdir",
help="Where to look for BLAS/LAPACK dir"),
"blas_lapack")
| bsd-3-clause | Python | |
c388e6a4143b3646df5947cb5f596ec137488513 | Add minimal skeleton for plotting script | cphyc/n-body,cphyc/n-body | plot.py | plot.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
| mit | Python | |
a7ad8f2075e7661ad9ed539083a8785f7a628b95 | test 1 | FungalPanic/dashsniffer | dashsniffer/sniff.py | dashsniffer/sniff.py | def greet(name):
print 'Hello', name
greet('Jack')
greet('Jill')
greet('Bob')
| mit | Python | |
faf13ff99fd47424c4fb93f1c2a6b3b80c81e0d1 | replace bin<->text converters for ipv6 | fkakuma/ryu,darjus-amzn/ryu,diogommartins/ryu,yamt/ryu,lagopus/ryu-lagopus-ext,yamt/ryu,Tejas-Subramanya/RYU_MEC,openvapour/ryu,elahejalalpour/ELRyu,habibiefaried/ryu,iwaseyusuke/ryu,TakeshiTseng/ryu,elahejalalpour/ELRyu,osrg/ryu,Tejas-Subramanya/RYU_MEC,sivaramakrishnansr/ryu,muzixing/ryu,Tesi-Luca-Davide/ryu,osrg/ryu,darjus-amzn/ryu,ynkjm/ryu,alyosha1879/ryu,lagopus/ryu-lagopus-ext,torufuru/OFPatchPanel,OpenState-SDN/ryu,takahashiminoru/ryu,zyq001/ryu,alyosha1879/ryu,haniehrajabi/ryu,Tejas-Subramanya/RYU_MEC,diogommartins/ryu,jalilm/ryu,jazzmes/ryu,zyq001/ryu,ysywh/ryu,ttsubo/ryu,lzppp/mylearning,elahejalalpour/ELRyu,osrg/ryu,zangree/ryu,castroflavio/ryu,alanquillin/ryu,Tejas-Subramanya/RYU_MEC,shinpeimuraoka/ryu,umkcdcrg01/ryu_openflow,lzppp/mylearning,hisaharu/ryu,citrix-openstack-build/ryu,citrix-openstack/build-ryu,John-Lin/ryu,jkoelker/ryu,muzixing/ryu,haniehrajabi/ryu,mikhaelharswanto/ryu,mikhaelharswanto/ryu,pichuang/ryu,StephenKing/summerschool-2015-ryu,ysywh/ryu,gareging/SDN_Framework,pichuang/ryu,lagopus/ryu-lagopus-ext,haniehrajabi/ryu,jalilm/ryu,lsqtongxin/ryu,gareging/SDN_Framework,jkoelker/ryu,darjus-amzn/ryu,fkakuma/ryu,lsqtongxin/ryu,ynkjm/ryu,Zouyiran/ryu,Tesi-Luca-Davide/ryu,elahejalalpour/ELRyu,lagopus/ryu-lagopus-ext,ntts-clo/ryu,ynkjm/ryu,sivaramakrishnansr/ryu,ttsubo/ryu,alyosha1879/ryu,alanquillin/ryu,torufuru/oolhackathon,OpenState-SDN/ryu,fkakuma/ryu,TakeshiTseng/ryu,zangree/ryu,umkcdcrg01/ryu_openflow,muzixing/ryu,evanscottgray/ryu,OpenState-SDN/ryu,lzppp/mylearning,takahashiminoru/ryu,alanquillin/ryu,diogommartins/ryu,Zouyiran/ryu,gareging/SDN_Framework,ynkjm/ryu,openvapour/ryu,evanscottgray/ryu,fkakuma/ryu,iwaseyusuke/ryu,fujita/ryu,ysywh/ryu,fujita/ryu,ntts-clo/mld-ryu,hisaharu/ryu,jazzmes/ryu,muzixing/ryu,alanquillin/ryu,darjus-amzn/ryu,TakeshiTseng/ryu,zyq001/ryu,yamt/ryu,John-Lin/ryu,takahashiminoru/ryu,sivaramakrishnansr/ryu,habibiefaried/ryu,ynkjm/ryu,torufuru/oolhackathon,habibiefaried/ryu,Tejas-Subramanya/RYU_MEC,StephenKing/ryu,John-Lin/ryu,ysywh/ryu,Tesi-Luca-Davide/ryu,yamt/ryu,diogommartins/ryu,citrix-openstack/build-ryu,habibiefaried/ryu,gopchandani/ryu,lsqtongxin/ryu,zangree/ryu,jalilm/ryu,muzixing/ryu,hisaharu/ryu,iwaseyusuke/ryu,osrg/ryu,gareging/SDN_Framework,Zouyiran/ryu,openvapour/ryu,haniehrajabi/ryu,o3project/ryu-oe,OpenState-SDN/ryu,umkcdcrg01/ryu_openflow,fujita/ryu,zangree/ryu,iwaseyusuke/ryu,ttsubo/ryu,zyq001/ryu,castroflavio/ryu,shinpeimuraoka/ryu,ttsubo/ryu,shinpeimuraoka/ryu,zyq001/ryu,evanscottgray/ryu,gopchandani/ryu,pichuang/ryu,lzppp/mylearning,o3project/ryu-oe,fujita/ryu,StephenKing/summerschool-2015-ryu,TakeshiTseng/ryu,haniehrajabi/ryu,pichuang/ryu,jkoelker/ryu,jalilm/ryu,openvapour/ryu,alyosha1879/ryu,gopchandani/ryu,umkcdcrg01/ryu_openflow,sivaramakrishnansr/ryu,darjus-amzn/ryu,Tesi-Luca-Davide/ryu,lagopus/ryu-lagopus-ext,yamada-h/ryu,StephenKing/ryu,gareging/SDN_Framework,yamt/ryu,shinpeimuraoka/ryu,StephenKing/ryu,citrix-openstack-build/ryu,castroflavio/ryu,yamada-h/ryu,Zouyiran/ryu,alanquillin/ryu,Tesi-Luca-Davide/ryu,lzppp/mylearning,jalilm/ryu,ysywh/ryu,StephenKing/summerschool-2015-ryu,takahashiminoru/ryu,torufuru/oolhackathon,StephenKing/summerschool-2015-ryu,gopchandani/ryu,osrg/ryu,gopchandani/ryu,hisaharu/ryu,ntts-clo/mld-ryu,zangree/ryu,John-Lin/ryu,hisaharu/ryu,diogommartins/ryu,elahejalalpour/ELRyu,sivaramakrishnansr/ryu,unifycore/ryu,unifycore/ryu,TakeshiTseng/ryu,John-Lin/ryu,StephenKing/ryu,lsqtongxin/ryu,jazzmes/ryu,lsqtongxin/ryu,fkakuma/ryu,torufuru/OFPatchPanel,umkcdcrg01/ryu_openflow,StephenKing/ryu,Zouyiran/ryu,iwaseyusuke/ryu,shinpeimuraoka/ryu,fujita/ryu,StephenKing/summerschool-2015-ryu,habibiefaried/ryu,openvapour/ryu,takahashiminoru/ryu,ntts-clo/ryu,OpenState-SDN/ryu,pichuang/ryu,ttsubo/ryu | ryu/lib/ip.py | ryu/lib/ip.py | from ryu.lib import addrconv
def ipv4_arg_to_bin(w, x, y, z):
"""Generate unsigned int from components of IP address
returns: w << 24 | x << 16 | y << 8 | z"""
return (w << 24) | (x << 16) | (y << 8) | z
def ipv4_to_bin(ip):
'''
Parse an IP address and return an unsigned int.
The IP address is in dotted decimal notation.
'''
args = [int(arg) for arg in ip.split('.')]
return ipv4_arg_to_bin(*args)
def ipv4_to_str(ip):
"""Generate IP address string from an unsigned int.
ip: unsigned int of form w << 24 | x << 16 | y << 8 | z
returns: ip address string w.x.y.z"""
w = (ip >> 24) & 0xff
x = (ip >> 16) & 0xff
y = (ip >> 8) & 0xff
z = ip & 0xff
return "%i.%i.%i.%i" % (w, x, y, z)
def ipv6_to_bin(ipv6):
'''
convert ipv6 string to binary representation
'''
return addrconv.ipv6.text_to_bin(ipv6)
def ipv6_to_str(bin_addr):
'''
convert binary representation to human readable string
'''
return addrconv.ipv6.bin_to_text(bin_addr)
| import struct
def ipv4_arg_to_bin(w, x, y, z):
"""Generate unsigned int from components of IP address
returns: w << 24 | x << 16 | y << 8 | z"""
return (w << 24) | (x << 16) | (y << 8) | z
def ipv4_to_bin(ip):
'''
Parse an IP address and return an unsigned int.
The IP address is in dotted decimal notation.
'''
args = [int(arg) for arg in ip.split('.')]
return ipv4_arg_to_bin(*args)
def ipv4_to_str(ip):
"""Generate IP address string from an unsigned int.
ip: unsigned int of form w << 24 | x << 16 | y << 8 | z
returns: ip address string w.x.y.z"""
w = (ip >> 24) & 0xff
x = (ip >> 16) & 0xff
y = (ip >> 8) & 0xff
z = ip & 0xff
return "%i.%i.%i.%i" % (w, x, y, z)
IPV6_PACK_STR = '!8H'
def ipv6_to_arg_list(ipv6):
'''
convert ipv6 string to a list of 8 different parts
'''
args = []
if '::' in ipv6:
h, t = ipv6.split('::')
h_list = [int(x, 16) for x in h.split(':')]
t_list = [int(x, 16) for x in t.split(':')]
args += h_list
zero = [0]
args += ((8 - len(h_list) - len(t_list)) * zero)
args += t_list
else:
args = [int(x, 16) for x in ipv6.split(':')]
return args
def ipv6_to_bin(ipv6):
'''
convert ipv6 string to binary representation
'''
args = ipv6_to_arg_list(ipv6)
return struct.pack(IPV6_PACK_STR, *args)
def ipv6_to_str(bin_addr):
'''
convert binary representation to human readable string
'''
args = struct.unpack_from(IPV6_PACK_STR, bin_addr)
return ':'.join('%x' % x for x in args)
| apache-2.0 | Python |
f189137d52b9f44db0e82723b0e7a16a602c6523 | Create duplicate_encoder.py | Kunalpod/codewars,Kunalpod/codewars | duplicate_encoder.py | duplicate_encoder.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Duplicate Encoder
#Problem level: 6 kyu
def duplicate_encode(word. st=""):
for char in word.lower():
if word.lower().count(char)>1: st = st + ')'
else: st = st + '('
return st
| mit | Python | |
488717ab6c84c771737a3b2ccfe8cbf4d270c9b7 | Implement dragon class | reinikai/mugloar | mugloar/dragon.py | mugloar/dragon.py | import json
class Dragon:
# By default, stay home.
scaleThickness = 0
clawSharpness = 0
wingStrength = 0
fireBreath = 0
def __init__(self, weather_code):
if weather_code == 'T E':
# Draught requires a 'balanced' dragon, ha ha
self.scaleThickness = 5
self.clawSharpness = 5
self.wingStrength = 5
self.fireBreath = 5
elif weather_code == 'FUNDEFINEDG':
# Fog means we're unseen, no need to fly
self.scaleThickness = 8
self.clawSharpness = 8
self.wingStrength = 0
self.fireBreath = 4
elif weather_code == 'NMR':
self.scaleThickness = 6
self.clawSharpness = 6
self.wingStrength = 4
self.fireBreath = 4
elif weather_code == 'SRO':
# Stay at home if there's a storm.
pass
else:
# Fire is useless in the rain. Additional claw-sharpening is needed to destroy the umbrellaboats
self.scaleThickness = 5
self.clawSharpness = 10
self.wingStrength = 5
self.fireBreath = 0
def get_json(self):
return {"dragon": {
"scaleThickness": self.scaleThickness,
"clawSharpness": self.clawSharpness,
"wingStrength": self.wingStrength,
"fireBreath": self.fireBreath}}
| mit | Python | |
5f092edf48828f61042c78878474b8c85b62fbdd | Bump version to turn on SET_MAX_FPS. | jaruba/chromium.src,robclark/chromium,hgl888/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,rogerwang/chromium,Jonekee/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,ondra-novak/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,rogerwang/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,dednal/chromium.src,keishi/chromium,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,rogerwang/chromium,crosswalk-project/chromium-crosswalk-efl,keishi/chromium,M4sse/chromium.src,robclark/chromium,ltilve/chromium,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,ondra-novak/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,mogoweb/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,rogerwang/chromium,anirudhSK/chromium,littlstar/chromium.src,robclark/chromium,robclark/chromium,nacl-webkit/chrome_deps,robclark/chromium,ltilve/chromium,littlstar/chromium.src,Just-D/chromium-1,littlstar/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,dednal/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,patrickm/chromium.src,timopulkkinen/BubbleFish,M4sse/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,keishi/chromium,markYoungH/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,patrickm/chromium.src,ChromiumWebApps/chromium,keishi/chromium,timopulkkinen/BubbleFish,Just-D/chromium-1,keishi/chromium,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,markYoungH/chromium.src,nacl-webkit/chrome_deps,markYoungH/chromium.src,Just-D/chromium-1,mogoweb/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,zcbenz/cefode-chromium,jaruba/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,jaruba/chromium.src,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,robclark/chromium,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,littlstar/chromium.src,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,Just-D/chromium-1,dednal/chromium.src,hujiajie/pa-chromium,nacl-webkit/chrome_deps,fujunwei/chromium-crosswalk,anirudhSK/chromium,patrickm/chromium.src,junmin-zhu/chromium-rivertrail,krieger-od/nwjs_chromium.src,keishi/chromium,Jonekee/chromium.src,M4sse/chromium.src,ltilve/chromium,robclark/chromium,pozdnyakov/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,ltilve/chromium,junmin-zhu/chromium-rivertrail,mogoweb/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,rogerwang/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,markYoungH/chromium.src,keishi/chromium,bright-sparks/chromium-spacewalk,littlstar/chromium.src,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,dednal/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,Chilledheart/chromium,anirudhSK/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,hujiajie/pa-chromium,markYoungH/chromium.src,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,rogerwang/chromium,M4sse/chromium.src,patrickm/chromium.src,robclark/chromium,keishi/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,anirudhSK/chromium,anirudhSK/chromium,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,anirudhSK/chromium,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,keishi/chromium,zcbenz/cefode-chromium,robclark/chromium,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,keishi/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,robclark/chromium,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,jaruba/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,Chilledheart/chromium,jaruba/chromium.src,zcbenz/cefode-chromium,hujiajie/pa-chromium,axinging/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,keishi/chromium,ondra-novak/chromium.src,rogerwang/chromium,hujiajie/pa-chromium,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,rogerwang/chromium,Just-D/chromium-1,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,nacl-webkit/chrome_deps,Jonekee/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,zcbenz/cefode-chromium,jaruba/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,rogerwang/chromium,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src | o3d/installer/win/o3d_version.py | o3d/installer/win/o3d_version.py | #!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 2)
sdk_version = plugin_version
| #!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 1)
sdk_version = plugin_version
| bsd-3-clause | Python |
beeb3065e2d366dd68021eb5f55c94e2c61684e4 | add experiment script | JasonWayne/avazu-essay,JasonWayne/avazu-essay | ftrl/single_feature_experiment.py | ftrl/single_feature_experiment.py | import subprocess
for i in range(3, 23):
print "\n\n\nrun field " + str(i) + "\n"
subprocess.call("python ftrl/ftrl.py train.raw.csv test.raw.csv submission.csv {0}".format(i).split(" "), shell=True) | mit | Python | |
e5f130c1f006d2b96ca81be5a9f66c15b97b8793 | Create sol2.py | TheAlgorithms/Python | project_euler/problem_12/sol2.py | project_euler/problem_12/sol2.py | def triangle_number_generator():
for n in range(1,1000000):
yield n*(n+1)//2
def count_divisors(n):
return sum([2 for i in range(1,int(n**0.5)+1) if n%i==0 and i*i != n])
print(next(i for i in triangle_number_generator() if count_divisors(i) > 500))
| mit | Python | |
5938a5d751bcac40eac2bf7f7090e1970f097ebc | Add py-rq (#19175) | iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/py-rq/package.py | var/spack/repos/builtin/packages/py-rq/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyRq(PythonPackage):
"""RQ (Redis Queue) is a simple Python library for queueing
jobs and processing them in the background with workers."""
homepage = "https://github.com/rq/rq"
url = "https://github.com/rq/rq/archive/v1.5.2.tar.gz"
version('1.5.2', sha256='e8e7b6ffc4a962837eaff8eb0137514346e629633bf08550a1649771cdc4ace6')
version('1.5.1', sha256='36ca5cd2762b5b15bb176943f77da933fac6c2f4e5b5d47a0475f918c167fd4c')
version('1.5.0', sha256='97443acd8aab1c273710f74db197838f68a0678f9cabb64c3598dfb816d35e13')
version('1.4.3', sha256='a971aa16d346d1c145442af3bfb171ea620f375d240fbade3c42c2246d3d698a')
version('1.4.2', sha256='478bd19ac4f66d3066459f5e8253cf5f477bfe128f69ed952f7565cb530ac6a4')
version('1.4.1', sha256='fe158e3d9d4efe533f5698738f14e975656e396cd280c6acfd45952dc5ddfc66')
version('1.4.0', sha256='03cd39392d31d00205bd1d84930e9b7aefc5d3ac9770c59092bdd8a94fc8a47d')
version('1.3.0', sha256='ce94d07125b96313e8c4512b30c62da290ae6f5eeff60b8c3e2a0a08055f5608')
version('1.2.2', sha256='ea71f805d4e3b972b4df5545529044df4bc0fbae30814a48bc28d8d0a39c0068')
version('1.2.1', sha256='0b38344cda68710e572df9c70b733e95f1cdf13ce727a970f68307cedc98376a')
depends_on('python@3.5:3.8', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-redis@3.5.0:', type=('build', 'run'))
depends_on('py-click@5.0.0:', type=('build', 'run'))
| lgpl-2.1 | Python | |
be3428c9fe6de7741cec7f3899bcc71049b113ca | Create HR_IntroToConditionalStatements.py | bluewitch/Code-Blue-Python | HR_IntroToConditionalStatements.py | HR_IntroToConditionalStatements.py | #!/bin/python3
import math
import os
import random
import re
import sys
if __name__ == '__main__':
#N = int(input())
# One liner in a lambda function
print((lambda N:'Weird' if N % 2 else 'Not Weird')(int(input())))
| mit | Python | |
9f2fc78155dc725842ebbc82203994e26d1c7333 | Add marv_ros skeleton for ROS specific code | ternaris/marv-robotics,ternaris/marv-robotics | code/marv-robotics/marv_ros/__init__.py | code/marv-robotics/marv_ros/__init__.py | # Copyright 2019 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
| agpl-3.0 | Python | |
cb29ce461eb143dc44b244576b153a0b7a3b1a7d | Create missing_element.py | HeyIamJames/CodingInterviewPractice,HeyIamJames/CodingInterviewPractice | missing_element.py | missing_element.py | """
There is an array of non-negative integers.
A second array is formed by shuffling the elements of the first array and deleting a random element.
Given these two arrays, find which element is missing in the second array.
http://www.ardendertat.com/2012/01/09/programming-interview-questions/
"""
| mit | Python | |
527288828306c3620442e611fc9fb23180ee09fe | Add remove-nth-node-from-end-of-list | ibigbug/leetcode | remove-nth-node-from-end-of-list.py | remove-nth-node-from-end-of-list.py | # Link: https://leetcode.com/problems/remove-nth-node-from-end-of-list/
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param {ListNode} head
# @param {integer} n
# @return {ListNode}
def removeNthFromEnd(self, head, n):
if not head or not head.next:
return None
fast = head
for i in range(0, n):
fast = fast.next
if not fast:
return head.next
slow = head
while fast.next:
fast = fast.next
slow = slow.next
slow.next = slow.next.next
return head
| mit | Python | |
90218ad99cf9d9f4599f065790ac4d388adc3521 | Add markup template filter. | ubernostrum/blog | blog/templatetags/markup.py | blog/templatetags/markup.py | from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from blog.models import markup
register = template.Library()
@register.filter
@stringfilter
def markup(value):
result = markup(value)
return mark_safe(result)
| bsd-3-clause | Python | |
28ed25a30ed495cce2d5ace3ac12c00f35f9dbcd | Add AmazonDriver | cloudcomputinghust/CAL | calplus/v1/object_storage/drivers/amazon.py | calplus/v1/object_storage/drivers/amazon.py | import boto3
from calplus.v1.object_storage.drivers.base import BaseDriver, BaseQuota
PROVIDER = 'AMAZON'
class AmazonDriver(BaseDriver):
"""AmazonDriver for Object Storage"""
def __init__(self, cloud_config):
super(AmazonDriver, self).__init__()
self.aws_access_key_id = cloud_config['aws_access_key_id']
self.aws_secret_access_key = cloud_config['aws_secret_access_key']
self.endpoint_url = cloud_config['endpoint_url']
self.region_name = cloud_config.get('region_name', None)
self.driver_name = \
cloud_config.get('driver_name', 'default')
self.limit = cloud_config.get('limit', None)
self._setup()
def _setup(self):
parameters = {
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'region_name': self.region_name,
'endpoint_url': self.endpoint_url
}
self.client = boto3.client('s3', **parameters)
self.quota = AmazonQuota(self.client, self.limit)
def create_container(self, container, **kwargs):
return self.client.create_bucket(Bucket=container, **kwargs)
def delete_container(self, container):
return self.client.delete_bucket(Bucket=container)
def list_containers(self):
return self.client.list_buckets()
def stat_container(self, container):
return self.client.head_bucket(Bucket=container)
def update_container(self, container, headers, **kwargs):
pass
def upload_object(self, container, obj, contents,
content_length=None, **kwargs):
return self.client.put_object(Bucket=container, Key=obj,
ContentLength=content_length,
Body=contents)
def download_object(self, container, obj, **kwargs):
return self.client.get_object(Bucket=container, Key=obj)
def stat_object(self, container, obj):
return self.client.head_object(Bucket=container, Key=obj)
def delete_object(self, container, obj, **kwargs):
return self.client.delete_object(Bucket=container, Key=obj,
**kwargs)
def list_container_objects(self, container):
return self.client.list_objects(Bucket=container)
def update_object(self, container, obj, headers, **kwargs):
pass
def copy_object(self, container, obj, destination=None, **kwargs):
copysource = {
'Bucket': container,
'Key': obj
}
return self.client.copy_object(Bucket=container, Key=destination,
CopySource=copysource)
class AmazonQuota(BaseQuota):
"""AmazonQuota for ObjectStorage"""
def __init__(self, client, limit=None):
super(AmazonQuota, self).__init__()
self.client = client
self.limit = limit
self._setup()
def _setup(self):
pass
| apache-2.0 | Python | |
b78518df363fb1cb398c70920f219ca9be78f816 | Test another implementation of scipy's _spectral | pombredanne/pythran,pombredanne/pythran,serge-sans-paille/pythran,serge-sans-paille/pythran,pombredanne/pythran | pythran/tests/scipy/_spectral.py | pythran/tests/scipy/_spectral.py | # Author: Pim Schellart
# 2010 - 2011
"""Tools for spectral analysis of unequally sampled signals."""
import numpy as np
#pythran export _lombscargle(float64[], float64[], float64[])
##runas import numpy; x = numpy.arange(2., 12.); y = numpy.arange(1., 11.); z = numpy.arange(3., 13.); _lombscargle(x, y, z)
def _lombscargle(x, y, freqs):
"""
_lombscargle(x, y, freqs)
Computes the Lomb-Scargle periodogram.
Parameters
----------
x : array_like
Sample times.
y : array_like
Measurement values (must be registered so the mean is zero).
freqs : array_like
Angular frequencies for output periodogram.
Returns
-------
pgram : array_like
Lomb-Scargle periodogram.
Raises
------
ValueError
If the input arrays `x` and `y` do not have the same shape.
See also
--------
lombscargle
"""
# Check input sizes
if x.shape != y.shape:
raise ValueError("Input arrays do not have the same size.")
# Create empty array for output periodogram
pgram = np.empty_like(freqs)
c = np.empty_like(x)
s = np.empty_like(x)
for i in range(freqs.shape[0]):
xc = 0.
xs = 0.
cc = 0.
ss = 0.
cs = 0.
c[:] = np.cos(freqs[i] * x)
s[:] = np.sin(freqs[i] * x)
for j in range(x.shape[0]):
xc += y[j] * c[j]
xs += y[j] * s[j]
cc += c[j] * c[j]
ss += s[j] * s[j]
cs += c[j] * s[j]
if freqs[i] == 0:
raise ZeroDivisionError()
tau = np.arctan2(2 * cs, cc - ss) / (2 * freqs[i])
c_tau = np.cos(freqs[i] * tau)
s_tau = np.sin(freqs[i] * tau)
c_tau2 = c_tau * c_tau
s_tau2 = s_tau * s_tau
cs_tau = 2 * c_tau * s_tau
pgram[i] = 0.5 * (((c_tau * xc + s_tau * xs)**2 / \
(c_tau2 * cc + cs_tau * cs + s_tau2 * ss)) + \
((c_tau * xs - s_tau * xc)**2 / \
(c_tau2 * ss - cs_tau * cs + s_tau2 * cc)))
return pgram
| bsd-3-clause | Python | |
a1e679b4b0802f1c40d08f1f7cba212b13de61a4 | Create testing2.py | aldmbmtl/toolbox | myPack/testing2.py | myPack/testing2.py | import aldmbmtl
aldmbmtl.toolbox.myPack.testing.test()
| mit | Python | |
0ae07ef204806ab45b746df16371c3925ea894e9 | Create problem6.py | CptDemocracy/Python | Project-Euler/Problem6/problem6.py | Project-Euler/Problem6/problem6.py | """
[ref.href] https://projecteuler.net/problem=6
Sum square difference.
The sum of the squares of the first ten natural numbers is:
1^2 + 2^2 + ... + 10^2 = 385
The square of the sum of the first ten natural numbers is:
(1 + 2 + ... + 10)^2 = 55^2 = 3025
Hence the difference between the sum of the squares of the
first ten natural numbers and the square of the sum is:
3025 - 385 = 2640.
Find the difference between the sum of the squares of the
first one hundred natural numbers and the square of the sum.
"""
smallestNaturalNumber = 1
naturalNumberCount = 100
nums = range(smallestNaturalNumber,
naturalNumberCount + smallestNaturalNumber)
squares = map(lambda x : x ** 2, nums)
sumOfSquares = sum(squares)
squaredSumOfNums = sum(nums) ** 2
diff = squaredSumOfNums - sumOfSquares
print "The difference between the sum of squares of the first "\
+ str(naturalNumberCount) + " is " + str(diff) + "."
| mit | Python | |
a8b07a61b56f87509f33cd3f79e7800837ef4f29 | Add lc0189_rotate_array.py | bowen0701/algorithms_data_structures | lc0189_rotate_array.py | lc0189_rotate_array.py | """Leetcode 189. Rotate Array
Easy
URL: https://leetcode.com/problems/rotate-array/
Given an array, rotate the array to the right by k steps, where k is non-negative.
Example 1:
Input: [1,2,3,4,5,6,7] and k = 3
Output: [5,6,7,1,2,3,4]
Explanation:
rotate 1 steps to the right: [7,1,2,3,4,5,6]
rotate 2 steps to the right: [6,7,1,2,3,4,5]
rotate 3 steps to the right: [5,6,7,1,2,3,4]
Example 2:
Input: [-1,-100,3,99] and k = 2
Output: [3,99,-1,-100]
Explanation:
rotate 1 steps to the right: [99,-1,-100,3]
rotate 2 steps to the right: [3,99,-1,-100]
Note:
- Try to come up as many solutions as you can,
there are at least 3 different ways to solve this problem.
- Could you do it in-place with O(1) extra space?
"""
class Solution(object):
def rotate(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: None Do not return anything, modify nums in-place instead.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
7bae6e3f490f4986f07ce45bf333a5982b505bd4 | add 255 | ufjfeng/leetcode-jf-soln,ufjfeng/leetcode-jf-soln | python/255_verify_preorder_sequence_in_binary_search_tree.py | python/255_verify_preorder_sequence_in_binary_search_tree.py | """
Given an array of numbers, verify whether it is the correct preorder traversal
sequence of a binary search tree.
You may assume each number in the sequence is unique.
Follow up:
Could you do it using only constant space complexity?
"""
class Solution(object):
def verifyPreorder(self, preorder):
"""
:type preorder: List[int]
:rtype: bool
"""
if preorder is None or preorder == []:
return True
lb = -2 ** 31
stack = []
for i in preorder:
if i < lb:
return False
while stack and i > stack[-1]:
lb = stack.pop()
stack.append(i)
return True
from binarySearchTree import *
a = Solution()
t = BST([10,5,12,2,6])
print(t.toList(order=-1))
print(a.verifyPreorder(t.toList(order=-1)) == True)
| mit | Python | |
86686926809bfef55b71618888eec6667faaeec9 | complete 26 reciprocal cycles | dawran6/project-euler | 26-reciprocal-cycles.py | 26-reciprocal-cycles.py | """Based on chillee's answer at Fri, 6 Jan 2017, 05:06:
There's so many convoluted substring solutions.
1/3 = 3/9 = 0.(3)
1/7 = 148257/999999 = 0.(148257)
Therefore, the length of the repeating portion is length of the numerator when
you set the denominator equal to some string of 9s.
There's one other thing to keep in mind, which is that 1/5 and 1/2 have
terminating decimals, as 5 and 2 are the only divisors of 10. So, for example,
1/6 = 1/3 * 1/2, and it's clear that the length of the repeating decimal is the
length of the repeating decimal of 1/3. And if the prime factors of the
denominator are only composed of 2's and 5's, the denominator is terminating.
"""
def cycle_length(x):
while x % 2 == 0:
x //= 2
while x % 5 == 0:
x //= 5
if x == 1:
return 0
t = 9
while True:
if t % x == 0:
return len(str(t))
else:
t = t * 10 + 9
if __name__ == '__main__':
results = [(x, cycle_length(x)) for x in range(1, 1001)]
print(max(results, key=lambda pair: pair[1]))
| mit | Python | |
4c6442382adcb716ea817fbc781a402dec36aac9 | set app.debug = True. | total-impact/software,Impactstory/impactstory-tng,total-impact/depsy,total-impact/depsy,total-impact/software,total-impact/depsy,Impactstory/biomed,total-impact/software,Impactstory/depsy,Impactstory/impactstory-tng,Impactstory/impactstory-tng,total-impact/biomed,Impactstory/oadoi,Impactstory/oadoi,Impactstory/biomed,Impactstory/citeas-api,total-impact/biomed,Impactstory/depsy,total-impact/depsy,Impactstory/sherlockoa,total-impact/software,total-impact/biomed,Impactstory/depsy,Impactstory/depsy,Impactstory/citeas-api,Impactstory/biomed,Impactstory/citeas-api,Impactstory/oadoi,Impactstory/sherlockoa,Impactstory/biomed,total-impact/biomed | app.py | app.py | from flask import Flask
import redis
import os
from rq import Queue
app = Flask(__name__)
app.debug = True
my_redis = redis.from_url(
os.getenv("REDIS_URL", "redis://127.0.0.1:6379"),
db=10
)
redis_rq_conn = redis.from_url(
os.getenv("REDIS_URL", "redis://127.0.0.1:6379"),
db=14
)
scopus_queue = Queue("scopus", connection=redis_rq_conn)
refset_queue = Queue("refset", connection=redis_rq_conn)
| from flask import Flask
import redis
import os
from rq import Queue
app = Flask(__name__)
my_redis = redis.from_url(
os.getenv("REDIS_URL", "redis://127.0.0.1:6379"),
db=10
)
redis_rq_conn = redis.from_url(
os.getenv("REDIS_URL", "redis://127.0.0.1:6379"),
db=14
)
scopus_queue = Queue("scopus", connection=redis_rq_conn)
refset_queue = Queue("refset", connection=redis_rq_conn)
| mit | Python |
3cd4a151f9f03ecf2674348e9377e00346bbd849 | add first revision of the script | falsovsky/RTPapd | rtp.py | rtp.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
import urllib2
import re
import unicodedata
import os
import string
validFilenameChars = "-_. %s%s" % (string.ascii_letters, string.digits)
def removeDisallowedFilenameChars(filename):
cleanedFilename = unicodedata.normalize('NFKD', filename).encode('ASCII', 'ignore')
return ''.join(c for c in cleanedFilename if c in validFilenameChars)
def parseRTMP(url,dt):
url = 'http://www.rtp.pt' + url
page = urllib2.urlopen(url)
match = re.search('"file": "(.*?)","application": "(.*?)","streamer": "(.*?)"', page.read(), re.MULTILINE)
if match:
fn = match.group(1).split('/')[5].replace('.mp3', '.flv')
cmd = 'rtmpdump -r "rtmp://' + match.group(3) + '/' + match.group(2) + '" -y "mp3:' + match.group(1) + '" -o "'+ dt + '.flv"'
#print cmd
if os.path.isfile(dt+'.mp3'):
print "- Ja downloadada... a ignorar"
return
print "- A sacar..."
os.system(cmd + "> /dev/null 2>&1")
print "- A extrair mp3 do flv..."
os.system('ffmpeg -i "' + dt + '.flv" -acodec copy "'+dt+'.mp3" > /dev/null 2>&1')
os.remove(dt + '.flv')
print "- Done"
id = "1085"
# apanhar o numero total de paginas
url = "http://www.rtp.pt/play/browseprog/"+id+"/1/true"
page = urllib2.urlopen(url)
match = re.search(r'<a title="Fim.*?,page:(\d+)\}', page.read(), re.MULTILINE)
if match:
totalpages = match.group(1)
else:
exit
for c in range(1,int(totalpages)):
print "--- Pagina " + str(c)
url = "http://www.rtp.pt/play/browseprog/"+id+"/"+str(c)+"/"
page = urllib2.urlopen(url)
soup = BeautifulSoup(page.read())
# apanha todos os items da pagina
items = soup.findAll('div',{'class': 'Elemento'})
for item in items:
# url
link = item.find('a')
# data
dt = item.find('b').contents[0].strip()
dt = dt.replace(' ', '_')
# parte ?
pt = item.find('p').contents[0].strip()
pt = pt.replace(' ', '_')
print "-- " + dt, pt
title = removeDisallowedFilenameChars(dt + "-" + pt)
parseRTMP(link['href'],title)
| bsd-2-clause | Python | |
8947167b0442b8d03cfd328fd77961a864f54638 | Create double.py | NendoTaka/CodeForReference,NendoTaka/CodeForReference,NendoTaka/CodeForReference | CodeWars/8kyu/double.py | CodeWars/8kyu/double.py | def doubleInteger(i):
return i + i
| mit | Python | |
3f2a1aa0ce76dc50662e11da50149d0de231c848 | add keys | daya0576/matelook_mini-facebook,daya0576/matelook_mini-facebook,daya0576/matelook_mini-facebook | keys.py | keys.py | G_EMAIL_KEY = ""
| mit | Python | |
cec1cc8082854a0fd61ea83bb69ba1e9d013b089 | Create libs.py | ink-ru/sublime-triks,ink-ru/sublime-triks | libs.py | libs.py | # coding: utf-8
'''Библиотеки SEO модуля'''
import sublime, sublime_plugin, re, urllib
class xenuTools:
def download_url_to_string(url):
request = urllib.request.Request(url)
response = urllib.request.urlopen(request)
html = response.read()
return html
def getrobots(url):
#TODO: split single line files
robots_rules = ''
robots = xenuTools.download_url_to_string(url)
# remove leading and trailing white space
robots = robots.strip()
# put each line into a list
robots_list = robots.decode("utf-8").strip().splitlines()
for item in robots_list:
mach = re.search('^Disallow: +([^\s]+)$', item, flags=re.IGNORECASE)
if item == "" or mach == None:
continue
item = mach.group(1)
if item.find('#') > 0:
# comment removing
item = re.sub(r"([^#]*)#.*", r"\1", item)
item = re.sub(r"\*$", "", item)
item = item.replace("*", ".*").replace("?", "\?").replace("$", "\n").strip()
robots_rules = robots_rules + item + '|'
robots_rules = robots_rules[:-1]
# TODO: cut images
robots_rules = r'(?s)^https?:\S+('+robots_rules+')(.*?)\n\n'
return robots_rules
| cc0-1.0 | Python | |
25d90937ecce5e18e32a9f7e14b5744d21c14cfb | add simple box zoom example | huongttlan/mpld3,giserh/mpld3,danielballan/mpld3,jakevdp/mpld3,litaotao/mpld3,jayhetee/mpld3,mlovci/mpld3,e-koch/mpld3,aflaxman/mpld3,void32/mpld3,jakevdp/mpld3,CrazyGuo/mpld3,fdeheeger/mpld3,void32/mpld3,ahnitz/mpld3,jakirkham/mpld3,jrkerns/mpld3,fdeheeger/mpld3,linearregression/mpld3,aflaxman/mpld3,CrazyGuo/mpld3,kdheepak89/mpld3,keflavich/mpld3,mpld3/mpld3,mlovci/mpld3,etgalloway/mpld3,e-koch/mpld3,keflavich/mpld3,kdheepak89/mpld3,linearregression/mpld3,jayhetee/mpld3,jrkerns/mpld3,mpld3/mpld3,ahnitz/mpld3,etgalloway/mpld3,danielballan/mpld3,huongttlan/mpld3,Jiangshangmin/mpld3,jakirkham/mpld3,Jiangshangmin/mpld3,litaotao/mpld3,giserh/mpld3 | examples/box_zoom.py | examples/box_zoom.py | import mpld3
from mpld3.plugins import PluginBase
class BoxZoomPlugin(PluginBase):
"""Box Zoom"""
JAVASCRIPT = r"""
mpld3.BoxZoomPlugin = function(fig, prop){
this.fig = fig;
this.prop = mpld3.process_props(this, prop, {}, []);
// add a button to enable/disable box zoom
mpld3.ButtonFactory({
toolbarKey: "boxzoom",
icon: function(){return mpld3.icons["zoom"];},
onClick: this.onClick.bind(this),
activate: this.activate.bind(this),
deactivate: this.deactivate.bind(this),
post_draw: this.post_draw.bind(this),
});
this.fig.prop.toolbar.push("boxzoom");
};
mpld3.BoxZoomPlugin.prototype.onClick = function(){this.toggle()};
mpld3.BoxZoomPlugin.prototype.activate = function(){this.enable()};
mpld3.BoxZoomPlugin.prototype.deactivate = function(){this.disable()};
mpld3.BoxZoomPlugin.prototype.post_draw = function(){this.disable()};
mpld3.BoxZoomPlugin.prototype.draw = function(){
mpld3.insert_css("#" + this.fig.figid + " rect.extent",
{"fill": "#fff",
"fill-opacity": 0,
"stroke": "#999"});
var brush = d3.svg.brush()
.x(this.fig.axes[0].x)
.y(this.fig.axes[0].y)
.on("brushend", brushend.bind(this));
this.fig.root.selectAll(".mpld3-axes")
.data(this.fig.axes)
.call(brush)
this.enable = function(){
brush.on("brushstart", brushstart);
this.fig.canvas.selectAll("rect.background")
.style("cursor", "crosshair");
this.fig.canvas.selectAll("rect.extent, rect.resize")
.style("display", null);
this.fig.canvas.selectAll(".mpld3-boxzoombutton")
.classed({pressed: true});
this.fig.disable_zoom();
this.enabled = true;
}
this.disable = function(){
brush.on("brushstart", null).clear();
this.fig.canvas.selectAll("rect.background")
.style("cursor", null);
this.fig.canvas.selectAll("rect.extent, rect.resize")
.style("display", "none");
this.fig.canvas.selectAll(".mpld3-boxzoombutton")
.classed({pressed: false});
this.enabled = false;
}
this.toggle = function(){
this.enabled ? this.disable() : this.enable();
}
function brushstart(d, i){
brush.x(d.x).y(d.y);
}
function brushend(d, i){
if(this.enabled){
var extent = brush.extent();
if(extent[0][0] != extent[1][0] &&
extent[0][1] != extent[1][1]){
console.log(extent);
d.set_axlim([extent[0][0], extent[1][0]],
[extent[0][1], extent[1][1]]);
}
}
d.axes.call(brush.clear());
}
}
mpld3.register_plugin("boxzoom", mpld3.BoxZoomPlugin);
mpld3.icons['zoom'] = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABmJLR0QA/wD/AP+gvaeTAAAACXBI\nWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH3gMPDiIRPL/2oQAAANBJREFUOMvF0b9KgzEcheHHVnCT\nKoI4uXbtLXgB3oJDJxevw1VwkoJ/NjepQ2/BrZRCx0ILFURQKV2kyOeSQpAmn7WDB0Lg955zEhLy\n2scdXlBggits+4WOQqjAJ3qYR7NGLrwXGU9+sGbEtlIF18FwmuBngZ+nCt6CIacC3Rx8LSl4xzgF\nn0tusBn4UyVhuA/7ZYIv5g+pE3ail25hN/qdmzCfpsJVjKKCZesDBwtzrAqGOMQj6vhCDRsY4ALH\nmOVObltR/xeG/jph6OD2r+Fv5lZBWEhMx58AAAAASUVORK5CYII=\n"
"""
def __init__(self):
self.dict_ = {"type":"boxzoom"}
if __name__ == '__main__':
import matplotlib.pyplot as plt
import numpy as np
fig, ax = plt.subplots()
ax.plot(np.random.normal(0, 1, 1000),
np.random.normal(0, 1, 1000), 'ok', alpha=0.3)
mpld3.plugins.connect(fig, BoxZoomPlugin())
mpld3.show()
| bsd-3-clause | Python | |
c04610422ffd6e0fe87c62d7a8039116f804467c | Add jupyterhub config | everware/everware,betatim/everware,astiunov/everware,everware/everware,astiunov/everware,astiunov/everware,astiunov/everware,ibab/everware,everware/everware,sashabaranov/everware,ibab/everware,betatim/everware,sashabaranov/everware,ibab/everware,betatim/everware,everware/everware,sashabaranov/everware,sashabaranov/everware,betatim/everware,ibab/everware | jupyterhub_config.py | jupyterhub_config.py | import os
import everware
import jupyterhub.handlers.pages
jupyterhub.handlers.pages.HomeHandler.get = everware.HomeHandler.get
jupyterhub.handlers.pages.HomeHandler.post = everware.HomeHandler.post
c = get_config()
# spawn with custom docker containers
c.JupyterHub.spawner_class = 'everware.CustomDockerSpawner'
# The docker instances need access to the Hub, so the default loopback port doesn't work:
from IPython.utils.localinterfaces import public_ips
c.JupyterHub.hub_ip = public_ips()[0]
c.JupyterHub.hub_api_ip = public_ips()[0]
c.JupyterHub.authenticator_class = 'everware.GitHubOAuthenticator'
c.Authenticator.whitelist = set()
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
c.GitHubOAuthenticator.client_id = os.environ['GITHUB_CLIENT_ID']
c.GitHubOAuthenticator.client_secret = os.environ['GITHUB_CLIENT_SECRET']
c.Spawner.tls = True
c.Spawner.debug = True
c.Spawner.http_timeout = 32
c.JupyterHub.data_files_path = 'share'
c.JupyterHub.template_paths = ['share/static/html']
# change this to the ip that `boot2docker ip` tells you if
# you use boot2docker, otherwise remove the line
#c.Spawner.container_ip = '192.168.59.103'
| bsd-3-clause | Python | |
010f19ab2f9c0f3305d7f2eabcbbd33952a58fdd | Add a dir | karnikamit/python,llluiop/python-1,Mark24Code/python,whix/python,ZSeaPeng/python,EricSekyere/python,JiYouMCC/python,DanielShangHai/python,merfii/PythonExercises,merfii/PythonExercises,wangjun/python,tzq668766/python,JiYouMCC/python,Show-Me-the-Code/python,snailwalker/python,hooting/show-me-the-code-python,renzongxian/Show-Me-the-Code,wangjun/python,ionutcipriananescu/python,karnikamit/python,hooting/show-me-the-code-python,dominjune/python,YGIronMan/python,llluiop/python-1,hooting/show-me-the-code-python,merfii/PythonExercises,llluiop/python-1,yangzilong1986/python,fairyzoro/python,yangzilong1986/python,Jaccorot/python,Show-Me-the-Code/python,Yrthgze/prueba-sourcetree2,lz199144/python,Friday21/python_show_me_the_code,xchaoinfo/python,whix/python,Yrthgze/prueba-sourcetree2,12wang3/python,tzq668766/python,ZSeaPeng/python,keysona/python,Pritesh242/python,snailwalker/python,ionutcipriananescu/python,Pritesh242/python,JiYouMCC/python,keysona/python,agogear/python-1,zhakui/python,zhakui/python,starlightme/python,Mark24Code/python,Ph0enixxx/python,karnikamit/python,ZSeaPeng/python,Show-Me-the-Code/python,lz199144/python,dominjune/python,12wang3/python,Jaccorot/python,Jaccorot/python,EricSekyere/python,wangjun/python,fairyzoro/python,Pritesh242/python,xiaoixa/python,Yrthgze/prueba-sourcetree2,zhenglaizhang/python,keysona/python,Ph0enixxx/python,Pritesh242/python,Ph0enixxx/python,luoxufeiyan/python,ZuoGuocai/python,sravaniaitha/python,merfii/PythonExercises,Mark24Code/python,yangzilong1986/python,Show-Me-the-Code/python,Yrthgze/prueba-sourcetree2,Friday21/python_show_me_the_code,Yrthgze/prueba-sourcetree2,Jaccorot/python,ionutcipriananescu/python,renzongxian/Show-Me-the-Code,xchaoinfo/python,zhakui/python,haiyangd/python-show-me-the-code-,Ph0enixxx/python,YGIronMan/python,Friday21/python_show_me_the_code,xchaoinfo/python,zhenglaizhang/python,hooting/show-me-the-code-python,wangjun/python,fairyzoro/python,sravaniaitha/python,zhakui/python,agogear/python-1,zhenglaizhang/python,whix/python,snailwalker/python,tzq668766/python,zhakui/python,xiaoixa/python,snailwalker/python,merfii/PythonExercises,lz199144/python,starlightme/python,luoxufeiyan/python,starlightme/python,karnikamit/python,llluiop/python-1,xchaoinfo/python,yangzilong1986/python,EricSekyere/python,Show-Me-the-Code/python,YGIronMan/python,keysona/python,Yrthgze/prueba-sourcetree2,luoxufeiyan/python,snailwalker/python,ionutcipriananescu/python,EricSekyere/python,DanielShangHai/python,wangjun/python,haiyangd/python-show-me-the-code-,llluiop/python-1,fairyzoro/python,renzongxian/Show-Me-the-Code,ZuoGuocai/python,12wang3/python,ZSeaPeng/python,luoxufeiyan/python,xiaoixa/python,DanielShangHai/python,12wang3/python,haiyangd/python-show-me-the-code-,agogear/python-1,lz199144/python,yangzilong1986/python,JiYouMCC/python,whix/python,ZSeaPeng/python,sravaniaitha/python,Mark24Code/python,starlightme/python,Friday21/python_show_me_the_code,agogear/python-1,DanielShangHai/python,sravaniaitha/python,xiaoixa/python,DanielShangHai/python,EricSekyere/python,tzq668766/python,xchaoinfo/python,Ph0enixxx/python,agogear/python-1,fairyzoro/python,YGIronMan/python,Show-Me-the-Code/python,luoxufeiyan/python,karnikamit/python,renzongxian/Show-Me-the-Code,ZuoGuocai/python,lz199144/python,Pritesh242/python,haiyangd/python-show-me-the-code-,ionutcipriananescu/python,Jaccorot/python,12wang3/python,tzq668766/python,YGIronMan/python,xiaoixa/python,Mark24Code/python,hooting/show-me-the-code-python,haiyangd/python-show-me-the-code-,zhenglaizhang/python,sravaniaitha/python,renzongxian/Show-Me-the-Code,dominjune/python,ZuoGuocai/python,JiYouMCC/python,dominjune/python,dominjune/python,starlightme/python,Friday21/python_show_me_the_code,keysona/python,ZuoGuocai/python,zhenglaizhang/python,whix/python | stingroc/0002/0002.py | stingroc/0002/0002.py | print "0002"
| mit | Python | |
1c6aebcf02d698c6a1722476978fb88fbf6c218d | Add CartPole TF HighLevel | AlwaysLearningDeeper/OpenAI_Challenges,AlwaysLearningDeeper/Project | src/CartPole-v0/TF_High_Level_NN.py | src/CartPole-v0/TF_High_Level_NN.py | import gym
import time
import random
import numpy as np
import tensorflow as tf
from statistics import median, mean
from collections import Counter
import os
tf.logging.set_verbosity(tf.logging.FATAL)
LR = 1e-3
env = gym.make("CartPole-v0")
env.reset()
goal_steps = 500
score_requirement = 50
initial_games = 10000
def initial_population():
"""
Extracts good runs from random games. Code from sentdex
:return training_data:
"""
# [OBS, MOVES]
training_data = []
# all scores:
scores = []
# just the scores that met our threshold:
accepted_scores = []
# iterate through however many games we want:
for _ in range(initial_games):
score = 0
# moves specifically from this environment:
game_memory = []
# previous observation that we saw
prev_observation = []
# for each frame in 200
for _ in range(goal_steps):
# choose random action (0 or 1)
action = random.randrange(0, 2)
# do it!
observation, reward, done, info = env.step(action)
# notice that the observation is returned FROM the action
# so we'll store the previous observation here, pairing
# the prev observation to the action we'll take.
if len(prev_observation) > 0:
game_memory.append([prev_observation, action])
prev_observation = observation
score += reward
if done: break
# IF our score is higher than our threshold, we'd like to save
# every move we made
# NOTE the reinforcement methodology here.
# all we're doing is reinforcing the score, we're not trying
# to influence the machine in any way as to HOW that score is
# reached.
if score >= score_requirement:
accepted_scores.append(score)
for data in game_memory:
# convert to one-hot (this is the output layer for our neural network)
if data[1] == 1:
output = [0, 1]
elif data[1] == 0:
output = [1, 0]
# saving our training data
training_data.append([data[0], output])
# reset env to play again
env.reset()
# save overall scores
scores.append(score)
# some stats here, to further illustrate the neural network magic!
print('Average accepted score:', mean(accepted_scores))
print('Median score for accepted scores:', median(accepted_scores))
print(Counter(accepted_scores))
return training_data
def model_fn(features, targets, mode, params):
"""Model function for Estimator."""
network = tf.contrib.layers.relu(features, 128)
network = tf.contrib.layers.relu(network, 256)
network = tf.contrib.layers.relu(network, 512)
network = tf.contrib.layers.relu(network, 256)
network = tf.contrib.layers.relu(network, 128)
predictions = tf.contrib.layers.fully_connected(network, 2,activation_fn = tf.nn.softmax)
# Reshape output layer to 1-dim Tensor to return predictions
predictions_dict = {"actions": predictions}
# Calculate loss using softmax
loss =tf.losses.softmax_cross_entropy(targets, predictions)
train_op = tf.contrib.layers.optimize_loss(
loss=loss,
global_step=tf.contrib.framework.get_global_step(),
learning_rate=params["learning_rate"],
optimizer="SGD")
return predictions_dict, loss, train_op
def train_model(training_data):
model_params = {"learning_rate": LR}
X = np.array([i[0] for i in training_data]).reshape(-1, len(training_data[0][0]))
y = [i[1] for i in training_data]
nn = tf.contrib.learn.SKCompat(tf.contrib.learn.Estimator(
model_fn=model_fn, params=model_params))
nn.fit(x=X, y=y, batch_size=None, max_steps=5)
return nn
model = train_model(training_data=initial_population())
scores = []
choices = []
for each_game in range(10):
score = 0
game_memory = []
prev_obs = []
env.reset()
for _ in range(goal_steps):
#env.render()
if len(prev_obs) == 0:
action = random.randrange(0, 2)
else:
#t0 = time.time()
action = np.argmax(model.predict(prev_obs.reshape(-1, len(prev_obs)))['actions'][0])
#t1 = time.time()
#print("Took: ", t1 - t0)
choices.append(action)
new_observation, reward, done, info = env.step(action)
prev_obs = new_observation
game_memory.append([new_observation, action])
score += reward
if done: break
scores.append(score) | mit | Python | |
287a89307af6ad720978682f49c01e39259303ec | Create censys_monitor.py | nremynse/Automation-Scripts | censys_monitor.py | censys_monitor.py | import censys.certificates
import json
import requests
import os
import random
#UID = ""
#SECRET = ""
#api for remynseit and remynse
UIDS = ["UID1", "UID2", "UID3"]
SECRETS = {"secret": "value", "secret2": "value2"}
'''
Search
(utah.edu.*) AND NOT parsed.subject_dn.raw:/.*utah.edu/
'''
alert_webhook = ''
known_certs = []
def getCerts():
UID = random.choice(UIDS)
SECRET = SECRETS[UID]
c = censys.certificates.CensysCertificates(UID, SECRET)
certs = c.search("(utah.edu.* OR utahedu.*) AND NOT parsed.subject_dn.raw:/.*utah.edu/")
return certs
def knownCerts():
known_certs = []
if os.path.isfile('/opt/censys/certs.txt'):
with open('/opt/censys/certs.txt') as f:
for line in f:
known_certs.append(line.rstrip())
return known_certs
else:
pass
def alert(cert):
text = f"New SSL Cert Detected:\n Sha256: {cert['parsed.fingerprint_sha256']}\n SubjectCN: {cert['parsed.subject_dn']}"
message = {"text": text}
resp = requests.post(alert_webhook, data=json.dumps(message), headers={'Content-Type': 'application/json'})
if resp.status_code != 200:
raise ValueError(f'Status Code: {resp.status_code}, Error: {resp.text}')
def main():
known_certs = knownCerts()
new_certs = getCerts()
for i in new_certs:
if known_certs:
if i['parsed.fingerprint_sha256'] in known_certs:
pass
else:
alert(i)
print("Found New Cert")
with open('/opt/censys/certs.txt', 'a+') as f:
f.write(i['parsed.fingerprint_sha256'] + "\r\n")
else:
print("First Run, adding certs and alerting")
alert(i)
with open('/opt/censys/certs.txt', 'a+') as f:
f.write(i['parsed.fingerprint_sha256'] + "\r\n")
if __name__ == "__main__":
main()
| mit | Python | |
731118d82aa41689f12adb32ea37be55be89a757 | Add gpu_buffer.py | Kupoman/Fafnir | fafnir/gpu_buffer.py | fafnir/gpu_buffer.py | import panda3d.core as p3d
class GpuBuffer:
def __init__(self, name, count, data_type, data_format):
self.buffer = p3d.Texture(name)
self.data_type = data_type
self.data_format = data_format
self.resize(count)
def resize(self, count):
self.buffer.setup_buffer_texture(
count,
self.data_type,
self.data_format,
p3d.GeomEnums.UH_dynamic
)
self.buffer.prepare(base.win.get_gsg().get_prepared_objects())
ram_image = self.buffer.modify_ram_image()
for i in range(len(ram_image)):
ram_image[i] = 0
def get_buffer_id(self):
gsg = base.win.get_gsg()
pgo = gsg.get_prepared_objects()
context = self.buffer.prepare_now(0, pgo, gsg)
return context.get_native_buffer_id()
def get_texture(self):
return self.buffer
def print_buffer(self, count):
base.graphics_engine.extract_texture_data(self.buffer, base.win.get_gsg())
view = memoryview(self.buffer.get_ram_image()).cast('f')
for i in range(count):
print(view[i], end=' ')
print()
| apache-2.0 | Python | |
3cb7c1cd73dfb73d96af15a183d4e7ef6a9369e8 | create src | HeavenH/teleGit | src/GitApi.py | src/GitApi.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from requests import get
from json import loads
from argparse import ArgumentParser
class GitHub():
def GetRepos(self, user):
self.msg = ""
req = loads(get('https://api.github.com/users/' +
user + '/repos').text)
self.msg += '\nRepositorys of user.'
for i in range(len(req)):
self.msg += '\n\nName repository: ' + str(req[i]['name'])
self.msg += '\nDescription repository: ' + \
str(req[i]['description'])
self.msg += '\nURL repository: ' + str(req[i]['html_url'])
self.msg += '\nStars: total: ' + \
str(req[i]['stargazers_count'])
self.msg += '\nForks total: ' + \
str(req[i]['forks_count'])
return self.msg
def GetInfo(self, user):
self.msg = ""
req = loads(get('https://api.github.com/users/' + user).text)
self.msg += '\nInformation of user:\n'
self.msg += '\nName: ' + str(req['name'])
self.msg += '\nEmail: ' + str(req['email'])
self.msg += '\nCompany: ' + str(req['company'])
self.msg += '\nBlog: ' + str(req['blog'])
self.msg += '\nBio: ' + str(req['bio'])
self.msg += '\nLocation: ' + str(req['location'])
self.msg += '\nPublic repository: ' + str(req['public_repos'])
self.msg += '\nFollowers: ' + str(req['followers']) + '\n'
return self.msg
def Arguments(self):
self.user = GitHub()
self.parser = ArgumentParser()
self.parser.add_argument('--repos', dest='repos', action='store_true',
help='List all repository.')
self.parser.add_argument('--user', dest='user', action='store',
required=True, help='Parameter for set user.')
self.parser.add_argument('--info', dest='info', action='store_true',
help='Parameter for to get info of user')
self.parser.add_argument('--all', dest='all', action='store_true',
help='Parameter for to define all options')
self.args = self.parser.parse_args()
if self.args.user and self.args.info:
print(self.user.GetInfo(self.args.user))
elif self.args.user and self.args.repos:
print(self.user.GetRepos(self.args.user))
elif self.args.user and self.args.all:
print(self.user.GetRepos(self.args.user))
print(self.user.GetInfo(self.args.user))
else:
print('Use --info, --repos or --all.')
| mit | Python | |
4f751298176bf2118d4a638e106d5e9572725178 | Add utility class | fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur | konstrukteur/Util.py | konstrukteur/Util.py | #
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
import re
import unidecode
def fixCoreTemplating(content):
""" This fixes differences between core JS templating and standard mustache templating """
# Replace {{=tagname}} with {{&tagname}}
content = re.sub(r"{{=(?P<tag>.+?)}}", "{{&\g<tag>}}", content)
# Replace {{?tagname}} with {{#tagname}}
content = re.sub(r"{{\?(?P<tag>.+?)}}", "{{#\g<tag>}}", content)
return content
def fixSlug(slug):
""" Replaces unicode character with something equal from ascii ( e.g. ü -> u ) """
pattern = r'[.\s]+'
return re.sub(pattern, "-", unidecode.unidecode(slug).lower()) | mit | Python | |
fea74aa88af88ea352b72525ecbf22a0fbd4e3db | Make a histogram and visualize it | naoyak/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2 | ch02/histogram.py | ch02/histogram.py | # Load the parquet file containing flight delay records
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# Register the data for Spark SQL
on_time_dataframe.registerTempTable("on_time_performance")
# Compute a histogram of departure delays
on_time_dataframe\
.select("DepDelay")\
.rdd\
.flatMap(lambda x: x)\
.histogram(10)
import numpy as np
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
# Function to plot a histogram using pyplot
def create_hist(rdd_histogram_data):
"""Given an RDD.histogram, plot a pyplot histogram"""
heights = np.array(rdd_histogram_data[1])
full_bins = rdd_histogram_data[0]
mid_point_bins = full_bins[:-1]
widths = [abs(i - j) for i, j in zip(full_bins[:-1], full_bins[1:])]
bar = plt.bar(mid_point_bins, heights, width=widths, color='b')
return bar
# Compute a histogram of departure delays
departure_delay_histogram = on_time_dataframe\
.select("DepDelay")\
.rdd\
.flatMap(lambda x: x)\
.histogram(10, [-60,-30,-15,-10,-5,0,5,10,15,30,60,90,120,180])
create_hist(departure_delay_histogram)
| mit | Python | |
168f6a9d557d1813649fd060dbfa1217355443df | Implement main for entry | chhsiao90/cheat-ext | cheat_ext/main.py | cheat_ext/main.py | from __future__ import print_function
import argparse
from cheat_ext.installer import (
install, upgrade, remove
)
from cheat_ext.linker import link
def _install(args):
install(args.repository)
link(args.repository)
def _upgrade(args):
upgrade(args.repository)
def _remove(args):
remove(args.repository)
parser = argparse.ArgumentParser(description="cheat extension")
subparsers = parser.add_subparsers()
install_parser = subparsers.add_parser("install")
install_parser.add_argument("repository", type=str)
install_parser.set_defaults(func=_install)
upgrade_parser = subparsers.add_parser("upgrade")
upgrade_parser.add_argument("repository", type=str)
upgrade_parser.set_defaults(func=_upgrade)
remove_parser = subparsers.add_parser("remove")
remove_parser.add_argument("repository", type=str)
remove_parser.set_defaults(func=_remove)
def main():
options = parser.parse_args()
options.func(options)
| mit | Python | |
fadae1b5213f9c2b08e061376fcb3a0c519c0e93 | sort of working | bhpayne/domino_tile_floor,bhpayne/domino_tile_floor,bhpayne/domino_tile_floor | domino_tiles_transition_graph.py | domino_tiles_transition_graph.py |
def create_transition_dic(width,height):
transition_dic={}
transition_dic[1]=[2,4]
transition_dic[2]=[1,5,3]
transition_dic[3]=[2,6]
transition_dic[4]=[1,5]
transition_dic[5]=[4,2,6]
transition_dic[6]=[3,5]
return transition_dic
def print_list_of_transitions(list_of_transitions):
for this_list in list_of_transitions:
print(this_list)
return
for next_val in transition_dic[5]:
new_list=[]
new_list.append(starting_value)
new_list.append(next_val)
list_of_transitions.append(new_list)
return list_of_transitions
def append_next_value(transition_dic,list_of_transitions):
for this_list in list_of_transitions:
if (len(this_list)<(number_of_tiles_to_fill)): # if this list isn't "done"
last_value=this_list[len(this_list)-1]
new_transition_list=[]
for next_value in transition_dic[last_value]:
if next_value not in this_list:
new_list=list(this_list) # https://stackoverflow.com/questions/2612802/how-to-clone-or-copy-a-list
new_list.append(next_value)
new_transition_list.append(new_list)
# print(new_list)
list_of_transitions=new_transition_list
return list_of_transitions
width=3
height=2
number_of_tiles_to_fill=width*height
transition_dic = create_transition_dic(width,height)
starting_value=5
list_of_transitions=[]
print("seed")
this_transition=[5]
list_of_transitions.append(this_transition)
print_list_of_transitions(list_of_transitions)
print("step 1")
list_of_transitions = append_next_value(transition_dic,list_of_transitions)
print_list_of_transitions(list_of_transitions)
print("step 2")
list_of_transitions = append_next_value(transition_dic,list_of_transitions)
print_list_of_transitions(list_of_transitions)
| apache-2.0 | Python | |
91bb20158513e5ba2a8fbaccb0c7b80ffabdb36b | Add demo for PyCUDA IAF trig-poly decoder. | bionet/ted.python | demos/iaf_trig_cuda_demo.py | demos/iaf_trig_cuda_demo.py | #!/usr/bin/env python
"""
Demos for basic time encoding and decoding algorithms that use
IAF neurons. The decoding algorithms assume a trigonometric polynomial
approximation of the input signals.
"""
import sys
import numpy as np
# Set matplotlib backend so that plots can be generated without a
# display:
import matplotlib
matplotlib.use('AGG')
from bionet.utils.misc import func_timer
import bionet.utils.gen_test_signal as g
import bionet.utils.plotting as pl
import bionet.ted.iaf as iaf
import bionet.ted.iaf_trig_cuda as iaf_trig_cuda
import pycuda.autoinit
import scikits.cuda.autoinit
# For determining output plot file names:
output_name = 'iaf_trig_cuda_demo_'
output_count = 0
output_ext = '.png'
# Define algorithm parameters and input signal:
dur = 0.1
dt = 1e-6
f = 32
bw = 2*np.pi*f
t = np.arange(0, dur, dt)
np.random.seed(0)
noise_power = None
if noise_power == None:
fig_title = 'IAF Input Signal with no Noise';
else:
fig_title = 'IAF Input Signal with %d dB of Noise' % noise_power;
print fig_title
u = func_timer(g.gen_test_signal)(dur, dt, f, noise_power)
pl.plot_signal(t, u, fig_title,
output_name + str(output_count) + output_ext)
b = 3.5 # bias
d = 0.7 # threshold
R = 10.0 # resistance
C = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b, d, R, C)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
M = 32 # 2*M+1 trigonometric polynomials are used in the reconstruction
# Test leaky algorithms:
output_count += 1
fig_title = 'Signal Encoded Using Leaky IAF Encoder'
print fig_title
s = func_timer(iaf_trig_cuda.iaf_encode)(u, dt, b, d, R, C)
pl.plot_encoded(t, u, s, fig_title,
output_name + str(output_count) + output_ext)
output_count += 1
fig_title = 'Signal Decoded Using Leaky Trigonometric IAF Decoder'
print fig_title
u_rec = func_timer(iaf_trig_cuda.iaf_decode_trig)(s, dur, dt, bw, b, d, R,
C, M)
pl.plot_compare(t, u, u_rec, fig_title,
output_name + str(output_count) + output_ext)
# Test ideal algorithms:
R = np.inf
output_count += 1
fig_title = 'Signal Encoded Using Ideal IAF Encoder'
print fig_title
s = func_timer(iaf_trig_cuda.iaf_encode)(u, dt, b, d, R, C)
pl.plot_encoded(t, u, s, fig_title,
output_name + str(output_count) + output_ext)
output_count += 1
fig_title = 'Signal Decoded Using Ideal Trigonometric IAF Decoder'
print fig_title
u_rec = func_timer(iaf_trig_cuda.iaf_decode_trig)(s, dur, dt, bw, b,
d, R, C, M)
pl.plot_compare(t, u, u_rec, fig_title,
output_name + str(output_count) + output_ext)
| bsd-3-clause | Python | |
6420dc0127f0f33036fe0f9258d5350da5faef6d | Create filtering.py | krzjoa/sciquence,krzjoa/sciquence | sciquence/sequences/filtering.py | sciquence/sequences/filtering.py | def parallel_filter(condition, *lists):
'''
Parallelly filter multiple lists.
Parameters
----------
condition: callable
A function, which has as many arguments as the number of lists
lists: list of list
Returns
-------
filtered_lists:
Filtered accordingly some criterion
'''
# TODO: check length
output = [[] for _ in xrange(len(lists))]
for d in zip(*lists):
if condition(*list(d)):
multi_append(output, *list(d))
print output
return output
| mit | Python | |
0bd93c02ab7917d570a74cf151dfb5789c3bf174 | Add a brutal script for removing concepts in bulk while testing | fzadow/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID | scripts/remove_concepts_after.py | scripts/remove_concepts_after.py | # An entirely untested script to delete all the concepts in the
# CATMAID database for a particular project.
# Mark Longair 2010
import os
from jarray import array
from java.sql import DriverManager, Connection, SQLException, Types
# FIXME: Just hardcode the user_id and project_id for the moment
user_id = 3
project_id = 4
# Set up the JDBC connection:
try:
Class.forName("org.postgresql.Driver")
except:
IJ.log("Failed to find the postgresql driver...")
raise
catmaid_db_user = None
catmaid_db_password = None
db_login_filename = os.path.join(os.environ['HOME'],'.catmaid-db')
fp = open(db_login_filename)
for i, line in enumerate(fp):
if i == 0:
catmaid_db_user = line.strip()
elif i == 1:
catmaid_db_password = line.strip()
c = DriverManager.getConnection("jdbc:postgresql://localhost/catmaid",
catmaid_db_user,
catmaid_db_password)
def run():
# FIXME: ask in a dialog for the ID instead
first_id = 3859376
where = ' where id > %d'%(first_id,))
s = c.createStatement('delete from treenode_class_instance'+where)
s.executeQuery()
s = c.createStatement('delete from connector_class_instance'+where)
s.executeQuery()
s = c.createStatement('delete from class_instance'+where)
s.executeQuery()
s = c.createStatement('alter table treenode drop constraint treenode_parent_id_fkey')
s.executeQuery()
s = c.createStatement('delete from treenode'+where)
s.executeQuery()
s = c.createStatement('alter table only treenode add constraint treenode_parent_id_fkey foreign key (parent_id) REFERENCES treenode(id)');
s.executeQuery()
s = c.createStatement('delete from relation'+where)
s.executeQuery()
s = c.createStatement('delete from connector'+where)
s.executeQuery()
s = c.createStatement('delete from class_instance_class_instance'+where)
s.executeQuery()
| agpl-3.0 | Python | |
39b156cb3e208c3d06ced6fb086ab171209ac346 | add ctable fixture | SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | psi/ctable_mappings.py | psi/ctable_mappings.py | from ctable.fixtures import CtableMappingFixture
from ctable.models import ColumnDef, KeyMatcher
class EventsMapping(CtableMappingFixture):
name = 'events'
domains = ['psi-unicef', 'psi']
couch_view = 'psi/events'
schedule_active = True
@property
def columns(self):
columns = [
ColumnDef(name="domain", data_type="string", value_source="key", value_index=2),
ColumnDef(name="state", data_type="string", value_source="key", value_index=3),
ColumnDef(name="district", data_type="string", value_source="key", value_index=4),
ColumnDef(name="date", data_type="date", value_source="key", value_index=1),
]
for c in ['events', 'males', 'females', 'attendees', 'leaflets', 'gifts']:
columns.append(ColumnDef(name=c, data_type="integer", value_source="value", value_attribute='sum',
match_keys=[KeyMatcher(index=5, value=c)]))
return columns
def customize(self, mapping):
mapping.couch_key_prefix = ['ctable']
| bsd-3-clause | Python | |
f3a02b3570724964f60d10a8112e0d8eb32dddc7 | Add 4chan download script | codepony/useful-stuff,codepony/useful-stuff,codepony/useful-stuff | 4chan.py | 4chan.py | #!/usr/bin/python
# Protip: want to monitor a thread and download all new images every 5 seconds?
# while x= 0 ; do 4c [-nf] url; sleep 5; done
import re, urllib, urllib2, argparse, os
parser = argparse.ArgumentParser(description='Downloads all full-size images in one or more arbitrary 4chan threads.')
parser.add_argument('urllist', metavar='url', type=str, nargs='+',
help='the URLs of the threads')
parser.add_argument('-n', '--newdir', dest='newdir', action='store_true',
help='create a new directory for each thread in the current directory')
parser.add_argument('-f', '--force', dest='force_redownload', action='store_true',
help='force redownloading every image, overwriting it if it already exists')
args = parser.parse_args()
options = vars(args)
regex = 'href="(\/\/images\.4chan\.org\/[a-z]+\/src\/[0-9]+\.[a-z]+)"'
for url in options['urllist']:
print "Thread URL: %s" % url
try:
page = urllib2.urlopen(url).read()
except ValueError:
print "That does not look like a valid URL."
continue
except urllib2.HTTPError:
print "The given URL returns a HTTP 404 status code - the thread may have died."
continue
if options['newdir'] == True:
thread_id = url.split('/')[-1]
target_dir = "%s/" % thread_id
if not os.path.exists(thread_id):
os.makedirs(thread_id)
else:
target_dir = ""
search = re.compile(regex)
matches = search.finditer(page)
urls = []
for match in matches:
if match.group(1) not in urls:
urls.append(match.group(1))
current = 1
total = len(urls)
print " Parsed thread. Total images: %d" % total
for downloadurl in urls:
downloadurl = "http:%s" % downloadurl
filename = downloadurl.split('/')[-1]
path = target_dir + filename
if os.path.exists(path) and options['force_redownload'] == False:
print "Skipped existing file %s (%d/%d)." % (filename, current, total)
else:
urllib.urlretrieve(downloadurl, path)
print "Downloaded %s (%d/%d)." % (filename, current, total)
current += 1
print "Done." | mit | Python | |
b166caa9fb0efa4aceab315fd6a945d2fe6922e4 | Patch fixed | geekroot/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,njmube/erpnext,njmube/erpnext,geekroot/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,indictranstech/erpnext,Aptitudetech/ERPNext,indictranstech/erpnext,gsnbng/erpnext,njmube/erpnext,gsnbng/erpnext,njmube/erpnext | erpnext/patches/v7_2/update_salary_slips.py | erpnext/patches/v7_2/update_salary_slips.py | import frappe
from erpnext.hr.doctype.process_payroll.process_payroll import get_month_details
def execute():
salary_slips = frappe.db.sql("""select fiscal_year, month, name from `tabSalary Slip`
where (month is not null and month != '')
and (fiscal_year is not null and fiscal_year != '') and
(start_date is null or start_date = '') and
(end_date is null or end_date = '') and docstatus != 2""", as_dict=1)
for salary_slip in salary_slips:
get_start_end_date = get_month_details(salary_slip.fiscal_year, salary_slip.month)
start_date = get_start_end_date['month_start_date']
end_date = get_start_end_date['month_end_date']
frappe.db.sql("""update `tabSalary Slip` set start_date = %s, end_date = %s where name = %s""",
(start_date, end_date, salary_slip.name)) | import frappe
from erpnext.hr.doctype.process_payroll.process_payroll import get_month_details
def execute():
salary_slips = frappe.db.sql("""select fiscal_year, month, name from `tabSalary Slip`
where (month is not null and month != '')
and (fiscal_year is not null and fiscal_year != '') and
(start_date is null or start_date = '') and
(end_date is null or end_date = '') and docstatus != 2""")
for salary_slip in salary_slips:
get_start_end_date = get_month_details(salary_slip.fiscal_year, salary_slip.month)
start_date = get_start_end_date['month_start_date']
end_date = get_start_end_date['month_end_date']
frappe.db.sql("""update `tabSalary Slip` set start_date = %s, end_date = %s where name = %s""",
(start_date, end_date, salary_slip.name)) | agpl-3.0 | Python |
c2151ae33c44f29d15d494d4862645beb33671cb | Add comments tests | bcroq/kansha,Net-ng/kansha,Net-ng/kansha,Net-ng/kansha,bcroq/kansha,bcroq/kansha,Net-ng/kansha,bcroq/kansha | kansha/card_addons/comment/tests.py | kansha/card_addons/comment/tests.py | # -*- coding:utf-8 -*-
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
from nagare import security
from kansha.cardextension.tests import CardExtensionTestCase
from .comp import Comments
class CommentsTest(CardExtensionTestCase):
def create_instance(self, card, action_log):
return Comments(card, action_log)
def test_add_delete(self):
self.assertEqual(len(self.extension.comments), 0)
self.extension.add(u'test')
self.assertEqual(len(self.extension.comments), 1)
comment = self.extension.comments[0]()
self.assertEqual(comment.data.comment, u'test')
comment.set_comment(u'test2')
self.assertEqual(comment.data.comment, u'test2')
self.extension.delete_comment(self.extension.comments[0])
def test_comment_label(self):
self.extension.add(u'test')
label = self.extension.comments[0]().comment_label()
self.assertEqual(label.text, u'test')
label.change_text(u'test2')
self.assertEqual(label.text, u'test2')
self.assertTrue(label.is_author(security.get_user()))
| bsd-3-clause | Python | |
29a05075e500635b10a25931045702888b12618f | add main file | Agiroq/VW | main.py | main.py | import gas
GASANALOG = 0
ALARMLED = 13
gas = machine.ADC(GASANALOG)
gasLED = machine.Pin(ALARMLED, machine.Pin.OUT)
g = gas.CheckGas(led=gasLED, sensor=gas, time=1000)
| cc0-1.0 | Python | |
6b96008b3e89e3ff6a5616a68e49af3e41b2bc0b | Create main.py | Myselfminer/nCuberPlug | main.py | main.py | #!/usr/bin/python
"""
__version__ = "$Revision: 1.3 $"
__date__ = "$Date: 2004/04/14 02:38:47 $"
"""
import nID
import plugin
import os
repository_nid="all_url:https://www.dropbox.com/s/tvyxx5iidodidz2/nid_sample_list.txt?dl=1@name:Sample Repo Name@owner:myselfminer"
from PythonCard import model
class MyBackground(model.Background):
def on_initialize(self, event):
# if you have any initialization
# including sizer setup, do it here
pass
def on_allPlugins_mouseClick(self, event):
allp=plugin.listall(repository_nid)# get a list of all plugins aviable in the repo
self.components.List1.items=allp#update List1 with the list
def on_load_mouseClick(self,event):
print("w")
a=open("nidlist.tmp","r")# open saved nids(presaved result of plugin.listall())
b=a.readlines()
a=plugin.get_nid_by_name(b,self.components.List1.stringSelection) # get nid by using the selection in List1
# ---Set Component values---
self.components.NameF.text=nID.parse(a, "name")
self.components.VersionF.text=nID.parse(a, "version")
self.components.OwnerF.text=nID.parse(a, "Author")
self.components.DescriptionF.text=nID.parse(a, "Description")
def on_Installed_mouseClick(self, event):
nids=plugin.get_local_nids()
final=[]
for i in nids:#create a nidlist of installed plugins
now=nID.parse(i, "name")
final.append(now)
self.components.List1.items=final
if __name__ == '__main__':
app = model.Application(MyBackground)
app.MainLoop()
| apache-2.0 | Python | |
ec0ac308420a6cfd24b4093ef279deeb1f8728ec | Add a huge start-to-finish integration test. | cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO | tests/integration/test_full_integration.py | tests/integration/test_full_integration.py | #!/usr/bin/env python2.7
'''Test the full client access sequence.
It is pretty much the anti-pattern of testing.
'''
import unittest
from tornado.web import Application
from tornado.testing import AsyncHTTPTestCase
import sys
sys.path.append(".")
import json
from urllib import urlencode
from authserver import PingHandler, AuthHandler, LogoutHandler, CharacterHandler
from charserver import CharacterZoneHandler
from zoneserver import ObjectsHandler, CharStatusHandler, MovementHandler
import settings
from elixir import session
from elixir_models import metadata, setup_all, create_all
from elixir_models import User
def set_up_db():
'''Connects to an in-memory SQLite database,
with the purpose of emptying it and recreating it.'''
metadata.bind = "sqlite:///:memory:"
# metadata.bind.echo = True
setup_all()
create_all()
# Call it the first time for tests that don't care if they have clean data.
set_up_db()
class TestFullIntegration(AsyncHTTPTestCase):
def get_app(self):
handlers = []
handlers.append((r"/ping", PingHandler))
handlers.append((r"/login", AuthHandler))
handlers.append((r"/logout", LogoutHandler))
handlers.append((r"/characters", CharacterHandler))
handlers.append((r"/zone/(.*)/zone", CharacterZoneHandler))
handlers.append((r"/characters", CharacterHandler))
handlers.append((r"/objects", ObjectsHandler))
handlers.append((r"/setstatus", CharStatusHandler))
handlers.append((r"/movement", MovementHandler))
return Application(handlers, cookie_secret=settings.COOKIE_SECRET)
def test_everything(self):
'''This is a test version of the 'idealclient.py' file for
ease of automation.'''
# Make sure the server is up.
response = self.fetch('/ping').body
expected = 'pong'
self.assertEqual(expected, response)
# Insert our user.
user = User.query.filter_by(username=settings.DEFAULT_USERNAME, password=settings.DEFAULT_PASSWORD).first()
if not user:
User(username=settings.DEFAULT_USERNAME, password=settings.DEFAULT_PASSWORD)
session.commit()
# Log in.
data = {'username':settings.DEFAULT_USERNAME, 'password':settings.DEFAULT_PASSWORD}
response = self.fetch('/login', body=urlencode(data), method="POST")
result = response.body
expected = "Login successful."
self.assertEqual(expected, result)
cookie = response.headers['Set-Cookie']
self.assertTrue(cookie)
headers = {'Cookie': cookie}
# Get our characters.
response = self.fetch('/characters', headers=headers)
result = json.loads(response.body)
self.assertTrue(len(result) > 0)
character = result[0]
# Get the zone our character is in:
response = self.fetch('/zone/%s/zone' % character, headers=headers)
result = json.loads(response.body)
expected = {'zone': 'playerinstance-GhibliHills-%s' % (character,)}
self.assertEqual(result, expected)
zone = result['zone']
# Normally, the client would ask the masterzoneserver for the
# url of the zone. This is not necessary for this test
# since we already know where it is.
# Get the zone's objects.
response = self.fetch('/objects', headers=headers)
self.fetch('/logout', headers=headers)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | Python | |
9e388ad5b78967f87a0b3b55235bd1e19183c152 | Test for the PaladinSpellSchema values | Enether/python_wow | tests/models/spells/test_paladin_spells.py | tests/models/spells/test_paladin_spells.py | import unittest
from tests.delete_test_db import delete_test_db # module that deletes the DB :)
import database.main
from tests.create_test_db import engine, session, Base
database.main.engine = engine
database.main.session = session
database.main.Base = Base
import models.main
from models.spells.paladin_spells_template import PaladinSpellsSchema
from spells import PaladinSpell
from models.items.item_template import ItemTemplateSchema
from models.spells.spell_dots import DotSchema
from buffs import BeneficialBuff, DoT
class PaladinSpellsSchemaTests(unittest.TestCase):
def setUp(self):
"""
Test that the values in the Schema are as expected
And that the convert_to_paladin_spell_object function works
"""
self.spell_entry = 4
self.spell_name = 'Melting Strike'
self.expected_spell = PaladinSpell(name=self.spell_name, rank=1, damage1=3, damage2=0, damage3=0,
heal1=0, heal2=0, heal3=0, mana_cost=6, cooldown=3,
beneficial_effect=None, harmful_effect=None)
def test_schema_values(self):
""" Load a schema object and assert that every value is as expected"""
loaded_schema: PaladinSpellsSchema = session.query(PaladinSpellsSchema).get(self.spell_entry)
self.assertTrue(isinstance(loaded_schema.id, int))
self.assertTrue(isinstance(loaded_schema.name, str))
self.assertTrue(isinstance(loaded_schema.rank, int))
self.assertTrue(isinstance(loaded_schema.level_required, int))
self.assertTrue(isinstance(loaded_schema.damage1, int))
self.assertTrue(isinstance(loaded_schema.damage2, int))
self.assertTrue(isinstance(loaded_schema.damage3, int))
self.assertTrue(isinstance(loaded_schema.heal1, int))
self.assertTrue(isinstance(loaded_schema.heal2, int))
self.assertTrue(isinstance(loaded_schema.heal3, int))
self.assertTrue(isinstance(loaded_schema.mana_cost, int))
self.assertIsNone(loaded_schema.beneficial_effect)
self.assertTrue(isinstance(loaded_schema.harmful_effect, int))
self.assertTrue(isinstance(loaded_schema.cooldown, int))
self.assertTrue(isinstance(loaded_schema.comment, str))
self.assertIsNone(loaded_schema.buff)
self.assertTrue(isinstance(loaded_schema.dot, DotSchema))
def tearDownModule():
delete_test_db()
if __name__ == '__main__':
unittest.main()
| mit | Python | |
8303188f2378bace2974c5eac65fda8433629935 | Add exercise 3 checking code | Kaggle/learntools,Kaggle/learntools | learntools/deep_learning_new/ex3.py | learntools/deep_learning_new/ex3.py | from learntools.core import *
_inputs = 50
# Data Preparation
class Q1(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q2(CodingProblem):
_var = "input_shape"
_hints = [
"Think about whether you should look at the processed data `X_train` or the original data `fuel`.",
"You should look at the processed data `X_train`, since that is the data actually going into the network. Since the target was already removed, you can just look at the second entry in `X_train.shape` (the columns) to find the number of features."
]
_solution = CS("""
input_shape = [{inputs}]
# or,
input_shape = [X_train.shape[1]]
""".format(inputs=_inputs))
def check(self, input_shape):
assert (type(input_shape) [list, tuple]), \
("""The input shape should be a list (or tuple) with a single integer, like `[__]`.""")
assert (input_shape[0] not in [13, 14]), \
("Look at the columns of `X_train` for the number of input features, since `X_train` (the processed data) is what is actually being used as input.")
assert (len(input_shape) == 1), \
("""You should use a list of length 1 here. Each entry in the `input_shape` list says how many input values you have in that dimension. The inputs here are numbers (one dimensional) and so your answer should look something like:
```python
input_shape = [____]
```
""")
assert (input_shape[0] == _inputs), \
("The number of inputs should be {good_inputs}, but you gave {bad_inputs}".format(good_inputs=_inputs, bad_inputs=input_shape[0]))
# Fuel Economy Prediction
class Q3(CodingProblem):
_hint = """Your answer should look something like:
```python
model = keras.Sequential([
# Hidden layers
layers.Dense(____),
layers.Dense(____),
layers.Dense(____),
# Output layer
layers.Dense(1),
])
```
"""
_solution = CS("""
from tensorflow import keras
from tensorflow.keras import layers
model = keras.Sequential([
layers.Dense(64, activation='relu', input_shape=input_shape),
layers.Dense(64, activation='relu'),
layers.Dense(64, activation='relu'),
layers.Dense(1),
])
""".format(_inputs))
_var = "model"
def check(self, model):
assert (len(model.layers) == 4), \
("Your model should four layers in all. The first three are the hidden layers and the last is the output layer. The output layer looks like `layers.Dense(1)`.")
dense_layer = model.layers[0]
layer_class = dense_layer.__class__.__name__
layer_classes = [layer.__class__.__name__ for layer in model.layers]
true_classes = ['Dense', 'Dense', 'Dense', 'Dense']
assert (layer_classes == true_classes), \
("Your model doesn't have the correct kinds of layers. You should have five layers with classes: Dense, Activation, Dense, Activation, Dense.")
your_inputs = model.layers[0].input_shape[0]
assert (your_inputs == _inputs), \
("Your model should have {} inputs, but you gave {}.".format(_inputs, your_inputs))
dense_activations = [layer.activation.__name__ for layer in model.layers]
true_activations = ['relu', 'relu', 'relu', 'linear']
assert (dense_activations == true_activations), \
("Your model doesn't have the correct activations. The hidden `Dense` layers should be have `'relu'` activation, while the output layer should be linear (no activation).")
try:
input_shape = dense_layer.input_shape
except:
input_shape = None
class Q4(CodingProblem):
_hint = ""
_solution = ""
_var = "model"
def check(self, model):
pass
class Q5(CodingProblem):
pass
class Q6(ThoughtExperiment):
pass
# Learning Rate and Batch Size
class Q7(ThoughtExperiment):
pass
qvars = bind_exercises(globals(), [
Q1, Q2, Q3, Q4, Q5, Q6, Q7,
],
var_format='q_{n}',
)
__all__ = list(qvars)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.