commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
3fb15a0e2fd4b1c9d6fb90ea5db92e99fda578c7 | Create topKFrequent.py | jose-raul-barreras/letscode | topKFrequent.py | topKFrequent.py | #
# Given a non-empty array of integers, return the k most frequent elements.
#
# For example,
# Given [1,1,1,2,2,3] and k = 2, return [1,2].
#
# Note:
# You may assume k is always valid, 1 ≤ k ≤ number of unique elements.
# Your algorithm's time complexity must be better than O(n log n), where n is the array's size.
#
class Solution(object):
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
res = {}
for n in nums:
if n not in res:
res[n] = 0
res[n] = res[n]+1
return [elem[0] for pos, elem in enumerate(sorted([(key, res[key]) for key in res], key=lambda x: x[1], reverse=True)) if pos < k]
| mit | Python | |
3190b1e90c4f5de71e766fc97acb6c03b5c6888b | Create tweet-ip.py | hackerspace-ntnu/tweet_ip,hackerspace-ntnu/tweet_ip | tweet-ip.py | tweet-ip.py | from twitter import *
import subprocess
from random import randint
import time
import urllib2
def internet_on():
try:
response=urllib2.urlopen('http://twitter.com',timeout=1)
return True
except urllib2.URLError as err: pass
return False
def getserial():
# Extract serial from cpuinfo file
cpuserial = "0000000000000000"
try:
f = open('/proc/cpuinfo','r')
for line in f:
if line[0:6]=='Serial':
cpuserial = line[10:26]
f.close()
except:
cpuserial = "ERROR000000000"
return cpuserial
while not internet_on():
print "no internetz"
time.sleep(5)
print "internetz!"
rng = randint(1, 999)
complete = None
while not complete:
try:
time.sleep(2)
twitter = Twitter(auth=OAuth('2387200483-QN8mAtIweLdF70TfsEtBuR47zzw86p06MXCY0er', '6nqftquJnaqfqqM8IEiId98iU3i8GzMSDrYDWwQ3o9Z90', 'qtM32rShCUvdwEnO0FwRCw', 'VquAJnz7WXWi190gtnfYla4hGSZ0SsSqylJxvv2Y7o'))
print "Authed with twitter!"
arg='ip route list'
p=subprocess.Popen(arg,shell=True,stdout=subprocess.PIPE)
data = p.communicate()
split_data = data[0].split()
ipaddr = split_data[split_data.index('src')+1]
my_ip = '<%d>(%s) piip: %s' % (rng,getserial(), ipaddr)
print my_ip
twitter.statuses.update(status=my_ip)
print "tweeted!"
complete = True
except TwitterError:
print "TwitterError!! Trying again"
continue
| mit | Python | |
d7a5743bf92627280c2067be7dc496cd81b8353c | add unit tests file | nricklin/leafpy | unit_tests.py | unit_tests.py | import pytest
r = pytest.main(["-s", "tests/unit"])
if r:
raise Exception("There were test failures or errors.") | mit | Python | |
8dc6afa76f2dcfdba4d80c28e9fdfbc278bd8374 | add XFCC-related config tests for invalid values | datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador | python/tests/test_ambassador_module_validation.py | python/tests/test_ambassador_module_validation.py | from typing import List, Tuple
import logging
import pytest
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s test %(levelname)s: %(message)s",
datefmt='%Y-%m-%d %H:%M:%S'
)
logger = logging.getLogger("ambassador")
from ambassador import Cache, IR
from ambassador.compile import Compile
def require_no_errors(ir: IR):
assert ir.aconf.errors == {}
def require_errors(ir: IR, errors: List[Tuple[str, str]]):
flattened_ir_errors: List[str] = []
for key in ir.aconf.errors.keys():
for error in ir.aconf.errors[key]:
flattened_ir_errors.append(f"{key}: {error['error']}")
flattened_wanted_errors: List[str] = [
f"{key}: {error}" for key, error in errors
]
assert sorted(flattened_ir_errors) == sorted(flattened_wanted_errors)
def test_valid_forward_client_cert_details():
yaml = """
---
apiVersion: getambassador.io/v2
kind: Module
metadata:
name: ambassador
namespace: default
spec:
config:
forward_client_cert_details: SANITIZE_SET
"""
cache = Cache(logger)
r1 = Compile(logger, yaml, k8s=True)
r2 = Compile(logger, yaml, k8s=True, cache=cache)
require_no_errors(r1["ir"])
require_no_errors(r2["ir"])
def test_invalid_forward_client_cert_details():
yaml = """
---
apiVersion: getambassador.io/v2
kind: Module
metadata:
name: ambassador
namespace: default
spec:
config:
forward_client_cert_details: SANITIZE_INVALID
"""
cache = Cache(logger)
r1 = Compile(logger, yaml, k8s=True)
r2 = Compile(logger, yaml, k8s=True, cache=cache)
require_errors(r1["ir"], [
( "ambassador.default.1", "'forward_client_cert_details' may not be set to 'SANITIZE_INVALID'; it may only be set to one of: SANITIZE, FORWARD_ONLY, APPEND_FORWARD, SANITIZE_SET, ALWAYS_FORWARD_ONLY")
])
require_errors(r2["ir"], [
( "ambassador.default.1", "'forward_client_cert_details' may not be set to 'SANITIZE_INVALID'; it may only be set to one of: SANITIZE, FORWARD_ONLY, APPEND_FORWARD, SANITIZE_SET, ALWAYS_FORWARD_ONLY")
])
def test_valid_set_current_client_cert_details():
yaml = """
---
apiVersion: getambassador.io/v2
kind: Module
metadata:
name: ambassador
namespace: default
spec:
config:
set_current_client_cert_details:
subject: true
dns: true
"""
cache = Cache(logger)
r1 = Compile(logger, yaml, k8s=True)
r2 = Compile(logger, yaml, k8s=True, cache=cache)
require_no_errors(r1["ir"])
require_no_errors(r2["ir"])
def test_invalid_set_current_client_cert_details_key():
yaml = """
---
apiVersion: getambassador.io/v2
kind: Module
metadata:
name: ambassador
namespace: default
spec:
config:
set_current_client_cert_details:
invalid: true
"""
cache = Cache(logger)
r1 = Compile(logger, yaml, k8s=True)
r2 = Compile(logger, yaml, k8s=True, cache=cache)
logger.info("R1 IR: %s", r1["ir"].as_json())
require_errors(r1["ir"], [
( "ambassador.default.1", "'set_current_client_cert_details' may not contain key 'invalid'; it may only contain keys: subject, cert, chain, dns, uri")
])
require_errors(r2["ir"], [
( "ambassador.default.1", "'set_current_client_cert_details' may not contain key 'invalid'; it may only contain keys: subject, cert, chain, dns, uri")
])
def test_invalid_set_current_client_cert_details_value():
yaml = """
---
apiVersion: getambassador.io/v2
kind: Module
metadata:
name: ambassador
namespace: default
spec:
config:
set_current_client_cert_details:
subject: invalid
"""
cache = Cache(logger)
r1 = Compile(logger, yaml, k8s=True)
r2 = Compile(logger, yaml, k8s=True, cache=cache)
require_errors(r1["ir"], [
( "ambassador.default.1", "'set_current_client_cert_details' value for key 'subject' may only be 'true' or 'false', not 'invalid'")
])
require_errors(r2["ir"], [
( "ambassador.default.1", "'set_current_client_cert_details' value for key 'subject' may only be 'true' or 'false', not 'invalid'")
])
| apache-2.0 | Python | |
5a2042ebd62cefdda82b6e288b4b6d5b0f527fcd | Add script to add uplaoders to a repo | sorenh/python-django-repomgmt,sorenh/python-django-repomgmt | repomgmt/management/commands/repo-add-uploader.py | repomgmt/management/commands/repo-add-uploader.py | #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from repomgmt.models import Repository
from django.contrib.auth.models import User
class Command(BaseCommand):
args = '<repository> <username>'
help = 'Poll all upstream archives and update local db accordingly'
def handle(self, reponame, username, **options):
repo = Repository.objects.get(name=reponame)
user = User.objects.get(username=username)
repo.uploaders.add(user)
repo.save()
| apache-2.0 | Python | |
09c8399092c3c97be068051306fda057170cf290 | Add LPC residual computation. | cournape/talkbox,cournape/talkbox | scikits/talkbox/linpred/common.py | scikits/talkbox/linpred/common.py | from scipy.signal import lfilter
from scikits.talkbox.linpred import lpc
def lpcres(signal, order, axis = -1):
"""Compute the LPC residual of a signal.
The LPC residual is the 'error' signal from LPC analysis, and is defined
as:
res[n] = x[n] - xe[n] = 1 + a[1] x[n-1] + ... + a[p] x[n-p]
Where x is the input signal and xe the linear prediction of x.
Parameters
----------
signal : array-like
input signal
order : int
LPC order
axis : int
axis along which to compute the LPC residual
Returns
-------
res : array-like
LPC residual
Note
----
The LPC residual can also be seen as the input of the LPC analysis filter.
As the LPC filter is a whitening filter, it is a whitened version of the
signal.
In AR modelling, the residual is simply the estimated excitation of the AR
filter.
"""
return lfilter(lpc(signal, order)[0], 1., signal, axis)
| mit | Python | |
1afe54b237724ce8f06379ef461e5d849ddeec74 | Add Persian Badwords | aetilley/revscoring,eranroz/revscoring,ToAruShiroiNeko/revscoring,he7d3r/revscoring,wiki-ai/revscoring | revscoring/languages/persian.py | revscoring/languages/persian.py | import warnings
import enchant
from .language import Language, LanguageUtility
DICTIONARY = enchant.Dict("fa")
BADWORDS = set([
"کیرم", "ایتالیک", "کونی", "کیر", "فرمود", "آله", "فرموده", "فرمودند",
"جنده", "برووتو", "لعنت", "کون", "السلام", "جمهورمحترم", "کونی",
"کاکاسیاه", "آشغال", "گائیدم", "گوزیده", "مشنگ", "ننتو", "بخواب"
])
def is_misspelled_process():
def is_misspelled(word):
return not DICTIONARY.check(word)
return is_misspelled
def is_badword_process():
def is_badword(word):
return word.lower() in BADWORDS
return is_badword
is_badword = LanguageUtility("is_badword", is_badword_process, depends_on=[])
is_misspelled = LanguageUtility("is_misspelled", is_misspelled_process,
depends_on=[])
persian = Language("revscoring.languages.persian", [is_badword, is_misspelled])
| import warnings
import enchant
from .language import Language, LanguageUtility
DICTIONARY = enchant.Dict("fa")
def is_misspelled_process():
def is_misspelled(word):
return not DICTIONARY.check(word)
return is_misspelled
is_misspelled = LanguageUtility("is_misspelled", is_misspelled_process,
depends_on=[])
persian = Language("revscoring.languages.persian", [is_misspelled])
| mit | Python |
4fe8df5d09c554b45d5097ca0574b47703c9b581 | Add another simpler test for %f | buchuki/pyjaco,mattpap/py2js,chrivers/pyjaco,mattpap/py2js,qsnake/py2js,chrivers/pyjaco,chrivers/pyjaco,buchuki/pyjaco,buchuki/pyjaco,qsnake/py2js | tests/strings/string_format_f_simple.py | tests/strings/string_format_f_simple.py | a = 1.123456
b = 10
c = -30
d = 34
e = 123.456789
f = 892122.129899
# form 0
s = "b=%f" % a
print s
# form 1
s = "b,c,d=%f+%f+%f" % (a, e, f)
print s
| mit | Python | |
215822f6edb48f156a15548ff40d21d76e14d692 | Add markdown as submodule | plotly/dash-core-components | dash_core_components/markdown/__init__.py | dash_core_components/markdown/__init__.py | from .Markdown import Markdown
from .. import _js_dist
from .. import _css_dist
_js_dist.append(
{
'relative_package_path': 'highlight.pack.js',
'namespace': 'dash_core_components'
}
)
_css_dist.append(
{
'relative_package_path': 'highlight.css',
'namespace': 'dash_core_components'
}
)
| mit | Python | |
e6ff67fc67e3c3f1a1513534088743a243e1257a | Add tests to the role logic | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | tests/app/soc/logic/models/test_role.py | tests/app/soc/logic/models/test_role.py | #!/usr/bin/env python2.5
#
# Copyright 2010 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import unittest
from google.appengine.api import users
from google.appengine.ext import db
from soc.models import user
from soc.logic import accounts
from soc.models import role
from soc.logic.models.user import logic as user_logic
from soc.logic.models.role import logic as role_logic
class RoleTest(unittest.TestCase):
"""Tests related to role logic.
"""
def setUp(self):
"""Set up required for the role tests.
"""
# Create a user to experiment on
email = "a_user@example.com"
account = users.User(email=email)
link_id = 'a_user'
name = 'A User'
properties = {
'account': account,
'link_id': link_id,
'name': name,
'user_id': link_id,
}
self.user = user_logic.updateOrCreateFromFields(properties)
def createRole(self, user):
"""Create a role for user.
"""
# Create a role for a_user
given_name = 'A'
surname = 'User'
res_street = 'A Street'
res_city = 'A City'
res_country = 'United Kingdom'
res_postalcode = 'A Postalcode'
phone = '01234567'
birth_date = db.DateProperty.now()
properties = {
'link_id': user.link_id,
'scope_path': 'google',
'user': user,
'given_name': given_name,
'surname': surname,
'email': user.account.email(),
'res_street': res_street,
'res_city': res_city,
'res_country': res_country,
'res_postalcode': res_postalcode,
'phone': phone,
'birth_date': birth_date,
}
role = role_logic.updateOrCreateFromFields(properties)
return role, properties
def testCreateRole(self):
"""Test that role can be created for a user.
"""
role, properties = self.createRole(self.user)
for key, value in properties.iteritems():
self.assertEqual(value, getattr(role, key))
def testUpdateRole(self):
"""Test that role can be updated for a user.
"""
role, properties = self.createRole(self.user)
old_given_name = properties['given_name']
new_given_name = 'New'
properties = {
'link_id': self.user.link_id,
'scope_path': 'google',
'given_name': new_given_name,
}
role = role_logic.updateOrCreateFromFields(properties)
updated_given_name = getattr(role, 'given_name')
self.assertEqual(updated_given_name , new_given_name)
self.assertNotEqual(updated_given_name, old_given_name)
def testDeleteRole(self):
"""Test that role can be deleted for a user.
"""
role, properties = self.createRole(self.user)
role_logic.delete(role)
actual = role_logic.getFromKeyFields(properties)
expected = None
self.assertEqual(actual, expected)
def testGetSuggestedInitialPropertiesWithoutAnyRoles(self):
"""Test that an empty dict is returned when the user has no roles.
"""
properties = role_logic.getSuggestedInitialProperties(self.user)
self.assertEqual(properties, {})
def testGetSuggestedInitialProperties(self):
"""Test that correct properties are retrieved.
"""
role, properties = self.createRole(self.user)
initial_properties = role_logic.getSuggestedInitialProperties(self.user)
for key, value in properties.iteritems():
if key in initial_properties:
self.assertEqual(value, initial_properties[key])
| apache-2.0 | Python | |
3657eed1c0f0cf29be85bce03983e5b2c2581b9b | test showing bug in cyl mesh face inner product | simpeg/discretize,simpeg/simpeg,simpeg/discretize,simpeg/discretize | tests/mesh/test_cylMeshInnerProducts.py | tests/mesh/test_cylMeshInnerProducts.py | from SimPEG import Mesh
import numpy as np
import sympy
from sympy.abc import r, t, z
import unittest
TOL = 1e-1
class CylInnerProducts_Test(unittest.TestCase):
def test_FaceInnerProduct(self):
# Here we will make up some j vectors that vary in space
# j = [j_r, j_z] - to test face inner products
j = sympy.Matrix([
r**2 * z,
r * z**2
])
# Create an isotropic sigma vector
Sig = sympy.Matrix([
[540/sympy.pi*(r*z)**2, 0 ],
[ 0 , 540/sympy.pi*(r*z)**4],
])
# Do the inner product! - we are in cyl coordinates!
jTSj = j.T*Sig*j
ans = sympy.integrate(
sympy.integrate(
sympy.integrate(r * jTSj, (r,0,1)), # we are in cyl coordinates
(t,0,2.*sympy.pi)),
(z,0,1))[0] # The `[0]` is to make it an int.
def get_vectors(mesh):
""" Get Vectors sig, sr. jx from sympy"""
f_jr = sympy.lambdify((r,z), j[0], 'numpy')
f_jz = sympy.lambdify((r,z), j[1], 'numpy')
f_sigr = sympy.lambdify((r,z), Sig[0], 'numpy')
f_sigz = sympy.lambdify((r,z), Sig[1], 'numpy')
jr = f_jr(mesh.gridFx[:,0], mesh.gridFx[:,2])
jz = f_jz(mesh.gridFz[:,0], mesh.gridFz[:,2])
sigr = f_sigr(mesh.gridCC[:,0], mesh.gridCC[:,2])
return sigr, np.r_[jr, jz]
n = 100.
mesh = Mesh.CylMesh([n, 1, n])
sig, jv = get_vectors(mesh)
MfSig = mesh.getFaceInnerProduct(sig)
numeric_ans = jv.T.dot(MfSig.dot(jv))
print('------ Testing Face Inner Product-----------')
print(' Analytic: {analytic}, Numeric: {numeric}'.format(analytic=ans, numeric=numeric_ans))
assert(np.abs(ans-numeric_ans) < TOL)
if __name__ == '__main__':
unittest.main()
| mit | Python | |
8ad86651a9d07984c0b1afb0ec7e400288ac6d2e | add pyRpc2 | ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study | python/proto/pyRpc2/__init__.py | python/proto/pyRpc2/__init__.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
| bsd-2-clause | Python | |
47c2a98d28c8e592035761b4ecfcd1026038fd14 | Add an option to not automatically record interaction for gesture actions. | chuan9/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,jaruba/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,ltilve/chromium,M4sse/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,ondra-novak/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dednal/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,Fireblend/chromium-crosswalk,littlstar/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,littlstar/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,Chilledheart/chromium,ltilve/chromium,dushu1203/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,dednal/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ltilve/chromium,M4sse/chromium.src,ltilve/chromium,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,M4sse/chromium.src,littlstar/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,littlstar/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,ondra-novak/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src | tools/telemetry/telemetry/page/actions/gesture_action.py | tools/telemetry/telemetry/page/actions/gesture_action.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
from telemetry.page.actions import wait
from telemetry import decorators
from telemetry.page.actions import action_runner
from telemetry.web_perf import timeline_interaction_record as tir_module
class GestureAction(page_action.PageAction):
def __init__(self, attributes=None):
super(GestureAction, self).__init__(attributes)
if not hasattr(self, 'automatically_record_interaction'):
self.automatically_record_interaction = True
if hasattr(self, 'wait_after'):
self.wait_action = wait.WaitAction(self.wait_after)
else:
self.wait_action = None
assert self.wait_until is None or self.wait_action is None, (
'gesture cannot have wait_after and wait_until at the same time.')
def RunAction(self, page, tab):
runner = action_runner.ActionRunner(None, tab)
if self.wait_action:
interaction_name = 'Action_%s' % self.__class__.__name__
else:
interaction_name = 'Gesture_%s' % self.__class__.__name__
if self.automatically_record_interaction:
runner.BeginInteraction(interaction_name, [tir_module.IS_SMOOTH])
self.RunGesture(page, tab)
if self.wait_action:
self.wait_action.RunAction(page, tab)
if self.automatically_record_interaction:
runner.EndInteraction(interaction_name, [tir_module.IS_SMOOTH])
def RunGesture(self, page, tab):
raise NotImplementedError()
@staticmethod
def GetGestureSourceTypeFromOptions(tab):
gesture_source_type = tab.browser.synthetic_gesture_source_type
return 'chrome.gpuBenchmarking.' + gesture_source_type.upper() + '_INPUT'
@staticmethod
@decorators.Cache
def IsGestureSourceTypeSupported(tab, gesture_source_type):
# TODO(dominikg): remove once support for
# 'chrome.gpuBenchmarking.gestureSourceTypeSupported' has
# been rolled into reference build.
if tab.EvaluateJavaScript("""
typeof chrome.gpuBenchmarking.gestureSourceTypeSupported ===
'undefined'"""):
return True
return tab.EvaluateJavaScript("""
chrome.gpuBenchmarking.gestureSourceTypeSupported(
chrome.gpuBenchmarking.%s_INPUT)"""
% (gesture_source_type.upper()))
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
from telemetry.page.actions import wait
from telemetry import decorators
from telemetry.page.actions import action_runner
from telemetry.web_perf import timeline_interaction_record as tir_module
class GestureAction(page_action.PageAction):
def __init__(self, attributes=None):
super(GestureAction, self).__init__(attributes)
if hasattr(self, 'wait_after'):
self.wait_action = wait.WaitAction(self.wait_after)
else:
self.wait_action = None
assert self.wait_until is None or self.wait_action is None, (
'gesture cannot have wait_after and wait_until at the same time.')
def RunAction(self, page, tab):
runner = action_runner.ActionRunner(None, tab)
if self.wait_action:
interaction_name = 'Action_%s' % self.__class__.__name__
else:
interaction_name = 'Gesture_%s' % self.__class__.__name__
runner.BeginInteraction(interaction_name, [tir_module.IS_SMOOTH])
self.RunGesture(page, tab)
if self.wait_action:
self.wait_action.RunAction(page, tab)
runner.EndInteraction(interaction_name, [tir_module.IS_SMOOTH])
def RunGesture(self, page, tab):
raise NotImplementedError()
@staticmethod
def GetGestureSourceTypeFromOptions(tab):
gesture_source_type = tab.browser.synthetic_gesture_source_type
return 'chrome.gpuBenchmarking.' + gesture_source_type.upper() + '_INPUT'
@staticmethod
@decorators.Cache
def IsGestureSourceTypeSupported(tab, gesture_source_type):
# TODO(dominikg): remove once support for
# 'chrome.gpuBenchmarking.gestureSourceTypeSupported' has
# been rolled into reference build.
if tab.EvaluateJavaScript("""
typeof chrome.gpuBenchmarking.gestureSourceTypeSupported ===
'undefined'"""):
return True
return tab.EvaluateJavaScript("""
chrome.gpuBenchmarking.gestureSourceTypeSupported(
chrome.gpuBenchmarking.%s_INPUT)"""
% (gesture_source_type.upper()))
| bsd-3-clause | Python |
20a191ad9325909434a6ca806ef69c515cbce6a8 | add new package (#24749) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-neurokit2/package.py | var/spack/repos/builtin/packages/py-neurokit2/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNeurokit2(PythonPackage):
"""The Python Toolbox for Neurophysiological Signal Processing.
This package is the continuation of NeuroKit 1. It's a user-friendly
package providing easy access to advanced biosignal processing routines.
Researchers and clinicians without extensive knowledge of programming or
biomedical signal processing can analyze physiological data with only two
lines of code.
"""
homepage = "https://github.com/neuropsychology/NeuroKit"
pypi = "neurokit2/neurokit2-0.1.2.tar.gz"
version('0.1.2', sha256='5ef40037c2d7078ecb713ab0b77b850267babf133856b59595de9613f29787bc')
depends_on('py-setuptools@040.6.0:', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-pandas', type=('build', 'run'))
depends_on('py-scipy', type=('build', 'run'))
depends_on('py-scikit-learn', type=('build', 'run'))
depends_on('py-matplotlib', type=('build', 'run'))
| lgpl-2.1 | Python | |
2322b349aac06395382d26a95b5d965ab0f0b326 | Test save, load functionality in Statespace | josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,jseabold/statsmodels,jseabold/statsmodels,bashtage/statsmodels,yl565/statsmodels,jseabold/statsmodels,bert9bert/statsmodels,jseabold/statsmodels,yl565/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,bert9bert/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,ChadFulton/statsmodels,yl565/statsmodels | statsmodels/tsa/statespace/tests/test_save.py | statsmodels/tsa/statespace/tests/test_save.py | """
Tests of save / load / remove_data state space functionality.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import pandas as pd
import os
from statsmodels import datasets
from statsmodels.tsa.statespace import (sarimax, structural, varmax,
dynamic_factor)
from numpy.testing import assert_allclose
macrodata = datasets.macrodata.load_pandas().data
def test_sarimax():
mod = sarimax.SARIMAX(macrodata['realgdp'].values, order=(4, 1, 0))
res = mod.smooth(mod.start_params)
res.summary()
res.save('test_save_sarimax.p')
res2 = sarimax.SARIMAXResults.load('test_save_sarimax.p')
assert_allclose(res.params, res2.params)
assert_allclose(res.bse, res2.bse)
assert_allclose(res.llf, res2.llf)
os.unlink('test_save_sarimax.p')
def test_structural():
mod = structural.UnobservedComponents(
macrodata['realgdp'].values, 'llevel')
res = mod.smooth(mod.start_params)
res.summary()
res.save('test_save_structural.p')
res2 = structural.UnobservedComponentsResults.load(
'test_save_structural.p')
assert_allclose(res.params, res2.params)
assert_allclose(res.bse, res2.bse)
assert_allclose(res.llf, res2.llf)
os.unlink('test_save_structural.p')
def test_dynamic_factor():
mod = dynamic_factor.DynamicFactor(
macrodata[['realgdp', 'realcons']].diff().iloc[1:].values, k_factors=1,
factor_order=1)
res = mod.smooth(mod.start_params)
res.summary()
res.save('test_save_dynamic_factor.p')
res2 = dynamic_factor.DynamicFactorResults.load(
'test_save_dynamic_factor.p')
assert_allclose(res.params, res2.params)
assert_allclose(res.bse, res2.bse)
assert_allclose(res.llf, res2.llf)
os.unlink('test_save_dynamic_factor.p')
def test_varmax():
mod = varmax.VARMAX(
macrodata[['realgdp', 'realcons']].diff().iloc[1:].values,
order=(1, 0))
res = mod.smooth(mod.start_params)
res.summary()
res.save('test_save_varmax.p')
res2 = varmax.VARMAXResults.load(
'test_save_varmax.p')
assert_allclose(res.params, res2.params)
assert_allclose(res.bse, res2.bse)
assert_allclose(res.llf, res2.llf)
os.unlink('test_save_varmax.p')
| bsd-3-clause | Python | |
58624ba3b267fdc0e1ae6d8509c0a1315f22c22f | Initialize P4_autoDownloadTorrent | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter16/PracticeProjects/P4_autoDownloadTorrent.py | books/AutomateTheBoringStuffWithPython/Chapter16/PracticeProjects/P4_autoDownloadTorrent.py | # Write a program that checks an email account every 15 minutes for any instructions
# you email it and executes those instructions automatically.
#
# For example, BitTorrent is a peer-to-peer downloading system. Using free BitTorrent
# software such as qBittorrent, you can download large media files on your home computer.
# If you email the program a (completely legal, not at all piratical) BitTorrent link,
# the program will eventually check its email, find this message, extract the link, and
# then launch qBittorrent to start downloading the file. This way, you can have your
# home computer begin downloads while you’re away, and the (completely legal, not at
# all piratical) download can be finished by the time you return home.
| mit | Python | |
f41585c0bccf63ad1d5d451c0eeb4bb091264416 | test factories stub | sndrtj/pyrefflat | test/test_factories.py | test/test_factories.py | from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
__author__ = 'ahbbollen'
import pytest
from pyrefflat import Reader
from pyrefflat.factories import *
@pytest.fixture(scope="module")
def factory():
factory = RecordFactory()
return factory
@pytest.fixture(scope="module")
def record():
r = Reader(filename="test/data/mini.refFlat")
return next(r)
@pytest.fixture(scope="module")
def from_record(record):
factory = RecordFactory(record)
return factory
class Test_RecordFactory():
def test_creation_empty(self):
new_factory = RecordFactory()
for k, v in new_factory.items.iteritems():
assert v is not None
def test_too_many_args_creation(self):
with pytest.raises(TypeError):
_ = RecordFactory("one", "two")
def test_creation_from_record_basic(self, record):
new_factory = RecordFactory(record)
for k, v in new_factory.items.iteritems():
assert v is not None
def test_from_record_gene(self, from_record):
nrecord = from_record.make()
assert nrecord.gene == "MLH1"
def test_from_record_name(self, from_record):
nrecord = from_record.make()
assert nrecord.transcript == "NM_000249"
def test_from_record_chr(self, from_record):
nrecord = from_record.make()
assert nrecord.chromosome == "chr3"
def test_from_record_strand(self, from_record):
assert from_record.make().strand == "+"
def test_from_record_txstart(self, from_record):
assert from_record.make().txStart == 37034840
def test_from_record_texend(self, from_record):
assert from_record.make().txEnd == 37092337
def test_from_record_cdsstart(self, from_record):
assert from_record.make().cdsStart == 37035038
def test_from_record_cdsend(self, from_record):
assert from_record.make().cdsEnd == 37092144
def test_from_record_nexons(self, from_record):
assert from_record.make().n_exons == 19
def test_from_record_exonstarts(self, from_record):
assert from_record.make().exonStarts == [37034840, 37038109, 37042445, 37045891,
37048481, 37050304, 37053310, 37053501,
37055922, 37058996, 37061800, 37067127,
37070274, 37081676, 37083758, 37089009,
37090007, 37090394, 37091976]
def test_from_record_exonends(self, from_record):
assert from_record.make().exonEnds == [37035154, 37038200, 37042544, 37045965,
37048554, 37050396, 37053353, 37053590,
37056035, 37059090, 37061954, 37067498,
37070423, 37081785, 37083822, 37089174,
37090100, 37090508, 37092337]
def test_empty(self, factory):
for k, v in factory.items.iteritems():
assert v is not None
def test_set_attribute(self, factory):
factory.setattribute("geneName", "MLH1")
assert factory.items["geneName"] == "MLH1"
def test_set_attribute_make_record(self, factory):
factory.setattribute("geneName", "MLH1")
record = factory.make()
assert record.gene == "MLH1"
| mit | Python | |
a2f20be78ad54a6fe118b197cc416dcfdfb6dddf | add tf test file | iViolinSolo/DeepLearning-GetStarted,iViolinSolo/DeepLearning-GetStarted | TF-Demo/AlexNetDemo/test_tf.py | TF-Demo/AlexNetDemo/test_tf.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: violinsolo
# Created on 28/12/2017
import tensorflow as tf
import numpy as np
x = []
for i in range(0, 20):
x += [i]
print x
# trans to float32
x1 = np.asarray(x, dtype=np.float32)
print 'new x:'
print x1
with tf.Session() as sess:
m = np.reshape(x, [-1, 5])
print 'int m: [%s]' % (str(m.shape))
print m
print sess.run(tf.reduce_mean(m))
print sess.run(tf.reduce_mean(m, axis=0))
print sess.run(tf.reduce_mean(m, axis=1))
m = np.reshape(x1, [-1, 5])
print 'float m: [%s]' % (str(m.shape))
print m
print sess.run(tf.reduce_mean(m))
print sess.run(tf.reduce_mean(m, axis=0))
print sess.run(tf.reduce_mean(m, axis=1))
| apache-2.0 | Python | |
8026b091b1bae1a3b241b6b23b515ce8b5ec084e | Add openshift inventory plugin | thaim/ansible,thaim/ansible | plugins/inventory/openshift.py | plugins/inventory/openshift.py | #!/bin/python
# (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
inventory: openshift
short_description: Openshift gears external inventory script
description:
- Generates inventory of Openshift gears using the REST interface
- this permit to reuse playbook to setup a Openshift gear
version_added: None
author: Michael Scherer
'''
import urllib2
try:
import json
except ImportError:
import simplejson as json
import os
import os.path
import sys
import ConfigParser
import StringIO
configparser = None
def get_from_rhc_config(variable):
global configparser
CONF_FILE = os.path.expanduser('~/.openshift/express.conf')
if os.path.exists(CONF_FILE):
if not configparser:
ini_str = '[root]\n' + open(CONF_FILE, 'r').read()
configparser = ConfigParser.SafeConfigParser()
configparser.readfp(StringIO.StringIO(ini_str))
try:
return configparser.get('root', variable)
except ConfigParser.NoOptionError:
return None
def get_config(env_var, config_var):
result = os.getenv(env_var)
if not result:
result = get_from_rhc_config(config_var)
if not result:
print "failed=True msg='missing %s'" % env_var
sys.exit(1)
return result
def get_json_from_api(url):
req = urllib2.Request(url, None, {'Accept': 'application/json; version=1.5'})
response = urllib2.urlopen(req)
return json.loads(response.read())['data']
def passwd_setup(top_level_url, username, password):
# create a password manager
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, top_level_url, username, password)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
username = get_config('ANSIBLE_OPENSHIFT_USERNAME', 'default_rhlogin')
password = get_config('ANSIBLE_OPENSHIFT_PASSWORD', 'password')
broker_url = 'https://%s/broker/rest/' % get_config('ANSIBLE_OPENSHIFT_BROKER', 'libra_server')
passwd_setup(broker_url, username, password)
response = get_json_from_api(broker_url + '/domains')
response = get_json_from_api("%s/domains/%s/applications" %
(broker_url, response[0]['id']))
result = {}
for app in response:
# ssh://520311404832ce3e570000ff@blog-johndoe.example.org
(user, host) = app['ssh_url'][6:].split('@')
app_name = host.split('-')[0]
result[app_name] = {}
result[app_name]['hosts'] = []
result[app_name]['hosts'].append(host)
result[app_name]['vars'] = {}
result[app_name]['vars']['ansible_ssh_user'] = user
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print json.dumps(result)
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print json.dumps({})
else:
print "Need a argument, either --list or --host <host>"
| mit | Python | |
5856ceb23cf639ee1cc3ea45d81a1917c0ef031d | Make a pnacl-finalize tool, that runs the final steps for pnacl ABI stability. | Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client | pnacl/driver/pnacl-finalize.py | pnacl/driver/pnacl-finalize.py | #!/usr/bin/python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# IMPORTANT NOTE: If you make local mods to this file, you must run:
# % pnacl/build.sh driver
# in order for them to take effect in the scons build. This command
# updates the copy in the toolchain/ tree.
#
import driver_tools
import pathtools
from driver_env import env
from driver_log import Log
EXTRA_ENV = {
'INPUTS' : '',
'OUTPUT' : '',
'OPT_FLAGS' : '-disable-opt -strip',
'RUN_OPT' : '${LLVM_OPT} ${OPT_FLAGS} ${input} -o ${output}',
}
PrepPatterns = [
( ('-o','(.*)'), "env.set('OUTPUT', pathtools.normalize($0))"),
( '(-.*)', driver_tools.UnrecognizedOption),
( '(.*)', "env.append('INPUTS', pathtools.normalize($0))"),
]
def main(argv):
env.update(EXTRA_ENV)
driver_tools.ParseArgs(argv, PrepPatterns)
inputs = env.get('INPUTS')
output = env.getone('OUTPUT')
if len(inputs) != 1:
Log.Fatal('Can only have one input')
# Allow in-place file changes if output isn't specified..
if output != '':
f_output = output
else:
f_output = inputs[0]
# Transform the file, and re-wrap the result.
driver_tools.RunWithEnv('${RUN_OPT}', input=inputs[0], output=f_output)
driver_tools.WrapBitcode(f_output)
return 0
def get_help(unused_argv):
script = env.getone('SCRIPT_NAME')
return """Usage: %s <options> in-file
This tool prepares a PNaCl bitcode application for ABI stability.
The options are:
-h --help Display this output
-o <file> Place the output into <file>. Otherwise, the
input file is modified in-place.
""" % script
| bsd-3-clause | Python | |
73027d04a416f24dbaaf685da6eb1893c6c433ab | Add hanabank adapter | ssut/PushBank,ssut/PushBank | adapters/hana.py | adapters/hana.py | #-*- coding: utf-8 -*-
from datetime import datetime, timedelta
import json
from bs4 import BeautifulSoup
import urllib, urllib2
en_name = 'hana'
name = u'하나은행'
def query(account, password, resident):
"""
하나은행 계좌 잔액 빠른조회. 빠른조회 서비스에 등록이 되어있어야 사용 가능.
빠른조회 서비스: https://open.hanabank.com/flex/quick/quickService.do?subMenu=1&Ctype=B&cid=OpenB_main_Left&oid=quickservice
account -- 계좌번호 ('-' 제외)
password -- 계좌 비밀번호 (숫자 4자리)
resident -- 주민등록번호 앞 6자리
"""
if len(password) != 4 or not password.isdigit():
raise ValueError("password: 비밀번호는 숫자 4자리여야 합니다.")
if len(resident) != 6 or not resident.isdigit():
raise ValueError("resident: 주민등록번호 앞 6자리를 입력해주세요.")
url = 'https://open.hanabank.com/quick_service/inquiryAcct02_01.do'
params = {
'ajax': 'true',
'acctNo': account,
'acctPw': password,
'bkfgResRegNo': resident,
'curCd': '',
'inqStrDt': (datetime.now() - timedelta(days=7)).strftime('%Y%m%d'),
'inqEndDt': datetime.now().strftime('%Y%m%d'),
'rvSeqInqYn': 'Y',
'rcvWdrwDvCd': '',
'rqstNcnt': '30',
'maxRowCount': '700',
'rqstPage': '1',
'acctType': '01',
'language': 'KOR'
}
try:
data = urllib.urlencode(params)
req = urllib2.Request(url, data)
response = urllib2.urlopen(req)
data = response.read()
data = data.decode('euc-kr').encode('utf-8')
success = True
except Exception, e:
if e.getcode() == 500:
success = False
data = e.read()
d = {
'success': success,
'account': account,
}
if success:
data = data.replace(' ', '')
data = BeautifulSoup(data)
balance = data.select('table.tbl_col01 tr:nth-of-type(2) td')[0].text.strip()
balance = int(balance.replace(',', ''))
history = [
[y.text.strip() for y in x.select('td')]
for x in data.select('table.tbl_col01')[1].select('tbody tr')
]
'''
순서:
거래일, 구분, 적요, 입금액, 출금액, 잔액, 거래시간, 거래점
'''
d['balance'] = balance
d['history'] = [{
'date': datetime.strptime('{0},{1}'.format(x[0], x[6]),
'%Y-%m-%d,%H:%M').date(),
'type': x[1],
'depositor': x[2],
'withdraw': int(x[3].replace(',', '') if x[3] else '0'),
'pay': int(x[4].replace(',', '') if x[4] else '0'),
'balance': int(x[5].replace(',', '')),
'distributor': x[7],
} for x in history]
return d
| mit | Python | |
7f65c70b786024e8213c56448f8d715bda8c0197 | add jsonrpc | hansroh/skitai,hansroh/skitai,hansroh/skitai | skitai/saddle/jsonrpc_executor.py | skitai/saddle/jsonrpc_executor.py | from . import wsgi_executor
try:
import jsonrpclib
except ImportError:
pass
from aquests.protocols.http import respcodes
class Executor (wsgi_executor.Executor):
def __call__ (self):
request = self.env ["skitai.was"].request
data = self.env ["wsgi.input"].read ()
args = jsonrpclib.loads (data)
is_multicall = False
jsonrpc = "2.0"
path = ""
if type (args) == type ([]):
is_multicall = True
thunks = []
for each in args:
thunks.append ((each ["method"], each.get ("params", []), each ['id'], each ['jsonrpc']))
else:
thunks = [(args ["method"], args.get ("params", []), args ["id"], args ["jsonrpc"])]
self.build_was ()
results = []
for _method, _args, _rpcid, _jsonrpc in thunks:
path_info = self.env ["PATH_INFO"] = "/" + _method.replace (".", "/")
current_app, thing, param, respcode = self.find_method (request, path_info, is_multicall is False)
if respcode:
results.append (jsonrpclib.dumps (jsonrpclib.Fault (1, respcodes.get (respcode, "Undefined Error")), rpcid = _rpcid, version = _jsonrpc))
self.was.subapp = current_app
try:
result = self.chained_exec (thing, _args, {})
except:
results.append (jsonrpclib.dumps (jsonrpclib.Fault (1, self.was.app.debug and wsgi_executor.traceback () or "Error Occured")))
else:
result = jsonrpclib.dumps (
result, methodresponse = True,
rpcid = _rpcid, version = _jsonrpc
)
results.append (result)
del self.was.subapp
self.commit ()
self.was.response ["Content-Type"] = "application/json-rpc"
del self.was.env
if len (results) == 1:
results = results [0]
else:
results = "[" + ",".join (results) + "]"
return results
| mit | Python | |
26ff3cbfcd9aee35da3645573c01717518467e8d | Create main.py | rmotr-curriculum-testing/learn-testing-repo | unit-3-mixed-reading-and-assignment-lessons/lesson-4-assignment-multiple-code-blocks/main.py | unit-3-mixed-reading-and-assignment-lessons/lesson-4-assignment-multiple-code-blocks/main.py | class Operation(object):
def __init__(self, *args):
# Do something here
pass
def operate(self):
raise NotImplementedError()
class AddOperation(Operation):
# The only method present in this class
def operate(self):
pass
class SubtractOperation(Operation):
def operate(self):
pass
class Calculator(object):
pass
| mit | Python | |
b589ab584cc1fdade736d0c166aae73978018dc5 | add channel out example | jagill/treeano,diogo149/treeano,nsauder/treeano,nsauder/treeano,jagill/treeano,diogo149/treeano,jagill/treeano,nsauder/treeano,diogo149/treeano | examples/channel_out/mnist_cnn.py | examples/channel_out/mnist_cnn.py | from __future__ import division, absolute_import
from __future__ import print_function, unicode_literals
import itertools
import numpy as np
import sklearn.datasets
import sklearn.cross_validation
import sklearn.metrics
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
import treeano.lasagne.nodes as tl
import canopy
fX = theano.config.floatX
# ############################### prepare data ###############################
mnist = sklearn.datasets.fetch_mldata('MNIST original')
# theano has a constant float type that it uses (float32 for GPU)
# also rescaling to [0, 1] instead of [0, 255]
X = mnist['data'].reshape(-1, 1, 28, 28).astype(fX) / 255.0
y = mnist['target'].astype("int32")
X_train, X_valid, y_train, y_valid = sklearn.cross_validation.train_test_split(
X, y, random_state=42)
in_train = {"x": X_train, "y": y_train}
in_valid = {"x": X_valid, "y": y_valid}
# ############################## prepare model ##############################
model = tn.HyperparameterNode(
"model",
tn.SequentialNode(
"seq",
[tn.InputNode("x", shape=(None, 1, 28, 28)),
tl.Conv2DNode("conv1"),
tn.ChannelOutNode("co1"),
tl.MaxPool2DNode("mp1"),
tl.Conv2DNode("conv2"),
tn.ChannelOutNode("co2"),
tl.MaxPool2DNode("mp2"),
tn.DenseNode("fc1"),
tn.ChannelOutNode("mo3"),
tn.DropoutNode("do1"),
tn.DenseNode("fc2", num_units=10),
tn.SoftmaxNode("pred"),
]),
num_filters=32,
filter_size=(5, 5),
pool_size=(2, 2),
num_units=256,
num_pieces=2,
dropout_probability=0.5,
inits=[treeano.inits.XavierNormalInit()],
)
with_updates = tn.HyperparameterNode(
"with_updates",
tn.AdamNode(
"adam",
{"subtree": model,
"cost": tn.TotalCostNode("cost", {
"pred": tn.ReferenceNode("pred_ref", reference="model"),
"target": tn.InputNode("y", shape=(None,), dtype="int32")},
)}),
cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build() # build eagerly to share weights
BATCH_SIZE = 500
valid_fn = canopy.handled_fn(
network,
[canopy.handlers.time_call(key="valid_time"),
canopy.handlers.override_hyperparameters(dropout_probability=0),
canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
variables=["x", "y"])],
{"x": "x", "y": "y"},
{"cost": "cost", "pred": "pred"})
def validate(in_map):
valid_out = valid_fn(in_valid)
probabilities = valid_out["pred"]
predicted_classes = np.argmax(probabilities, axis=1)
in_map["valid_cost"] = valid_out["cost"]
in_map["valid_time"] = valid_out["valid_time"]
in_map["valid_accuracy"] = sklearn.metrics.accuracy_score(
y_valid, predicted_classes)
train_fn = canopy.handled_fn(
network,
[canopy.handlers.time_call(key="total_time"),
canopy.handlers.call_after_every(1, validate),
canopy.handlers.time_call(key="train_time"),
canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
variables=["x", "y"])],
{"x": "x", "y": "y"},
{"train_cost": "cost"},
include_updates=True)
# ################################# training #################################
print("Starting training...")
canopy.evaluate_until(fn=train_fn,
gen=itertools.repeat(in_train),
max_iters=25)
| apache-2.0 | Python | |
c95772e8b3119f464dba4b8fd864812a525a4379 | add tests | glasslion/salt-mill | tests/test_core.py | tests/test_core.py | # -*- coding: utf-8 -*-
from saltmill import Mill
from pepper import PepperException
import pytest
def test_login():
mill = Mill()
mill.login()
def test_auto_login():
mill = Mill()
MSG = 'This is a test.'
ret = mill.local('*', 'test.echo',MSG)
assert len(ret['return'][0]) > 0
for salt_id, msg in ret['return'][0].iteritems():
assert msg == MSG
def test_renew_auth_token():
mill = Mill()
mill.login()
mill.auth['token'] = 'invalid'
MSG = 'This is a test.'
ret = mill.local('*', 'test.echo',MSG)
assert len(ret['return'][0]) > 0
| bsd-3-clause | Python | |
83cfb4d135b5eb3eaa4efb3f74ce13d44afb4c5a | Add a test for __main__ | marcelm/cutadapt | tests/test_main.py | tests/test_main.py | import pytest
from cutadapt.__main__ import main
def test_help():
with pytest.raises(SystemExit) as e:
main(["--help"])
assert e.value.args[0] == 0
| mit | Python | |
0275556bcb29f4468c4a7e5b0771686c031e3c94 | Add context test. | MostAwesomeDude/pyfluidsynth | demos/context.py | demos/context.py | #!/usr/bin/env python
import fluidsynth
settings = fluidsynth.FluidSettings()
settings["synth.chorus.active"] = "off"
settings["synth.reverb.active"] = "off"
settings["synth.sample-rate"] = 22050
synth = fluidsynth.FluidSynth(settings)
driver = fluidsynth.FluidAudioDriver(settings, synth)
player = fluidsynth.FluidPlayer(synth)
| mit | Python | |
eb15e17e99212f2d779ef33a1a9dfa7293ad96ad | Add `ProtectedFieldsMixin` for use with `ChangeProtected`s | shawnadelic/shuup,suutari-ai/shoop,akx/shoop,shawnadelic/shuup,jorge-marques/shoop,akx/shoop,shoopio/shoop,shoopio/shoop,jorge-marques/shoop,taedori81/shoop,hrayr-artunyan/shuup,suutari/shoop,suutari-ai/shoop,suutari/shoop,suutari-ai/shoop,shoopio/shoop,taedori81/shoop,taedori81/shoop,jorge-marques/shoop,akx/shoop,hrayr-artunyan/shuup,shawnadelic/shuup,suutari/shoop,hrayr-artunyan/shuup | shoop/core/utils/form_mixins.py | shoop/core/utils/form_mixins.py | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.utils.translation import ugettext_lazy as _
class ProtectedFieldsMixin(object):
change_protect_field_text = _("This field cannot be changed since it is protected.")
def _get_protected_fields(self):
"""
Get a tuple of protected fields if set.
The fields are set in model level when model has `ChangeProtected`
"""
if self.instance and self.instance.pk:
are_changes_protected = getattr(self.instance, "_are_changes_protected", None)
if are_changes_protected: # Supports the `_are_changes_protected` protocol?
if not are_changes_protected(): # Not protected though?
return () # Nothing protected, then.
return getattr(self.instance, "protected_fields", ())
return ()
def disable_protected_fields(self):
for field in self._get_protected_fields():
self.fields[field].widget.attrs["disabled"] = True
self.fields[field].help_text = self.change_protect_field_text
self.fields[field].required = False
def clean_protected_fields(self, cleaned_data):
"""
Ignore protected fields (they are set to `disabled`,
so they will not be in the form data).
As a side effect, this removes the fields from `changed_data` too.
:param cleaned_data: Cleaned data
:type cleaned_data: dict
:return: Cleaned data without protected field data
:rtype: dict
"""
for field in self._get_protected_fields():
if field in self.changed_data:
self.changed_data.remove(field)
cleaned_data[field] = getattr(self.instance, field)
return cleaned_data
def clean(self):
return self.clean_protected_fields(super(ProtectedFieldsMixin, self).clean())
def __init__(self, **kwargs):
super(ProtectedFieldsMixin, self).__init__(**kwargs)
self.disable_protected_fields()
| agpl-3.0 | Python | |
9a82eb7fe4f587b00cca155b84a36c6d590e0e16 | Add tests to patterns | rougeth/bottery | tests/test_patterns.py | tests/test_patterns.py | from bottery import patterns
def test_message_handler_check_positive_match():
message = type('Message', (), {'text': 'ping'})
handler = patterns.MessageHandler(pattern='ping')
assert handler.check(message)
def test_message_handler_check_negative_match():
message = type('Message', (), {'text': 'Ping'})
handler = patterns.MessageHandler(pattern='ping')
assert not handler.check(message)
def test_message_handler_check_positive_match_with_sensitive():
message = type('Message', (), {'text': 'Ping'})
handler = patterns.MessageHandler(pattern='ping', sensitive=False)
assert handler.check(message)
def test_message_handler_check_negative_match_with_sensitive():
message = type('Message', (), {'text': 'pong'})
handler = patterns.MessageHandler(pattern='ping', sensitive=False)
assert not handler.check(message)
def test_startswith_handler_check_positive_match():
message = type('Message', (), {'text': 'hello my friend'})
handler = patterns.StartswithHandler(pattern='hello')
assert handler.check(message)
def test_startswith_handler_check_negative_match():
message = type('Message', (), {'text': 'Ping'})
handler = patterns.StartswithHandler(pattern='hello my friend')
assert not handler.check(message)
def test_startswith_handler_check_positive_match_with_sensitive():
message = type('Message', (), {'text': 'Hello my friend'})
handler = patterns.StartswithHandler(pattern='hello', sensitive=False)
assert handler.check(message)
def test_startswith_handler_check_negative_match_with_sensitive():
message = type('Message', (), {'text': 'pong'})
handler = patterns.StartswithHandler(pattern='hello', sensitive=False)
assert not handler.check(message)
def test_default_handler():
message = type('Message', (), {'text': 'pong'})
assert patterns.DefaultHandler().check(message)
def test_patterns_handlers():
handler = patterns.PatternsHandler()
assert not handler.registered
def test_patterns_handler_message():
handler = patterns.PatternsHandler()
decorator = handler.message('ping')
def view(): 'pong'
assert callable(decorator)
assert decorator(view) == view
assert handler.registered
def test_patterns_handler_startswith():
handler = patterns.PatternsHandler()
decorator = handler.startswith('ping')
def view(): 'pong'
assert callable(decorator)
assert decorator(view) == view
assert handler.registered
| mit | Python | |
2087394a69b3d4ca47e441b2561a0645c9a99e68 | Add test_recharge | gwtsa/gwtsa,pastas/pastas,pastas/pasta | tests/test_recharge.py | tests/test_recharge.py | import pastas as ps
import pandas as pd
def test_linear():
index = pd.date_range("2000-01-01", "2000-01-10")
prec = pd.Series([1, 2] * 5, index=index)
evap = prec / 2
rm = ps.RechargeModel(prec=prec, evap=evap, rfunc=ps.Exponential,
recharge="Linear", name="recharge")
return rm | mit | Python | |
170373e6f0a1a416a50e16a3fbfb6a2da2b2e700 | Add Site traversal object | usingnamespace/usingnamespace | usingnamespace/api/traversal/v1/site.py | usingnamespace/api/traversal/v1/site.py | import logging
log = logging.getLogger(__name__)
from pyramid.compat import string_types
from .... import models as m
class Site(object):
"""Site
Traversal object for a site ID
"""
__name__ = None
__parent__ = None
def __init__(self, site_id):
"""Create the default root object
:request: The Pyramid request object
"""
log.debug("Creating new Site: {}".format(site_id))
if isinstance(site_id, int):
self.__name__ = '{}'.format(site_id)
self.id = site_id
if isinstance(site_id, string_types):
self.__name__ = site_id
try:
self.id = int(site_id)
except ValueError:
raise ValueError('Site ID is not an valid integer value')
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
next_ctx = None
if next_ctx is None:
raise KeyError
else:
next_ctx.__parent__ = self
return next_ctx
def finalise(self, last=True):
"""Attempts to find out if the site ID is valid
:last: If this is the last context in the tree.
:returns: None
"""
if self.__parent__ is not None:
# Finalise the parent first
self.__parent__.finalise(last=False)
# Get the entries variable from the parent
self.site = self.__parent__.sites
self.site = self.site.filter(m.Site.id == self.id)
else:
# We need a parent ...
raise ValueError
if last:
# Attempt to get a single entry, if we get nothing back we return
# ValueError
first = self.site.first()
if first is None:
raise ValueError
else:
self.entries = first.entries
| isc | Python | |
97fe3384b0e614e17010623af5bccf515ce21845 | Migrate jupyter_{notebook => server}_config.py | verdimrc/linuxcfg,verdimrc/linuxcfg,verdimrc/linuxcfg | .jupyter/jupyter_server_config.py | .jupyter/jupyter_server_config.py | # https://jupyter-server.readthedocs.io/en/stable/operators/migrate-from-nbserver.html
#c.ServerApp.browser = 'chromium-browser'
#c.ServerApp.terminado_settings = { "shell_command": ["/usr/bin/env", "bash"] }
c.ServerApp.open_browser = False
c.ServerApp.port_retries = 0
c.KernelSpecManager.ensure_native_kernel = False
# Needs: pip install environment_kernels
c.ServerApp.kernel_spec_manager_class = 'environment_kernels.EnvironmentKernelSpecManager'
c.EnvironmentKernelSpecManager.find_conda_envs = False
c.EnvironmentKernelSpecManager.virtualenv_env_dirs = ['/home/verdi/.pyenv/versions']
c.FileCheckpoints.checkpoint_dir = '/tmp/.ipynb_checkpoints'
c.FileContentsManager.delete_to_trash = False
| apache-2.0 | Python | |
d4541113581433b63f19f23a9bde249acf8324a8 | Add a vizualization tool | keithw/SSIM,vasilvv/SSIM,keithw/SSIM,vasilvv/SSIM | tools/visualize.py | tools/visualize.py | #!/usr/bin/python
import matplotlib.pyplot as plt
import sys
if len(sys.argv) < 2:
print "Usage: vizualize.py file1[:label1] file2[:label2] ..."
colors = ['g', 'b', 'r', '#F800F0', '#00E8CC', '#E8E800']
markers = { 'I' : '*', 'P' : 's', 'B' : 'o' }
if len(sys.argv) - 1 > len(colors):
print "Too many files specified"
def parse_arg(s):
if ':' in s:
return tuple(s.split(s, 1))
else:
return (s, s)
sources = map(parse_arg, sys.argv[1:])
for (filename, label), color in zip(sources, colors):
f = open(filename, 'r')
x, y = [], []
per_type = { 'I' : [], 'P' : [], 'B' : [] }
for line in f:
num, ssim, frametype, _ = line.strip().split(' ', 3)
num = int(num)
ssim = float(ssim)
x.append(num)
y.append(ssim)
per_type[frametype].append(num)
plt.plot(x, y, '-', color = color)
for frametype, marker in markers.items():
mx = per_type[frametype]
plt.plot(mx, [y[x] for x in mx], marker, color = color)
plt.show()
| mit | Python | |
5fc7fa839616213d07ad85e164f6639ff1225065 | Add override for createsuperuser | fotinakis/sentry,daevaorn/sentry,BuildingLink/sentry,mvaled/sentry,JamesMura/sentry,BuildingLink/sentry,ifduyue/sentry,jean/sentry,BuildingLink/sentry,JackDanger/sentry,JamesMura/sentry,beeftornado/sentry,alexm92/sentry,JackDanger/sentry,gencer/sentry,zenefits/sentry,JamesMura/sentry,mvaled/sentry,alexm92/sentry,ifduyue/sentry,BuildingLink/sentry,zenefits/sentry,mvaled/sentry,alexm92/sentry,ifduyue/sentry,fotinakis/sentry,JackDanger/sentry,JamesMura/sentry,mvaled/sentry,fotinakis/sentry,nicholasserra/sentry,mvaled/sentry,mitsuhiko/sentry,nicholasserra/sentry,mvaled/sentry,looker/sentry,gencer/sentry,looker/sentry,zenefits/sentry,zenefits/sentry,fotinakis/sentry,jean/sentry,nicholasserra/sentry,ifduyue/sentry,looker/sentry,jean/sentry,zenefits/sentry,BuildingLink/sentry,gencer/sentry,gencer/sentry,looker/sentry,daevaorn/sentry,daevaorn/sentry,daevaorn/sentry,beeftornado/sentry,JamesMura/sentry,gencer/sentry,jean/sentry,ifduyue/sentry,mitsuhiko/sentry,looker/sentry,jean/sentry,beeftornado/sentry | src/sentry/management/commands/createsuperuser.py | src/sentry/management/commands/createsuperuser.py | from __future__ import absolute_import, print_function
from django.core.management import call_command
from django.contrib.auth.management.commands.createsuperuser import Command
class Command(Command):
help = 'Performs any pending database migrations and upgrades'
def handle(self, **options):
call_command(
'createuser',
is_superuser=True,
**options
)
| bsd-3-clause | Python | |
7f860b23975150642bd6f8d244bce96d401603b0 | Improve the help text for the rdp options | gooddata/openstack-nova,hanlind/nova,jianghuaw/nova,cloudbase/nova,rajalokan/nova,vmturbo/nova,klmitch/nova,klmitch/nova,openstack/nova,cloudbase/nova,sebrandon1/nova,Juniper/nova,mikalstill/nova,sebrandon1/nova,mikalstill/nova,openstack/nova,jianghuaw/nova,alaski/nova,gooddata/openstack-nova,mahak/nova,Juniper/nova,Juniper/nova,phenoxim/nova,mahak/nova,alaski/nova,rajalokan/nova,vmturbo/nova,bigswitch/nova,phenoxim/nova,sebrandon1/nova,mikalstill/nova,rahulunair/nova,klmitch/nova,Juniper/nova,cloudbase/nova,hanlind/nova,rajalokan/nova,jianghuaw/nova,vmturbo/nova,klmitch/nova,rahulunair/nova,bigswitch/nova,gooddata/openstack-nova,mahak/nova,openstack/nova,jianghuaw/nova,hanlind/nova,vmturbo/nova,rajalokan/nova,gooddata/openstack-nova,rahulunair/nova | nova/conf/rdp.py | nova/conf/rdp.py | # Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
enabled = cfg.BoolOpt('enabled',
default=False,
help="""
Enables RDP related features
Hyper-V, unlike the majority of the hypervisors employed on
Nova compute nodes, uses RDP instead of VNC and SPICE as a
desktop sharing protocol to provide instance console access.
This option enables RDP for graphical console access
for virtual machines created by Hyper-V.
* Possible values:
True or False(default).
* Services that use this:
``nova-compute``
* Related options:
None
""")
html5_proxy_base_url = cfg.StrOpt('html5_proxy_base_url',
default='http://127.0.0.1:6083/',
help="""
Location of RDP html5 console proxy
In order to use the web based console access, FreeRDP HTML5
proxy should be configured and installed.
* Possible values:
Must be a valid URL of the form:``http://host:port/"
where host and port should be configured in the node
running FreeRDP HTML5 proxy.
* Services that use this:
``nova-compute``
* Related options:
[rdp]enabled = True
""")
ALL_OPTS = [enabled,
html5_proxy_base_url]
def register_opts(conf):
conf.register_opts(ALL_OPTS, group="rdp")
def list_opts():
return {"rdp": ALL_OPTS}
| # Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
enabled = cfg.BoolOpt('enabled',
default=False,
help='Enable RDP related features')
html5_proxy_base_url = cfg.StrOpt('html5_proxy_base_url',
default='http://127.0.0.1:6083/',
help='Location of RDP html5 console proxy, in the form '
'"http://127.0.0.1:6083/"')
ALL_OPTS = [enabled,
html5_proxy_base_url]
def register_opts(conf):
conf.register_opts(ALL_OPTS, group="rdp")
def list_opts():
return {"rdp": ALL_OPTS}
| apache-2.0 | Python |
e7d86c77471d3b0890287e0ca32ecfb94b80abda | add util method for Leave One Out crossvalidation | kaichogami/scikit-learn,simon-pepin/scikit-learn,iismd17/scikit-learn,RomainBrault/scikit-learn,liangz0707/scikit-learn,siutanwong/scikit-learn,rsivapr/scikit-learn,mikebenfield/scikit-learn,massmutual/scikit-learn,arjoly/scikit-learn,lenovor/scikit-learn,lesteve/scikit-learn,mattgiguere/scikit-learn,mattgiguere/scikit-learn,Garrett-R/scikit-learn,hitszxp/scikit-learn,arahuja/scikit-learn,amueller/scikit-learn,mrshu/scikit-learn,potash/scikit-learn,mlyundin/scikit-learn,chrsrds/scikit-learn,harshaneelhg/scikit-learn,michigraber/scikit-learn,ominux/scikit-learn,poryfly/scikit-learn,cauchycui/scikit-learn,IndraVikas/scikit-learn,nrhine1/scikit-learn,mjgrav2001/scikit-learn,Adai0808/scikit-learn,nesterione/scikit-learn,cybernet14/scikit-learn,Clyde-fare/scikit-learn,ycaihua/scikit-learn,btabibian/scikit-learn,rahul-c1/scikit-learn,abimannans/scikit-learn,robin-lai/scikit-learn,dsquareindia/scikit-learn,yask123/scikit-learn,mattgiguere/scikit-learn,zihua/scikit-learn,vybstat/scikit-learn,lesteve/scikit-learn,bigdataelephants/scikit-learn,liyu1990/sklearn,trungnt13/scikit-learn,sinhrks/scikit-learn,vibhorag/scikit-learn,hsuantien/scikit-learn,eg-zhang/scikit-learn,PatrickOReilly/scikit-learn,zuku1985/scikit-learn,jseabold/scikit-learn,xuewei4d/scikit-learn,0x0all/scikit-learn,justincassidy/scikit-learn,vshtanko/scikit-learn,Windy-Ground/scikit-learn,JeanKossaifi/scikit-learn,robin-lai/scikit-learn,Sentient07/scikit-learn,ashhher3/scikit-learn,kashif/scikit-learn,fredhusser/scikit-learn,henrykironde/scikit-learn,roxyboy/scikit-learn,bikong2/scikit-learn,Vimos/scikit-learn,RachitKansal/scikit-learn,shikhardb/scikit-learn,andrewnc/scikit-learn,liberatorqjw/scikit-learn,ssaeger/scikit-learn,mwv/scikit-learn,mikebenfield/scikit-learn,nrhine1/scikit-learn,tawsifkhan/scikit-learn,ltiao/scikit-learn,beepee14/scikit-learn,tmhm/scikit-learn,nvoron23/scikit-learn,arjoly/scikit-learn,shyamalschandra/scikit-learn,jorge2703/scikit-learn,MechCoder/scikit-learn,xzh86/scikit-learn,TomDLT/scikit-learn,nhejazi/scikit-learn,khkaminska/scikit-learn,ndingwall/scikit-learn,murali-munna/scikit-learn,pythonvietnam/scikit-learn,elkingtonmcb/scikit-learn,jlegendary/scikit-learn,jmschrei/scikit-learn,nesterione/scikit-learn,rsivapr/scikit-learn,jlegendary/scikit-learn,giorgiop/scikit-learn,ahoyosid/scikit-learn,saiwing-yeung/scikit-learn,stylianos-kampakis/scikit-learn,altairpearl/scikit-learn,bikong2/scikit-learn,olologin/scikit-learn,tdhopper/scikit-learn,appapantula/scikit-learn,jpautom/scikit-learn,sumspr/scikit-learn,raghavrv/scikit-learn,wzbozon/scikit-learn,nikitasingh981/scikit-learn,kevin-intel/scikit-learn,sergeyf/scikit-learn,CVML/scikit-learn,UNR-AERIAL/scikit-learn,ClimbsRocks/scikit-learn,pythonvietnam/scikit-learn,evgchz/scikit-learn,Akshay0724/scikit-learn,espg/scikit-learn,adamgreenhall/scikit-learn,thilbern/scikit-learn,tosolveit/scikit-learn,jmetzen/scikit-learn,IndraVikas/scikit-learn,Aasmi/scikit-learn,bigdataelephants/scikit-learn,jorge2703/scikit-learn,ogrisel/scikit-learn,sgenoud/scikit-learn,PatrickChrist/scikit-learn,mrshu/scikit-learn,mwv/scikit-learn,YinongLong/scikit-learn,wanggang3333/scikit-learn,AlexanderFabisch/scikit-learn,zaxtax/scikit-learn,jayflo/scikit-learn,mfjb/scikit-learn,OshynSong/scikit-learn,zhenv5/scikit-learn,akionakamura/scikit-learn,fyffyt/scikit-learn,mugizico/scikit-learn,DSLituiev/scikit-learn,vibhorag/scikit-learn,idlead/scikit-learn,manashmndl/scikit-learn,xwolf12/scikit-learn,btabibian/scikit-learn,jorge2703/scikit-learn,vortex-ape/scikit-learn,manhhomienbienthuy/scikit-learn,JPFrancoia/scikit-learn,ChanChiChoi/scikit-learn,liyu1990/sklearn,B3AU/waveTree,robbymeals/scikit-learn,JPFrancoia/scikit-learn,poryfly/scikit-learn,treycausey/scikit-learn,liyu1990/sklearn,deepesch/scikit-learn,YinongLong/scikit-learn,mayblue9/scikit-learn,bhargav/scikit-learn,arabenjamin/scikit-learn,phdowling/scikit-learn,michigraber/scikit-learn,AlexRobson/scikit-learn,larsmans/scikit-learn,pkruskal/scikit-learn,toastedcornflakes/scikit-learn,davidgbe/scikit-learn,siutanwong/scikit-learn,potash/scikit-learn,ephes/scikit-learn,glennq/scikit-learn,davidgbe/scikit-learn,rsivapr/scikit-learn,xuewei4d/scikit-learn,alvarofierroclavero/scikit-learn,ldirer/scikit-learn,MechCoder/scikit-learn,jjx02230808/project0223,khkaminska/scikit-learn,joshloyal/scikit-learn,wazeerzulfikar/scikit-learn,mugizico/scikit-learn,NelisVerhoef/scikit-learn,pypot/scikit-learn,shangwuhencc/scikit-learn,HolgerPeters/scikit-learn,shusenl/scikit-learn,idlead/scikit-learn,glouppe/scikit-learn,clemkoa/scikit-learn,Sentient07/scikit-learn,lesteve/scikit-learn,PatrickOReilly/scikit-learn,JosmanPS/scikit-learn,belltailjp/scikit-learn,macks22/scikit-learn,jorge2703/scikit-learn,mehdidc/scikit-learn,zuku1985/scikit-learn,petosegan/scikit-learn,nmayorov/scikit-learn,RachitKansal/scikit-learn,shusenl/scikit-learn,clemkoa/scikit-learn,IndraVikas/scikit-learn,Djabbz/scikit-learn,sarahgrogan/scikit-learn,saiwing-yeung/scikit-learn,akionakamura/scikit-learn,andrewnc/scikit-learn,JsNoNo/scikit-learn,AlexRobson/scikit-learn,Lawrence-Liu/scikit-learn,abhishekkrthakur/scikit-learn,liangz0707/scikit-learn,LohithBlaze/scikit-learn,Srisai85/scikit-learn,PrashntS/scikit-learn,thientu/scikit-learn,wazeerzulfikar/scikit-learn,devanshdalal/scikit-learn,samuel1208/scikit-learn,Obus/scikit-learn,pratapvardhan/scikit-learn,themrmax/scikit-learn,cwu2011/scikit-learn,ningchi/scikit-learn,herilalaina/scikit-learn,cainiaocome/scikit-learn,mattilyra/scikit-learn,AlexandreAbraham/scikit-learn,akionakamura/scikit-learn,mblondel/scikit-learn,samuel1208/scikit-learn,cdegroc/scikit-learn,themrmax/scikit-learn,anntzer/scikit-learn,anntzer/scikit-learn,3manuek/scikit-learn,eickenberg/scikit-learn,Nyker510/scikit-learn,victorbergelin/scikit-learn,ngoix/OCRF,wlamond/scikit-learn,betatim/scikit-learn,abimannans/scikit-learn,pv/scikit-learn,zorojean/scikit-learn,billy-inn/scikit-learn,herilalaina/scikit-learn,kjung/scikit-learn,krez13/scikit-learn,amueller/scikit-learn,eickenberg/scikit-learn,terkkila/scikit-learn,RomainBrault/scikit-learn,depet/scikit-learn,hugobowne/scikit-learn,henridwyer/scikit-learn,fredhusser/scikit-learn,chrsrds/scikit-learn,aewhatley/scikit-learn,ilyes14/scikit-learn,pv/scikit-learn,xyguo/scikit-learn,abhishekkrthakur/scikit-learn,michigraber/scikit-learn,eg-zhang/scikit-learn,rsivapr/scikit-learn,joernhees/scikit-learn,cwu2011/scikit-learn,rahuldhote/scikit-learn,Akshay0724/scikit-learn,aminert/scikit-learn,nesterione/scikit-learn,fzalkow/scikit-learn,dhruv13J/scikit-learn,aetilley/scikit-learn,sinhrks/scikit-learn,rrohan/scikit-learn,mfjb/scikit-learn,espg/scikit-learn,nelson-liu/scikit-learn,madjelan/scikit-learn,rexshihaoren/scikit-learn,icdishb/scikit-learn,roxyboy/scikit-learn,mikebenfield/scikit-learn,pompiduskus/scikit-learn,lin-credible/scikit-learn,ishanic/scikit-learn,ltiao/scikit-learn,kagayakidan/scikit-learn,herilalaina/scikit-learn,thientu/scikit-learn,dingocuster/scikit-learn,Obus/scikit-learn,depet/scikit-learn,BiaDarkia/scikit-learn,kevin-intel/scikit-learn,stylianos-kampakis/scikit-learn,Vimos/scikit-learn,yyjiang/scikit-learn,madjelan/scikit-learn,shenzebang/scikit-learn,olologin/scikit-learn,IshankGulati/scikit-learn,djgagne/scikit-learn,scikit-learn/scikit-learn,Akshay0724/scikit-learn,ndingwall/scikit-learn,Titan-C/scikit-learn,simon-pepin/scikit-learn,chrisburr/scikit-learn,shenzebang/scikit-learn,macks22/scikit-learn,xyguo/scikit-learn,yyjiang/scikit-learn,fzalkow/scikit-learn,tawsifkhan/scikit-learn,samuel1208/scikit-learn,tmhm/scikit-learn,sanketloke/scikit-learn,petosegan/scikit-learn,nomadcube/scikit-learn,loli/sklearn-ensembletrees,robbymeals/scikit-learn,luo66/scikit-learn,AnasGhrab/scikit-learn,fzalkow/scikit-learn,belltailjp/scikit-learn,pythonvietnam/scikit-learn,xyguo/scikit-learn,mxjl620/scikit-learn,cauchycui/scikit-learn,PatrickChrist/scikit-learn,toastedcornflakes/scikit-learn,ldirer/scikit-learn,rohanp/scikit-learn,hainm/scikit-learn,siutanwong/scikit-learn,mhdella/scikit-learn,dsquareindia/scikit-learn,lesteve/scikit-learn,simon-pepin/scikit-learn,mlyundin/scikit-learn,ilo10/scikit-learn,siutanwong/scikit-learn,depet/scikit-learn,billy-inn/scikit-learn,betatim/scikit-learn,wzbozon/scikit-learn,mhdella/scikit-learn,thilbern/scikit-learn,wazeerzulfikar/scikit-learn,anirudhjayaraman/scikit-learn,MartinDelzant/scikit-learn,billy-inn/scikit-learn,Myasuka/scikit-learn,davidgbe/scikit-learn,maheshakya/scikit-learn,MatthieuBizien/scikit-learn,aewhatley/scikit-learn,kmike/scikit-learn,rvraghav93/scikit-learn,iismd17/scikit-learn,RachitKansal/scikit-learn,Obus/scikit-learn,Barmaley-exe/scikit-learn,jkarnows/scikit-learn,liberatorqjw/scikit-learn,yask123/scikit-learn,fabianp/scikit-learn,eickenberg/scikit-learn,altairpearl/scikit-learn,rvraghav93/scikit-learn,glennq/scikit-learn,jayflo/scikit-learn,rahul-c1/scikit-learn,dhruv13J/scikit-learn,r-mart/scikit-learn,xuewei4d/scikit-learn,hugobowne/scikit-learn,AlexanderFabisch/scikit-learn,kmike/scikit-learn,eickenberg/scikit-learn,heli522/scikit-learn,3manuek/scikit-learn,joernhees/scikit-learn,vermouthmjl/scikit-learn,cybernet14/scikit-learn,hrjn/scikit-learn,alexsavio/scikit-learn,vybstat/scikit-learn,joshloyal/scikit-learn,henridwyer/scikit-learn,mlyundin/scikit-learn,NunoEdgarGub1/scikit-learn,shusenl/scikit-learn,jorik041/scikit-learn,DonBeo/scikit-learn,jjx02230808/project0223,robbymeals/scikit-learn,toastedcornflakes/scikit-learn,mattilyra/scikit-learn,Titan-C/scikit-learn,Fireblend/scikit-learn,OshynSong/scikit-learn,iismd17/scikit-learn,bthirion/scikit-learn,ngoix/OCRF,aabadie/scikit-learn,jereze/scikit-learn,dsullivan7/scikit-learn,Lawrence-Liu/scikit-learn,nvoron23/scikit-learn,alexsavio/scikit-learn,ephes/scikit-learn,PrashntS/scikit-learn,russel1237/scikit-learn,xyguo/scikit-learn,cainiaocome/scikit-learn,fabianp/scikit-learn,0asa/scikit-learn,mehdidc/scikit-learn,jakobworldpeace/scikit-learn,ChanChiChoi/scikit-learn,nvoron23/scikit-learn,procoder317/scikit-learn,hainm/scikit-learn,Lawrence-Liu/scikit-learn,adamgreenhall/scikit-learn,andaag/scikit-learn,krez13/scikit-learn,gotomypc/scikit-learn,sinhrks/scikit-learn,hsiaoyi0504/scikit-learn,fyffyt/scikit-learn,mojoboss/scikit-learn,jzt5132/scikit-learn,mhue/scikit-learn,frank-tancf/scikit-learn,ashhher3/scikit-learn,kaichogami/scikit-learn,Obus/scikit-learn,mojoboss/scikit-learn,bhargav/scikit-learn,khkaminska/scikit-learn,MohammedWasim/scikit-learn,etkirsch/scikit-learn,jaidevd/scikit-learn,cl4rke/scikit-learn,Akshay0724/scikit-learn,glennq/scikit-learn,zaxtax/scikit-learn,mjudsp/Tsallis,ndingwall/scikit-learn,cdegroc/scikit-learn,dhruv13J/scikit-learn,ZENGXH/scikit-learn,0x0all/scikit-learn,waterponey/scikit-learn,harshaneelhg/scikit-learn,ningchi/scikit-learn,lazywei/scikit-learn,nvoron23/scikit-learn,rrohan/scikit-learn,shahankhatch/scikit-learn,0asa/scikit-learn,luo66/scikit-learn,cwu2011/scikit-learn,aewhatley/scikit-learn,massmutual/scikit-learn,Djabbz/scikit-learn,yunfeilu/scikit-learn,arahuja/scikit-learn,mhue/scikit-learn,JosmanPS/scikit-learn,NelisVerhoef/scikit-learn,Garrett-R/scikit-learn,tomlof/scikit-learn,alexeyum/scikit-learn,moutai/scikit-learn,pypot/scikit-learn,rajat1994/scikit-learn,AnasGhrab/scikit-learn,maheshakya/scikit-learn,fzalkow/scikit-learn,Adai0808/scikit-learn,LiaoPan/scikit-learn,sergeyf/scikit-learn,aminert/scikit-learn,rohanp/scikit-learn,PatrickOReilly/scikit-learn,Barmaley-exe/scikit-learn,yonglehou/scikit-learn,devanshdalal/scikit-learn,xavierwu/scikit-learn,devanshdalal/scikit-learn,Achuth17/scikit-learn,kylerbrown/scikit-learn,fengzhyuan/scikit-learn,NunoEdgarGub1/scikit-learn,meduz/scikit-learn,imaculate/scikit-learn,ssaeger/scikit-learn,imaculate/scikit-learn,henrykironde/scikit-learn,NelisVerhoef/scikit-learn,ankurankan/scikit-learn,rexshihaoren/scikit-learn,LiaoPan/scikit-learn,jakirkham/scikit-learn,justincassidy/scikit-learn,etkirsch/scikit-learn,zaxtax/scikit-learn,jkarnows/scikit-learn,voxlol/scikit-learn,Nyker510/scikit-learn,nrhine1/scikit-learn,loli/sklearn-ensembletrees,vybstat/scikit-learn,alexeyum/scikit-learn,xzh86/scikit-learn,Barmaley-exe/scikit-learn,AlexanderFabisch/scikit-learn,murali-munna/scikit-learn,lin-credible/scikit-learn,ishanic/scikit-learn,Achuth17/scikit-learn,zhenv5/scikit-learn,hlin117/scikit-learn,joernhees/scikit-learn,ivannz/scikit-learn,B3AU/waveTree,florian-f/sklearn,jm-begon/scikit-learn,pythonvietnam/scikit-learn,kmike/scikit-learn,treycausey/scikit-learn,loli/sklearn-ensembletrees,smartscheduling/scikit-learn-categorical-tree,carrillo/scikit-learn,mjgrav2001/scikit-learn,mjgrav2001/scikit-learn,russel1237/scikit-learn,AIML/scikit-learn,samzhang111/scikit-learn,beepee14/scikit-learn,Lawrence-Liu/scikit-learn,Aasmi/scikit-learn,bnaul/scikit-learn,jseabold/scikit-learn,yanlend/scikit-learn,walterreade/scikit-learn,marcocaccin/scikit-learn,idlead/scikit-learn,shenzebang/scikit-learn,pypot/scikit-learn,shenzebang/scikit-learn,theoryno3/scikit-learn,466152112/scikit-learn,zhenv5/scikit-learn,HolgerPeters/scikit-learn,quheng/scikit-learn,lucidfrontier45/scikit-learn,bigdataelephants/scikit-learn,r-mart/scikit-learn,huobaowangxi/scikit-learn,wlamond/scikit-learn,samzhang111/scikit-learn,RomainBrault/scikit-learn,fabioticconi/scikit-learn,costypetrisor/scikit-learn,idlead/scikit-learn,liyu1990/sklearn,trankmichael/scikit-learn,spallavolu/scikit-learn,BiaDarkia/scikit-learn,terkkila/scikit-learn,ilyes14/scikit-learn,appapantula/scikit-learn,fbagirov/scikit-learn,Myasuka/scikit-learn,meduz/scikit-learn,procoder317/scikit-learn,shahankhatch/scikit-learn,DonBeo/scikit-learn,equialgo/scikit-learn,aewhatley/scikit-learn,jakirkham/scikit-learn,stylianos-kampakis/scikit-learn,mxjl620/scikit-learn,tosolveit/scikit-learn,fabianp/scikit-learn,cainiaocome/scikit-learn,schets/scikit-learn,vshtanko/scikit-learn,pnedunuri/scikit-learn,IndraVikas/scikit-learn,TomDLT/scikit-learn,mblondel/scikit-learn,sgenoud/scikit-learn,cdegroc/scikit-learn,potash/scikit-learn,AlexanderFabisch/scikit-learn,RayMick/scikit-learn,macks22/scikit-learn,DSLituiev/scikit-learn,pianomania/scikit-learn,hlin117/scikit-learn,B3AU/waveTree,alexsavio/scikit-learn,pianomania/scikit-learn,tawsifkhan/scikit-learn,xwolf12/scikit-learn,MartinDelzant/scikit-learn,rvraghav93/scikit-learn,tosolveit/scikit-learn,RPGOne/scikit-learn,huobaowangxi/scikit-learn,yanlend/scikit-learn,shangwuhencc/scikit-learn,lbishal/scikit-learn,sergeyf/scikit-learn,loli/sklearn-ensembletrees,Achuth17/scikit-learn,murali-munna/scikit-learn,frank-tancf/scikit-learn,Jimmy-Morzaria/scikit-learn,vigilv/scikit-learn,Sentient07/scikit-learn,manashmndl/scikit-learn,qifeigit/scikit-learn,mfjb/scikit-learn,q1ang/scikit-learn,aflaxman/scikit-learn,plissonf/scikit-learn,Achuth17/scikit-learn,qifeigit/scikit-learn,fabioticconi/scikit-learn,abimannans/scikit-learn,466152112/scikit-learn,huobaowangxi/scikit-learn,vivekmishra1991/scikit-learn,procoder317/scikit-learn,PrashntS/scikit-learn,arahuja/scikit-learn,hitszxp/scikit-learn,vigilv/scikit-learn,mjgrav2001/scikit-learn,vibhorag/scikit-learn,gclenaghan/scikit-learn,zuku1985/scikit-learn,themrmax/scikit-learn,poryfly/scikit-learn,tomlof/scikit-learn,ilyes14/scikit-learn,jjx02230808/project0223,MartinDelzant/scikit-learn,ahoyosid/scikit-learn,scikit-learn/scikit-learn,IshankGulati/scikit-learn,fabioticconi/scikit-learn,Jimmy-Morzaria/scikit-learn,ishanic/scikit-learn,nrhine1/scikit-learn,mrshu/scikit-learn,vivekmishra1991/scikit-learn,ChanderG/scikit-learn,giorgiop/scikit-learn,raghavrv/scikit-learn,eg-zhang/scikit-learn,lazywei/scikit-learn,vermouthmjl/scikit-learn,RomainBrault/scikit-learn,theoryno3/scikit-learn,depet/scikit-learn,samuel1208/scikit-learn,abhishekgahlot/scikit-learn,shahankhatch/scikit-learn,466152112/scikit-learn,jseabold/scikit-learn,victorbergelin/scikit-learn,MartinSavc/scikit-learn,ChanChiChoi/scikit-learn,nomadcube/scikit-learn,Fireblend/scikit-learn,henrykironde/scikit-learn,MatthieuBizien/scikit-learn,btabibian/scikit-learn,lucidfrontier45/scikit-learn,russel1237/scikit-learn,marcocaccin/scikit-learn,rsivapr/scikit-learn,xzh86/scikit-learn,ClimbsRocks/scikit-learn,icdishb/scikit-learn,belltailjp/scikit-learn,Srisai85/scikit-learn,ZenDevelopmentSystems/scikit-learn,waterponey/scikit-learn,zorroblue/scikit-learn,chrsrds/scikit-learn,evgchz/scikit-learn,roxyboy/scikit-learn,zihua/scikit-learn,tawsifkhan/scikit-learn,pompiduskus/scikit-learn,espg/scikit-learn,xiaoxiamii/scikit-learn,zorojean/scikit-learn,CVML/scikit-learn,jaidevd/scikit-learn,huzq/scikit-learn,ClimbsRocks/scikit-learn,kmike/scikit-learn,0x0all/scikit-learn,bhargav/scikit-learn,quheng/scikit-learn,hsuantien/scikit-learn,sarahgrogan/scikit-learn,rishikksh20/scikit-learn,ElDeveloper/scikit-learn,OshynSong/scikit-learn,mayblue9/scikit-learn,anurag313/scikit-learn,zorojean/scikit-learn,equialgo/scikit-learn,jakobworldpeace/scikit-learn,ssaeger/scikit-learn,466152112/scikit-learn,macks22/scikit-learn,JPFrancoia/scikit-learn,abhishekkrthakur/scikit-learn,TomDLT/scikit-learn,smartscheduling/scikit-learn-categorical-tree,lazywei/scikit-learn,UNR-AERIAL/scikit-learn,untom/scikit-learn,btabibian/scikit-learn,xubenben/scikit-learn,nikitasingh981/scikit-learn,victorbergelin/scikit-learn,yanlend/scikit-learn,joshloyal/scikit-learn,Sentient07/scikit-learn,Djabbz/scikit-learn,chrisburr/scikit-learn,aminert/scikit-learn,DSLituiev/scikit-learn,LiaoPan/scikit-learn,gclenaghan/scikit-learn,liangz0707/scikit-learn,aabadie/scikit-learn,jblackburne/scikit-learn,loli/semisupervisedforests,mattilyra/scikit-learn,lin-credible/scikit-learn,pnedunuri/scikit-learn,mhdella/scikit-learn,kylerbrown/scikit-learn,spallavolu/scikit-learn,JosmanPS/scikit-learn,pv/scikit-learn,tomlof/scikit-learn,RayMick/scikit-learn,rishikksh20/scikit-learn,gotomypc/scikit-learn,AlexRobson/scikit-learn,yunfeilu/scikit-learn,ankurankan/scikit-learn,RayMick/scikit-learn,Adai0808/scikit-learn,loli/sklearn-ensembletrees,f3r/scikit-learn,shusenl/scikit-learn,larsmans/scikit-learn,ZenDevelopmentSystems/scikit-learn,glemaitre/scikit-learn,aetilley/scikit-learn,0x0all/scikit-learn,joernhees/scikit-learn,ky822/scikit-learn,rahuldhote/scikit-learn,walterreade/scikit-learn,rahuldhote/scikit-learn,fyffyt/scikit-learn,untom/scikit-learn,abhishekgahlot/scikit-learn,ashhher3/scikit-learn,xubenben/scikit-learn,mjudsp/Tsallis,vybstat/scikit-learn,liberatorqjw/scikit-learn,jereze/scikit-learn,ycaihua/scikit-learn,vinayak-mehta/scikit-learn,kaichogami/scikit-learn,jzt5132/scikit-learn,ElDeveloper/scikit-learn,schets/scikit-learn,AlexRobson/scikit-learn,ogrisel/scikit-learn,hrjn/scikit-learn,q1ang/scikit-learn,alvarofierroclavero/scikit-learn,clemkoa/scikit-learn,NunoEdgarGub1/scikit-learn,manhhomienbienthuy/scikit-learn,JeanKossaifi/scikit-learn,arabenjamin/scikit-learn,dingocuster/scikit-learn,pkruskal/scikit-learn,vermouthmjl/scikit-learn,Barmaley-exe/scikit-learn,phdowling/scikit-learn,tdhopper/scikit-learn,vivekmishra1991/scikit-learn,manashmndl/scikit-learn,ivannz/scikit-learn,zaxtax/scikit-learn,manhhomienbienthuy/scikit-learn,jm-begon/scikit-learn,wazeerzulfikar/scikit-learn,treycausey/scikit-learn,hitszxp/scikit-learn,xubenben/scikit-learn,sumspr/scikit-learn,moutai/scikit-learn,anurag313/scikit-learn,kylerbrown/scikit-learn,imaculate/scikit-learn,ningchi/scikit-learn,toastedcornflakes/scikit-learn,yonglehou/scikit-learn,ycaihua/scikit-learn,liberatorqjw/scikit-learn,jlegendary/scikit-learn,eickenberg/scikit-learn,jpautom/scikit-learn,xiaoxiamii/scikit-learn,Aasmi/scikit-learn,giorgiop/scikit-learn,B3AU/waveTree,kjung/scikit-learn,gclenaghan/scikit-learn,andrewnc/scikit-learn,amueller/scikit-learn,zorojean/scikit-learn,Garrett-R/scikit-learn,sarahgrogan/scikit-learn,CforED/Machine-Learning,arjoly/scikit-learn,andaag/scikit-learn,ElDeveloper/scikit-learn,hrjn/scikit-learn,florian-f/sklearn,sanketloke/scikit-learn,xubenben/scikit-learn,yanlend/scikit-learn,gclenaghan/scikit-learn,jblackburne/scikit-learn,maheshakya/scikit-learn,mjudsp/Tsallis,MohammedWasim/scikit-learn,Windy-Ground/scikit-learn,hsuantien/scikit-learn,hitszxp/scikit-learn,Jimmy-Morzaria/scikit-learn,jorik041/scikit-learn,shyamalschandra/scikit-learn,rishikksh20/scikit-learn,florian-f/sklearn,elkingtonmcb/scikit-learn,harshaneelhg/scikit-learn,mwv/scikit-learn,IshankGulati/scikit-learn,aetilley/scikit-learn,hsiaoyi0504/scikit-learn,MohammedWasim/scikit-learn,cwu2011/scikit-learn,maheshakya/scikit-learn,heli522/scikit-learn,f3r/scikit-learn,LohithBlaze/scikit-learn,chrisburr/scikit-learn,shikhardb/scikit-learn,r-mart/scikit-learn,TomDLT/scikit-learn,MatthieuBizien/scikit-learn,Srisai85/scikit-learn,hsiaoyi0504/scikit-learn,Myasuka/scikit-learn,ChanderG/scikit-learn,massmutual/scikit-learn,jm-begon/scikit-learn,evgchz/scikit-learn,wanggang3333/scikit-learn,amueller/scikit-learn,maheshakya/scikit-learn,glouppe/scikit-learn,cl4rke/scikit-learn,jjx02230808/project0223,Windy-Ground/scikit-learn,CforED/Machine-Learning,IssamLaradji/scikit-learn,manashmndl/scikit-learn,ngoix/OCRF,moutai/scikit-learn,kaichogami/scikit-learn,AIML/scikit-learn,djgagne/scikit-learn,kylerbrown/scikit-learn,carrillo/scikit-learn,shangwuhencc/scikit-learn,qifeigit/scikit-learn,trungnt13/scikit-learn,herilalaina/scikit-learn,dsullivan7/scikit-learn,huobaowangxi/scikit-learn,jayflo/scikit-learn,RayMick/scikit-learn,elkingtonmcb/scikit-learn,ivannz/scikit-learn,nesterione/scikit-learn,fbagirov/scikit-learn,ankurankan/scikit-learn,hlin117/scikit-learn,ilo10/scikit-learn,adamgreenhall/scikit-learn,akionakamura/scikit-learn,wzbozon/scikit-learn,fengzhyuan/scikit-learn,hdmetor/scikit-learn,waterponey/scikit-learn,anurag313/scikit-learn,yyjiang/scikit-learn,PatrickChrist/scikit-learn,h2educ/scikit-learn,glemaitre/scikit-learn,victorbergelin/scikit-learn,cdegroc/scikit-learn,ogrisel/scikit-learn,rahuldhote/scikit-learn,pnedunuri/scikit-learn,billy-inn/scikit-learn,nmayorov/scikit-learn,ldirer/scikit-learn,robin-lai/scikit-learn,pratapvardhan/scikit-learn,jakobworldpeace/scikit-learn,carrillo/scikit-learn,mehdidc/scikit-learn,hdmetor/scikit-learn,fengzhyuan/scikit-learn,Fireblend/scikit-learn,AIML/scikit-learn,icdishb/scikit-learn,etkirsch/scikit-learn,mhue/scikit-learn,loli/semisupervisedforests,jmschrei/scikit-learn,ilo10/scikit-learn,andaag/scikit-learn,saiwing-yeung/scikit-learn,tmhm/scikit-learn,pianomania/scikit-learn,sonnyhu/scikit-learn,dingocuster/scikit-learn,ChanderG/scikit-learn,jseabold/scikit-learn,hainm/scikit-learn,MartinSavc/scikit-learn,h2educ/scikit-learn,abhishekgahlot/scikit-learn,nhejazi/scikit-learn,bthirion/scikit-learn,aflaxman/scikit-learn,UNR-AERIAL/scikit-learn,chrisburr/scikit-learn,ZenDevelopmentSystems/scikit-learn,hugobowne/scikit-learn,HolgerPeters/scikit-learn,JPFrancoia/scikit-learn,tmhm/scikit-learn,RachitKansal/scikit-learn,larsmans/scikit-learn,lbishal/scikit-learn,tdhopper/scikit-learn,evgchz/scikit-learn,waterponey/scikit-learn,smartscheduling/scikit-learn-categorical-tree,henrykironde/scikit-learn,fabianp/scikit-learn,madjelan/scikit-learn,rrohan/scikit-learn,ldirer/scikit-learn,AlexandreAbraham/scikit-learn,Fireblend/scikit-learn,Clyde-fare/scikit-learn,thilbern/scikit-learn,dsullivan7/scikit-learn,henridwyer/scikit-learn,aabadie/scikit-learn,MatthieuBizien/scikit-learn,hsuantien/scikit-learn,marcocaccin/scikit-learn,MartinSavc/scikit-learn,vibhorag/scikit-learn,shyamalschandra/scikit-learn,jereze/scikit-learn,terkkila/scikit-learn,nelson-liu/scikit-learn,anntzer/scikit-learn,LiaoPan/scikit-learn,samzhang111/scikit-learn,rexshihaoren/scikit-learn,sergeyf/scikit-learn,alvarofierroclavero/scikit-learn,AlexandreAbraham/scikit-learn,RPGOne/scikit-learn,hdmetor/scikit-learn,nomadcube/scikit-learn,kevin-intel/scikit-learn,betatim/scikit-learn,ogrisel/scikit-learn,frank-tancf/scikit-learn,sgenoud/scikit-learn,shikhardb/scikit-learn,nikitasingh981/scikit-learn,yonglehou/scikit-learn,arahuja/scikit-learn,YinongLong/scikit-learn,xwolf12/scikit-learn,pypot/scikit-learn,roxyboy/scikit-learn,scikit-learn/scikit-learn,elkingtonmcb/scikit-learn,plissonf/scikit-learn,pv/scikit-learn,cauchycui/scikit-learn,arabenjamin/scikit-learn,rohanp/scikit-learn,dsquareindia/scikit-learn,jkarnows/scikit-learn,3manuek/scikit-learn,icdishb/scikit-learn,thilbern/scikit-learn,Adai0808/scikit-learn,vshtanko/scikit-learn,mojoboss/scikit-learn,meduz/scikit-learn,mrshu/scikit-learn,mattgiguere/scikit-learn,ashhher3/scikit-learn,trankmichael/scikit-learn,HolgerPeters/scikit-learn,MechCoder/scikit-learn,jmetzen/scikit-learn,huzq/scikit-learn,CforED/Machine-Learning,alexsavio/scikit-learn,alvarofierroclavero/scikit-learn,petosegan/scikit-learn,AnasGhrab/scikit-learn,ngoix/OCRF,3manuek/scikit-learn,spallavolu/scikit-learn,Nyker510/scikit-learn,ZENGXH/scikit-learn,ltiao/scikit-learn,ssaeger/scikit-learn,ankurankan/scikit-learn,vinayak-mehta/scikit-learn,hugobowne/scikit-learn,quheng/scikit-learn,adamgreenhall/scikit-learn,eg-zhang/scikit-learn,carrillo/scikit-learn,untom/scikit-learn,frank-tancf/scikit-learn,jereze/scikit-learn,f3r/scikit-learn,krez13/scikit-learn,0x0all/scikit-learn,bthirion/scikit-learn,anirudhjayaraman/scikit-learn,mrshu/scikit-learn,loli/semisupervisedforests,gotomypc/scikit-learn,mugizico/scikit-learn,Clyde-fare/scikit-learn,ilo10/scikit-learn,davidgbe/scikit-learn,huzq/scikit-learn,walterreade/scikit-learn,raghavrv/scikit-learn,JsNoNo/scikit-learn,rahul-c1/scikit-learn,jblackburne/scikit-learn,xavierwu/scikit-learn,procoder317/scikit-learn,nelson-liu/scikit-learn,ishanic/scikit-learn,Jimmy-Morzaria/scikit-learn,lucidfrontier45/scikit-learn,dsullivan7/scikit-learn,anurag313/scikit-learn,theoryno3/scikit-learn,iismd17/scikit-learn,fredhusser/scikit-learn,jzt5132/scikit-learn,JsNoNo/scikit-learn,madjelan/scikit-learn,sonnyhu/scikit-learn,0asa/scikit-learn,mwv/scikit-learn,sonnyhu/scikit-learn,fengzhyuan/scikit-learn,lbishal/scikit-learn,hlin117/scikit-learn,mhue/scikit-learn,xavierwu/scikit-learn,NunoEdgarGub1/scikit-learn,djgagne/scikit-learn,lin-credible/scikit-learn,trungnt13/scikit-learn,YinongLong/scikit-learn,rajat1994/scikit-learn,zihua/scikit-learn,DonBeo/scikit-learn,PatrickOReilly/scikit-learn,jakobworldpeace/scikit-learn,rajat1994/scikit-learn,moutai/scikit-learn,ephes/scikit-learn,mugizico/scikit-learn,jmetzen/scikit-learn,justincassidy/scikit-learn,cauchycui/scikit-learn,wanggang3333/scikit-learn,wanggang3333/scikit-learn,xuewei4d/scikit-learn,vinayak-mehta/scikit-learn,IshankGulati/scikit-learn,zorroblue/scikit-learn,aetilley/scikit-learn,DonBeo/scikit-learn,ahoyosid/scikit-learn,zuku1985/scikit-learn,yask123/scikit-learn,cl4rke/scikit-learn,bthirion/scikit-learn,jm-begon/scikit-learn,dingocuster/scikit-learn,thientu/scikit-learn,vortex-ape/scikit-learn,ZENGXH/scikit-learn,bigdataelephants/scikit-learn,xzh86/scikit-learn,abhishekkrthakur/scikit-learn,heli522/scikit-learn,Windy-Ground/scikit-learn,espg/scikit-learn,schets/scikit-learn,tosolveit/scikit-learn,scikit-learn/scikit-learn,ZENGXH/scikit-learn,mattilyra/scikit-learn,Titan-C/scikit-learn,pianomania/scikit-learn,Garrett-R/scikit-learn,yonglehou/scikit-learn,vortex-ape/scikit-learn,marcocaccin/scikit-learn,rajat1994/scikit-learn,kagayakidan/scikit-learn,jakirkham/scikit-learn,harshaneelhg/scikit-learn,lenovor/scikit-learn,NelisVerhoef/scikit-learn,cainiaocome/scikit-learn,alexeyum/scikit-learn,loli/semisupervisedforests,sanketloke/scikit-learn,ycaihua/scikit-learn,sgenoud/scikit-learn,JosmanPS/scikit-learn,beepee14/scikit-learn,abhishekgahlot/scikit-learn,tdhopper/scikit-learn,potash/scikit-learn,sumspr/scikit-learn,meduz/scikit-learn,plissonf/scikit-learn,PrashntS/scikit-learn,Djabbz/scikit-learn,jzt5132/scikit-learn,heli522/scikit-learn,anirudhjayaraman/scikit-learn,sanketloke/scikit-learn,rishikksh20/scikit-learn,zorroblue/scikit-learn,wlamond/scikit-learn,mjudsp/Tsallis,qifeigit/scikit-learn,fbagirov/scikit-learn,f3r/scikit-learn,ilyes14/scikit-learn,xiaoxiamii/scikit-learn,kmike/scikit-learn,vigilv/scikit-learn,nhejazi/scikit-learn,yunfeilu/scikit-learn,bnaul/scikit-learn,q1ang/scikit-learn,jblackburne/scikit-learn,ivannz/scikit-learn,PatrickChrist/scikit-learn,phdowling/scikit-learn,walterreade/scikit-learn,vortex-ape/scikit-learn,nmayorov/scikit-learn,stylianos-kampakis/scikit-learn,abimannans/scikit-learn,arabenjamin/scikit-learn,jkarnows/scikit-learn,florian-f/sklearn,lenovor/scikit-learn,quheng/scikit-learn,hdmetor/scikit-learn,raghavrv/scikit-learn,jaidevd/scikit-learn,fredhusser/scikit-learn,altairpearl/scikit-learn,themrmax/scikit-learn,anirudhjayaraman/scikit-learn,glouppe/scikit-learn,andrewnc/scikit-learn,ycaihua/scikit-learn,ndingwall/scikit-learn,manhhomienbienthuy/scikit-learn,jpautom/scikit-learn,ChanderG/scikit-learn,voxlol/scikit-learn,JeanKossaifi/scikit-learn,DSLituiev/scikit-learn,Myasuka/scikit-learn,vermouthmjl/scikit-learn,jayflo/scikit-learn,betatim/scikit-learn,mjudsp/Tsallis,robin-lai/scikit-learn,tomlof/scikit-learn,Vimos/scikit-learn,Garrett-R/scikit-learn,pratapvardhan/scikit-learn,treycausey/scikit-learn,glemaitre/scikit-learn,RPGOne/scikit-learn,jorik041/scikit-learn,ky822/scikit-learn,anntzer/scikit-learn,wzbozon/scikit-learn,lbishal/scikit-learn,kjung/scikit-learn,zorroblue/scikit-learn,dhruv13J/scikit-learn,Clyde-fare/scikit-learn,fabioticconi/scikit-learn,mayblue9/scikit-learn,wlamond/scikit-learn,IssamLaradji/scikit-learn,MechCoder/scikit-learn,rrohan/scikit-learn,ningchi/scikit-learn,trankmichael/scikit-learn,pompiduskus/scikit-learn,depet/scikit-learn,ominux/scikit-learn,yyjiang/scikit-learn,kashif/scikit-learn,cl4rke/scikit-learn,olologin/scikit-learn,hainm/scikit-learn,michigraber/scikit-learn,arjoly/scikit-learn,RPGOne/scikit-learn,xiaoxiamii/scikit-learn,MohammedWasim/scikit-learn,simon-pepin/scikit-learn,trankmichael/scikit-learn,mfjb/scikit-learn,bhargav/scikit-learn,B3AU/waveTree,r-mart/scikit-learn,zihua/scikit-learn,bikong2/scikit-learn,mblondel/scikit-learn,aminert/scikit-learn,UNR-AERIAL/scikit-learn,jakirkham/scikit-learn,jmschrei/scikit-learn,equialgo/scikit-learn,pkruskal/scikit-learn,JsNoNo/scikit-learn,spallavolu/scikit-learn,djgagne/scikit-learn,khkaminska/scikit-learn,mhdella/scikit-learn,deepesch/scikit-learn,phdowling/scikit-learn,IssamLaradji/scikit-learn,yask123/scikit-learn,pratapvardhan/scikit-learn,justincassidy/scikit-learn,costypetrisor/scikit-learn,rexshihaoren/scikit-learn,untom/scikit-learn,h2educ/scikit-learn,hitszxp/scikit-learn,trungnt13/scikit-learn,mattilyra/scikit-learn,ngoix/OCRF,ltiao/scikit-learn,0asa/scikit-learn,Vimos/scikit-learn,MartinDelzant/scikit-learn,huzq/scikit-learn,theoryno3/scikit-learn,henridwyer/scikit-learn,smartscheduling/scikit-learn-categorical-tree,aabadie/scikit-learn,sonnyhu/scikit-learn,cybernet14/scikit-learn,cybernet14/scikit-learn,robbymeals/scikit-learn,kagayakidan/scikit-learn,glouppe/scikit-learn,larsmans/scikit-learn,lucidfrontier45/scikit-learn,giorgiop/scikit-learn,LohithBlaze/scikit-learn,AnasGhrab/scikit-learn,kagayakidan/scikit-learn,evgchz/scikit-learn,ky822/scikit-learn,etkirsch/scikit-learn,russel1237/scikit-learn,ClimbsRocks/scikit-learn,xavierwu/scikit-learn,lucidfrontier45/scikit-learn,voxlol/scikit-learn,plissonf/scikit-learn,BiaDarkia/scikit-learn,xwolf12/scikit-learn,ElDeveloper/scikit-learn,alexeyum/scikit-learn,equialgo/scikit-learn,zhenv5/scikit-learn,aflaxman/scikit-learn,fyffyt/scikit-learn,CVML/scikit-learn,mehdidc/scikit-learn,0asa/scikit-learn,abhishekgahlot/scikit-learn,luo66/scikit-learn,jpautom/scikit-learn,mxjl620/scikit-learn,luo66/scikit-learn,bikong2/scikit-learn,OshynSong/scikit-learn,ky822/scikit-learn,aflaxman/scikit-learn,Srisai85/scikit-learn,shangwuhencc/scikit-learn,shyamalschandra/scikit-learn,deepesch/scikit-learn,vivekmishra1991/scikit-learn,costypetrisor/scikit-learn,jmschrei/scikit-learn,kashif/scikit-learn,sarahgrogan/scikit-learn,voxlol/scikit-learn,CforED/Machine-Learning,mayblue9/scikit-learn,treycausey/scikit-learn,joshloyal/scikit-learn,florian-f/sklearn,ominux/scikit-learn,nhejazi/scikit-learn,altairpearl/scikit-learn,samzhang111/scikit-learn,shikhardb/scikit-learn,saiwing-yeung/scikit-learn,mlyundin/scikit-learn,CVML/scikit-learn,mblondel/scikit-learn,sinhrks/scikit-learn,olologin/scikit-learn,LohithBlaze/scikit-learn,ZenDevelopmentSystems/scikit-learn,kjung/scikit-learn,ankurankan/scikit-learn,vshtanko/scikit-learn,petosegan/scikit-learn,deepesch/scikit-learn,terkkila/scikit-learn,larsmans/scikit-learn,jlegendary/scikit-learn,chrsrds/scikit-learn,rohanp/scikit-learn,yunfeilu/scikit-learn,glemaitre/scikit-learn,kashif/scikit-learn,AlexandreAbraham/scikit-learn,jaidevd/scikit-learn,IssamLaradji/scikit-learn,sgenoud/scikit-learn,JeanKossaifi/scikit-learn,Titan-C/scikit-learn,vinayak-mehta/scikit-learn,kevin-intel/scikit-learn,appapantula/scikit-learn,Nyker510/scikit-learn,krez13/scikit-learn,nmayorov/scikit-learn,lenovor/scikit-learn,BiaDarkia/scikit-learn,lazywei/scikit-learn,Aasmi/scikit-learn,glennq/scikit-learn,nikitasingh981/scikit-learn,costypetrisor/scikit-learn,ominux/scikit-learn,belltailjp/scikit-learn,ahoyosid/scikit-learn,hsiaoyi0504/scikit-learn,imaculate/scikit-learn,rvraghav93/scikit-learn,fbagirov/scikit-learn,bnaul/scikit-learn,andaag/scikit-learn,poryfly/scikit-learn,h2educ/scikit-learn,ChanChiChoi/scikit-learn,q1ang/scikit-learn,sumspr/scikit-learn,hrjn/scikit-learn,gotomypc/scikit-learn,dsquareindia/scikit-learn,murali-munna/scikit-learn,shahankhatch/scikit-learn,vigilv/scikit-learn,pnedunuri/scikit-learn,nomadcube/scikit-learn,pompiduskus/scikit-learn,ephes/scikit-learn,beepee14/scikit-learn,MartinSavc/scikit-learn,ngoix/OCRF,jmetzen/scikit-learn,thientu/scikit-learn,AIML/scikit-learn,bnaul/scikit-learn,appapantula/scikit-learn,pkruskal/scikit-learn,liangz0707/scikit-learn,schets/scikit-learn,rahul-c1/scikit-learn,mojoboss/scikit-learn,mxjl620/scikit-learn,mikebenfield/scikit-learn,massmutual/scikit-learn,devanshdalal/scikit-learn,jorik041/scikit-learn,nelson-liu/scikit-learn,clemkoa/scikit-learn | scikits/learn/utils/crossval.py | scikits/learn/utils/crossval.py | # Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD Style.
# $Id: cd.py 473 2010-03-03 16:27:38Z twigster $
import numpy as np
import exceptions
class LOO:
"""
Leave-One-Out cross validation:
Provides train/test indexes to split data in train test sets
Examples:
import scikits.learn.utils.crossval
import numpy as np
n_samples, n_features = 5, 10
X = np.random.randn(n_samples, n_features)
print X
loo = crossval.LOO(n_samples)
print loo[1]
for train_index, test_index in loo:
print "TRAIN:", train_index, "TEST:", test_index
"""
def __init__(self,n):
"""
n : is the size of the dataset to split
"""
self.n_folds = n
self.iter = 0
def __getitem__(self,item):
test_index = np.zeros(self.n_folds,dtype=np.bool)
test_index[item] = True
train_index = np.logical_not(test_index)
return train_index, test_index
def next(self):
if self.iter < self.n_folds:
self.iter += 1
return self.__getitem__(self.iter-1)
raise StopIteration
def __iter__(self):
return self
def crossval_split(train_indexes, test_indexes, *args):
"""
For each arg return a train and test subsets defined by indexes provided
in train_indexes and test_indexes
"""
ret = []
for arg in args:
arg_train = arg[trainIndexes,:]
arg_test = arg[testIndexes,:]
ret.append(arg_train)
ret.append(arg_test)
return ret
if __name__ == "__main__":
print "Leave One Out crossvalidation"
n_samples, n_features = 5, 10
X = np.random.randn(n_samples, n_features)
print X
loo = LOO(n_samples)
print loo[1]
for train_index, test_index in loo:
print "TRAIN:", train_index, "TEST:", test_index
| bsd-3-clause | Python | |
e42fcd8a7dfd213c3de8ccc925410ab3dfe68a3c | Test Lemniscate of Bernoulli trajectory | bit0001/trajectory_tracking,bit0001/trajectory_tracking | src/test/trajectory/test_lemniscate_trajectory.py | src/test/trajectory/test_lemniscate_trajectory.py | #!/usr/bin/env python
import unittest
from geometry_msgs.msg import Point
from trajectory.lemniscate_trajectory import LemniscateTrajectory
class LemniscateTrajectoryTest(unittest.TestCase):
def setUp(self):
self.trajectory = LemniscateTrajectory(5, 4)
self.expected_position = Point()
def test_given_lemniscate_trajectory_when_getting_position_after_0s_then_position_is_returned(self):
self.expected_position.x = 2.8284
self.expected_position.y = 0.0
self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(0), 0.01)
def test_given_lemniscate_trajectory_when_getting_position_after_1s_then_position_is_returned(self):
self.expected_position.x = 0
self.expected_position.y = 0
self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(1), 0.01)
def test_given_lemniscate_trajectory_when_getting_position_after_2s_then_position_is_returned(self):
self.expected_position.x = -2.8284
self.expected_position.y = 0
self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(2), 0.01)
def test_given_lemniscate_trajectory_when_getting_position_after_3s_then_position_is_returned(self):
self.expected_position.x = 0
self.expected_position.y = 0
self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(3), 0.01)
def test_given_lemniscate_trajectory_when_getting_position_after_4s_then_position_is_returned(self):
self.expected_position.x = 2.8284
self.expected_position.y = 0
self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(4), 0.01)
def assertPositionAlmostEqual(self, expected, actual, delta):
self.assertAlmostEqual(expected.x, actual.x, delta=delta)
self.assertAlmostEqual(expected.y, actual.y, delta=delta)
| mit | Python | |
4322e8f487af673191cb042bd8f34d6c1526bb42 | Add a command for _accessibilityTraitsInspectorHumanReadable. | mrhappyasthma/happydebugging | scripts/accessibility_traits.py | scripts/accessibility_traits.py | import lldb
import shlex
from helpers.environment_checks import EnvironmentChecks
from subprocess import call
def accessibility_traits(debugger, command, result, internal_dict):
"""Prints human readable strings of the a11y traits for a given object..
Note: This command can only be run while VoiceOver is running.
Warning: This command uses private framework APIs. Later OS versions may
cause this command to malfunction.
Usage:
accesibilityTraits <NSObject_instance>
"""
args = shlex.split(command)
if len(args) != 1:
result.Println('ERROR: Please enter the command as "accessibilityTraits <NSObject instance>".')
return
target = debugger.GetSelectedTarget()
if target is None:
result.Println('ERROR: Could not get selected target.')
return
if EnvironmentChecks.isSimulatorTarget(target):
result.Println('ERROR: This command is only supported for device builds, and current debugger target is a simulator.')
return
# Check if private UIAccessibility.framework is already loaded, if not try to load it.
is_framework_loaded_cmd = """
@import UIKit;
BOOL frameworkAlreadyLoaded = NO;
for (NSBundle *bundle in NSBundle.allFrameworks) {
NSString *frameworkPath = @"/System/Library/PrivateFrameworks/UIAccessibility.framework";
if ([bundle.resourcePath isEqualToString:frameworkPath]) {
frameworkAlreadyLoaded = YES;
break;
}
}
frameworkAlreadyLoaded ? @"YES" : @"NO"; // Return the BOOL
"""
ret_value = target.EvaluateExpression(is_framework_loaded_cmd)
# If something went wrong, try to load the command.
if not ret_value.GetError().Success() or ret_value.GetObjectDescription().strip() == 'NO':
load_framework_cmd = """
NSString *frameworkPath = @"/System/Library/PrivateFrameworks/UIAccessibility.framework";
NSBundle *bundleForFramework = [NSBundle bundleWithPath:frameworkPath];
BOOL success = [bundleForFramework load];
success ? @"YES" : @"NO"; // Return the BOOL
"""
ret_value = target.EvaluateExpression(load_framework_cmd)
result.Println(ret_value.GetError().GetCString())
if not ret_value.GetError().Success() or ret_value.GetObjectDescription().strip() == 'NO':
result.Println('ERROR: Could not load the private UIAccessibility.framework.')
return
# Ensure we have an NSObject instance. If not, return and print an error message.
nsobject_check_cmd = 'po (BOOL)[' + args[0] + ' isKindOfClass:[NSObject class]]'
temp_result = lldb.SBCommandReturnObject()
debugger.GetCommandInterpreter().HandleCommand(nsobject_check_cmd, temp_result)
if temp_result.GetError() or temp_result.GetOutput().strip() == 'NO':
result.Println('ERROR: This command only works for NSObjects. Enter the command as "accessibilitytree <NSObject instance>".')
return
# Ensure the object responds to the selector.
responds_to_selector_cmd = 'po (BOOL)[' + args[0] + ' respondsToSelector:NSSelectorFromString(@"_accessibilityTraitsInspectorHumanReadable")]'
temp_result = lldb.SBCommandReturnObject()
debugger.GetCommandInterpreter().HandleCommand(responds_to_selector_cmd, temp_result)
if temp_result.GetError() or temp_result.GetOutput().strip() == 'NO':
result.Println('ERROR: Object does not respond to `_accessibilityTraitsInspectorHumanReadable`. Apple might have changed their API. Please file a bug at http://www.github.com/mrhappyasthma/HappyDebugging')
return
cmd = 'po (NSString *)[' + args[0] + ' _accessibilityTraitsInspectorHumanReadable]'
debugger.GetCommandInterpreter().HandleCommand(cmd, result)
def __lldb_init_module(debugger, internal_dict):
cmd = ('command script add '
'-f accessibility_traits.accessibility_traits accessibilityTraits '
'-h "Prints the a11y traits of the given object as human readable strings."')
debugger.HandleCommand(cmd)
| mit | Python | |
7b4107cfb465faf70110b72da9b655758d62d9b3 | add extraction tool as per request from Renee | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/mec/extract_rshowers.py | scripts/mec/extract_rshowers.py | import pytz
import datetime
import psycopg2
pgconn = psycopg2.connect(host='127.0.0.1', port=5555, user='mesonet', database='mec')
cursor = pgconn.cursor()
dates = """06-02-2008 00z - 06-07-2008 06z
06-09-2008 00z - 06-14-2008 06z
06-23-2008 00z - 06-25-2008 06z
07-04-2008 00z - 07-06-2008 06z
08-15-2008 00z - 08-23-2008 06z
02-19-2009 00z - 02-25-2009 06z
03-02-2009 00z - 03-07-2009 06z
03-28-2009 00z - 04-03-2009 06z"""
def c(val):
if val is None:
return 'M'
return val
for line in dates.split("\n"):
tokens = line.split(" - ")
sts = datetime.datetime.strptime(tokens[0][:12], '%m-%d-%Y %H')
sts = sts.replace(tzinfo=pytz.timezone("UTC"))
ets = datetime.datetime.strptime(tokens[1][:12], '%m-%d-%Y %H')
ets = ets.replace(tzinfo=pytz.timezone("UTC"))
output = open('extract/%s-%s.txt' % (sts.strftime("%Y%m%d%H%M"),
ets.strftime("%Y%m%d%H%M")), 'w')
output.write("utcvalid,avg_power,avg_windspeed,stddev_windspeed,count\n")
cursor.execute("""
select valid, avg(power), avg(windspeed), stddev(windspeed),
count(*) from sampled_data
WHERE valid >= %s and valid < %s
and extract(minute from valid)::int %% 10 = 0 and power is not null
and windspeed is not null GROUP by valid ORDER by valid ASC
""", (sts, ets))
print sts, ets, cursor.rowcount
for row in cursor:
ts = row[0].astimezone(pytz.timezone("UTC"))
output.write("%s,%s,%s,%s,%s\n" % (
row[0].strftime("%Y-%m-%d %H:%M:%S"),
c(row[1]), c(row[2]), c(row[3]), row[4] ))
output.close()
| mit | Python | |
74bfc85ef4533e93a4edf4c16e5a7a6bb175f36b | Simplify the view as the validation logic has already moved to the model | ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website | onetime/views.py | onetime/views.py | from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key, redirect_invalid_to=None):
user = auth.authenticate(key=key)
if user is None:
if redirect_invalid_to is not None:
return HttpResponseRedirect(redirect_invalid_to)
else:
return HttpResponseGone()
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
next = request.GET.get('next', None)
if data.next is not None:
next = data.next
if next is None:
next = settings.LOGIN_REDIRECT_URL
return HttpResponseRedirect(next)
| from datetime import datetime
from django.http import HttpResponseRedirect, HttpResponseGone
from django.contrib.auth import login
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
def login(request, key, redirect_invalid_to=None, redirect_expired_to=None):
data = Key.objects.get(key=key)
if data is None:
if redirect_invalid_to is not None:
return HttpResponseRedirect(redirect_invalid_to)
else:
return HttpResponseGone()
expired = False
if data.usage_left is not None and data.usage_left <= 0:
expired = True
if data.expires is not None and data.expires < datetime.now():
expired = True
if expired:
if redirect_expired_to is not None:
return HttpResponseRedirect(redirect_expired_to)
else:
return HttpResponseGone()
if data.usage_left is not None:
data.usage_left -= 1
data.save()
login(request, data.user)
next = request.GET.get('next', None)
if data.next is not None:
next = data.next
if next is None:
next = settings.LOGIN_REDIRECT_URL
return HttpResponseRedirect(next)
| agpl-3.0 | Python |
159b971ae95501f9093dedb881ed030eed74241e | Create __init__.py | claus022015/Nutrition-Database-Modernization,claus022015/Nutrition-Database-Modernization,claus022015/Nutrition-Database-Modernization,claus022015/Nutrition-Database-Modernization,claus022015/Nutrition-Database-Modernization,claus022015/Nutrition-Database-Modernization | docs/__init__.py | docs/__init__.py | # -*- coding: utf-8 -*-
"""
sphinxcontrib
~~~~~~~~~~~~~
This package is a namespace package that contains all extensions
distributed in the ``sphinx-contrib`` distribution.
:copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
__import__('pkg_resources').declare_namespace(__name__)
| mit | Python | |
b9be58f46fe40f471696dd153253781b7a873eda | Create secret_lottery_winning_no3.py | james-jz-zheng/OtherStuff | secret_lottery_winning_no3.py | secret_lottery_winning_no3.py | ### QUESTION:
'''
Mr. X is approached in the subway by a guy who claims to be an alien stranded on Earth and to possess time machine
that allows him to know the future. He needs funds to fix his flying saucer but filling in wiMAX_NOing numbers for next week's lottery
would create a time paradox. Therefore, he's willing to sell next week's wiMAX_NOing numbers to Mr. X at a favorable price. Mr. X,
as gullible as he is, feels that this may be a scam and asks for a proof. Alien gives him the next week's wiMAX_NOing numbers in
encrypted form so that Mr. X can't use them and then decide not to pay for them. After the lottery draw he'll give Mr. X the key
to unlock the file and Mr. X can verify that the prediction was correct.
After the draw, Mr. X gets the key and lo and behold, the numbers are correct! To rule out the possibility that it happened by chance,
they do the experiment twice. Then thrice. Finally, Mr. X is persuaded. He pays the alien and gets the set of numbers for the next
week's draw. But the numbers drawn are completely different.
And now the question: How did the scam work?
Assuming the lottery is a set of 5 numbers in range [1..32]
'''
### SOLUTION:
import string, random
UNIT, MAX_NO, NDIGIT, NCODE = 6, 32, 3, 18
asciid = dict(enumerate(string.ascii_letters + string.digits + '-_'))
asciidr = dict(map(reversed, asciid.items()))
keydr = dict(enumerate([(i+1,j+1,k+1,l+1,m+1) for i in range(MAX_NO-4) for j in range(i+1, MAX_NO-3) for k in range(j+1, MAX_NO-2) for l in range(k+1, MAX_NO-1) for m in range(l+1, MAX_NO)]))
keyd = dict(map(reversed, keydr.items()))
# encoder, decoder
encode = lambda d: ('0'*NCODE+bin(keyd[d])[2:])[-NCODE:]
decode = lambda p: eval('0b'+''.join([('0'*UNIT+bin(asciidr[i])[2:])[-UNIT:] for i in p]))
# main logic
gen_encrypted_wiMAX_NOing_no = lambda : ''.join(random.sample(string.ascii_letters*64,64))
gen_decrypt_key = lambda _, data : ''.join([asciid[eval('0b'+encode(data)[i*UNIT:(i+1)*UNIT])] for i in range(NDIGIT)])
decrypt_wiMAX_NOing_no = lambda _, passwd: keydr[decode(passwd)]
### USE CASE:
# give fake encrypted no. to mr. X
locked_wiMAX_NOing_no_to_MrX = gen_encrypted_wiMAX_NOing_no()
print 'locked_wiMAX_NOing_no_to_MrX :\n\t', locked_wiMAX_NOing_no_to_MrX
# wait for actual wiMAX_NOing no. is out and generate fake key
WINNING_NO = (3,11,12,27,31)
key_to_unlock = gen_decrypt_key(locked_wiMAX_NOing_no_to_MrX, WINNING_NO)
print 'key_to_unlock :\n\t', key_to_unlock
# mr. X uses the fake key to decrypt the fake encrpted no.
unlocked_wiMAX_NOing_no_to_MrX = decrypt_wiMAX_NOing_no(locked_wiMAX_NOing_no_to_MrX, key_to_unlock)
print 'unlocked_wiMAX_NOing_no_to_MrX :\n\t', unlocked_wiMAX_NOing_no_to_MrX
### OUTPUT:
'''
locked_wiMAX_NOing_no_to_MrX :
KCpmAZDzCtXExsVIcAkvMsItwbUMkcUxeQQkjYZtVHPGjzeDlEsveioNVtLeSfHJ
key_to_unlock :
sBC
unlocked_wiMAX_NOing_no_to_MrX :
(3, 11, 12, 27, 31)
'''
| apache-2.0 | Python | |
88d4139fdfdcb11be7cbe42fe1223cfde5752950 | debug path | ethermarket/pyethereum,ethereum/pyethereum,vaporry/pyethereum,pipermerriam/pyethereum,harlantwood/pyethereum,inzem77/pyethereum,shahankhatch/pyethereum,shahankhatch/pyethereum,pipermerriam/pyethereum,karlfloersch/pyethereum,holiman/pyethereum,ddworken/pyethereum,karlfloersch/pyethereum,ethereum/pyethereum,ckeenan/pyethereum | pyethereum/config.py | pyethereum/config.py |
import os
import uuid
import StringIO
import ConfigParser
from pyethereum.utils import default_data_dir
from pyethereum.packeter import Packeter
from pyethereum.utils import sha3
def default_config_path():
return os.path.join(default_data_dir, 'config.txt')
def default_client_version():
return Packeter.CLIENT_VERSION # FIXME
def default_node_id():
x = (sha3(str(uuid.uuid1())) * 2).encode('hex')
assert len(x) == 128
return x
config_template = \
"""
# NETWORK OPTIONS ###########
[network]
# Connect to remote host/port
# poc-7.ethdev.com:30300
remote_host = 207.12.89.180
remote_port = 30300
# Listen on the given host/port for incoming connections
listen_host = 0.0.0.0
listen_port = 30303
# Number of peer to connections to establish
num_peers = 10
# unique id of this node
node_id = {0}
# API OPTIONS ###########
[api]
# Serve the restful json api on the given host/port
listen_host = 0.0.0.0
listen_port = 30203
# path to server the api at
api_path = /api/v02a
# MISC OIPTIONS #########
[misc]
# Load database from path
data_dir = {1}
# percent cpu devoted to mining 0=off
mining = 30
# see help for available log groups
logging = :INFO
# log as json output
log_json = 0
# WALLET OPTIONS ##################
[wallet]
# Set the coinbase (mining payout) address
coinbase = 6c386a4b26f73c802f34673f7248bb118f97424a
""".format(default_node_id(), default_data_dir)
def get_default_config():
f = StringIO.StringIO()
f.write(config_template)
f.seek(0)
config = ConfigParser.ConfigParser()
config.readfp(f)
config.set('network', 'client_version', default_client_version())
return config
def read_config(cfg_path=default_config_path()):
# create default if not existent
if not os.path.exists(cfg_path):
open(cfg_path, 'w').write(config_template)
# extend on the default config
config = get_default_config()
config.read(cfg_path)
return config
def validate_config(config):
assert len(config.get('network', 'node_id')) == 128 # 512bit hex encoded
assert len(config.get('wallet', 'coinbase')) == 40 # 160bit hex encoded
|
import os
import uuid
import StringIO
import ConfigParser
from pyethereum.utils import default_data_dir
from pyethereum.packeter import Packeter
from pyethereum.utils import sha3
def default_config_path():
return os.path.join(default_data_dir, 'config.txt')
def default_client_version():
return Packeter.CLIENT_VERSION # FIXME
def default_node_id():
x = (sha3(str(uuid.uuid1())) * 2).encode('hex')
assert len(x) == 128
return x
config_template = \
"""
# NETWORK OPTIONS ###########
[network]
# Connect to remote host/port
# poc-7.ethdev.com:30300
remote_host = 207.12.89.180
remote_port = 30300
# Listen on the given host/port for incoming connections
listen_host = 0.0.0.0
listen_port = 30303
# Number of peer to connections to establish
num_peers = 10
# unique id of this node
node_id = {0}
# API OPTIONS ###########
[api]
# Serve the restful json api on the given host/port
listen_host = 0.0.0.0
listen_port = 30203
# path to server the api at
api_path = /api/v02a
# MISC OIPTIONS #########
[misc]
# Load database from path
data_dir = {1}
# percent cpu devoted to mining 0=off
mining = 30
# see help for available log groups
logging = :INFO
# log as json output
log_json = 0
# WALLET OPTIONS ##################
[wallet]
# Set the coinbase (mining payout) address
coinbase = 6c386a4b26f73c802f34673f7248bb118f97424a
""".format(default_node_id(), default_data_dir)
def get_default_config():
f = StringIO.StringIO()
f.write(config_template)
f.seek(0)
config = ConfigParser.ConfigParser()
config.readfp(f)
config.set('network', 'client_version', default_client_version())
return config
def read_config(cfg_path=default_config_path()):
print cfg_path
# create default if not existent
if not os.path.exists(cfg_path):
open(cfg_path, 'w').write(config_template)
# extend on the default config
config = get_default_config()
config.read(cfg_path)
return config
def validate_config(config):
assert len(config.get('network', 'node_id')) == 128 # 512bit hex encoded
assert len(config.get('wallet', 'coinbase')) == 40 # 160bit hex encoded
| mit | Python |
7f0658ee700174bae100a12b8c8c22377e829d6f | Create BlepiInit.py | HammerDuJour/blepisensor,HammerDuJour/blepisensor | BlepiInit.py | BlepiInit.py | import sqlite3
connection = sqlite3.connect('/home/pi/blepimesh/data/client.db')
cursor = connection.cursor()
print "Adding Data To DB"
cursor.execute("INSERT INTO log(tagDate) values(date('now'))")
cursor.execute("INSERT INTO log values('5',date('now'),time('now'),'34','43','TagAddr','')")
connection.commit()
print "Entire Database Contents"
for row in cursor.execute("SELECT * FROM log"):
print row
connection.close()
| mit | Python | |
b5fda5ff78f97c7bdd23f3ca4ed2b2d2ab33d101 | Create _init_.py | FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code,FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code,FinancialSentimentAnalysis-team/Finanical-annual-reports-analysis-code | luowang/tools/tree-tagger-windows-3.2/TreeTagger/bin/_init_.py | luowang/tools/tree-tagger-windows-3.2/TreeTagger/bin/_init_.py | apache-2.0 | Python | ||
f3fbb6ca517314ab7ac1330e766da1de89970e13 | Add debug plugin | Cyanogenoid/smartbot,tomleese/smartbot,Muzer/smartbot,thomasleese/smartbot-old | plugins/debug.py | plugins/debug.py | import time
class Plugin:
def __call__(self, bot):
bot.on_respond(r"ping$", lambda bot, msg, reply: reply("PONG"))
bot.on_respond(r"echo (.*)$", lambda bot, msg, reply: reply(msg["match"].group(1)))
bot.on_respond(r"time$", lambda bot, msg, reply: reply(time.time()))
bot.on_help("debug", self.on_help)
def on_help(self, bot, msg, reply):
reply("Syntax: ping | echo <msg> | time")
| mit | Python | |
1a7acfd59f48522f0dda984b2f33d20d843ee8ba | set up role.py | ucfopen/canvasapi,ucfopen/canvasapi,ucfopen/canvasapi | pycanvas/role.py | pycanvas/role.py | from canvas_object import CanvasObject
from util import combine_kwargs
class Role(CanvasObject):
def __str__(self):
return ""
| mit | Python | |
0d85832a82c0973c89f3f321e1f2e2486a197882 | Add script to perform partial upload | fedora-infra/fedimg,fedora-infra/fedimg | bin/partial_upload.py | bin/partial_upload.py | #!/bin/env python
# -*- coding: utf8 -*-
""" Triggers a partial upload process with the specified raw.xz URL. """
import argparse
from fedimg.config import AWS_ACCESS_ID
from fedimg.config import AWS_SECRET_KEY
from fedimg.config import AWS_BASE_REGION, AWS_REGIONS
from fedimg.services.ec2.ec2copy import main as ec2copy
from fedimg.services.ec2.ec2initiate import main as ec2main
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a partial upload based on the arguments")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
parser.add_argument(
"-v", "--volume", help="volume type for the image", required=False)
args = parser.parse_args()
return (
args.url,
args.compose_id,
args.push_notifications,
args.volume
)
def main():
url, compose_id, push_notifications, volume = get_args()
if volume is not None:
volume = [volume]
images_metadata = ec2main(
image_urls=url,
access_id=AWS_ACCESS_ID,
secret_key=AWS_SECRET_KEY,
regions=None,
volume_types=volume,
push_notifications=push_notifications,
compose_id=compose_id
)
for image_metadata in images_metadata:
image_id = image_metadata['image_id']
aws_regions = list(set(AWS_REGIONS) - set([AWS_BASE_REGION]))
ec2copy(
aws_regions,
AWS_ACCESS_ID,
AWS_SECRET_KEY,
image_ids=[image_id],
push_notifications=push_notifications,
compose_id=compose_id
)
if __name__ == '__main__':
main()
| agpl-3.0 | Python | |
1a49426497819c13ccf858d51e5fa333d95f1f7d | Add basic unit test for parseCommand | atkvo/masters-bot,atkvo/masters-bot,atkvo/masters-bot,atkvo/masters-bot,atkvo/masters-bot | src/autobot/src/udpRemote_test.py | src/autobot/src/udpRemote_test.py | #!/usr/bin/env python
import unittest
from udpRemote import parseCommand
class MockDriveParam:
velocity = 0.0
angle = 0.0
class UdpRemoteTest(unittest.TestCase):
def testValidParse(self):
p = MockDriveParam()
p = parseCommand("V44.4", p)
self.assertEqual(p.velocity, 44.4)
self.assertEqual(p.angle, 0.0)
p = parseCommand("A81.3", p)
self.assertEqual(p.velocity, 44.4)
self.assertEqual(p.angle, 81.3)
def testInvalidParse(self):
p = MockDriveParam()
p = parseCommand("X44.4", p)
self.assertEqual(p.velocity, 0.0)
self.assertEqual(p.angle, 0.0)
p = MockDriveParam()
p = parseCommand("V0F.4", p)
self.assertEqual(p.velocity, 0.0)
self.assertEqual(p.angle, 0.0)
| mit | Python | |
5b899181f14c65778f23312ddd31078fac46cd9c | Fix template filter. | jaddison/django-assets,logston/django-assets,ridfrustum/django-assets,logston/django-assets,Eksmo/django-assets,adamchainz/django-assets,mcfletch/django-assets | django_assets/filter.py | django_assets/filter.py | """Django specific filters.
For those to be registered automatically, make sure the main
django_assets namespace imports this file.
"""
from django.template import Template, Context
from webassets import six
from webassets.filter import Filter, register_filter
class TemplateFilter(Filter):
"""
Will compile all source files as Django templates.
"""
name = 'template'
max_debug_level = None
def __init__(self, context=None):
super(TemplateFilter, self).__init__()
self.context = context
def input(self, _in, out, source_path, output_path, **kw):
t = Template(_in.read(), origin='django-assets', name=source_path)
rendered = t.render(Context(self.context if self.context else {}))
out.write(rendered)
register_filter(TemplateFilter)
| """Django specific filters.
For those to be registered automatically, make sure the main
django_assets namespace imports this file.
"""
from django.template import Template, Context
from webassets import six
from webassets.filter import Filter, register_filter
class TemplateFilter(Filter):
"""
Will compile all source files as Django templates.
"""
name = 'template'
max_debug_level = None
def __init__(self, context=None):
super(TemplateFilter, self).__init__()
self.context = context
def input(self, _in, out, source_path, output_path, **kw):
t = Template(_in.read(), origin='django-assets', name=source_path)
rendered = t.render(Context(self.context if self.context else {}))
if not six.PY3:
rendered = rendered.encode('utf-8')
out.write(rendered)
register_filter(TemplateFilter)
| bsd-2-clause | Python |
d843a2198b87a41d73ab19e09ac8d0c78a6e0ef9 | Create IC74139.py | rajathkumarmp/BinPy,BinPy/BinPy,daj0ker/BinPy,BinPy/BinPy,daj0ker/BinPy,rajathkumarmp/BinPy,yashu-seth/BinPy,yashu-seth/BinPy | BinPy/examples/ic/Series_7400/IC74139.py | BinPy/examples/ic/Series_7400/IC74139.py | from __future__ import print_function
from BinPy import *
print ('Usage of IC 74139:\n')
ic = IC_74139()
print ("""This is a dial 1:4 demultiplexer(2:4 decoder) with output being inverted input"""")
print ('\nThe Pin configuration is:\n')
p = {1:0,2:0,3:0,14:0,13:1,15:0}
print (p)
print ('\nPin initialization -using -- ic.setIC(p) --\n')
ic.setIC(p)
print ('\nPowering up the IC - using -- ic.setIC({16:1, 8:0}) -- \n')
ic.setIC({16: 1, 8: 0})
print ('\nDraw the IC with the current configuration\n')
ic.drawIC()
print (
'\nRun the IC with the current configuration using -- print ic.run() -- \n')
print (
'Note that the ic.run() returns a dict of pin configuration similar to :')
print (ic.run())
print (
'\nSeting the outputs to the current IC configuration using -- ic.setIC(ic.run()) --\n')
ic.setIC(ic.run())
print ('\nDraw the final configuration\n')
ic.drawIC()
| bsd-3-clause | Python | |
dcd19e7982024f4f196f24b71fc2d73bef6723eb | add new package (#25505) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/cupla/package.py | var/spack/repos/builtin/packages/cupla/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cupla(Package):
"""C++ User interface for the Platform independent Library Alpaka"""
homepage = "https://github.com/alpaka-group/cupla"
git = "https://github.com/alpaka-group/cupla.git"
url = "https://github.com/alpaka-group/cupla/archive/refs/tags/0.3.0.tar.gz"
maintainers = ['vvolkl']
version('develop', branch='dev')
version('master', branch='master')
version('0.3.0', sha256='035512517167967697e73544c788453de5e3f0bc4e8d4864b41b2e287365cbaf')
depends_on('alpaka@0.6.0:')
def install(self, spec, prefix):
install_tree('include', self.prefix.include)
install_tree('src', self.prefix.src)
install_tree('doc', self.prefix.share.cupla.doc)
install_tree('example', self.prefix.example)
install_tree('cmake', self.prefix.cmake)
install('Findcupla.cmake', self.prefix)
install('cuplaConfig.cmake', self.prefix)
def setup_run_environment(self, env):
env.set("CUPLA_ROOT", self.prefix)
env.prepend_path("CMAKE_PREFIX_PATH", self.prefix)
env.set("CUPLA", self.prefix.share.cupla)
def setup_dependent_build_environment(self, env, dependent_spec):
env.set("CUPLA_ROOT", self.prefix)
env.prepend_path("CMAKE_PREFIX_PATH", self.prefix)
env.set("CUPLA", self.prefix.share.cupla)
| lgpl-2.1 | Python | |
7e5b4e178a5d36ca89034287168560a73bd9e63d | Create drivers.py | ariegg/webiopi-drivers,ariegg/webiopi-drivers | chips/sensor/lis3dh/drivers.py | chips/sensor/lis3dh/drivers.py | # This code has to be added to the corresponding __init__.py
DRIVERS["lis3dh"] = ["LIS3DH"]
| apache-2.0 | Python | |
f342dbf8d9455db91286823ec5d6ef64e2ace68c | Create MCP3202.py | userdw/RaspberryPi_3_Starter_Kit | Other_Applications/Ultrasonic/MCP3202.py | Other_Applications/Ultrasonic/MCP3202.py | #!/usr/bin/python
import RPi.GPIO as GPIO
import time
import datetime
import os
from time import strftime
CS = 4
CS2 = 7
CLK = 11
MOSI = 10
MISO = 9
LDAC = 8
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(CS, GPIO.OUT)
GPIO.setup(CLK, GPIO.OUT)
GPIO.setup(MOSI, GPIO.OUT)
GPIO.setup(CS2, GPIO.OUT)
GPIO.setup(LDAC, GPIO.OUT)
GPIO.setup(MISO, GPIO.IN,pull_up_down = GPIO.PUD_UP)
GPIO.output(CS, True)
GPIO.output(CLK, False)
GPIO.output(MOSI, True)
def myspi(rdata):
dataX = 0
mask = 0x80
for i in range(8):
if(rdata & mask):
GPIO.output(MOSI, True)
else:
GPIO.output(MOSI, False)
GPIO.output(CLK, True)
if(GPIO.input(MISO) == 1):
dataX = dataX + mask
GPIO.output(CLK, False)
mask = mask >> 1
return dataX;
def readADC(ch):
cmd = 0
if ch == 0: cmd = 0x80
elif ch == 1: cmd = 0xc0
#elif ch == 2: cmd = 0x00
#elif ch == 4: cmd = 0x04
GPIO.output(CS, False)
a = myspi(0x01)
#print "a: ",a
b = myspi(cmd)
#print "b: ",b
c = myspi(0x00)
#print "c: ",c
v = ((b & 0x0f) << 8) + c
#print "v: ",v
GPIO.output(CS, True)
v = round(float(v), 2)
#v=round(float(v)/4095*3.3,2)
return v;
def setDAC(data, channel):
cmd = 0
if channel == 1: cmd = 0xF0
else: cmd = 0x70
GPIO.output(LDAC,False)
GPIO.output(CS2,False)
data = int(float(data * 4095 / 255))
a = myspi((data >> 8) + cmd)
b = myspi(data & 0xFF)
GPIO.output(CS2, True)
return;
| mit | Python | |
bfc8d1052ba6f1011fcdb882a825694acf98dd39 | Add regression test for bug 1797580 | mikalstill/nova,gooddata/openstack-nova,klmitch/nova,klmitch/nova,openstack/nova,gooddata/openstack-nova,rahulunair/nova,klmitch/nova,mahak/nova,mikalstill/nova,rahulunair/nova,mahak/nova,openstack/nova,gooddata/openstack-nova,rahulunair/nova,mahak/nova,mikalstill/nova,gooddata/openstack-nova,klmitch/nova,openstack/nova | nova/tests/functional/regressions/test_bug_1797580.py | nova/tests/functional/regressions/test_bug_1797580.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as image_fake
from nova.tests.unit import policy_fixture
from nova.virt import fake
class ColdMigrateTargetHostThenLiveMigrateTest(
test.TestCase, integrated_helpers.InstanceHelperMixin):
"""Regression test for bug 1797580 introduced in Queens.
Microversion 2.56 allows cold migrating to a specified target host. The
compute API sets the requested destination on the request spec with the
specified target host and then conductor sends that request spec to the
scheduler to validate the host. Conductor later persists the changes to
the request spec because it's the resize flow and the flavor could change
(even though in this case it won't since it's a cold migrate). After
confirming the resize, if the server is live migrated it will fail during
scheduling because of the persisted RequestSpec.requested_destination
from the cold migration, and you can't live migrate to the same host on
which the instance is currently running.
This test reproduces the regression and will validate the fix.
"""
def setUp(self):
super(ColdMigrateTargetHostThenLiveMigrateTest, self).setUp()
self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(nova_fixtures.PlacementFixture())
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
# The admin API is used to get the server details to verify the
# host on which the server was built and cold/live migrate it.
self.admin_api = api_fixture.admin_api
self.api = api_fixture.api
# Use the latest microversion available to make sure something does
# not regress in new microversions; cap as necessary.
self.admin_api.microversion = 'latest'
self.api.microversion = 'latest'
image_fake.stub_out_image_service(self)
self.addCleanup(image_fake.FakeImageService_reset)
self.start_service('conductor')
self.start_service('scheduler')
for host in ('host1', 'host2'):
fake.set_nodes([host])
self.addCleanup(fake.restore_nodes)
self.start_service('compute', host=host)
def test_cold_migrate_target_host_then_live_migrate(self):
# Create a server, it doesn't matter on which host it builds.
server = self._build_minimal_create_server_request(
self.api, 'test_cold_migrate_target_host_then_live_migrate',
image_uuid=image_fake.AUTO_DISK_CONFIG_ENABLED_IMAGE_UUID,
networks='none')
server = self.api.post_server({'server': server})
server = self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
original_host = server['OS-EXT-SRV-ATTR:host']
target_host = 'host1' if original_host == 'host2' else 'host2'
# Cold migrate the server to the specific target host.
migrate_req = {'migrate': {'host': target_host}}
self.admin_api.post_server_action(server['id'], migrate_req)
server = self._wait_for_state_change(
self.admin_api, server, 'VERIFY_RESIZE')
# Confirm the resize so the server stays on the target host.
confim_req = {'confirmResize': None}
self.admin_api.post_server_action(server['id'], confim_req)
server = self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
# Attempt to live migrate the server but don't specify a host so the
# scheduler has to pick one.
live_migrate_req = {
'os-migrateLive': {'host': None, 'block_migration': 'auto'}}
self.admin_api.post_server_action(server['id'], live_migrate_req)
server = self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
# FIXME(mriedem): Until bug 1797580 is resolved the migration will
# fail during scheduling.
migration = self._wait_for_migration_status(server, ['error'])
self.assertEqual('live-migration', migration['migration_type'])
| apache-2.0 | Python | |
64b842d0af6c4e07971a733d8ed6e70109e26979 | Add sample logging | yxiong/xy_python_utils | samples/sample_logging.py | samples/sample_logging.py | #!/usr/bin/env python
#
# Author: Ying Xiong.
# Created: Dec 04, 2015.
import logging
import sys
class DebugOrInfoFilter(logging.Filter):
"""Keep the record only if the level is debug or info."""
def filter(self, record):
return record.levelno in (logging.DEBUG, logging.INFO)
def config_logger(logger, formatter):
"""Configure the logger such that debug and info messages are directed to stdout,
while more critical warnings and errors to stderr.
"""
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setLevel(logging.DEBUG)
stdoutHandler.setFormatter(formatter)
stdoutHandler.addFilter(DebugOrInfoFilter())
logger.addHandler(stdoutHandler)
stderrHandler = logging.StreamHandler(sys.stderr)
stderrHandler.setLevel(logging.WARNING)
stderrHandler.setFormatter(formatter)
logger.addHandler(stderrHandler)
if __name__ == "__main__":
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
log_format = '%(asctime)s.%(msecs)03d\t%(process)d\t%(filename)s:%(lineno)s\t%(funcName)s\t%(message)s'
formatter = logging.Formatter(log_format, datefmt="%Y-%m-%d %H:%M:%S")
config_logger(logging.getLogger(), formatter)
# To stdout.
logging.debug("debug")
logging.info("info")
# To stderr.
logging.warning("warning")
logging.error("error")
| mit | Python | |
cc02339717f392b0750be3b9a74a6406d8c2122f | Expand Event's attributes and add a __doc__ field to each class in events/models.py | matus-stehlik/glowing-batman,rtrembecky/roots,tbabej/roots,matus-stehlik/roots,tbabej/roots,tbabej/roots,rtrembecky/roots,matus-stehlik/roots,matus-stehlik/roots,matus-stehlik/glowing-batman,rtrembecky/roots | events/models.py | events/models.py | from django.db import models
from django.contrib import admin
# Event-related models
class Event(models.Model):
"""
Event represents a simple event, that is opened to public. This can be
either a public presentation, or a public game.
Users are not invited, but can notify the organizer that they want to
participate. This relation is represented using EventUserRegistration.
"""
name = models.CharField(max_length=100)
location = models.CharField(max_length=100)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
galerry = models.ForeignKey('posts.Gallery')
registered_user = models.ManyToManyField('users.User',
through='EventUserRegistration')
registered_org = models.ManyToManyField('users.Organizer',
through='EventOrgRegistration',
related_name='organizers')
def __unicode__(self):
return self.name
class EventUserRegistration(models.Model):
"""
Represents a user's registration to the event.'
"""
event = models.ForeignKey('events.Event')
user = models.ForeignKey('users.User')
timestamp = models.DateTimeField()
def __unicode__(self):
return (self.user.__unicode__() + u" goes to " +
self.event.__unicode__())
class EventOrgRegistration(models.Model):
"""
Represents a organizer's registration to the event. This is merely for them
to let everybody know that they will be available (if help with organization
is needed).
"""
event = models.ForeignKey('events.Event')
organizer = models.ForeignKey('users.Organizer')
timestamp = models.DateTimeField()
def __unicode__(self):
return (self.user.__unicode__() + u" organizes " +
self.event.__unicode__())
class Camp(Event):
"""
This class is called Camp from historical reasons. It is supposed to model
an workshop that is open only to the best of the competitors in the given
competition's season.
User's have to be invited to be able to register. Usually more users than
the capacity allows are invited, some are substitutes.'
"""
location = models.CharField(max_length=100) # temporary placeholder
def __unicode__(self):
return self.location
# Register to the admin site
admin.site.register(Event)
admin.site.register(EventUserRegistration)
admin.site.register(EventOrgRegistration)
admin.site.register(Camp) | from django.db import models
from django.contrib import admin
# Event-related models
class Event(models.Model):
name = models.CharField(max_length=100)
galerry = models.ForeignKey('posts.Gallery')
registered_user = models.ManyToManyField('users.User',
through='EventUserRegistration')
registered_org = models.ManyToManyField('users.Organizer',
through='EventOrgRegistration',
related_name='organizers')
def __unicode__(self):
return self.name
class EventUserRegistration(models.Model):
event = models.ForeignKey('events.Event')
user = models.ForeignKey('users.User')
timestamp = models.DateTimeField()
def __unicode__(self):
return (self.user.__unicode__() + u" goes to " +
self.event.__unicode__())
class EventOrgRegistration(models.Model):
event = models.ForeignKey('events.Event')
organizer = models.ForeignKey('users.Organizer')
timestamp = models.DateTimeField()
def __unicode__(self):
return (self.user.__unicode__() + u" organizes " +
self.event.__unicode__())
class Camp(Event):
location = models.CharField(max_length=100) # temporary placeholder
def __unicode__(self):
return self.location
# Register to the admin site
admin.site.register(Event)
admin.site.register(EventUserRegistration)
admin.site.register(EventOrgRegistration)
admin.site.register(Camp) | mit | Python |
4f0b6a6eefd6848a702fe4b808f137ef0b2ee2f8 | rename as "config.py" after adding keys | mwweinberg/NYC-MTA-Next-Train,mwweinberg/NYC-MTA-Next-Train | exampleconfig.py | exampleconfig.py | URL_F = 'http://datamine.mta.info/mta_esi.php?key='KEY'&feed_id=21'
URL_AC = 'http://datamine.mta.info/mta_esi.php?key='KEY'&feed_id=26'
| mit | Python | |
85daad5401267b613e546896bb2abd1658f730b1 | Create 1_triple_step.py | zmarvel/cracking,zmarvel/cracking | ch09/1_triple_step.py | ch09/1_triple_step.py | # 0 - (1) [0]
# 1 - (1) [1]
# 2 - (2) [1, 1], [2]
# 3 - (4) [1, 1, 1], [1, 2], [2, 1], [3]
# 4 -
#subtract 1
#subtract 2
#subtract 3
ways = {0: 0, 1:1, 2: 2, 3: 4}
def calculate_ways(steps):
if steps < 4:
return ways[steps]
for i in range(4, steps + 1):
ways[i] = ways[i-1] + ways[i-2] + ways[i-3]
return ways[steps]
ways = {0: 0, 1:1, 2: 2, 3: 4}
def ways_recurse(steps):
if steps < 4:
return ways[steps]
return ways_recurse(steps-1) + ways_recurse(steps-2) + ways_recurse(steps-3)
def ways_recurse(steps):
if steps not in ways:
ways[steps] = ways_recurse(steps-1) + ways_recurse(steps-2) + ways_recurse(steps-3)
return ways[steps]
for i in range(29):
print str(i) + ' ' + str(ways_recurse(i))
| mit | Python | |
3e51c57a8611a8ebfb4f2eb045510c50587bd781 | Test password tokens not in response | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | api/radar_api/tests/test_users.py | api/radar_api/tests/test_users.py | import json
from radar_api.tests.fixtures import get_user
def test_serialization(app):
admin = get_user('admin')
client = app.test_client()
client.login(admin)
response = client.get('/users')
assert response.status_code == 200
data = json.loads(response.data)
for user in data['data']:
assert 'username' in user
assert 'password_hash' not in user
assert 'reset_password_token' not in user
| agpl-3.0 | Python | |
3660767a92750eae3c3ede69ef6778a23d3074a7 | Add the Action enum | chrisseto/Still | wdim/client/actions.py | wdim/client/actions.py | import enum
class Action(enum.Enum):
create = 0
delete = 1
update = 2
| mit | Python | |
71bab0603cbf52d6b443cfff85ef19a04f882a36 | Add the SQL statements because I forgot | codeforsanjose/inventory-control,worldcomputerxchange/inventory-control | inventory_control/database/sql.py | inventory_control/database/sql.py | """
So this is where all the SQL commands live
"""
CREATE_SQL = """
CREATE TABLE component_type (
id INT PRIMARY KEY AUTO_INCREMENT,
type VARCHAR(255) UNIQUE
);
CREATE TABLE components (
id INT PRIMARY KEY AUTO_INCREMENT,
sku TEXT,
type INT,
status INT,
FOREIGN KEY (type) REFERENCES component_type(id)
);
CREATE TABLE projects (
id INT PRIMARY KEY AUTO_INCREMENT,
motherboard INT,
power_supply INT,
cpu INT,
hard_drive INT,
proj_case INT,
memory INT,
FOREIGN KEY (motherboard) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (cpu) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (power_supply) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (hard_drive) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (proj_case) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (memory) REFERENCES components(id) ON DELETE CASCADE
);
"""
ADD_COMPONENT_TYPE = """INSERT IGNORE INTO component_type (type) VALUES ('{text}')
"""
GET_COMPONENT_TYPE="""SELECT * FROM component_type WHERE type='{text}'"""
DELETE_COMPONENT_TYPE = """DELETE FROM component_type WHERE type='{text}'
"""
SELECT_ALL_COMPONENTS = """
SELECT * FROM components INNER JOIN component_type
ON components.type = component_type.id;
"""
DROP_SQL = """
DROP TABLE projects;
DROP TABLE components;
DROP TABLE component_type;
"""
| mit | Python | |
52076834e04fd735d4bba88472163c31347bc201 | Create scarp_diffusion_no_component.py | landlab/drivers | scripts/diffusion/scarp_diffusion_no_component.py | scripts/diffusion/scarp_diffusion_no_component.py | #Import statements so that you will have access to the necessary methods
import numpy
from landlab import RasterModelGrid
from landlab.plot.imshow import imshow_node_grid, imshow_core_node_grid
from pylab import show, figure
#Create a raster grid with 25 rows, 40 columns, and cell spacing of 10 m
mg = RasterModelGrid(25, 40, 10.0)
#Create a field of node data (an array) on the grid called elevation.
#Initially populate this array with zero values.
z = mg.add_zeros('node', 'elevation')
#Check the size of the array
len(z)
#Create a diagonal fault across the grid
fault_y = 50.0 + 0.25*mg.node_x
upthrown_nodes = numpy.where(mg.node_y>fault_y)
z[upthrown_nodes] += 10.0 + 0.01*mg.node_x[upthrown_nodes]
#Illustrate the grid
imshow_node_grid(mg, z, cmap='jet', grid_units=['m','m'])
show()
#Define paramaters
kd = 0.01 # 0.01 m2 per year
dt = 0.2*mg.dx*mg.dx/kd # CFL condition
#Set boundary conditions
mg.set_closed_boundaries_at_grid_edges(False, True, False, True)
#Get id values of the cord nodes on which you will operate
interior_nodes = mg.get_core_nodes()
#Evolve landscape
for i in range(25):
g = mg.calculate_gradients_at_active_links(z)
qs = -kd*g
dqsdx = mg.calculate_flux_divergence_at_nodes(qs)
dzdt = -dqsdx
z[interior_nodes] += dzdt[interior_nodes]*dt
#Plot new landscape
figure()
imshow_node_grid(mg, z, cmap='jet', grid_units=['m','m'])
show()
| mit | Python | |
9d8278e98e505ffb68c2dcf870e61c0239721e5b | Add the gpio proxy for the Intel Edison | fjacob21/pycon2015 | elpiwear/Edison/gpio.py | elpiwear/Edison/gpio.py | # The MIT License (MIT)
#
# Copyright (c) 2015 Frederic Jacob
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#
# Simple proxy class for access of the GPIO bus on the Raspberry Pi.
#
import mraa
IN = mraa.DIR_IN
OUT = mraa.DIR_OUT
class gpio:
def __init__(self, pin, direction):
self.gpio = mraa.Gpio(pin)
self.gpio.dir(direction)
def input(self):
return self.gpio.read()
def output(self, value):
self.gpio.write(value)
def on(self):
self.gpio.write(1)
def off(self):
self.gpio.write(0)
| mit | Python | |
a49d28c552600ee2a0fe24ee83ed5cc7bbe36417 | Add wrist tracker class | Isaac-W/cpr-vision-measurement,Isaac-W/cpr-vision-measurement,Isaac-W/cpr-vision-measurement | wristtracker.py | wristtracker.py | import math
from markerutils import *
class TrackedMarker(object):
def __init__(self, marker, size, distance, position):
self.marker = marker
self.size = size
self.distance = distance
self.position = position
class WristTracker(object):
def __init__(self, marker_finder, marker_size, focal_length, origin_y=0):
self.marker_finder = marker_finder
self.S = marker_size
self.F = focal_length
self.origin_y = origin_y
self.cur_track = None
def set_origin(self, origin_y):
self.origin_y = origin_y
def get_origin(self):
return self.origin_y
def get_marker(self, img, output=None):
h, w, _ = img.shape
center = (w / 2, h / 2)
# Find tracker (closest to last location or center line)
track_matches = self.marker_finder.find_markers(img, output)
if self.cur_track:
# Ellipse: ((x, y), (MA, ma), angle)
track_matches.sort(key=lambda x: math.sqrt(
math.pow(x[1][0][0] - self.cur_track[1][0][0], 2) + math.pow(x[1][0][1] - self.cur_track[1][0][1], 2))) # Get closest to center
else:
track_matches.sort(key=lambda x: x[1][0][0] - center[0]) # Get closest to center
# Get best ellipse
if track_matches:
self.cur_track = track_matches[0]
# Operate on prev_track (retain last known position if no new marker found)
if self.cur_track:
# Calculate distance
px = get_pixel_size(self.cur_track)
D = self.F * self.S / px
# Calculate position
y = self.cur_track[1][0][1]
pos = (y - self.origin_y) * (self.S / px)
return TrackedMarker(self.cur_track, px, D, pos)
return None
| mit | Python | |
49716ea37b36785faeb4a8b1cb43e6225e6b1d82 | add revised jobfile script for excalibur | stu314159/pyNFC,stu314159/pyNFC,stu314159/pyNFC,stu314159/pyNFC | genJobfile_ex.py | genJobfile_ex.py | #genJobfile.py
"""
more-or-less automated generation of PBS jobfile
"""
import argparse
parser = argparse.ArgumentParser(prog="genJobfile.py",
description="PBS Jobfile generation script.")
parser.add_argument('jobfileName',type=str)
parser.add_argument('jobName',type=str)
parser.add_argument('nnodes',type=int)
parser.add_argument('ppn',type=int)
parser.add_argument('mpi_procs_per_node',type=int)
parser.add_argument('runtimeNumHours',type=int)
parser.add_argument('queue',type=str)
parser.add_argument('latticeType',type=str)
parser.add_argument('partitionType',type=str)
# parse input arguments
args = parser.parse_args()
# assign to the variables
jobfileName = args.jobfileName
jobName = args.jobName
nnodes = args.nnodes
ppn = args.ppn
mpi_procs_per_node = args.mpi_procs_per_node
runtimeNumHours = args.runtimeNumHours
queue = args.queue
latticeType = args.latticeType
partitionType = args.partitionType
executableName = 'pyNFC_test.py'
filesToCopy = ['FluidChannel.py', 'pyLattice.py', 'pyNFC.py', 'pyNFC_test.py',
'pyNFC_Util.py', 'validate.py', 'vtkHelper.py', 'test_script.sh',
'inl.lbm', 'onl.lbm', 'snl.lbm', 'params.lbm', 'parts.lbm',
'pyPartition.py','pyNFC_preprocess.py','pyNFC_postprocess.py',
'partition_suggestion.py','partition_compare.py',
'LBM_Interface.so','processNFC.py','hdf5Helper.py']
if runtimeNumHours < 10:
walltime = "0%d:00:00"%runtimeNumHours
else:
walltime = "%d:00:00"%runtimeNumHours # may be a problem if runtime > 99 hours
mpi_procs = mpi_procs_per_node*nnodes
jobfileName = "%s.pbs"%(jobfileName)
#--------- more-or-less fixed code below -----------------
proj_id = 'DTRAA02280EA8'
# open the file
jf = open(jobfileName,'w')
# essential PBS directives
jf.write('#!/bin/bash \n') # the shell
jf.write('#PBS -A %s \n'%proj_id) # project identifier
jf.write('#PBS -q %s \n'%queue) # specify queue
jf.write('#PBS -l select=%d:ncpus=%d:mpiprocs=%d \n'% \
(nnodes,ppn,mpi_procs_per_node))
jf.write('#PBS -l walltime=%s \n'%walltime)
jf.write('#PBS -l ccm=1 \n') # specify cluster compatibility mode. Why wouldn't you?
#optional PBS directives
jf.write('#PBS -N %s \n'%jobName)
jf.write('#PBS -j oe \n')
#jf.write('#PBS -V \n')
jf.write('#PBS -S /bin/bash \n')
# Execution block
jf.write('cd $WORKDIR\n')
jf.write("JOBID=`echo $PBS_JOBID | cut -d '.' -f 1` \n")
jf.write('if [ ! -d $JOBID ]; then \n')
jf.write(' mkdir -p $JOBID \n')
jf.write('fi \n')
jf.write('cd $JOBID \n')
# copy files
for s in filesToCopy:
jf.write('cp $PBS_O_WORKDIR/%s . \n'% s)
## move to the $JOBDIR
#jf.write('cd $JOBDIR \n') #<--- this was an error
# invoke execution
#jf.write('module swap PrgEnv-cray PrgEnv-intel\n')
jf.write('module load costinit\n')
#jf.write('module load python\n')
#jf.write('module load numpy\n')
#jf.write('module load scipy\n')
#jf.write('module load mpi4py\n')
jf.write('module load boost\n')
jf.write('./test_script.sh %s %s %d\n'%(latticeType,partitionType,mpi_procs))
jf.close()
| mit | Python | |
cbc1609758762c7db4d3477248e87ecf29fdd288 | add dep | kiaderouiche/hilbmetrics | hilbert/common/__accessdata__.py | hilbert/common/__accessdata__.py | from sys import platform
from platform import architecture
def install_data_files():
""" """
if sys.platform.startswith('netbsd'):
""" """
pass
elif sys.platform.startswith('freebsd'):
""" """
pass
elif sys.platform.startswith('linux'):
if PY3:
data_files = [('share/applications', ['script/hilbmetric.desktop']),
('share/pixmaps', ['data/hilbmetric.png'])]
else:
data_files = [('share/applications', ['script/hilmetric.desktop'] ),
('share/pixmaps', ['data/hilbmetric.png'])]
elif os.name =='nt':
data_files = [('script', ['data/hilbmetric.ico'])]
else:
data_files = []
return data_files | apache-2.0 | Python | |
dfe3f7fd7775ce13a670e1d27beddba5c1254a4a | Define the HPACK reference structure. | irvind/hyper,fredthomsen/hyper,lawnmowerlatte/hyper,masaori335/hyper,fredthomsen/hyper,masaori335/hyper,plucury/hyper,Lukasa/hyper,irvind/hyper,jdecuyper/hyper,Lukasa/hyper,jdecuyper/hyper,lawnmowerlatte/hyper,plucury/hyper | hyper/http20/hpack_structures.py | hyper/http20/hpack_structures.py | # -*- coding: utf-8 -*-
"""
hyper/http20/hpack_structures
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Contains data structures used in hyper's HPACK implementation.
"""
class Reference(object):
"""
The reference object is essentially an object that 'points to' another
object, not unlike a pointer in C or similar languages. This object is
distinct from the normal Python name because we can tell the difference
between a reference and the 'actual' object.
It behaves in the following ways:
- Two references to the same object evaluate equal.
- Two references to different objects evaluate not equal, even if those
objects themselves evaluate equal.
- Two references to the same object hash to the same value.
- Two references to different objects hash to different values.
The reference is distinct from and does not use weak references. A
reference may never point at an object that has been garbage collected.
This means that, to ensure objects get GC'd, any reference to them must
also go out of scope.
This object is _conceptually_ immutable, but the implementation doesn't
attempt to enforce that to avoid the overhead. Be warned that changing
the object being referenced after creation could lead to all sorts of weird
nonsense.
:param obj: The object being referenced.
"""
def __init__(self, obj):
self.obj = obj
def __eq__(self, other):
return (isinstance(other, Reference) and self.obj is other.obj)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return id(self.obj)
| mit | Python | |
5326519e69b1280ae53c02fa6e62ed6a9aa2db03 | Create Ethiopia.py | dr-prodigy/python-holidays | holidays/countries/Ethiopia.py | holidays/countries/Ethiopia.py | # -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Author: ryanss <ryanssdev@icloud.com> (c) 2014-2017
# dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2021
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
import calander
from dateutil.easter import easter
from dateutil.relativedelta import relativedelta as rd
from holidays.constants import SAT,SUN
from holidays.constants import JAN,MAR, MAY,SEP
from holidays.holiday_base import HolidayBase
from holidays.utils import islamic_to_gre
WEEKEND = (SAT,SUN)
class Ethiopia(HolidayBase):
# Holidays here are estimates, it is common for the day to be pushed
# if falls in a weekend, although not a rule that can be implemented.
# Holidays after 2020: the following four moving date holidays whose exact
# date is announced yearly are estimated (and so denoted):
# - Eid El Fetr*
# - Eid El Adha*
# - Arafat Day*
# - Moulad El Naby*
# *only if hijri-converter library is installed, otherwise a warning is
# raised that this holiday is missing. hijri-converter requires
# Python >= 3.6
# is_weekend function is there, however not activated for accuracy.
def __init__(self, **kwargs):
self.country = "ET"
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
# New Year's Day
if calendar.isleap(year):
self[date(year, SEP, 12)] = "አዲስ ዓመት እንቁጣጣሽ"
else:
self[date(year, SEP, 11)] = "አዲስ ዓመት እንቁጣጣሽ"
# Finding of true cross
if calendar.isleap(year):
self[date(year, SEP, 29)] = "መስቀል"
else:
self[date(year, SEP, 28)] = "መስቀል"
# Ethiopian Christmas
self[date(year, JAN, 7)] = "ገና"
# Ethiopian Ephiphany
self[date(year, JAN, 19)] = "ጥምቀት"
# Ethiopian Good Friday
self[easter(year, 2) - rd(days=2)] = "ስቅለት"
# Ethiopian Easter - Orthodox Easter
self[easter(year, 2)] = "ፋሲካ"
# Adwa Victory Day
if year > 1896:
self[date(year, MAR, 2)] = "አድዋ"
# Labour Day
self[date(year, MAY, 1)] = "የሰራተኞች ቀን"
# Patriots Day
if year > 1941
self[date(year, MAY, 5)] = "የአርበኞች ቀን"
# Derg Downfall Day
if year > 1991:
self[date(year, MAY, 28)] = "ደርግ የወደቀበት ቀን"
# Downfall of King. Hailesilassie
if year < 1991 and year > 1974 :
if calendar.isleap(year):
self[date(year, SEP, 13)] = "ደርግ የመጣበት ቀን"
else:
self[date(year, SEP, 12)] = "ደርግ የመጣበት ቀን"
# Eid al-Fitr - Feast Festive
# date of observance is announced yearly, This is an estimate since
# having the Holiday on Weekend does change the number of days,
# deceided to leave it since marking a Weekend as a holiday
# wouldn't do much harm.
for date_obs in islamic_to_gre(year, 10, 1):
hol_date = date_obs
self[hol_date + rd(days=1)] = "ኢድ አልፈጥር"
# Eid al-Adha - Scarfice Festive
# date of observance is announced yearly
for date_obs in islamic_to_gre(year, 12, 9):
hol_date = date_obs
self[hol_date + rd(days=1)] = "አረፋ"
# Prophet Muhammad's Birthday - (hijari_year, 3, 12)
for date_obs in islamic_to_gre(year, 3, 12):
hol_date = date_obs
self[hol_date] = "መውሊድ"
class ET(Ethiopia):
pass
class ETH(Ethiopia):
pass
| mit | Python | |
4df070d5b39898ca67127ef17aa8d80f47e2c992 | Add files via upload | zaqari/WordEmbeddingsIntoDNN | Embeddings_2_DNNClass_General.py | Embeddings_2_DNNClass_General.py | #All the imports.
import gensim
import codecs
from gensim import corpora, models, similarities
import nltk
import csv
import pandas as pd
import tempfile
import codecs
import csv
model = models.Word2Vec.load(input('Where are your word embeddings coming from, shitbags? '))
word = model.wv.vocab
#Just some notes when the program is run/
print('Make sure to (1) run build_DNNarray(\'the lexeme you\'re interested in\'), and then (2) run_rabbit_run()')
#Pulls data from the corpus and formats it
def build_DNNarray(lexeme, WORD=word, MODEL=model):
inty=input('What example number is this? (note: 0 is anything that isn\'t being classified) ')
array = []
with codecs.open(input('Where is your training data going, fuck-face?? '), 'a', 'utf-8') as csvfile:
databuilder = csv.writer(csvfile, delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL)
for item in word:
if lexeme in item:
array.append(list(model[item]))
array.append(inty)
databuilder.writerow(array)
csvfile.close()
#Components for the DNN. Since we're playing with vectors,
#I ended up de-activating sections relating to categorical columns--they weren't necessary.
COLUMNS = list(range(100)) + ['label']
LABEL_COLUMN = 'label'
CONTINUOUS_COLUMNS = COLUMNS
import tensorflow as tf
def input_fn(df):
# Creates a dictionary mapping from each continuous feature column name (k) to
# the values of that column stored in a constant Tensor.
continuous_cols = {k: tf.constant(df[k].values)
for k in CONTINUOUS_COLUMNS}
# Creates a dictionary mapping from each categorical feature column name (k)
# to the values of that column stored in a tf.SparseTensor.
categorical_cols = {k: tf.SparseTensor(
indices=[[i, 0] for i in range(df[k].size)],
values=df[k].values,
dense_shape=[df[k].size, 1])
for k in CATEGORICAL_COLUMNS}
# Merges the two dictionaries into one.
feature_cols = dict(continuous_cols.items())
# Converts the label column into a constant Tensor.
label = tf.constant(df[LABEL_COLUMN].values)
# Returns the feature columns and the label.
return feature_cols, label
def train_input_fn():
return input_fn(df_train)
def eval_input_fn():
return input_fn(df_test)
model_dir = tempfile.mkdtemp()
features = []
#transforms the inputs into real_value_columns that TF can manipulate.
def make_features(columns=CONTINUOUS_COLUMNS):
for k in CONTINUOUS_COLUMNS:
for item in list(range(len(CONTINUOUS_COLUMNS))):
item = tf.contrib.layers.real_valued_column(k)
features.append(item)
#The following two lists are place-holders prior to running actual model in run_rabbit_run()
wide_columns=[0]
deep_columns=[]
#The actual model.
m = tf.contrib.learn.DNNLinearCombinedClassifier(
model_dir=model_dir,
linear_feature_columns=wide_columns,
dnn_feature_columns=deep_columns,
dnn_hidden_units=[100, 50],
n_classes=2)
#run this to put everything together after you've built some training data.
def run_rabbit_run():
make_features()
wide_columns = features
deep_columns = []
df_train = pd.read_csv(input('Can I have your training data? Can I has it? ', names=COLUMNS, skipinitialspace=True))
#df_test = pd.read_csv('/Users/ZaqRosen/Desktop/ARAPAHO_test_data.csv', names=COLUMNS, skipinitialspace=True, skiprows=1))
wide_collumns = make_features()
m.fit(input_fn=train_input_fn, steps=2000)
#results = m.evaluate(input_fn=eval_input_fn, steps=20)
#print(results)
| mit | Python | |
37b1250e213b78262075664e4291707ff369e981 | Create clase-3.py | AnhellO/DAS_Sistemas,AnhellO/DAS_Sistemas,AnhellO/DAS_Sistemas | Ene-Jun-2019/Ejemplos/clase-3.py | Ene-Jun-2019/Ejemplos/clase-3.py | diccionario = {
'a': ['accion', 'arte', 'arquitectura', 'agrego', 'actual'],
'b': ['bueno', 'bien', 'bonito'],
'c': ['casa', 'clase', 'coctel']
}
diccionario['d'] = ['dado', 'diccionario', 'duda']
# print(diccionario)
# print(diccionario['a'])
for llave, valor in diccionario.items():
pass #print("sho soy la llavesita -> {}, y el valor es: {}".format(llave, valor))
for llave in diccionario.keys():
pass #print("sho soy la puro llavesita: {}".format(llave))
for valor in diccionario.values():
pass #print("sho soy el puro valor: {}".format(valor))
# print(sorted(diccionario.items()))
tupla = (1, 2, 3)
lista = [4, 2, 3]
lista[0] = 0
# print(lista[0])
# print(len(lista))
# print(len(diccionario))
# print(len(diccionario['a']))
class Automovil(object):
bolsas_de_aire = 0
"""docstring for Automovil"""
def __init__(self, **argumentos):
self.llantas = argumentos.get('llantas')
self.motor = argumentos.get('motor')
self.transmision = argumentos.get('transmision')
self.bolsas_de_aire = argumentos.get('bolsas_de_aire', 0)
self.marca = argumentos.get('marca')
def set_llantas(self, llantas):
self.llantas = llantas
return self
def set_motor(self, motor):
self.motor = motor
return self
def set_transmision(self, transmision):
self.transmision = transmision
return self
def get_llantas(self):
return self.llantas
def get_motor(self):
return self.motor
def get_transmision(self):
return self.transmision
def __str__(self):
return """
Llantas: {}\nMotor: {}\nTransmision: {}\n# de Bolsas de Aire: {}\nMarca: {}
""".format(self.llantas, self.motor, self.transmision, self.bolsas_de_aire, self.marca).strip().lower()
#auto = Automovil(motor='v8', transmision='estándar')
#auto2 = Automovil(motor='v4')
auto3 = Automovil(motor='v6', llantas='euzkadi', transmision='cvt', marca='honda')
#auto3.set_motor('v6').set_llantas('euzkadi').set_transmision('cvt')
#print(auto.get_motor())
#print(auto.get_transmision())
#print(auto.get_llantas())
#print(auto)
#print(auto2)
print(auto3)
| mit | Python | |
48f2be780f6aa569bb1d8b8c0623e54cac49f613 | add instance action model | CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend | core/models/instance_action.py | core/models/instance_action.py | from django.db import models
class InstanceAction(models.Model):
name = models.CharField(max_length=50)
description = models.TextField()
class Meta:
db_table = 'instance_action'
app_label = 'core' | apache-2.0 | Python | |
78aaccb71fc64e52497abf0d0c768f3767a3d932 | Update expenses status on database | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | fellowms/migrations/0020_auto_20160602_1607.py | fellowms/migrations/0020_auto_20160602_1607.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0019_auto_20160601_1512'),
]
operations = [
migrations.AlterField(
model_name='expense',
name='status',
field=models.CharField(choices=[('W', 'Not submitted yet'), ('S', 'Submitted (but not processed yet)'), ('P', 'Processing'), ('A', 'Approved (waiting reply from finances)'), ('F', 'Finished')], default='P', max_length=1),
),
]
| bsd-3-clause | Python | |
1c6b74129d6e6a815d73e2a935fc86755ffb4f8a | Improve sourcecode (issue #11 and #17). | gersolar/solar_radiation_model,ahMarrone/solar_radiation_model,scottlittle/solar_radiation_model | imagedownloader/requester/api.py | imagedownloader/requester/api.py | from requester.models import AutomaticDownload
from tastypie.authentication import SessionAuthentication
from tastypie.resources import ModelResource
class AutomaticDownloadResource(ModelResource):
class Meta(object):
queryset = AutomaticDownload.objects.all()
resource_name = 'automatic_download'
filtering = {
'created': ['exact', 'lt', 'lte', 'gte', 'gt'],
'modified': ['exact', 'lt', 'lte', 'gte', 'gt'],
}
authentication = SessionAuthentication() | from requester.models import AutomaticDownload
from tastypie import fields
from tastypie.authentication import SessionAuthentication
from tastypie.resources import ModelResource
from libs.tastypie_polymorphic import ModelResource
class AutomaticDownloadResource(ModelResource):
class Meta(object):
queryset = AutomaticDownload.objects.all()
resource_name = 'automatic_download'
filtering = {
'created': ['exact', 'lt', 'lte', 'gte', 'gt'],
'modified': ['exact', 'lt', 'lte', 'gte', 'gt'],
}
authentication = SessionAuthentication() | mit | Python |
65830295d30507e632a1a71c15083c0e58977c9c | add badchans.py, for honeypot purposes... | PyLink/pylink-contrib-modules | 2.0/plugins/badchans.py | 2.0/plugins/badchans.py | """
badchans.py - Kills unopered users when they join specified channels.
"""
from pylinkirc import utils, conf, world
from pylinkirc.log import log
REASON = "You have si" + "nned..." # XXX: config option
def handle_join(irc, source, command, args):
"""
killonjoin JOIN listener.
"""
# Ignore our own clients and other Ulines
if irc.is_privileged_service(source) or irc.is_internal_client(source):
return
badchans = irc.serverdata.get('badchans')
if not badchans:
return
channel = args['channel']
for badchan in badchans:
if irc.match_text(badchan, channel):
asm_uid = None
# Try to kill from the antispam service if available
if 'antispam' in world.services:
asm_uid = world.services['antispam'].uids.get(irc.name)
for user in args['users']:
if irc.is_oper(user):
irc.msg(user, "Warning: %s kills unopered users!" % channel,
notice=True,
source=asm_uid or irc.pseudoclient.uid)
else:
log.info('(%s) badchans: killing user %s for joining channel %s',
irc.name, irc.get_hostmask(user), channel)
irc.kill(asm_uid or irc.sid, user, REASON)
utils.add_hook(handle_join, 'JOIN')
| mpl-2.0 | Python | |
e3ad95017bced8dac5474d6de5958decf4f58279 | add migration file | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/auditcare/migrations/0005_auditcaremigrationmeta.py | corehq/apps/auditcare/migrations/0005_auditcaremigrationmeta.py | # Generated by Django 2.2.24 on 2021-06-20 14:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auditcare', '0004_add_couch_id'),
]
operations = [
migrations.CreateModel(
name='AuditcareMigrationMeta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(db_index=True, max_length=50)),
('state', models.CharField(choices=[('s', 'Started'), ('f', 'Finished'), ('e', 'Errored')], max_length=1)),
('record_count', models.PositiveIntegerField(null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
],
),
]
| bsd-3-clause | Python | |
77a6100cbb45342d471d3d258b73c346bebacbbb | Add weather warnings | ElizabethSEden/cycling-weather-bot | weather_warning.py | weather_warning.py | from bs4 import BeautifulSoup
from urllib.request import Request, urlopen
from settings import REGION_CODE, REGION_NAME
from datetime import date
class NoWarningsException(Exception):
pass
def get_weather_warning():
try:
return WarningDetails(REGION_CODE, REGION_NAME) #get warning for London and South East
except NoWarningsException:
return None
class WarningDetails:
def __init__(self, region_code, region_name):
self.region_name = region_name
soup = self.get_soup(region_code)
if self.is_region_affected(soup):
warningMatrixTable = soup.find("table", class_="warningsMatrixInner")
if warningMatrixTable is None:
raise NoWarningsException
self.colour = self.get_colour(warningMatrixTable)
self.event = self.get_event(soup.find("span", class_="wxDetails"))
self.time_from = self.get_times(soup.find("span", class_="wxDetails"))[0]
if (self.time_from.split('T')[0] != date.today().isoformat()):
raise NoWarningsException
elif (int(self.time_from.split('T')[1].split(':')[0]) > 22):
raise NoWarningsException
self.time_to = self.get_times(soup.find("span", class_="wxDetails"))[1]
likelihood = 1
for tr in warningMatrixTable.find_all("tr"):
if tr.find_all("td", class_="selected"):
self.impact = self.get_impact(tr)
self.likelihood = self.get_likelihood(likelihood)
break
likelihood += 1
else:
raise NoWarningsException
def get_soup(self, region_code):
req = Request(
'http://www.metoffice.gov.uk/public/weather/warnings#?region={}'.format(region_code),
headers={'User-Agent': 'Mozilla/5.0'})
data = urlopen(req).read()
return BeautifulSoup(data, "html.parser")
def is_region_affected(self, soup):
regions = soup.find_all("div", class_="localAuthoritiesTitle")
if any([tag.contents[0] for tag in regions if tag.contents[0]==self.region_name]):
return True
return False
def __str__(self):
return "{} warning of {} - {} a {} impact.".format(self.colour.capitalize(), self.event, self.likelihood, self.impact)
def get_colour(self, warningMatrixTable):
selected_td = warningMatrixTable.find("td", class_="selected")
for attribute in selected_td["class"]:
if attribute == "yellow":
return "yellow"
elif attribute == "amber":
return "amber"
elif attribute == "red":
return "red"
def get_event(self, warning):
return warning.find("span", class_="wxType").contents[0].lower()
def get_times(self, warning):
return [x["datetime"] for x in warning.find_all("time")]
def get_likelihood(self, likelihood):
dict = {
1:"there will be",
2:"there is likely to be",
3:"there might be",
4:"there's a very small chance of",
}
return dict[likelihood]
def get_impact(self, tr):
impact = 0
dict = {
1:"low",
2:"medium",
3:"high",
}
for td in tr.find_all("td"):
if "selected" in td['class']:
return dict[impact]
impact += 1
| unlicense | Python | |
9c70a5d65b1c06f62751dfb4fcdd4d6a60a5eb71 | Add unit tests for testing the widget tree iterators. | eHealthAfrica/kivy,xpndlabs/kivy,vipulroxx/kivy,rafalo1333/kivy,CuriousLearner/kivy,Farkal/kivy,rafalo1333/kivy,Cheaterman/kivy,bionoid/kivy,bliz937/kivy,vipulroxx/kivy,habibmasuro/kivy,xiaoyanit/kivy,dirkjot/kivy,Farkal/kivy,rafalo1333/kivy,manashmndl/kivy,KeyWeeUsr/kivy,darkopevec/kivy,viralpandey/kivy,matham/kivy,cbenhagen/kivy,jegger/kivy,matham/kivy,JohnHowland/kivy,jehutting/kivy,edubrunaldi/kivy,aron-bordin/kivy,LogicalDash/kivy,matham/kivy,LogicalDash/kivy,andnovar/kivy,Farkal/kivy,kivy/kivy,iamutkarshtiwari/kivy,manashmndl/kivy,akshayaurora/kivy,xiaoyanit/kivy,arcticshores/kivy,viralpandey/kivy,autosportlabs/kivy,bhargav2408/kivy,jegger/kivy,bionoid/kivy,adamkh/kivy,adamkh/kivy,Shyam10/kivy,viralpandey/kivy,inclement/kivy,darkopevec/kivy,KeyWeeUsr/kivy,gonzafirewall/kivy,denys-duchier/kivy,rnixx/kivy,aron-bordin/kivy,Ramalus/kivy,bob-the-hamster/kivy,yoelk/kivy,edubrunaldi/kivy,thezawad/kivy,akshayaurora/kivy,vipulroxx/kivy,gonzafirewall/kivy,JohnHowland/kivy,ehealthafrica-ci/kivy,LogicalDash/kivy,bob-the-hamster/kivy,iamutkarshtiwari/kivy,manthansharma/kivy,vitorio/kivy,xiaoyanit/kivy,Cheaterman/kivy,ehealthafrica-ci/kivy,ehealthafrica-ci/kivy,ehealthafrica-ci/kivy,jffernandez/kivy,thezawad/kivy,Farkal/kivy,VinGarcia/kivy,youprofit/kivy,xpndlabs/kivy,matham/kivy,jkankiewicz/kivy,niavlys/kivy,Cheaterman/kivy,bob-the-hamster/kivy,vitorio/kivy,ernstp/kivy,niavlys/kivy,Ramalus/kivy,cbenhagen/kivy,kived/kivy,bhargav2408/kivy,ernstp/kivy,Shyam10/kivy,JohnHowland/kivy,autosportlabs/kivy,habibmasuro/kivy,rnixx/kivy,MiyamotoAkira/kivy,bionoid/kivy,ernstp/kivy,bliz937/kivy,janssen/kivy,Shyam10/kivy,yoelk/kivy,adamkh/kivy,inclement/kivy,bhargav2408/kivy,mSenyor/kivy,Ramalus/kivy,darkopevec/kivy,gonzafirewall/kivy,jffernandez/kivy,dirkjot/kivy,CuriousLearner/kivy,jegger/kivy,MiyamotoAkira/kivy,yoelk/kivy,Shyam10/kivy,autosportlabs/kivy,kived/kivy,jkankiewicz/kivy,arcticshores/kivy,tony/kivy,VinGarcia/kivy,denys-duchier/kivy,inclement/kivy,angryrancor/kivy,angryrancor/kivy,el-ethan/kivy,bionoid/kivy,CuriousLearner/kivy,youprofit/kivy,youprofit/kivy,janssen/kivy,angryrancor/kivy,ernstp/kivy,el-ethan/kivy,mSenyor/kivy,kived/kivy,vipulroxx/kivy,eHealthAfrica/kivy,JohnHowland/kivy,bob-the-hamster/kivy,dirkjot/kivy,eHealthAfrica/kivy,yoelk/kivy,tony/kivy,arlowhite/kivy,denys-duchier/kivy,KeyWeeUsr/kivy,arlowhite/kivy,niavlys/kivy,VinGarcia/kivy,KeyWeeUsr/kivy,el-ethan/kivy,andnovar/kivy,iamutkarshtiwari/kivy,jegger/kivy,vitorio/kivy,akshayaurora/kivy,jehutting/kivy,MiyamotoAkira/kivy,manthansharma/kivy,rnixx/kivy,andnovar/kivy,cbenhagen/kivy,xpndlabs/kivy,mSenyor/kivy,darkopevec/kivy,manthansharma/kivy,eHealthAfrica/kivy,arcticshores/kivy,tony/kivy,jehutting/kivy,aron-bordin/kivy,LogicalDash/kivy,edubrunaldi/kivy,janssen/kivy,manashmndl/kivy,habibmasuro/kivy,MiyamotoAkira/kivy,janssen/kivy,bliz937/kivy,thezawad/kivy,niavlys/kivy,Cheaterman/kivy,kivy/kivy,jkankiewicz/kivy,angryrancor/kivy,arcticshores/kivy,jffernandez/kivy,denys-duchier/kivy,gonzafirewall/kivy,kivy/kivy,aron-bordin/kivy,jffernandez/kivy,arlowhite/kivy,manthansharma/kivy,dirkjot/kivy,adamkh/kivy,jkankiewicz/kivy | kivy/tests/test_widget_walk.py | kivy/tests/test_widget_walk.py | import unittest
class FileWidgetWalk(unittest.TestCase):
def test_walk_large_tree(self):
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.uix.widget import walk, walk_reverse
''' the tree
BoxLayout
BoxLayout
Label
10 labels
BoxLayout
10 labels
BoxLayout
Label
Label
'''
root = BoxLayout()
tree = [root]
box = BoxLayout()
tree.append(box)
root.add_widget(box)
label = Label()
tree.append(label)
root.add_widget(label)
for i in range(10):
tree.append(Label())
label.add_widget(tree[-1])
box = BoxLayout()
tree.append(box)
root.add_widget(box)
for i in range(10):
tree.append(Label())
box.add_widget(tree[-1])
box = BoxLayout()
tree.append(box)
root.add_widget(box)
tree.append(Label())
box.add_widget(tree[-1])
label = Label()
tree.append(label)
root.add_widget(label)
def rotate(l, n):
return l[n:] + l[:n]
for i in range(len(tree)):
rotated = rotate(tree, i) # shift list to start at i
walked = [n for n in walk(tree[i])] # walk starting with i
walked_reversed = [n for n in walk_reverse(tree[i])]
self.assertListEqual(rotated, walked)
self.assertListEqual(walked, list(reversed(walked_reversed)))
def test_walk_single(self):
from kivy.uix.label import Label
from kivy.uix.widget import walk, walk_reverse
label = Label()
self.assertListEqual([n for n in walk(label)], [label])
self.assertListEqual([n for n in walk_reverse(label)], [label])
| mit | Python | |
989320c3f2bdf65eb8c22822f34052047e0d1a2b | Reorder array | prathamtandon/g4gproblems | Arrays/reorder_array.py | Arrays/reorder_array.py | """
Given two integer arrays of same size, arr[] and index[], reorder elements in arr[] according to given index array.
Input:
arr: 50 40 70 60 90
index: 3 0 4 1 2
Output:
arr: 60 50 90 40 70
index: 0 1 2 3 4
"""
"""
Approach:
1. Do the following for every element arr[i]
2. While index[i] != i, store array and index values for the target position where arr[i] has to be placed.
The correct position for arr[i] is index[i].
3. Place arr[i] at its correct position. Also update index value of correct position.
4. Copy old values of correct position to arr[i] and index[i] as the while loop continues for i.
"""
def reorder(list_of_numbers, indices):
for i in range(len(list_of_numbers)):
while indices[i] != i:
old_target_index = indices[indices[i]]
old_target_element = list_of_numbers[indices[i]]
list_of_numbers[indices[i]] = list_of_numbers[i]
indices[indices[i]] = indices[i]
indices[i] = old_target_index
list_of_numbers[i] = old_target_element
| mit | Python | |
3a4de870ebefd0e3e32b8c1b9facee6c98ce8b7f | Convert python 2 version to python 3 | Lingotek/filesystem-connector,Lingotek/translation-utility,Lingotek/client,Lingotek/client,Lingotek/translation-utility,Lingotek/filesystem-connector | ltk2to3.py | ltk2to3.py | import os
import shutil
import fnmatch
def get_files(patterns):
""" gets all files matching pattern from root
pattern supports any unix shell-style wildcards (not same as RE) """
cwd = os.getcwd()
if isinstance(patterns,str):
patterns = [patterns]
matched_files = []
for pattern in patterns:
path = os.path.abspath(pattern)
# print("looking at path "+str(path))
# check if pattern contains subdirectory
if os.path.exists(path):
if os.path.isdir(path):
for root, subdirs, files in os.walk(path):
split_path = root.split('/')
for file in files:
# print(os.path.join(root, file))
if fnmatch.fnmatch(file, '*.py'):
matched_files.append(os.path.join(root, file))
else:
matched_files.append(path)
else:
logger.info("File not found: "+pattern)
if len(matched_files) == 0:
return None
return matched_files
dir2 = 'python2/ltk'
files2 = get_files(dir2)
# Copy files from 2 to 3
for fpath2 in files2:
fpath3 = fpath2.replace('python2','python3')
shutil.copyfile(fpath2, fpath3)
# Comment and uncomment specified lines in Python 3 version
for fpath in files2:
fpath = fpath.replace('python2','python3')
with open(fpath, 'r+') as f:
lines = f.readlines()
f.seek(0)
f.truncate()
is_python3 = False
is_python2 = False
for line in lines:
if '# Python 3' in line:
is_python3 = True
elif is_python3:
if '# End Python 3' in line:
is_python3 = False
continue
line = line.replace('# ','')
elif '# Python 2' in line:
is_python2 = True
elif is_python2:
if '# End Python 2' in line:
is_python2 = False
continue
line = '# '+str(line)
f.write(line) | mit | Python | |
0babd53317322cea1a56cc8cacd6ffc417145c80 | Add migration file. | AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django | django_project/realtime/migrations/0033_auto_20180202_0723.py | django_project/realtime/migrations/0033_auto_20180202_0723.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('realtime', '0032_auto_20180201_0947'),
]
operations = [
migrations.AlterField(
model_name='ash',
name='forecast_duration',
field=models.IntegerField(default=1, verbose_name='Duration of forecast for Ash Hazard in days'),
preserve_default=True,
),
]
| bsd-2-clause | Python | |
844049b0d4aecb25fc480fae37111e8aebac6438 | Add mimeformats.py to support drag and drop | vorburger/mcedit2,vorburger/mcedit2 | src/mcedit2/util/mimeformats.py | src/mcedit2/util/mimeformats.py | """
mimeformats
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
class MimeFormats(object):
MapItem = "application/x-mcedit-mapitem" | bsd-3-clause | Python | |
d2c99675bce99da0c0b77829081a805c0aa817be | add init evtxinfo.py | williballenthin/python-evtx,ohio813/python-evtx | Evtx/evtxinfo.py | Evtx/evtxinfo.py | #!/bin/python
# This file is part of python-evtx.
#
# Copyright 2012, 2013 Willi Ballenthin <william.ballenthin@mandiant.com>
# while at Mandiant <http://www.mandiant.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Version v.0.1
import sys
import binascii
import mmap
import contextlib
from Evtx import *
def main():
with open(sys.argv[1], 'r') as f:
with contextlib.closing(mmap.mmap(f.fileno(), 0,
access=mmap.ACCESS_READ)) as buf:
fh = FileHeader(buf, 0x0)
print "Information from file header:"
print "Format version : %d.%d" % (fh.major_version(),
fh.minor_version())
print "Flags : 0x%08x" % (fh.flags())
dirty_string = "clean"
if fh.is_dirty():
dirty_string = "dirty"
print "File is : %s" % (dirty_string)
full_string = "no"
if fh.is_full():
full_string = "yes"
print "Log is full : %s" % (full_string)
print "Current chunk : %d of %d" % (fh.current_chunk_number(),
fh.chunk_count())
print "Oldest chunk : TODO"
print "Next record# : %d" % (fh.next_record_number())
checksum_string = "fail"
if fh.calculate_checksum() == fh.checksum():
checksum_string = "pass"
print "Check sum : %s" % (checksum_string)
print ""
print "Information from chunks:"
print " Chunk file (first/last) log (first/last) Header Data"
print "- ----- --------------------- --------------------- ------ ------"
for (i, chunk) in enumerate(fh.chunks(), 1):
note_string = " "
if i == fh.current_chunk_number() + 1:
note_string = "*"
# TODO(wb): handle symbol '>'
if not chunk.check_magic():
if chunk.magic() == "\x00\x00\x00\x00\x00\x00\x00\x00":
print "%s %4d [EMPTY]" % (note_string, i)
else:
print "%s %4d [INVALID]" % (note_string, i)
continue
header_checksum_string = "fail"
if chunk.calculate_header_checksum() == chunk.header_checksum():
header_checksum_string = "pass"
data_checksum_string = "fail"
if chunk.calculate_data_checksum() == chunk.data_checksum():
data_checksum_string = "pass"
print "%s %4d %8d %8d %8d %8d %s %s" % \
(note_string,
i,
chunk.file_first_record_number(), # TODO(wb): confirm this field
chunk.file_last_record_number(),
chunk.log_first_record_number(),
chunk.log_last_record_number(),
header_checksum_string,
data_checksum_string)
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
fa1223c661d60033b7d7aba2a27151d6ee18a299 | Add tests for circle ci checks | relekang/python-semantic-release,wlonk/python-semantic-release,relekang/python-semantic-release,riddlesio/python-semantic-release | tests/ci_checks/test_circle.py | tests/ci_checks/test_circle.py | import pytest
from semantic_release import ci_checks
from semantic_release.errors import CiVerificationError
def test_circle_should_pass_if_branch_is_master_and_no_pr(monkeypatch):
monkeypatch.setenv('CIRCLE_BRANCH', 'master')
monkeypatch.setenv('CI_PULL_REQUEST', '')
assert ci_checks.circle('master')
def test_circle_should_pass_if_branch_is_correct_and_no_pr(monkeypatch):
monkeypatch.setenv('CIRCLE_BRANCH', 'other-branch')
monkeypatch.setenv('CI_PULL_REQUEST', '')
assert ci_checks.circle('other-branch')
def test_circle_should_raise_ci_verification_error_for_wrong_branch(monkeypatch):
monkeypatch.setenv('CIRCLE_BRANCH', 'other-branch')
monkeypatch.setenv('CI_PULL_REQUEST', '')
with pytest.raises(CiVerificationError):
ci_checks.circle('master')
def test_circle_should_raise_ci_verification_error_for_pr(monkeypatch):
monkeypatch.setenv('CIRCLE_BRANCH', 'other-branch')
monkeypatch.setenv('CI_PULL_REQUEST', 'http://the-url-of-the-pr')
with pytest.raises(CiVerificationError):
ci_checks.circle('master')
| mit | Python | |
6a50f602ebc2334d45352cd2ff13c1f91db7e0bd | Integrate LLVM at llvm/llvm-project@8e22539067d9 | karllessard/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,frreiss/tensorflow-fred,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,frreiss/tensorflow-fred,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "8e22539067d9376c4f808b25f543feba728d40c9"
LLVM_SHA256 = "db0a7099e6e1eacbb51338f0b18c237be7354c25e8126c523390bef965a9b6f6"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "223261cbaa6b4c74cf9eebca3452ec0d15ea018e"
LLVM_SHA256 = "8425d6458484c6e7502b4e393cd8d98b533826a3b040261d67261f1364936518"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
bd729068b1683954ab190f187e59d8a5fc0741f1 | Integrate LLVM at llvm/llvm-project@7ed7d4ccb899 | gautam1858/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,sarvex/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,karllessard/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,tensorflow/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "7ed7d4ccb8991e2b5b95334b508f8cec2faee737"
LLVM_SHA256 = "6584ccaffd5debc9fc1bb275a36af9bad319a7865abecf36f97cbe3c2da028d0"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "b109172d993edacd9853a8bbb8128a94da014399"
LLVM_SHA256 = "36ee6bf7d89b43034c1c58c57aa63d0703d1688807480969dfd1f4d7ccaa3787"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
e1fc8b6774c6283a8c4f81235f1a1d9dc10c5fc6 | Add tSNE-script | tokee/juxta,tokee/juxta,tokee/juxta | tSNE-images.py | tSNE-images.py | # Copied with permission from https://github.com/ml4a/ml4a-ofx.git
import argparse
import sys
import numpy as np
import json
import os
from os.path import isfile, join
import keras
from keras.preprocessing import image
from keras.applications.imagenet_utils import decode_predictions, preprocess_input
from keras.models import Model
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
from scipy.spatial import distance
def process_arguments(args):
parser = argparse.ArgumentParser(description='tSNE on audio')
parser.add_argument('--images_path', action='store', help='path to directory of images')
parser.add_argument('--output_path', action='store', help='path to where to put output json file')
parser.add_argument('--num_dimensions', action='store', default=2, help='dimensionality of t-SNE points (default 2)')
parser.add_argument('--perplexity', action='store', default=30, help='perplexity of t-SNE (default 30)')
parser.add_argument('--learning_rate', action='store', default=150, help='learning rate of t-SNE (default 150)')
params = vars(parser.parse_args(args))
return params
def get_image(path, input_shape):
img = image.load_img(path, target_size=input_shape)
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
return x
def find_candidate_images(images_path):
"""
Finds all candidate images in the given folder and its sub-folders.
Returns:
images: a list of absolute paths to the discovered images.
"""
images = []
for root, dirs, files in os.walk(images_path):
for name in files:
file_path = os.path.abspath(os.path.join(root, name))
if ((os.path.splitext(name)[1]).lower() in ['.jpg','.png','.jpeg']):
images.append(file_path)
return images
def analyze_images(images_path):
# make feature_extractor
model = keras.applications.VGG16(weights='imagenet', include_top=True)
feat_extractor = Model(inputs=model.input, outputs=model.get_layer("fc2").output)
input_shape = model.input_shape[1:3]
# get images
candidate_images = find_candidate_images(images_path)
# analyze images and grab activations
activations = []
images = []
for idx,image_path in enumerate(candidate_images):
file_path = join(images_path,image_path)
img = get_image(file_path, input_shape);
if img is not None:
print("getting activations for %s %d/%d" % (image_path,idx,len(candidate_images)))
acts = feat_extractor.predict(img)[0]
activations.append(acts)
images.append(image_path)
# run PCA firt
print("Running PCA on %d images..." % len(activations))
features = np.array(activations)
pca = PCA(n_components=300)
pca.fit(features)
pca_features = pca.transform(features)
return images, pca_features
def run_tsne(images_path, output_path, tsne_dimensions, tsne_perplexity, tsne_learning_rate):
images, pca_features = analyze_images(images_path)
print("Running t-SNE on %d images..." % len(images))
X = np.array(pca_features)
tsne = TSNE(n_components=tsne_dimensions, learning_rate=tsne_learning_rate, perplexity=tsne_perplexity, verbose=2).fit_transform(X)
# save data to json
data = []
for i,f in enumerate(images):
point = [float((tsne[i,k] - np.min(tsne[:,k]))/(np.max(tsne[:,k]) - np.min(tsne[:,k]))) for k in range(tsne_dimensions) ]
data.append({"path":os.path.abspath(join(images_path,images[i])), "point":point})
with open(output_path, 'w') as outfile:
json.dump(data, outfile)
if __name__ == '__main__':
params = process_arguments(sys.argv[1:])
images_path = params['images_path']
output_path = params['output_path']
tsne_dimensions = int(params['num_dimensions'])
tsne_perplexity = int(params['perplexity'])
tsne_learning_rate = int(params['learning_rate'])
run_tsne(images_path, output_path, tsne_dimensions, tsne_perplexity, tsne_learning_rate)
print("finished saving %s" % output_path)
| apache-2.0 | Python | |
f29dab9a82b44fac483d71c432a40a0bb2ca51b1 | Add the beginnings of an example client. | rvykydal/blivet,AdamWill/blivet,jkonecny12/blivet,vpodzime/blivet,vojtechtrefny/blivet,vpodzime/blivet,vojtechtrefny/blivet,jkonecny12/blivet,AdamWill/blivet,rvykydal/blivet | examples/dbus_client.py | examples/dbus_client.py |
import dbus
bus = dbus.SystemBus()
# This adds a signal match so that the client gets signals sent by Blivet1's
# ObjectManager. These signals are used to notify clients of changes to the
# managed objects (for blivet, this will be devices, formats, and actions).
bus.add_match_string("type='signal',sender='com.redhat.Blivet1',path_namespace='/com/redhat/Blivet1'")
blivet = bus.get_object('com.redhat.Blivet1', '/com/redhat/Blivet1/Blivet')
blivet.Reset()
object_manager = bus.get_object('com.redhat.Blivet1', '/com/redhat/Blivet1')
objects = object_manager.GetManagedObjects()
for object_path in blivet.ListDevices():
device = objects[object_path]['com.redhat.Blivet1.Device']
print(device['Name'], device['Type'], device['Size'], device['FormatType'])
| lgpl-2.1 | Python | |
ebf4d87390307dcf735c53f18a18f3466a4ee5e4 | Add standalone wave trigger tool. | lordjabez/light-maestro,lordjabez/light-maestro,lordjabez/light-maestro,lordjabez/light-maestro | tools/standalonewavetrigger.py | tools/standalonewavetrigger.py | #!/usr/bin/env python
# Standard library imports
import argparse
import collections
import logging
import os
import time
# Additional library imports
import requests
# Named logger for this module
_logger = logging.getLogger(__name__)
# Parse the command line arguments
_parser = argparse.ArgumentParser('')
_parser.add_argument('-t', '--triggers', default='triggers', help='Folder containing trigger files')
_parser.add_argument('-r', '--rate', default=4.0, help='Poll rate in polls per second')
_parser.add_argument('-d', '--debug', action='store_true', help='Enables debug logging')
_args = _parser.parse_args()
# Configure the logging module
_logformat = '%(asctime)s : %(levelname)s : %(name)s : %(message)s'
_loglevel = logging.DEBUG if _args.debug else logging.INFO
logging.basicConfig(format=_logformat, level=_loglevel)
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
# We use a session variable so that HTTP keep-alive is utilized, and
# also so we'll always remember to set the content type appropriately.
_session = requests.session()
_session.headers['Content-Type'] = 'application/json'
# Stores previous last access times for each file
# so they can be compared each time files are polled.
_atimes = collections.defaultdict(time.time)
# Poll the list of files forever
while True:
# Delay the appropriate amount of time between polls
time.sleep(1.0 / _args.rate)
# Grab a list of all fully-qualified wave file names in the trigger folder
files = (os.path.join(_args.triggers, f) for f in os.listdir(_args.triggers) if os.path.splitext(f)[1] == '.wav')
# Iterate over the list of files
for filename in files:
# If the last access time is newer than what was previous recorded then take
# action on that file. A small threshold is used to prevent "double bouncing".
if os.stat(filename).st_atime - _atimes[filename] > 1.0:
# Open the file and pull out the data
with open(filename, 'rb') as f:
req = f.read()
# Immediately store off the last accessed time
_atimes[filename] = os.stat(filename).st_atime
# Separate the components of the request
method, url, data = req[52:].splitlines(False)
# Attempt to send the request and log the results
_logger.debug('Sending {0} request to {1}'.format(method, url))
try:
response = _session.request(method, url, data=data)
_logger.debug('Received response with status code {0}'.format(response.status_code))
except requests.RequestException:
_logger.warning('Unable to contact {0}'.format(url))
| apache-2.0 | Python | |
d76c7f73701edeb263ebffc94ccc3f4893f7ef0d | add leetcode Reorder List | Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code | leetcode/ReorderList/solution.py | leetcode/ReorderList/solution.py | # Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def printList(self):
head = self
while head:
print head,
head = head.next
print ''
def __str__(self):
return str(self.val)
class Solution:
# @param head, a ListNode
# @return nothing
def reorderList(self, head):
if head is None or head.next is None:
return head
slow = fast = head
while fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
prev = slow.next
slow.next = None
# reverse the second part of the list
cur = prev.next
prev.next = None
while cur:
next = cur.next
cur.next = prev
prev = cur
cur = next
# merge the two sublist
root = head
while prev:
tmp = prev.next
prev.next = root.next
root.next = prev
root = prev.next
prev = tmp
return head
def main():
import random
root = ListNode(random.randint(1, 1000))
head = root
for x in xrange(9):
head.next = ListNode(random.randint(1, 1000))
head = head.next
root.printList()
s = Solution()
root = s.reorderList(root)
root.printList()
if __name__ == '__main__':
main()
| mit | Python | |
27a177a9c03ca5e98f1997eae18d046875a17c3b | Create alias.py | TingPing/plugins,TingPing/plugins | HexChat/alias.py | HexChat/alias.py | import hexchat
__module_name__ = "Alias"
__module_author__ = "TingPing"
__module_version__ = "0"
__module_description__ = "Create aliases for commands"
alias_hooks = {}
help_cmds = ['alias', 'unalias', 'aliases']
help_msg = 'Alias: Valid commands are:\n \
ALIAS name command\n \
UNALIAS name\n \
ALIASES\n \
HELP name'
def load_aliases ():
for pref in hexchat.list_pluginpref():
if pref[:6] == 'alias_':
name = pref[6:]
hook = hexchat.hook_command(name, alias_cmd_cb)
alias_hooks[name] = hook
def get_alias(name):
cmd = hexchat.get_pluginpref('alias_' + name)
return cmd
def remove_alias(name, quiet=False):
hexchat.del_pluginpref('alias_' + name)
if name in alias_hooks:
hook = alias_hooks.get(name)
hexchat.unhook(hook)
del alias_hooks[name]
return True
return False
def add_alias(name, cmd):
hexchat.set_pluginpref('alias_' + name, cmd)
hook = hexchat.hook_command(name, alias_cmd_cb)
alias_hooks[name] = hook
if hook:
return True
return False
def alias_cmd_cb(word, word_eol, userdata):
hexchat.command('%s %s' %(get_alias(word[0]), word_eol[1]))
return hexchat.EAT_HEXCHAT
def unalias_cb(word, word_eol, userdata):
if len(word) > 1:
if remove_alias(word[1]):
print('Alias: %s removed' %word[1])
else:
print('Alias: %s not found' %word[1])
else:
hexchat.prnt('Alias: Not enough arguements')
return hexchat.EAT_ALL
def alias_cb(word, word_eol, userdata):
if len(word) > 3:
edited = False
if remove_alias(word[1], True):
edited = True
if add_alias(word[1], word_eol[2]):
if edited:
print('Alias: %s edited' %word[1])
else:
print('Alias: %s added' %word[1])
else:
print('Alias: %s failed to hook' %word[1])
else:
print(help_msg)
return hexchat.EAT_ALL
# FIXME: Formatting
def aliases_cb(word, word_eol, userdata):
print('\026NAME\t\t\t\tCMD ')
for pref in hexchat.list_pluginpref():
print('%s\t\t\t%s' %(pref[6:], get_alias(pref[6:])))
return hexchat.EAT_ALL
def help_cb(word, word_eol, userdata):
if len(word) > 1:
if word[1] in help_cmds:
print(help_msg)
elif word[1] in alias_hooks:
print('Alias: "%s" is an alias for "%s"' %(word[1], get_alias(word[1])))
else:
return None
return hexchat.EAT_HEXCHAT
def unload_callback(userdata):
print(__module_name__ + ' version ' + __module_version__ + ' unloaded.')
hexchat.hook_command("alias", alias_cb)
hexchat.hook_command("aliases", aliases_cb)
hexchat.hook_command("unalias", unalias_cb)
hexchat.hook_command("help", help_cb)
hexchat.hook_unload(unload_callback)
load_aliases()
print(__module_name__ + ' version ' + __module_version__ + ' loaded.')
| mit | Python | |
a86852fe908bb0a44ef267a75b9446ddcaf03f6e | Add basic support for LimitlessLED | Duoxilian/home-assistant,betrisey/home-assistant,CCOSTAN/home-assistant,open-homeautomation/home-assistant,Smart-Torvy/torvy-home-assistant,pschmitt/home-assistant,hexxter/home-assistant,srcLurker/home-assistant,postlund/home-assistant,alexmogavero/home-assistant,MungoRae/home-assistant,srcLurker/home-assistant,turbokongen/home-assistant,shaftoe/home-assistant,keerts/home-assistant,tchellomello/home-assistant,jamespcole/home-assistant,ewandor/home-assistant,jabesq/home-assistant,qedi-r/home-assistant,w1ll1am23/home-assistant,bdfoster/blumate,luxus/home-assistant,ma314smith/home-assistant,miniconfig/home-assistant,bencmbrook/home-assistant,alexmogavero/home-assistant,Duoxilian/home-assistant,FreekingDean/home-assistant,jamespcole/home-assistant,rohitranjan1991/home-assistant,jaharkes/home-assistant,alexmogavero/home-assistant,ewandor/home-assistant,Julian/home-assistant,JshWright/home-assistant,leppa/home-assistant,stefan-jonasson/home-assistant,tomduijf/home-assistant,justyns/home-assistant,sdague/home-assistant,sanmiguel/home-assistant,tmm1/home-assistant,sffjunkie/home-assistant,nugget/home-assistant,devdelay/home-assistant,molobrakos/home-assistant,sanmiguel/home-assistant,open-homeautomation/home-assistant,betrisey/home-assistant,oandrew/home-assistant,kyvinh/home-assistant,instantchow/home-assistant,mezz64/home-assistant,Duoxilian/home-assistant,Zac-HD/home-assistant,soldag/home-assistant,kyvinh/home-assistant,varunr047/homefile,sfam/home-assistant,open-homeautomation/home-assistant,eagleamon/home-assistant,hmronline/home-assistant,morphis/home-assistant,tboyce1/home-assistant,ct-23/home-assistant,EricRho/home-assistant,Nzaga/home-assistant,g12mcgov/home-assistant,aronsky/home-assistant,Julian/home-assistant,LinuxChristian/home-assistant,partofthething/home-assistant,Theb-1/home-assistant,keerts/home-assistant,aoakeson/home-assistant,happyleavesaoc/home-assistant,nnic/home-assistant,teodoc/home-assistant,emilhetty/home-assistant,mKeRix/home-assistant,PetePriority/home-assistant,Danielhiversen/home-assistant,MartinHjelmare/home-assistant,theolind/home-assistant,hmronline/home-assistant,Theb-1/home-assistant,emilhetty/home-assistant,jawilson/home-assistant,alanbowman/home-assistant,turbokongen/home-assistant,dorant/home-assistant,tmm1/home-assistant,nugget/home-assistant,nnic/home-assistant,pschmitt/home-assistant,jaharkes/home-assistant,varunr047/homefile,xifle/home-assistant,tboyce1/home-assistant,robbiet480/home-assistant,keerts/home-assistant,Danielhiversen/home-assistant,Cinntax/home-assistant,coteyr/home-assistant,qedi-r/home-assistant,alexkolar/home-assistant,ma314smith/home-assistant,varunr047/homefile,partofthething/home-assistant,sander76/home-assistant,leoc/home-assistant,mahendra-r/home-assistant,Zyell/home-assistant,devdelay/home-assistant,pottzer/home-assistant,oandrew/home-assistant,MartinHjelmare/home-assistant,PetePriority/home-assistant,MungoRae/home-assistant,JshWright/home-assistant,miniconfig/home-assistant,molobrakos/home-assistant,SEJeff/home-assistant,LinuxChristian/home-assistant,titilambert/home-assistant,stefan-jonasson/home-assistant,shaftoe/home-assistant,badele/home-assistant,sfam/home-assistant,luxus/home-assistant,aronsky/home-assistant,balloob/home-assistant,ma314smith/home-assistant,bencmbrook/home-assistant,deisi/home-assistant,open-homeautomation/home-assistant,tboyce1/home-assistant,kennedyshead/home-assistant,DavidLP/home-assistant,ErykB2000/home-assistant,mikaelboman/home-assistant,mikaelboman/home-assistant,vitorespindola/home-assistant,rohitranjan1991/home-assistant,caiuspb/home-assistant,ewandor/home-assistant,jawilson/home-assistant,GenericStudent/home-assistant,srcLurker/home-assistant,luxus/home-assistant,HydrelioxGitHub/home-assistant,happyleavesaoc/home-assistant,ct-23/home-assistant,stefan-jonasson/home-assistant,xifle/home-assistant,keerts/home-assistant,miniconfig/home-assistant,badele/home-assistant,Smart-Torvy/torvy-home-assistant,coteyr/home-assistant,deisi/home-assistant,Zyell/home-assistant,Julian/home-assistant,tomduijf/home-assistant,miniconfig/home-assistant,tboyce021/home-assistant,hmronline/home-assistant,adrienbrault/home-assistant,aequitas/home-assistant,hexxter/home-assistant,Nzaga/home-assistant,oandrew/home-assistant,happyleavesaoc/home-assistant,theolind/home-assistant,mikaelboman/home-assistant,titilambert/home-assistant,bencmbrook/home-assistant,DavidLP/home-assistant,EricRho/home-assistant,Zyell/home-assistant,auduny/home-assistant,DavidLP/home-assistant,fbradyirl/home-assistant,aoakeson/home-assistant,adrienbrault/home-assistant,shaftoe/home-assistant,joopert/home-assistant,robjohnson189/home-assistant,xifle/home-assistant,shaftoe/home-assistant,soldag/home-assistant,lukas-hetzenecker/home-assistant,hmronline/home-assistant,HydrelioxGitHub/home-assistant,joopert/home-assistant,eagleamon/home-assistant,betrisey/home-assistant,rohitranjan1991/home-assistant,mahendra-r/home-assistant,deisi/home-assistant,jabesq/home-assistant,hexxter/home-assistant,jnewland/home-assistant,mKeRix/home-assistant,mahendra-r/home-assistant,emilhetty/home-assistant,Julian/home-assistant,instantchow/home-assistant,hmronline/home-assistant,bdfoster/blumate,alanbowman/home-assistant,alanbowman/home-assistant,robjohnson189/home-assistant,dorant/home-assistant,sanmiguel/home-assistant,balloob/home-assistant,sffjunkie/home-assistant,dmeulen/home-assistant,sdague/home-assistant,persandstrom/home-assistant,tinloaf/home-assistant,nkgilley/home-assistant,emilhetty/home-assistant,robbiet480/home-assistant,LinuxChristian/home-assistant,FreekingDean/home-assistant,sander76/home-assistant,xifle/home-assistant,emilhetty/home-assistant,MartinHjelmare/home-assistant,GenericStudent/home-assistant,balloob/home-assistant,leoc/home-assistant,w1ll1am23/home-assistant,caiuspb/home-assistant,maddox/home-assistant,MungoRae/home-assistant,philipbl/home-assistant,persandstrom/home-assistant,LinuxChristian/home-assistant,postlund/home-assistant,CCOSTAN/home-assistant,ErykB2000/home-assistant,ErykB2000/home-assistant,mezz64/home-assistant,dmeulen/home-assistant,kennedyshead/home-assistant,HydrelioxGitHub/home-assistant,nevercast/home-assistant,CCOSTAN/home-assistant,nkgilley/home-assistant,ct-23/home-assistant,alexkolar/home-assistant,justyns/home-assistant,aequitas/home-assistant,ct-23/home-assistant,florianholzapfel/home-assistant,toddeye/home-assistant,LinuxChristian/home-assistant,varunr047/homefile,bdfoster/blumate,alexkolar/home-assistant,pottzer/home-assistant,oandrew/home-assistant,mKeRix/home-assistant,Smart-Torvy/torvy-home-assistant,maddox/home-assistant,ct-23/home-assistant,EricRho/home-assistant,aoakeson/home-assistant,jaharkes/home-assistant,Zac-HD/home-assistant,Teagan42/home-assistant,dmeulen/home-assistant,sffjunkie/home-assistant,aequitas/home-assistant,bdfoster/blumate,betrisey/home-assistant,eagleamon/home-assistant,Nzaga/home-assistant,instantchow/home-assistant,PetePriority/home-assistant,vitorespindola/home-assistant,fbradyirl/home-assistant,fbradyirl/home-assistant,deisi/home-assistant,leoc/home-assistant,nugget/home-assistant,morphis/home-assistant,sffjunkie/home-assistant,bdfoster/blumate,MungoRae/home-assistant,jabesq/home-assistant,JshWright/home-assistant,g12mcgov/home-assistant,michaelarnauts/home-assistant,Zac-HD/home-assistant,stefan-jonasson/home-assistant,morphis/home-assistant,SEJeff/home-assistant,robjohnson189/home-assistant,home-assistant/home-assistant,mikaelboman/home-assistant,SEJeff/home-assistant,badele/home-assistant,tinloaf/home-assistant,caiuspb/home-assistant,auduny/home-assistant,MungoRae/home-assistant,Smart-Torvy/torvy-home-assistant,kyvinh/home-assistant,jnewland/home-assistant,mikaelboman/home-assistant,lukas-hetzenecker/home-assistant,Teagan42/home-assistant,eagleamon/home-assistant,jamespcole/home-assistant,tomduijf/home-assistant,srcLurker/home-assistant,happyleavesaoc/home-assistant,michaelarnauts/home-assistant,devdelay/home-assistant,florianholzapfel/home-assistant,pottzer/home-assistant,leoc/home-assistant,Theb-1/home-assistant,alexmogavero/home-assistant,sffjunkie/home-assistant,leppa/home-assistant,vitorespindola/home-assistant,hexxter/home-assistant,tinloaf/home-assistant,florianholzapfel/home-assistant,tboyce021/home-assistant,Cinntax/home-assistant,deisi/home-assistant,kyvinh/home-assistant,teodoc/home-assistant,sfam/home-assistant,philipbl/home-assistant,tmm1/home-assistant,dorant/home-assistant,philipbl/home-assistant,philipbl/home-assistant,tchellomello/home-assistant,morphis/home-assistant,coteyr/home-assistant,justyns/home-assistant,home-assistant/home-assistant,Duoxilian/home-assistant,nevercast/home-assistant,nevercast/home-assistant,molobrakos/home-assistant,JshWright/home-assistant,g12mcgov/home-assistant,mKeRix/home-assistant,jaharkes/home-assistant,tboyce1/home-assistant,auduny/home-assistant,florianholzapfel/home-assistant,michaelarnauts/home-assistant,dmeulen/home-assistant,persandstrom/home-assistant,teodoc/home-assistant,robjohnson189/home-assistant,Zac-HD/home-assistant,varunr047/homefile,theolind/home-assistant,ma314smith/home-assistant,nnic/home-assistant,maddox/home-assistant,jnewland/home-assistant,toddeye/home-assistant,devdelay/home-assistant | homeassistant/components/light/limitlessled.py | homeassistant/components/light/limitlessled.py | """
homeassistant.components.light.limitlessled
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for LimitlessLED bulbs, also known as...
EasyBulb
AppLight
AppLamp
MiLight
LEDme
dekolight
iLight
"""
import random
import logging
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import STATE_ON, STATE_OFF, DEVICE_DEFAULT_NAME
from homeassistant.components.light import ATTR_BRIGHTNESS
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
try:
import ledcontroller
except ImportError:
_LOGGER.exception("Error while importing dependency ledcontroller.")
return
led = ledcontroller.LedController(config['host'])
lights = []
for i in range(1, 5):
if 'group_%d_name' % (i) in config:
lights.append(
LimitlessLED(
led,
i,
config['group_%d_name' % (i)],
STATE_OFF
)
)
add_devices_callback(lights)
class LimitlessLED(ToggleEntity):
def __init__(self, led, group, name, state, brightness=180):
self.led = led
self.group = group
# LimitlessLEDs don't report state, we have track it ourselves.
self.led.off(self.group)
self._name = name or DEVICE_DEFAULT_NAME
self._state = state
self._brightness = brightness
@property
def should_poll(self):
""" No polling needed for a demo light. """
return False
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def state(self):
""" Returns the name of the device if any. """
return self._state
@property
def state_attributes(self):
""" Returns optional state attributes. """
if self.is_on:
return {
ATTR_BRIGHTNESS: self._brightness,
}
@property
def is_on(self):
""" True if device is on. """
return self._state == STATE_ON
def turn_on(self, **kwargs):
""" Turn the device on. """
self._state = STATE_ON
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
self.led.set_brightness(self._brightness, self.group)
def turn_off(self, **kwargs):
""" Turn the device off. """
self._state = STATE_OFF
self.led.off(self.group)
| mit | Python | |
8d1917785f4cf8cc17ec1b3898dcb90f7402cfe9 | Revert of Attempt to add tracing dir into path, so that tracing_project can be imported. (patchset #1 id:1 of https://codereview.chromium.org/1300373002/ ) | sahiljain/catapult,0x90sled/catapult,zeptonaut/catapult,SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,SummerLW/Perf-Insight-Report,modulexcite/catapult,scottmcmaster/catapult,0x90sled/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult-csm,sahiljain/catapult,scottmcmaster/catapult,sahiljain/catapult,scottmcmaster/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,zeptonaut/catapult,modulexcite/catapult,0x90sled/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,zeptonaut/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,modulexcite/catapult,benschmaus/catapult | tracing/tracing_build/__init__.py | tracing/tracing_build/__init__.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
import os
import sys
import tracing_project
tracing_project.UpdateSysPathIfNeeded()
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
import os
import sys
def _AddTracingProjectPath():
tracing_path = os.path.normpath(
os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
if tracing_path not in sys.path:
sys.path.insert(0, tracing_path)
_AddTracingProjectPath()
import tracing_project
tracing_project.UpdateSysPathIfNeeded()
| bsd-3-clause | Python |
b09a6fdd14e2e65bddd03bd11d14a20133f36f57 | Create nad2wgs.py | Kevo89/UTM2LatLong | nad2wgs.py | nad2wgs.py | #----------------------------
# NAD83 to WGS84 Converter #
# Python Version #
#----------------------------
# Adapted from Node-coordinator Project (https://github.com/beatgammit/node-coordinator)
#
# Original and this version released under MIT License (Provided below as per licensing)
#
# Copyright (c) 2011 Larry Moore, larmoor@gmail.com
# Released under the MIT License; see
# http://www.opensource.org/licenses/mit-license.php
# or http://en.wikipedia.org/wiki/MIT_License
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
#conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# Import math module
import math
def utmToLatLong(utmNorthing, utmEasting, utmZone):
eastingOffset = 500000.0
northingOffset = 10000000.0
k0 = 0.9996
equatorialRadius = 6378137.0
eccSquared = 0.006694380023
eccPrimeSquared = eccSquared / (1 - eccSquared)
e1 = (1 - math.sqrt(1 - eccSquared)) / (1 + math.sqrt(1 - eccSquared));
rad2deg = 180.0/math.pi
# Casts input from string to floats or ints
# Removes 500,000 metre offset for longitude
xUTM = float(utmEasting) - eastingOffset
yUTM = float(utmNorthing)
zoneNumber = int(utmZone)
# This line below is for debug purposes only, remove for batch processes.
print 'The input is: ' + str(utmEasting) + 'm E, ' + str(utmNorthing) + 'm N in NAD83 UTM Zone ' + str(utmZone) + '\n'
# Finds the origin longitude for the zone
lonOrigin = (zoneNumber - 1) * 6 - 180 + 3 # +3 puts in zone centre
M = yUTM / k0 #This finds the meridional arc
mu = M / (equatorialRadius * (1- eccSquared / 4 - 3 * eccSquared * eccSquared / 64 -5 * eccSquared * eccSquared * eccSquared /256))
# Calculates the footprint latitude
phi1Rad = mu + (3 * e1 / 2 - 27 * e1 * e1 * e1 /32) * math.sin(2*mu) + ( 21 * e1 * e1 / 16 - 55 * e1 * e1 * e1 * e1 / 32) * math.sin( 4 * mu) + (151 * e1 * e1 * e1 / 96) * math.sin(6 * mu)
phi1 = phi1Rad * rad2deg
# Variables for conversion equations
N1 = equatorialRadius / math.sqrt( 1 - eccSquared * math.sin(phi1Rad) * math.sin(phi1Rad))
T1 = math.tan(phi1Rad) * math.tan(phi1Rad)
C1 = eccPrimeSquared * math.cos(phi1Rad) * math.cos(phi1Rad)
R1 = equatorialRadius * (1 - eccSquared) / math.pow(1 - eccSquared * math.sin(phi1Rad) * math.sin(phi1Rad), 1.5)
D = xUTM / (N1 * k0)
# Calculate latitude, in decimal degrees
lat = phi1Rad - ( N1 * math.tan(phi1Rad) / R1) * (D * D / 2 - (5 + 3 * T1 + 10 * C1 - 4 * C1 * C1 - 9 * eccPrimeSquared) * D * D * D * D / 24 + (61 + 90 * T1 + 298 * C1 + 45 * T1 * T1 - 252 * eccPrimeSquared - 3 * C1 * C1) * D * D * D * D * D * D / 720)
lat = lat * rad2deg
# Calculate longitude, in decimal degrees
lon = (D - (1 + 2 * T1 + C1) * D * D * D / 6 + (5 - 2 * C1 + 28 * T1 - 3 * C1 * C1 + 8 * eccPrimeSquared + 24 * T1 * T1) * D * D * D * D * D / 120) / math.cos(phi1Rad)
lon = lonOrigin + lon * rad2deg
# Print function below is for debug purposes
#NOTE: THIS IS THE LOCATION WHERE THE NUMBERS ARE ROUNDED TO 5 DECIMAL PLACES
print "Lat: " + str(round(lat, 5)) + ", Long: " + str(round(lon,5))
return lat
return lon
#For manual input
northing = input("Northing: ")
easting = input("Easting: ")
zone = input("UTM Zone: ")
utmToLatLong(northing, easting, zone)
| mit | Python | |
37f286812bea7429bea67172a40d26ad435d6f67 | Add test for 'holes' argument in add_polygon | nschloe/python4gmsh | test/examples/hole_in_square.py | test/examples/hole_in_square.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import pygmsh as pg
import numpy as np
def generate():
# Characteristic length
lcar = 1e-1
# Coordinates of lower-left and upper-right vertices of a square domain
xmin = 0.0
xmax = 5.0
ymin = 0.0
ymax = 5.0
# Vertices of a square hole
squareHoleCoordinates = np.array([[1, 1, 0],
[4, 1, 0],
[4, 4, 0],
[1, 4, 0]])
# Create geometric object
geom = pg.Geometry()
# Create square hole
squareHole = [geom.add_polygon_loop(squareHoleCoordinates, lcar)]
# Create square domain with square hole
geom.add_rectangle(xmin, xmax, ymin, ymax, 0.0, lcar, holes=squareHole)
# Return geo-file code
return geom.get_code()
if __name__ == '__main__':
print(generate())
| bsd-3-clause | Python | |
db4bc200f9a48edf9e160c2134293df0313183a7 | Add conditional command prefix plugin | Aaron1011/CloudBotPlugins | conditional_prefix.py | conditional_prefix.py | from cloudbot import hook
import re
@hook.sieve
def conditional_prefix(bot, event, plugin):
if plugin.type == 'command':
if event.chan in event.conn.config['prefix_blocked_channels']:
command_prefix = event.conn.config['command_prefix']
if not event.chan.lower() == event.nick.lower(): # private message, no command prefix
command_re = r'(?i)^(?:[{}])(\w+)(?:$|\s+)(.*)'.format(command_prefix, event.conn.nick)
if re.match(command_re, event.content):
return None
return event
| mit | Python | |
46977f4d36e09cccd5485352b27d1bac4d5b702a | Add unit tests for cmus module | tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status | tests/modules/test_cmus.py | tests/modules/test_cmus.py | # pylint: disable=C0103,C0111
import mock
import unittest
import tests.mocks as mocks
from bumblebee.config import Config
from bumblebee.input import I3BarInput, LEFT_MOUSE
from bumblebee.modules.cmus import Module
class TestCmusModule(unittest.TestCase):
def setUp(self):
self._stdin, self._select, self.stdin, self.select = mocks.epoll_mock("bumblebee.input")
self.popen = mocks.MockPopen()
self.config = Config()
self.input = I3BarInput()
self.engine = mock.Mock()
self.engine.input = self.input
self.input.need_event = True
self.module = Module(engine=self.engine, config={ "config": self.config })
for widget in self.module.widgets():
widget.link_module(self.module)
self.anyWidget = widget
self.songTemplate = """
status {status}
file /path/to/file
duration {duration}
position {position}
tag title {title}
tag artist {artist}
tag album {album}
tag tracknumber 1
tag date 1984
tag comment comment
"""
def tearDown(self):
self._stdin.stop()
self._select.stop()
self.popen.cleanup()
def test_read_song(self):
self.popen.mock.communicate.return_value = ("song", None)
self.module.update_all()
self.popen.assert_call("cmus-remote -Q")
def test_handle_runtimeerror(self):
self.popen.mock.communicate.side_effect = RuntimeError("error loading song")
self.module.update_all()
self.assertEquals(self.module.description(self.anyWidget), " - /")
def test_format(self):
self.popen.mock.communicate.return_value = (self.songTemplate.format(
artist="an artist", title="a title", duration="100", position="20",
album="an album", status="irrelevant"
), None)
self.module.update_all()
self.assertEquals(self.module.description(self.anyWidget),
"an artist - a title 00:20/01:40"
)
def test_repeat(self):
self.popen.mock.communicate.return_value = ("set repeat false", None)
self.module.update_all()
self.assertTrue("repeat-off" in self.module.state(self.module.widget("cmus.repeat")))
self.popen.mock.communicate.return_value = ("set repeat true", None)
self.module.update_all()
self.assertTrue("repeat-on" in self.module.state(self.module.widget("cmus.repeat")))
def test_shuffle(self):
self.popen.mock.communicate.return_value = ("set shuffle false", None)
self.module.update_all()
self.assertTrue("shuffle-off" in self.module.state(self.module.widget("cmus.shuffle")))
self.popen.mock.communicate.return_value = ("set shuffle true", None)
self.module.update_all()
self.assertTrue("shuffle-on" in self.module.state(self.module.widget("cmus.shuffle")))
def test_prevnext(self):
self.assertTrue("prev" in self.module.state(self.module.widget("cmus.prev")))
self.assertTrue("next" in self.module.state(self.module.widget("cmus.next")))
def test_main(self):
self.popen.mock.communicate.return_value = ("status paused", None)
self.module.update_all()
self.assertTrue("paused" in self.module.state(self.module.widget("cmus.main")))
self.popen.mock.communicate.return_value = ("status playing", None)
self.module.update_all()
self.assertTrue("playing" in self.module.state(self.module.widget("cmus.main")))
self.popen.mock.communicate.return_value = ("status stopped", None)
self.module.update_all()
self.assertTrue("stopped" in self.module.state(self.module.widget("cmus.main")))
def test_widget(self):
self.assertEquals(len(self.module.widgets()), 5)
for idx, val in enumerate(["prev", "main", "next", "shuffle", "repeat"]):
self.assertEquals(self.module.widgets()[idx].name, "cmus.{}".format(val))
def test_interaction(self):
events = [
{"widget": "cmus.shuffle", "action": "cmus-remote -S"},
{"widget": "cmus.repeat", "action": "cmus-remote -R"},
{"widget": "cmus.next", "action": "cmus-remote -n"},
{"widget": "cmus.prev", "action": "cmus-remote -r"},
{"widget": "cmus.main", "action": "cmus-remote -u"},
]
for event in events:
mocks.mouseEvent(stdin=self.stdin, inp=self.input, module=self.module, instance=self.module.widget(event["widget"]).id, button=LEFT_MOUSE)
self.popen.assert_call(event["action"])
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | Python | |
450eb8aee6d3638d6a5211e6c5ae1fa8ff8d1b9b | Add unittests for SelectTask, ProcessStreamHandler | fmoo/sparts,pshuff/sparts,fmoo/sparts,pshuff/sparts,bboozzoo/sparts,djipko/sparts,djipko/sparts,facebook/sparts,bboozzoo/sparts,facebook/sparts | tests/tasks/test_select.py | tests/tasks/test_select.py | # Copyright (c) 2014, Facebook, Inc. All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from sparts.fileutils import set_nonblocking
from sparts.tests.base import SingleTaskTestCase
from sparts.tasks.select import SelectTask, ProcessStreamHandler
import os
import subprocess
import threading
class TestSelectTask(SingleTaskTestCase):
TASK = SelectTask
def test_read_event(self):
r, w = os.pipe()
set_nonblocking(r)
try:
fired = threading.Event()
def on_event(fd):
self.logger.info('on_event(%s)', fd)
self.assertEqual(fd, r)
fired.set()
self.task.register_read(r, on_event)
os.write(w, '1')
fired.wait(3.0)
self.assertTrue(fired.is_set())
cb = self.task.unregister_read(r)
self.assertEqual(cb, on_event)
finally:
os.close(r)
os.close(w)
def test_write_event(self):
r, w = os.pipe()
try:
fired = threading.Event()
def on_event(fd):
self.logger.info('on_event(%s)', fd)
self.assertEqual(fd, w)
fired.set()
self.task.register_write(w, on_event)
fired.wait(3.0)
self.assertTrue(fired.is_set())
cb = self.task.unregister_write(w)
self.assertEqual(cb, on_event)
finally:
os.close(r)
os.close(w)
def test_except_event(self):
try:
r, w = os.pipe()
fired = threading.Event()
def on_event(fd):
self.logger.info('on_event(%s)', fd)
self.assertEqual(fd, w)
fired.set()
self.task.register_except(w, on_event)
# TODO: Actually write a test case that can trigger the
# exceptional circumstances that causes this event to fire.
cb = self.task.unregister_except(w)
self.assertEqual(cb, on_event)
finally:
os.close(r)
os.close(w)
class TestSelectCommands(SingleTaskTestCase):
TASK = SelectTask
def test_basic_popen(self):
p = subprocess.Popen('echo 123; echo 456 1>&2', shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Locally defined callback to verify stdout callback
def verify_out(msg):
self.logger.debug('verify_out(%s)', msg)
self.assertEqual(msg, "123\n")
verify_out = self.mock.Mock(wraps=verify_out)
# Locally defined callback to verify stderr callback
def verify_err(msg):
self.logger.debug('verify_err(%s)', msg)
self.assertEqual(msg, "456\n")
verify_err = self.mock.Mock(wraps=verify_err)
# Locally defined callback to verify exit callback
exited = threading.Event()
def verify_exit(code):
self.logger.debug('verify_exit(%s)', code)
self.assertEqual(code, 0)
exited.set()
verify_exit = self.mock.Mock(wraps=verify_exit)
# Create the handler
ProcessStreamHandler(p, self.task, on_stdout=verify_out,
on_stderr=verify_err, on_exit=verify_exit)
# Wait for the process to exit
self.logger.debug('waiting for exit...')
exited.wait(3.0)
# Check return code, etc.
self.assertNotNone(p.poll())
self.assertEqual(p.poll(), 0)
self.assertTrue(exited.is_set())
# Assert mocks called correctly
verify_out.assert_called_once_with("123\n")
verify_err.assert_called_once_with("456\n")
verify_exit.assert_called_once_with(0)
| bsd-3-clause | Python | |
a2ea7c7d4d6b680f180b9916eb2a814713887154 | Test empty record. | Byhiras/pyavroc,Byhiras/pyavroc,Byhiras/pyavroc,Byhiras/pyavroc | tests/test_empty_record.py | tests/test_empty_record.py | #!/usr/bin/env python
# Copyright 2016 Ben Walsh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import pytest
import shutil
import tempfile
import avro.schema
import avro.datafile
import avro.io
import pyavroc
import _testhelper
NRECORDS = 100
json_schema = '''{"namespace": "example.avro",
"type": "record",
"name": "User",
"fields": [
]
}'''
def _python_create_file(filename):
if sys.version_info >= (3,):
schema = avro.schema.Parse(json_schema)
else:
schema = avro.schema.parse(json_schema)
with open(filename, 'wb') as fp:
writer = avro.datafile.DataFileWriter(fp, avro.io.DatumWriter(), schema)
for i in range(NRECORDS):
writer.append({})
writer.close()
def _pyavroc_create_file(filename):
avtypes = pyavroc.create_types(json_schema)
with open(filename, 'w') as fp:
writer = pyavroc.AvroFileWriter(fp, json_schema)
for i in range(NRECORDS):
writer.write(avtypes.User())
writer.close()
def _create_files():
dirname = tempfile.mkdtemp()
python_filename = os.path.join(dirname, "test_python.avro")
pyavroc_filename = os.path.join(dirname, "test_pyavroc.avro")
_python_create_file(python_filename)
_pyavroc_create_file(pyavroc_filename)
return (dirname, python_filename, pyavroc_filename)
def _delete_files(dirname):
shutil.rmtree(dirname)
def _python_read(filename):
fp = avro.datafile.DataFileReader(open(filename, 'rb'), avro.io.DatumReader())
return list(fp)
def _pyavroc_read(filename, types):
fp = pyavroc.AvroFileReader(open(filename), types=types)
return list(fp)
def test_load_same():
dirname, python_filename, pyavroc_filename = _create_files()
assert _pyavroc_read(python_filename, False) == _python_read(pyavroc_filename)
_delete_files(dirname)
| apache-2.0 | Python | |
cba5a8058e96bd6c5ee639df223c77f56d8296fa | Add ladot package (#10905) | LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/ladot/package.py | var/spack/repos/builtin/packages/ladot/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Ladot(Package):
"""Ladot is a script that makes using LaTeX in graphs generated by dot
(graphviz) relatively easy."""
homepage = "https://brighten.bigw.org/projects/ladot/"
url = "https://brighten.bigw.org/projects/ladot/ladot-1.2.tar.gz"
version('1.2', sha256='f829eeca829b82c0315cd87bffe410bccab96309b86b1c883b3ddaa93170f25e')
depends_on('perl', type=('run', 'test'))
depends_on('graphviz', type=('run', 'test'))
depends_on('texlive', type='test')
def install(self, spec, prefix):
if self.run_tests:
with working_dir('example'):
make()
mkdir(prefix.bin)
install('ladot', prefix.bin)
| lgpl-2.1 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.