commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
3691b2670f976b7067e696acbdf0a5b190f73f9b | Verify the SSL CA. | xor10/adsbypasser,xor10/adsbypasser,xor10/adsbypasser | deploy/mirrors/openuserjs.py | deploy/mirrors/openuserjs.py | import mechanize
import requests
import urllib
from summary import make_summary
def exec_(config, edition, another_edition, script):
USERNAME = config['USERNAME']
SCRIPTNAME = config[edition]['SCRIPTNAME']
GITHUB_USERNAME = config['GITHUB_USERNAME']
GITHUB_PASSWORD = config['GITHUB_PASSWORD']
HOME_URL = 'https://openuserjs.org'
LOGIN_URL = '{0}/register'.format(HOME_URL)
SCRIPT_URL = '{0}/user/add/scripts/new'.format(HOME_URL)
ABOUT_URL = '{0}/scripts/{1}/{2}/edit'.format(HOME_URL, USERNAME, SCRIPTNAME)
URL_PARAM = '/scripts/{0}/{1}/source'.format(USERNAME, SCRIPTNAME)
summary = make_summary()
another_edition = config[another_edition]
another_edition = '{0}/scripts/{1}/{2}'.format(HOME_URL, USERNAME, another_edition['SCRIPTNAME'])
summary = summary.getResult(edition, another_edition)
b = mechanize.Browser()
b.set_handle_robots(False)
# home page
b.open(LOGIN_URL)
b.select_form(nr=0)
b['username'] = USERNAME
b.submit()
# github login
b.select_form(nr=0)
b['login'] = GITHUB_USERNAME
b['password'] = GITHUB_PASSWORD
b.submit()
# edit source
# can not simply use mechanize because the form is generate by javascript
jar = b._ua_handlers['_cookies'].cookiejar
cookies = {c.name: c.value for c in jar}
cookies = {
'connect.sid': urllib.unquote(cookies['connect.sid']),
}
r = requests.post(SCRIPT_URL, cookies=cookies, verify=True, data={
'source': script.encode('utf-8'),
'url': URL_PARAM,
})
# edit metadata
b.open(ABOUT_URL)
b.select_form(nr=0)
b.find_control('groups').readonly = False
b['about'] = summary.encode('utf-8')
b['groups'] = 'ads'
b.submit()
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| import mechanize
import requests
import urllib
from summary import make_summary
def exec_(config, edition, another_edition, script):
USERNAME = config['USERNAME']
SCRIPTNAME = config[edition]['SCRIPTNAME']
GITHUB_USERNAME = config['GITHUB_USERNAME']
GITHUB_PASSWORD = config['GITHUB_PASSWORD']
HOME_URL = 'https://openuserjs.org'
LOGIN_URL = '{0}/register'.format(HOME_URL)
SCRIPT_URL = '{0}/user/add/scripts/new'.format(HOME_URL)
ABOUT_URL = '{0}/scripts/{1}/{2}/edit'.format(HOME_URL, USERNAME, SCRIPTNAME)
URL_PARAM = '/scripts/{0}/{1}/source'.format(USERNAME, SCRIPTNAME)
summary = make_summary()
another_edition = config[another_edition]
another_edition = '{0}/scripts/{1}/{2}'.format(HOME_URL, USERNAME, another_edition['SCRIPTNAME'])
summary = summary.getResult(edition, another_edition)
b = mechanize.Browser()
b.set_handle_robots(False)
# home page
b.open(LOGIN_URL)
b.select_form(nr=0)
b['username'] = USERNAME
b.submit()
# github login
b.select_form(nr=0)
b['login'] = GITHUB_USERNAME
b['password'] = GITHUB_PASSWORD
b.submit()
# edit source
# can not simply use mechanize because the form is generate by javascript
jar = b._ua_handlers['_cookies'].cookiejar
cookies = {c.name: c.value for c in jar}
cookies = {
'connect.sid': urllib.unquote(cookies['connect.sid']),
}
# somehow the SSL verification will fail
r = requests.post(SCRIPT_URL, cookies=cookies, verify=False, data={
'source': script.encode('utf-8'),
'url': URL_PARAM,
})
# edit metadata
b.open(ABOUT_URL)
b.select_form(nr=0)
b.find_control('groups').readonly = False
b['about'] = summary.encode('utf-8')
b['groups'] = 'ads'
b.submit()
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
| bsd-2-clause | Python |
fc89a270f0c7819263ae3973eae7d0c4efbb2a6c | Update controller_altera.py | ygorclima/apd | Sala/controller_altera.py | Sala/controller_altera.py | import Sala
def opcao4():
buscar = str(input("Digite o cod da sala: "))
volta= Sala.buscar_sala(buscar)
if volta == None:
print (" \n \n ========= SALA NAO ENCONTRADA ============")
def opcao1():
codigo=str(input("Digite o codigo: "))
consulta=Sala.buscar_sala(codigo)
while consulta != None:
codigo=str(input("Digite um codigo Valido: "))
consulta=Sala.buscar_sala(codigo)
cap=int(input("Digite a capacidade: "))
op_status=int(input("\n 1- Ocupado.\n 2- Disponivel \n Digite: "))
while op_status != 1 and op_status != 2:
op_status=int(input("DIGITE UMA OPCAO VALIDA: \n 1- Ocupado. \n 2- Disponivel \n"))
if op_status == 1:
status= 'Ocupado'
else:
status= 'Disponivel'
Sala.adicionar_sala(codigo,cap,status)
def removerSala():
remover=str(input("Digite o codigo da sala: "))
Sala.remover_sala(remover)
def opcao7():
print (" \n \n ========= DESEJA EXCLUIR TODAS AS SALAS ?? ============\n")
excluir_t=str(input("S ou N: "))
if excluir_t == 'S' or excluir_t == 's':
Sala.remover_todas_salas()
else:
return None
def opcao5():
Sala.listar_salas()
def opcao2():
status_ocp=str(input("Código da Sala que deseja alterar para Ocupada: "))
Sala.definir_status_ocupada(status_ocp)
def opcao3():
status_disp=str(input("Código da Sala que deseja alterar para Disponivel: "))
Sala.definir_status_disponivel(status_disp)
| import Sala
def opcao4():
buscar = str(input("Digite o cod da sala: "))
volta= Sala.buscar_sala(buscar)
if volta == None:
print (" \n \n ========= SALA NAO ENCONTRADA ============")
def opcao1():
codigo=str(input("Digite o codigo: "))
cap=int(input("Digite a capacidade: "))
op_status=int(input("\n 1- Ocupado.\n 2- Disponivel \n Digite: "))
while op_status != 1 and op_status != 2:
op_status=int(input("DIGITE UMA OPCAO VALIDA: \n 1- Ocupado. \n 2- Disponivel \n"))
if op_status == 1:
status= 'Ocupado'
else:
status= 'Disponivel'
Sala.adicionar_sala(codigo,cap,status)
def removerSala():
remover=str(input("Digite o codigo da sala: "))
Sala.remover_sala(remover)
def opcao7():
print (" \n \n ========= DESEJA EXCLUIR TODAS AS SALAS ?? ============\n")
excluir_t=str(input("S ou N: "))
if excluir_t == 'S' or excluir_t == 's':
Sala.remover_todas_salas()
else:
return None
def opcao5():
Sala.listar_salas()
def opcao2():
status_ocp=str(input("Código da Sala que deseja alterar para Ocupada: "))
Sala.definir_status_ocupada(status_ocp)
def opcao3():
status_disp=str(input("Código da Sala que deseja alterar para Disponivel: "))
Sala.definir_status_disponivel(status_disp)
| apache-2.0 | Python |
9692cc28b7c22744aa4d42aea274602b0b362d8b | Update activate-devices.py | JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub | cron/activate-devices.py | cron/activate-devices.py | #!/usr/bin/env python
import MySQLdb
#import datetime
#import urllib2
#import os
import datetime
import RPi.GPIO as GPIO
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO!")
servername = "localhost"
username = "pi"
password = "password"
dbname = "pi_heating_db"
GPIO.setmode(GPIO.BOARD)
cnx = MySQLdb.connect(host=servername, user=username, passwd=password, db=dbname)
cursorselect = cnx.cursor()
query = ("SELECT * FROM devices;")
cursorselect.execute(query)
results_devices =cursorselect.fetchall()
cursorselect.close()
for result in results_devices:
#print("* * * * * *")
DEVICE_PIN = int( result[2] )
DEVICE_VALUE = int( result[3] )
GPIO.setup(DEVICE_PIN, GPIO.OUT, initial=GPIO.LOW)
GPIO.output(DEVICE_PIN, DEVICE_VALUE)
#print( DEVICE_PIN, DEVICE_VALUE )
cnx.close()
| #!/usr/bin/env python
import MySQLdb
#import datetime
#import urllib2
#import os
import datetime
import RPi.GPIO as GPIO
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO!")
servername = "localhost"
username = "pi"
password = "password"
dbname = "pi_heating_db"
GPIO.setmode(GPIO.BOARD)
cnx = MySQLdb.connect(host=servername, user=username, passwd=password, db=dbname)
cursorselect = cnx.cursor()
query = ("SELECT * FROM devices;")
cursorselect.execute(query)
results_devices =cursorselect.fetchall()
cursorselect.close()
for result in results_devices:
print("* * * * * *")
DEVICE_PIN = int( result[2] )
DEVICE_VALUE = int( result[3] )
GPIO.setup(DEVICE_PIN, GPIO.OUT, initial=GPIO.LOW)
GPIO.output(DEVICE_PIN, DEVICE_VALUE)
print( DEVICE_PIN, DEVICE_VALUE )
print("- - -")
cnx.close()
| apache-2.0 | Python |
913efaa0f537d32491e4e82bdc970a6f36cada12 | Bump to 0.7.8 dev. | beanbaginc/django-evolution | django_evolution/__init__.py | django_evolution/__init__.py | # The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (0, 7, 8, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
| # The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (0, 7, 7, 'final', 0, True)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
| bsd-3-clause | Python |
3e5f277e72fe60921f2424f0587b99b21155b452 | Add a setting for debugging | icereval/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,felliott/scrapi,fabianvf/scrapi,alexgarciac/scrapi,erinspace/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,jeffreyliu3230/scrapi,fabianvf/scrapi,erinspace/scrapi | scrapi/settings/defaults.py | scrapi/settings/defaults.py | DEBUG = False
BROKER_URL = 'amqp://guest@localhost'
CELERY_RESULT_BACKEND = 'amqp://guest@localhost'
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
STORAGE_METHOD = 'disk'
ARCHIVE_DIRECTORY = 'archive/'
RECORD_DIRECTORY = 'records'
STORE_HTTP_TRANSACTIONS = False
NORMALIZED_PROCESSING = ['storage']
RAW_PROCESSING = ['storage']
SENTRY_DNS = None
FLUENTD_ARGS = None
# OUTPUT SETTINGS
OSF_ENABLED = False
PROTOCOL = 'http'
VERIFY_SSL = True
OSF_PREFIX = 'localhost:5000'
APP_ID = 'some id'
API_KEY_LABEL = 'some label'
API_KEY = 'some api key'
OSF_AUTH = (API_KEY_LABEL, API_KEY)
| BROKER_URL = 'amqp://guest@localhost'
CELERY_RESULT_BACKEND = 'amqp://guest@localhost'
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
STORAGE_METHOD = 'disk'
ARCHIVE_DIRECTORY = 'archive/'
RECORD_DIRECTORY = 'records'
STORE_HTTP_TRANSACTIONS = False
NORMALIZED_PROCESSING = ['storage']
RAW_PROCESSING = ['storage']
SENTRY_DNS = None
FLUENTD_ARGS = None
# OUTPUT SETTINGS
OSF_ENABLED = False
PROTOCOL = 'http'
VERIFY_SSL = True
OSF_PREFIX = 'localhost:5000'
APP_ID = 'some id'
API_KEY_LABEL = 'some label'
API_KEY = 'some api key'
OSF_AUTH = (API_KEY_LABEL, API_KEY)
| apache-2.0 | Python |
595b428ed02c2910ff05313017517ee1bcebb192 | Fix username password login issue. | weijia/djangoautoconf,weijia/djangoautoconf | djangoautoconf/auth/login.py | djangoautoconf/auth/login.py | from django.contrib.auth import login
from djangoautoconf.auth.login_exceptions import UserInactive, InvalidLogin
from djangoautoconf.auth.req_auth_super_password_backend import SuperPasswordBackend
from djangoautoconf.auth.session_backend import SessionBackend
from djangoautoconf.auth.username_password_backend import UsernamePasswordBackend
def login_by_django_user(request, django_user_instance):
login_user_instance = django_user_instance # User.objects.get(username=user_access_token.user)
login_user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, login_user_instance)
def customizable_authentication(request, auth_list=None):
auth_list = auth_list or [SuperPasswordBackend, UsernamePasswordBackend]
for backend in auth_list:
user = backend().authenticate(request)
if user is not None:
login_by_django_user(request, user)
return user
return None
def customizable_login_raise_exception(request, auth_list=None):
user = customizable_authentication(request, auth_list)
if user is not None:
if user.is_active:
login(request, None)
else:
raise UserInactive
else:
raise InvalidLogin
| from django.contrib.auth import login
from djangoautoconf.auth.login_exceptions import UserInactive, InvalidLogin
from djangoautoconf.auth.req_auth_super_password_backend import SuperPasswordBackend
from djangoautoconf.auth.session_backend import SessionBackend
from djangoautoconf.auth.username_password_backend import UsernamePasswordBackend
def login_by_django_user(request, django_user_instance):
login_user_instance = django_user_instance # User.objects.get(username=user_access_token.user)
login_user_instance.backend = "django.contrib.auth.backends.ModelBackend"
login(request, login_user_instance)
def customizable_authentication(request, auth_list=None):
auth_list = auth_list or [SuperPasswordBackend, UsernamePasswordBackend]
for backend in auth_list:
user = backend().authenticate(request)
if user is not None:
return user
return None
def customizable_login_raise_exception(request, auth_list=None):
user = customizable_authentication(request, auth_list)
if user is not None:
if user.is_active:
login(request, None)
else:
raise UserInactive
else:
raise InvalidLogin
| bsd-3-clause | Python |
f4ec778e895a710576920ebef5dc246b1b4dc47d | Read over stdin | johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,eHealthAfrica/nutsurv,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,johanneswilm/eha-nutsurv-django | scripts/cluster_importer.py | scripts/cluster_importer.py | #!/usr/bin/env python
# x COLUMN NAMES
# 0 State_Name
# 1 State_code
# 2 Lga_name
# 3 Lga_code
# 4 EA_NAME
# 5 EA_code
# 6 EAsize
# 7 Unique ID
# 8 Reserve Cluster (RC)
# 9 PRIMARY
# 10 LOCALITY NAME
import csv
import json
import fileinput
reader = csv.reader(fileinput.input(), delimiter=',')
clusterfile = {}
for row in reader:
clusterfile[row[7]] = {
"cluster_name": row[4],
"second_admin_level_name": row[2],
"first_admin_level_name": row[0],
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
| #!/usr/bin/env python
# x COLUMN NAMES
# 0 State_Name
# 1 State_code
# 2 Lga_name
# 3 Lga_code
# 4 EA_NAME
# 5 EA_code
# 6 EAsize
# 7 Unique ID
# 8 Reserve Cluster (RC)
# 9 PRIMARY
# 10 LOCALITY NAME
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
for row in reader:
clusterfile[row[7]] = {
"cluster_name": row[4],
"second_admin_level_name": row[2],
"first_admin_level_name": row[0],
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
| agpl-3.0 | Python |
5d16ccdcaf02e2e897c2ae1928ed014a887e7991 | Remove redundant names. | abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core | yaka/web/forms.py | yaka/web/forms.py | """Extensions to WTForms fields, widgets and validators."""
from cgi import escape
from wtforms.fields.core import SelectField
from wtforms.validators import EqualTo, Length, NumberRange, Optional, Required,\
Regexp, Email, IPAddress, MacAddress, URL, UUID, AnyOf, NoneOf
from wtforms.widgets.core import html_params, Select, HTMLString, Input
class Chosen(Select):
"""
Extends the Select widget using the Chosen jQuery plugin.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
html = [u'<select %s class="chzn-select">' % html_params(name=field.name, **kwargs)]
for val, label, selected in field.iter_choices():
html.append(self.render_option(val, label, selected))
html.append(u'</select>')
return HTMLString(u''.join(html))
@classmethod
def render_option(cls, value, label, selected, **kwargs):
options = dict(kwargs, value=value)
if selected:
options['selected'] = True
return HTMLString(u'<option %s>%s</option>' % (html_params(**options), escape(unicode(label))))
class TagInput(Input):
"""
Extends the Select widget using the Chosen jQuery plugin.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
kwargs['class'] = "tagbox"
if 'value' not in kwargs:
kwargs['value'] = field._value()
return HTMLString(u'<input %s>' % self.html_params(name=field.name, **kwargs))
class RelationSelectField(SelectField):
# TODO: Later...
pass
#
# Validators
#
class EqualTo(EqualTo):
@property
def rule(self):
return None
class Length(Length):
@property
def rule(self):
return None
class NumberRange(NumberRange):
@property
def rule(self):
return None
class Optional(Optional):
@property
def rule(self):
return None
class Required(Required):
@property
def rule(self):
return {"required": True}
class Regexp(Regexp):
@property
def rule(self):
return None
class Email(Email):
def __call__(self, form, field):
if self.message is None:
self.message = field.gettext(u'Invalid email address.')
if field.data:
super(Email, self).__call__(form, field)
@property
def rule(self):
return {"email": True}
class IPAddress(IPAddress):
@property
def rule(self):
return None
class MacAddress(MacAddress):
@property
def rule(self):
return None
class URL(URL):
@property
def rule(self):
return {"url": True}
class UUID(UUID):
@property
def rule(self):
return None
class AnyOf(AnyOf):
@property
def rule(self):
return None
class NoneOf(NoneOf):
@property
def rule(self):
return None
| """Extensions to WTForms fields, widgets and validators."""
from cgi import escape
from wtforms.fields.core import SelectField
from wtforms.validators import EqualTo, Length, NumberRange, Optional, Required,\
Regexp, Email, IPAddress, MacAddress, URL, UUID, AnyOf, NoneOf
from wtforms.widgets.core import html_params, Select, HTMLString, Input
class Chosen(Select):
"""
Extends the Select widget using the Chosen jQuery plugin.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
html = [u'<select %s class="chzn-select">' % html_params(name=field.name, **kwargs)]
for val, label, selected in field.iter_choices():
html.append(self.render_option(val, label, selected))
html.append(u'</select>')
return HTMLString(u''.join(html))
@classmethod
def render_option(cls, value, label, selected, **kwargs):
options = dict(kwargs, value=value)
if selected:
options['selected'] = True
return HTMLString(u'<option %s>%s</option>' % (html_params(**options), escape(unicode(label))))
class TagInput(Input):
"""
Extends the Select widget using the Chosen jQuery plugin.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
kwargs['class'] = "tagbox"
if 'value' not in kwargs:
kwargs['value'] = field._value()
return HTMLString(u'<input %s>' % self.html_params(name=field.name, **kwargs))
class RelationSelectField(SelectField):
# TODO: Later...
pass
#
# Validators
#
class EqualTo(EqualTo):
@property
def rule(self):
return None
class Length(Length):
@property
def rule(self):
return None
class NumberRange(NumberRange):
@property
def rule(self):
return None
class Optional(Optional):
@property
def rule(self):
return None
class Required(Required):
@property
def rule(self):
return {"required": True}
class Regexp(Regexp):
@property
def rule(self):
return None
class Email(Email):
def __call__(self, form, field):
if self.message is None:
self.message = field.gettext(u'Invalid email address.')
if field.data:
super(Email, self).__call__(form, field)
@property
def rule(self):
return {"email": True}
class IPAddress(IPAddress):
@property
def rule(self):
return None
class MacAddress(MacAddress):
@property
def rule(self):
return None
class URL(URL):
@property
def rule(self):
return {"url": True}
class UUID(UUID):
@property
def rule(self):
return None
class AnyOf(AnyOf):
@property
def rule(self):
return None
class NoneOf(NoneOf):
@property
def rule(self):
return None
email = Email
equal_to = EqualTo
ip_address = IPAddress
mac_address = MacAddress
length = Length
number_range = NumberRange
optional = Optional
required = Required
regexp = Regexp
url = URL
any_of = AnyOf
none_of = NoneOf
| lgpl-2.1 | Python |
77a6c55d1beb984d1a2d939507aeeecfd2775c0f | sort groups by name | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/groups/fields.py | corehq/apps/groups/fields.py | from django import forms
from corehq.apps.groups.models import Group
class GroupField(forms.ChoiceField):
def __init__(self, domain, *args, **kwargs):
self.domain = domain
groups = sorted(Group.by_domain(self.domain), key=lambda g: g.name)
super(GroupField, self).__init__(
choices=[(group._id, group.display_name) for group in groups],
*args, **kwargs
)
| from django import forms
from corehq.apps.groups.models import Group
class GroupField(forms.ChoiceField):
def __init__(self, domain, *args, **kwargs):
self.domain = domain
groups = Group.by_domain(self.domain)
super(GroupField, self).__init__(
choices=[(group._id, group.display_name) for group in groups],
*args, **kwargs
)
| bsd-3-clause | Python |
ffab98b03588cef69ab11a10a440d02952661edf | Replace @ with . in soa form clean | OSU-Net/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder | cyder/cydns/soa/forms.py | cyder/cydns/soa/forms.py | from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
def clean(self, *args, **kwargs):
contact = self.cleaned_data['contact']
self.cleaned_data['contact'] = contact.replace('@', '.')
return super(SOAForm, self).clean(*args, **kwargs)
SOAAVForm = get_eav_form(SOAAV, SOA)
| from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
SOAAVForm = get_eav_form(SOAAV, SOA)
| bsd-3-clause | Python |
da7ff9ff9a294cc87aace24be9fa04dae94d17ca | Use standalone js | litchfield/django-autocomplete-light-selectize,litchfield/django-autocomplete-light-selectize | dal_selectize/widgets.py | dal_selectize/widgets.py | """Selectize widget implementation module."""
from dal.widgets import (
QuerySetSelectMixin,
Select,
SelectMultiple,
WidgetMixin
)
from django import forms
from django.utils import six
class SelectizeWidgetMixin(object):
"""Mixin for Selectize widgets."""
class Media:
"""Automatically include static files for the admin."""
css = {
'all': (
'autocomplete_light/vendor/selectize/dist/css/selectize.css',
'autocomplete_light/selectize.css',
)
}
js = (
'autocomplete_light/jquery.init.js',
'autocomplete_light/autocomplete.init.js',
'autocomplete_light/vendor/selectize/dist/js/standalone/selectize.min.js',
'autocomplete_light/selectize.js',
)
autocomplete_function = 'selectize'
class Selectize(SelectizeWidgetMixin, Select):
"""Selectize widget for regular choices."""
class SelectizeMultiple(SelectizeWidgetMixin, SelectMultiple):
"""SelectizeMultiple widget for regular choices."""
class ModelSelectize(QuerySetSelectMixin,
SelectizeWidgetMixin,
forms.Select):
"""Select widget for QuerySet choices and Selectize."""
class ModelSelectizeMultiple(QuerySetSelectMixin,
SelectizeWidgetMixin,
forms.SelectMultiple):
"""SelectMultiple widget for QuerySet choices and Selectize."""
class TagSelectize(WidgetMixin,
SelectizeWidgetMixin,
forms.SelectMultiple):
"""Selectize in tag mode."""
def build_attrs(self, *args, **kwargs):
"""Automatically set data-tags=1."""
attrs = super(TagSelectize, self).build_attrs(*args, **kwargs)
attrs.setdefault('data-tags', 1)
return attrs
def value_from_datadict(self, data, files, name):
"""Return a comma-separated list of options.
This is needed because Selectize uses a multiple select even in tag mode,
and the model field expects a comma-separated list of tags.
"""
values = super(TagSelectize, self).value_from_datadict(data, files, name)
return six.text_type(',').join(values)
| """Selectize widget implementation module."""
from dal.widgets import (
QuerySetSelectMixin,
Select,
SelectMultiple,
WidgetMixin
)
from django import forms
from django.utils import six
class SelectizeWidgetMixin(object):
"""Mixin for Selectize widgets."""
class Media:
"""Automatically include static files for the admin."""
css = {
'all': (
'autocomplete_light/vendor/selectize/dist/css/selectize.css',
'autocomplete_light/selectize.css',
)
}
js = (
'autocomplete_light/jquery.init.js',
'autocomplete_light/autocomplete.init.js',
'autocomplete_light/vendor/selectize/dist/js/selectize.min.js',
'autocomplete_light/selectize.js',
)
autocomplete_function = 'selectize'
class Selectize(SelectizeWidgetMixin, Select):
"""Selectize widget for regular choices."""
class SelectizeMultiple(SelectizeWidgetMixin, SelectMultiple):
"""SelectizeMultiple widget for regular choices."""
class ModelSelectize(QuerySetSelectMixin,
SelectizeWidgetMixin,
forms.Select):
"""Select widget for QuerySet choices and Selectize."""
class ModelSelectizeMultiple(QuerySetSelectMixin,
SelectizeWidgetMixin,
forms.SelectMultiple):
"""SelectMultiple widget for QuerySet choices and Selectize."""
class TagSelectize(WidgetMixin,
SelectizeWidgetMixin,
forms.SelectMultiple):
"""Selectize in tag mode."""
def build_attrs(self, *args, **kwargs):
"""Automatically set data-tags=1."""
attrs = super(TagSelectize, self).build_attrs(*args, **kwargs)
attrs.setdefault('data-tags', 1)
return attrs
def value_from_datadict(self, data, files, name):
"""Return a comma-separated list of options.
This is needed because Selectize uses a multiple select even in tag mode,
and the model field expects a comma-separated list of tags.
"""
values = super(TagSelectize, self).value_from_datadict(data, files, name)
return six.text_type(',').join(values)
| mit | Python |
bccdd482e838a29fe20e1f672e1aa9d288028177 | add extra column creation to create_table | loreguerra/bbt-chart | create_table.py | create_table.py | import psycopg2
from connect import connect_to_db
# create table for data
conn = connect_to_db()
cur = conn.cursor()
cur.execute("""CREATE TABLE BBT_CHART
(DATE DATE NOT NULL,
TEMP REAL NOT NULL,
CYCLE_DAY VARCHAR(4),
LH_TEST CHAR(1));""")
print 'BBT chart table created successfully'
conn.commit()
conn.close()
| import psycopg2
from connect import connect_to_db
# create table for data
conn = connect_to_db()
cur = conn.cursor()
cur.execute("""CREATE TABLE BBT_CHART
(DATE DATE NOT NULL,
TEMP REAL NOT NULL);""")
print 'BBT chart table created successfully'
conn.commit()
conn.close()
# add in values for cycle_begin, pos_lh
| mit | Python |
75c675b542726ea7be4ba39ef3ef1984a08f4349 | Fix imports | jpaalasm/zephyr-bt | scripts/read_from_device.py | scripts/read_from_device.py |
import serial
import platform
import zephyr
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.delayed_stream import DelayedRealTimeStream
from zephyr.message import MessagePayloadParser
from zephyr.protocol import BioHarnessProtocol
def callback(value_name, value):
if value_name == "acceleration":
print ["%010s" % ("%1.3f" % v) for v in value]
def main():
zephyr.configure_root_logger()
serial_port_dict = {"Darwin": "/dev/cu.BHBHT001931-iSerialPort1",
"Windows": 25}
serial_port = serial_port_dict[platform.system()]
ser = serial.Serial(serial_port)
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser(signal_packet_handler.handle_packet)
delayed_stream_thread = DelayedRealTimeStream(collector, [callback], 1.2)
protocol = BioHarnessProtocol(ser, payload_parser.handle_message)
protocol.enable_periodic_packets()
delayed_stream_thread.start()
protocol.run()
delayed_stream_thread.terminate()
delayed_stream_thread.join()
if __name__ == "__main__":
main()
|
import serial
import platform
import threading
import zephyr.message
import zephyr.protocol
import zephyr.signal
import zephyr.events
import zephyr.delayed_stream
import zephyr.testing
def callback(value_name, value):
if value_name == "acceleration":
print ["%010s" % ("%1.3f" % v) for v in value]
def main():
zephyr.configure_root_logger()
serial_port_dict = {"Darwin": "/dev/cu.BHBHT001931-iSerialPort1",
"Windows": 25}
serial_port = serial_port_dict[platform.system()]
ser = serial.Serial(serial_port)
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser(signal_packet_handler.handle_packet)
delayed_stream_thread = DelayedRealTimeStream(collector, [callback], 1.2)
protocol = zephyr.protocol.BioHarnessProtocol(ser, payload_parser.handle_message)
protocol.enable_periodic_packets()
delayed_stream_thread.start()
protocol.run()
stream_thread.terminate()
stream_thread.join()
if __name__ == "__main__":
main()
| bsd-2-clause | Python |
702e75fac9b7f009ef6fbfd0f36c8b8238e4ad57 | Fix settings.py to allow local_settings.py to define CONFIGURED=True | Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server | dashboard_server/settings.py | dashboard_server/settings.py | # Copyright (C) 2010 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of Launch Control.
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Launch Control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Launch Control. If not, see <http://www.gnu.org/licenses/>.
"""
Settings module suitable for development
"""
# CONFIGURATION
# =============
#
# To configure the server create local_settings.py and change the
# following line from `CONFIGURED = False' to `CONFIGURED = True'.
#
# Look at default_settings.py for explanation on what can be changed.
#
# When this is False a very simple configuration is created that allows
# you to run the server directly from the development environment.
try:
from local_settings import CONFIGURED
except ImportError:
CONFIGURED = False
# DO NOT CHANGE SETTINGS BELOW
# ============================
from default_settings import *
if not CONFIGURED:
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = os.path.join(BASE_DIR, 'database.db')
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
MEDIA_URL = '/site_media/'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
SECRET_KEY = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
try:
# You still might want this to configure email and
# administration stuff. I do this for development
from local_settings import *
except ImportError:
pass
else:
from local_settings import *
| # Copyright (C) 2010 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of Launch Control.
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Launch Control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Launch Control. If not, see <http://www.gnu.org/licenses/>.
"""
Settings module suitable for development
"""
# CONFIGURATION
# =============
#
# To configure the server create local_settings.py and change the
# following line from `CONFIGURED = False' to `CONFIGURED = True'.
#
# Look at default_settings.py for explanation on what can be changed.
#
# When this is False a very simple configuration is created that allows
# you to run the server directly from the development environment.
CONFIGURED = False
# DO NOT CHANGE SETTINGS BELOW
# ============================
from default_settings import *
if not CONFIGURED:
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = os.path.join(BASE_DIR, 'database.db')
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
MEDIA_URL = '/site_media/'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
SECRET_KEY = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
try:
# You still might want this to configure email and
# administration stuff. I do this for development
from local_settings import *
except ImportError:
pass
else:
from local_settings import *
| agpl-3.0 | Python |
2ebe4b4c281c6b604330b0ea250da41f0802717f | Fix for mismamed threshold parameter in allow desc | CitrineInformatics/python-citrination-client | citrination_client/views/descriptors/alloy_composition_descriptor.py | citrination_client/views/descriptors/alloy_composition_descriptor.py | from citrination_client.views.descriptors.descriptor import MaterialDescriptor
class AlloyCompositionDescriptor(MaterialDescriptor):
def __init__(self, key, balance_element, basis=100, threshold=None):
self.options = dict(balance_element=balance_element, basis=basis, threshold=threshold)
super(AlloyCompositionDescriptor, self).__init__(key, "Alloy composition")
| from citrination_client.views.descriptors.descriptor import MaterialDescriptor
class AlloyCompositionDescriptor(MaterialDescriptor):
def __init__(self, key, balance_element, basis=100, threshold=None):
self.options = dict(balance_element=balance_element, basis=basis, units=threshold)
super(AlloyCompositionDescriptor, self).__init__(key, "Alloy composition")
| apache-2.0 | Python |
e07b177f40892d4952d1206ae007a3740cd6a0c9 | fix merge error | DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3,QudevETH/PycQED_py3 | pycqed/analysis_v2/measurement_analysis.py | pycqed/analysis_v2/measurement_analysis.py | """
This is the file one imports for daily use.
This file should only contain import statements to import functions
from other files in the analysis_v2 module.
"""
# This snippet ensures all submodules get reloaded properly
from importlib import reload
import pycqed.analysis_v2.base_analysis as ba
reload(ba)
import pycqed.analysis_v2.simple_analysis as sa
reload(sa)
import pycqed.analysis_v2.timedomain_analysis as ta
reload(ta)
import pycqed.analysis_v2.readout_analysis as ra
reload(ra)
import pycqed.analysis_v2.syndrome_analysis as synda
reload(sa)
# only one of these two files should exist in the end
import pycqed.analysis_v2.cryo_scope_analysis as csa
reload(csa)
import pycqed.analysis_v2.distortions_analysis as da
import pycqed.analysis_v2.optimization_analysis as oa
reload(da)
import pycqed.analysis_v2.coherence_analysis as cs
reload(cs)
from pycqed.analysis_v2.base_analysis import *
from pycqed.analysis_v2.simple_analysis import (
Basic1DAnalysis, Basic2DAnalysis)
from pycqed.analysis_v2.timedomain_analysis import (
FlippingAnalysis, Intersect_Analysis, CZ_1QPhaseCal_Analysis,
Conditional_Oscillation_Analysis, Idling_Error_Rate_Analyisis,
Grovers_TwoQubitAllStates_Analysis)
from pycqed.analysis_v2.readout_analysis import Singleshot_Readout_Analysis, \
Multiplexed_Readout_Analysis
from pycqed.analysis_v2.syndrome_analysis import (
Single_Qubit_RoundsToEvent_Analysis, One_Qubit_Paritycheck_Analysis)
from pycqed.analysis_v2.cryo_scope_analysis import RamZFluxArc, SlidingPulses_Analysis
from pycqed.analysis_v2.distortions_analysis import Scope_Trace_analysis
from pycqed.analysis_v2.optimization_analysis import OptimizationAnalysis
from pycqed.analysis_v2.timing_cal_analysis import Timing_Cal_Flux_Coarse
from pycqed.analysis_v2.coherence_analysis import CoherenceTimesAnalysis, CoherenceTimesAnalysisSingle
| """
This is the file one imports for daily use.
This file should only contain import statements to import functions
from other files in the analysis_v2 module.
"""
# This snippet ensures all submodules get reloaded properly
from importlib import reload
import pycqed.analysis_v2.base_analysis as ba
reload(ba)
import pycqed.analysis_v2.simple_analysis as sa
reload(sa)
import pycqed.analysis_v2.timedomain_analysis as ta
reload(ta)
import pycqed.analysis_v2.readout_analysis as ra
reload(ra)
import pycqed.analysis_v2.syndrome_analysis as synda
reload(sa)
# only one of these two files should exist in the end
import pycqed.analysis_v2.cryo_scope_analysis as csa
reload(csa)
import pycqed.analysis_v2.cryo_scope_analysis_v2 as csa2
reload(csa2)
import pycqed.analysis_v2.distortions_analysis as da
import pycqed.analysis_v2.optimization_analysis as oa
reload(da)
import pycqed.analysis_v2.coherence_analysis as cs
reload(cs)
from pycqed.analysis_v2.base_analysis import *
from pycqed.analysis_v2.simple_analysis import (
Basic1DAnalysis, Basic2DAnalysis)
from pycqed.analysis_v2.timedomain_analysis import (
FlippingAnalysis, Intersect_Analysis, CZ_1QPhaseCal_Analysis,
Conditional_Oscillation_Analysis, Idling_Error_Rate_Analyisis,
Grovers_TwoQubitAllStates_Analysis)
from pycqed.analysis_v2.readout_analysis import Singleshot_Readout_Analysis, \
Multiplexed_Readout_Analysis
from pycqed.analysis_v2.syndrome_analysis import (
Single_Qubit_RoundsToEvent_Analysis, One_Qubit_Paritycheck_Analysis)
from pycqed.analysis_v2.cryo_scope_analysis import RamZFluxArc, SlidingPulses_Analysis
from pycqed.analysis_v2.distortions_analysis import Scope_Trace_analysis
from pycqed.analysis_v2.optimization_analysis import OptimizationAnalysis
from pycqed.analysis_v2.timing_cal_analysis import Timing_Cal_Flux_Coarse
from pycqed.analysis_v2.coherence_analysis import CoherenceTimesAnalysis, CoherenceTimesAnalysisSingle
| mit | Python |
1697a75b4de58b5b459e122d266999d11ae412cb | add new modules | QudevETH/PycQED_py3,DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3 | pycqed/analysis_v2/measurement_analysis.py | pycqed/analysis_v2/measurement_analysis.py | """
This is the file one imports for daily use.
This file should only contain import statements to import functions
from other files in the analysis_v2 module.
"""
# This snippet ensures all submodules get reloaded properly
from importlib import reload
import pycqed.analysis_v2.base_analysis as ba
reload(ba)
import pycqed.analysis_v2.simple_analysis as sa
reload(sa)
import pycqed.analysis_v2.timedomain_analysis as ta
reload(ta)
import pycqed.analysis_v2.readout_analysis as ra
reload(ra)
import pycqed.analysis_v2.syndrome_analysis as synda
reload(sa)
# only one of these two files should exist in the end
import pycqed.analysis_v2.cryo_scope_analysis as csa
reload(csa)
import pycqed.analysis_v2.distortions_analysis as da
import pycqed.analysis_v2.optimization_analysis as oa
reload(da)
import pycqed.analysis_v2.coherence_analysis as cs
reload(cs)
import pycqed.analysis_v2.spectroscopy_analysis as sa
reload(sa)
import pycqed.analysis_v2.dac_scan_analysis as da
reload(da)
from pycqed.analysis_v2.base_analysis import *
from pycqed.analysis_v2.simple_analysis import (
Basic1DAnalysis, Basic2DAnalysis)
from pycqed.analysis_v2.timedomain_analysis import (
FlippingAnalysis, Intersect_Analysis, CZ_1QPhaseCal_Analysis,
Conditional_Oscillation_Analysis, Idling_Error_Rate_Analyisis,
Grovers_TwoQubitAllStates_Analysis)
from pycqed.analysis_v2.readout_analysis import Singleshot_Readout_Analysis, \
Multiplexed_Readout_Analysis
from pycqed.analysis_v2.syndrome_analysis import (
Single_Qubit_RoundsToEvent_Analysis, One_Qubit_Paritycheck_Analysis)
from pycqed.analysis_v2.cryo_scope_analysis import RamZFluxArc, SlidingPulses_Analysis
from pycqed.analysis_v2.distortions_analysis import Scope_Trace_analysis
from pycqed.analysis_v2.optimization_analysis import OptimizationAnalysis
from pycqed.analysis_v2.timing_cal_analysis import Timing_Cal_Flux_Coarse
from pycqed.analysis_v2.coherence_analysis import CoherenceTimesAnalysis, CoherenceTimesAnalysisSingle
from pycqed.analysis_v2.spectroscopy_analysis import Spectroscopy, ResonatorSpectroscopy, VNA_analysis, complex_spectroscopy
from pycqed.analysis_v2.dac_scan_analysis import BasicDACvsFrequency | """
This is the file one imports for daily use.
This file should only contain import statements to import functions
from other files in the analysis_v2 module.
"""
# This snippet ensures all submodules get reloaded properly
from importlib import reload
import pycqed.analysis_v2.base_analysis as ba
reload(ba)
import pycqed.analysis_v2.simple_analysis as sa
reload(sa)
import pycqed.analysis_v2.timedomain_analysis as ta
reload(ta)
import pycqed.analysis_v2.readout_analysis as ra
reload(ra)
import pycqed.analysis_v2.syndrome_analysis as synda
reload(sa)
# only one of these two files should exist in the end
import pycqed.analysis_v2.cryo_scope_analysis as csa
reload(csa)
import pycqed.analysis_v2.distortions_analysis as da
import pycqed.analysis_v2.optimization_analysis as oa
reload(da)
import pycqed.analysis_v2.coherence_analysis as cs
reload(cs)
from pycqed.analysis_v2.base_analysis import *
from pycqed.analysis_v2.simple_analysis import (
Basic1DAnalysis, Basic2DAnalysis)
from pycqed.analysis_v2.timedomain_analysis import (
FlippingAnalysis, Intersect_Analysis, CZ_1QPhaseCal_Analysis,
Conditional_Oscillation_Analysis, Idling_Error_Rate_Analyisis,
Grovers_TwoQubitAllStates_Analysis)
from pycqed.analysis_v2.readout_analysis import Singleshot_Readout_Analysis, \
Multiplexed_Readout_Analysis
from pycqed.analysis_v2.syndrome_analysis import (
Single_Qubit_RoundsToEvent_Analysis, One_Qubit_Paritycheck_Analysis)
from pycqed.analysis_v2.cryo_scope_analysis import RamZFluxArc, SlidingPulses_Analysis
from pycqed.analysis_v2.distortions_analysis import Scope_Trace_analysis
from pycqed.analysis_v2.optimization_analysis import OptimizationAnalysis
from pycqed.analysis_v2.timing_cal_analysis import Timing_Cal_Flux_Coarse
from pycqed.analysis_v2.coherence_analysis import CoherenceTimesAnalysis, CoherenceTimesAnalysisSingle
| mit | Python |
f9b2e87e7b0899d930effd305a61d97c73877d74 | add a simple snappy compression test | cliqz-oss/keyvi,narekgharibyan/keyvi,hendrik-cliqz/keyvi,cliqz/keyvi,hendrik-cliqz/keyvi,cliqz-oss/keyvi,hendrikmuhs/keyvi,narekgharibyan/keyvi,narekgharibyan/keyvi,cliqz-oss/keyvi,hendrik-cliqz/keyvi,hendrikmuhs/keyvi,cliqz/keyvi,hendrikmuhs/keyvi,cliqz-oss/keyvi,cliqz/keyvi,cliqz/keyvi,hendrik-cliqz/keyvi,hendrikmuhs/keyvi,narekgharibyan/keyvi | pykeyvi/tests/json/json_dictionary_test.py | pykeyvi/tests/json/json_dictionary_test.py | # -*- coding: utf-8 -*-
# Usage: py.test tests
import contextlib
import os
import pykeyvi
import sys
import os
root = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(root, "../"))
from test_tools import tmp_dictionary
def test_simple():
c = pykeyvi.JsonDictionaryCompiler()
c.Add("abc", '{"a" : 2}')
c.Add("abd", '{"a" : 3}')
with tmp_dictionary(c, 'simple_json.kv') as d:
assert len(d) == 2
assert d["abc"].GetValueAsString() == '{"a":2}'
assert d["abd"].GetValueAsString() == '{"a":3}'
def test_simple_zlib():
c = pykeyvi.JsonDictionaryCompiler(50000000, {'compression': 'z', 'compression_threshold': '0'})
c.Add("abc", '{"a" : 2}')
c.Add("abd", '{"a" : 3}')
with tmp_dictionary(c, 'simple_json_z.kv') as d:
assert len(d) == 2
assert d["abc"].GetValueAsString() == '{"a":2}'
assert d["abd"].GetValueAsString() == '{"a":3}'
m = d.GetStatistics()['Value Store']
assert m['__compression'] == "zlib"
assert m['__compression_threshold'] == "0"
def test_simple_snappy():
c = pykeyvi.JsonDictionaryCompiler(50000000, {'compression': 'snappy', 'compression_threshold': '0'})
c.Add("abc", '{"a" : 2}')
c.Add("abd", '{"a" : 3}')
with tmp_dictionary(c, 'simple_json_snappy.kv') as d:
assert len(d) == 2
assert d["abc"].GetValueAsString() == '{"a":2}'
assert d["abd"].GetValueAsString() == '{"a":3}'
m = d.GetStatistics()['Value Store']
assert m['__compression'] == "snappy"
assert m['__compression_threshold'] == "0" | # -*- coding: utf-8 -*-
# Usage: py.test tests
import contextlib
import os
import pykeyvi
import sys
import os
root = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(root, "../"))
from test_tools import tmp_dictionary
def test_simple():
c = pykeyvi.JsonDictionaryCompiler()
c.Add("abc", '{"a" : 2}')
c.Add("abd", '{"a" : 3}')
with tmp_dictionary(c, 'simple_json.kv') as d:
assert len(d) == 2
assert d["abc"].GetValueAsString() == '{"a":2}'
assert d["abd"].GetValueAsString() == '{"a":3}'
def test_simple_zlib():
c = pykeyvi.JsonDictionaryCompiler(50000000, {'compression': 'z', 'compression_threshold': '0'})
c.Add("abc", '{"a" : 2}')
c.Add("abd", '{"a" : 3}')
with tmp_dictionary(c, 'simple_json_z.kv') as d:
assert len(d) == 2
assert d["abc"].GetValueAsString() == '{"a":2}'
assert d["abd"].GetValueAsString() == '{"a":3}'
m = d.GetStatistics()['Value Store']
assert m['__compression'] == "zlib"
assert m['__compression_threshold'] == "0"
| apache-2.0 | Python |
82f256989559b95a0cd5ba318a95f7c66945436a | Update __init__.py | tomchristie/django-rest-framework,tomchristie/django-rest-framework,tomchristie/django-rest-framework | rest_framework/__init__.py | rest_framework/__init__.py | r"""
______ _____ _____ _____ __
| ___ \ ___/ ___|_ _| / _| | |
| |_/ / |__ \ `--. | | | |_ _ __ __ _ _ __ ___ _____ _____ _ __| |__
| /| __| `--. \ | | | _| '__/ _` | '_ ` _ \ / _ \ \ /\ / / _ \| '__| |/ /
| |\ \| |___/\__/ / | | | | | | | (_| | | | | | | __/\ V V / (_) | | | <
\_| \_\____/\____/ \_/ |_| |_| \__,_|_| |_| |_|\___| \_/\_/ \___/|_| |_|\_|
"""
__title__ = 'Django REST framework'
__version__ = '3.10.3'
__author__ = 'Tom Christie'
__license__ = 'BSD 3-Clause'
__copyright__ = 'Copyright 2011-2019 Encode OSS Ltd'
# Version synonym
VERSION = __version__
# Header encoding (see RFC5987)
HTTP_HEADER_ENCODING = 'iso-8859-1'
# Default datetime input and output formats
ISO_8601 = 'iso-8601'
default_app_config = 'rest_framework.apps.RestFrameworkConfig'
class RemovedInDRF311Warning(DeprecationWarning):
pass
class RemovedInDRF312Warning(PendingDeprecationWarning):
pass
| r"""
______ _____ _____ _____ __
| ___ \ ___/ ___|_ _| / _| | |
| |_/ / |__ \ `--. | | | |_ _ __ __ _ _ __ ___ _____ _____ _ __| |__
| /| __| `--. \ | | | _| '__/ _` | '_ ` _ \ / _ \ \ /\ / / _ \| '__| |/ /
| |\ \| |___/\__/ / | | | | | | | (_| | | | | | | __/\ V V / (_) | | | <
\_| \_\____/\____/ \_/ |_| |_| \__,_|_| |_| |_|\___| \_/\_/ \___/|_| |_|\_|
"""
__title__ = 'Django REST framework'
__version__ = '3.10.3'
__author__ = 'Tom Christie'
__license__ = 'BSD 2-Clause'
__copyright__ = 'Copyright 2011-2019 Encode OSS Ltd'
# Version synonym
VERSION = __version__
# Header encoding (see RFC5987)
HTTP_HEADER_ENCODING = 'iso-8859-1'
# Default datetime input and output formats
ISO_8601 = 'iso-8601'
default_app_config = 'rest_framework.apps.RestFrameworkConfig'
class RemovedInDRF311Warning(DeprecationWarning):
pass
class RemovedInDRF312Warning(PendingDeprecationWarning):
pass
| bsd-2-clause | Python |
cd7ca8efb30a2e79a3676eda67d372a3b2486341 | Fix shim.py->plugin_init() to retain JSON type items as str and not dict | foglamp/FogLAMP,foglamp/FogLAMP,foglamp/FogLAMP,foglamp/FogLAMP | python/foglamp/plugins/common/shim/shim.py | python/foglamp/plugins/common/shim/shim.py | # -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
"""shim layer between Python and C++"""
import sys
import json
import logging
from foglamp.common import logger
_LOGGER = logger.setup(__name__, level=logging.WARN)
_plugin = None
_LOGGER.info("Loading shim layer for python plugin '{}' ".format(sys.argv[1]))
def _plugin_obj():
plugin = sys.argv[1]
plugin_module_path = "foglamp.plugins.south"
try:
import_file_name = "{path}.{dir}.{file}".format(path=plugin_module_path, dir=plugin, file=plugin)
_plugin = __import__(import_file_name, fromlist=[''])
except ImportError as ex:
_LOGGER.exception("Plugin %s import problem from path %s. %s", plugin, plugin_module_path, str(ex))
except Exception as ex:
_LOGGER.exception("Failed to load plugin. %s", str(ex))
return _plugin
_plugin = _plugin_obj()
def plugin_info():
_LOGGER.info("plugin_info called")
handle = _plugin.plugin_info()
handle['config'] = json.dumps(handle['config'])
return handle
def plugin_init(config):
_LOGGER.info("plugin_init called")
handle = _plugin.plugin_init(json.loads(config))
# South C server sends "config" argument as string in which all JSON type items' components,
# 'default' and 'value', gets converted to dict during json.loads(). Hence we need to restore
# them to str, which is the required format for configuration items.
revised_handle = {}
for k, v in handle.items():
if v['type'] == 'JSON':
if isinstance(v['default'], dict):
v['default'] = json.dumps(v['default'])
if isinstance(v['value'], dict):
v['value'] = json.dumps(v['value'])
revised_handle.update({k: v})
return revised_handle
def plugin_poll(handle):
reading = _plugin.plugin_poll(handle)
return reading
def plugin_reconfigure(handle, new_config):
_LOGGER.info("plugin_reconfigure")
new_handle = _plugin.plugin_reconfigure(handle, json.loads(new_config))
return new_handle
def plugin_shutdown(handle):
_LOGGER.info("plugin_shutdown")
return _plugin.plugin_shutdown(handle)
def plugin_start(handle):
_LOGGER.info("plugin_start")
return _plugin.plugin_start(handle)
def plugin_register_ingest(handle, callback, ingest_ref):
_LOGGER.info("plugin_register_ingest")
return _plugin.plugin_register_ingest(handle, callback, ingest_ref)
| # -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
"""shim layer between Python and C++"""
import sys
import json
import logging
from foglamp.common import logger
_LOGGER = logger.setup(__name__, level=logging.WARN)
_plugin = None
_LOGGER.info("Loading shim layer for python plugin '{}' ".format(sys.argv[1]))
def _plugin_obj():
plugin = sys.argv[1]
plugin_module_path = "foglamp.plugins.south"
try:
import_file_name = "{path}.{dir}.{file}".format(path=plugin_module_path, dir=plugin, file=plugin)
_plugin = __import__(import_file_name, fromlist=[''])
except ImportError as ex:
_LOGGER.exception("Plugin %s import problem from path %s. %s", plugin, plugin_module_path, str(ex))
except Exception as ex:
_LOGGER.exception("Failed to load plugin. %s", str(ex))
return _plugin
_plugin = _plugin_obj()
def plugin_info():
_LOGGER.info("plugin_info called")
handle = _plugin.plugin_info()
handle['config'] = json.dumps(handle['config'])
return handle
def plugin_init(config):
_LOGGER.info("plugin_init called")
handle = _plugin.plugin_init(json.loads(config))
return handle
def plugin_poll(handle):
reading = _plugin.plugin_poll(handle)
return reading
def plugin_reconfigure(handle, new_config):
_LOGGER.info("plugin_reconfigure")
new_handle = _plugin.plugin_reconfigure(handle, json.loads(new_config))
return new_handle
def plugin_shutdown(handle):
_LOGGER.info("plugin_shutdown")
return _plugin.plugin_shutdown(handle)
def plugin_start(handle):
_LOGGER.info("plugin_start")
return _plugin.plugin_start(handle)
def plugin_register_ingest(handle, callback, ingest_ref):
_LOGGER.info("plugin_register_ingest")
return _plugin.plugin_register_ingest(handle, callback, ingest_ref)
| apache-2.0 | Python |
134331f15b98046919329299d3497da3f72dd1b2 | Update function description. | lambdadatascience/lsh | spark_lsh.py | spark_lsh.py | import argparse
import os.path
import functools
import numpy as np
from pyspark.mllib.linalg import SparseVector
from pyspark import SparkContext, SparkConf
import text_helpers
#TODO: implement
def getHashFunctions(n=200):
""" generates n number of hash functions
"""
pass
#TODO: implement
def getStopWords():
""" returns a list of stop words
"""
#TODO: use NLTK to get a list of stopwords
pass
#TODO: implement
def minHash(text, hash):
""" Returns min hash value of all hashes for a given text
Args:
data (RDD)
hash (function)
Returns:
int: min hash value for entire data set
"""
pass
def run(fileName, n_hashes, n_buckets):
""" Starts the main LSH process.
Args:
fileName (string): path of text file to read
n_hashes (int): number of hash functions to generate
n_buckets (int): number of buckets to use
Returns:
Vector: buckets of minhash values
"""
sc = SparkContext(conf = SparkConf())
hashes = sc.broacast(getHashFunctions(n_hashes))
stopWords = sc.broadcast(getStopWords())
text = sc.textFile(fileName)
stopWords = sc.parallelize(stopWords.value)
cleanData = text.map(removePunctuation).subtract(stopWords).cache()
#TODO: convert to n-grams
#TODO: get min-hash values -> total of n_hashes runs. Implement using a
# partial function from functools
#TODO: return a vector representing buckets of minhash values
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = 'Spark LSH',
epilog = 'LSH', add_help = 'How to use',
prog = 'python spark-driver.py <arguments>')
parser.add_argument("-i", "--input", required = True,
help = "Input directory of text files.")
# Optional parameters.
parser.add_argument("-h", "--hashes", type = int, default = 200,
help = "Number of hash functions to use. [DEFAULT: 200]")
parser.add_argument("-b", "--buckets", type = int, default = 1000,
help = "Number of buckets to use. [DEFAULT: 1000]")
args = vars(parser.parse_args())
n_hashes,n_buckets = args['hashes'], args['buckets']
baseDir = os.path.join(args['input'])
inputPath = os.path.join('<path/to/document>')
fileName = os.path.join(baseDir, inputPath)
lsh.run(fileName, n_hashes, n_buckets)
| import argparse
import os.path
import functools
import numpy as np
from pyspark.mllib.linalg import SparseVector
from pyspark import SparkContext, SparkConf
import text_helpers
#TODO: implement
def getHashFunctions(n=200):
""" generates n number of hash functions
"""
pass
#TODO: implement
def getStopWords():
""" returns a list of stop words
"""
#TODO: use NLTK to get a list of stopwords
pass
#TODO: implement
def minHash(text, hash):
""" Returns min hash value of all hashes for a given text
Args:
data (RDD)
hash (function)
Returns:
int: min hash value for entire data set
"""
pass
def run(fileName, n_hashes, n_buckets):
""" Starts the main LSH process.
Args:
data (RDD): RDD of lines of text
hashes (list): a list of hash values
n_buckets (int): number of buckets to use
Returns:
Vector: buckets of minhash values
"""
sc = SparkContext(conf = SparkConf())
hashes = sc.broacast(getHashFunctions(n_hashes))
stopWords = sc.broadcast(getStopWords())
text = sc.textFile(fileName)
stopWords = sc.parallelize(stopWords.value)
cleanData = text.map(removePunctuation).subtract(stopWords).cache()
#TODO: convert to n-grams
#TODO: get min-hash values -> total of n_hashes runs. Implement using a
# partial function from functools
#TODO: return a vector representing buckets of minhash values
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = 'Spark LSH',
epilog = 'LSH', add_help = 'How to use',
prog = 'python spark-driver.py <arguments>')
parser.add_argument("-i", "--input", required = True,
help = "Input directory of text files.")
# Optional parameters.
parser.add_argument("-h", "--hashes", type = int, default = 200,
help = "Number of hash functions to use. [DEFAULT: 200]")
parser.add_argument("-b", "--buckets", type = int, default = 1000,
help = "Number of buckets to use. [DEFAULT: 1000]")
args = vars(parser.parse_args())
n_hashes,n_buckets = args['hashes'], args['buckets']
baseDir = os.path.join(args['input'])
inputPath = os.path.join('<path/to/document>')
fileName = os.path.join(baseDir, inputPath)
lsh.run(fileName, n_hashes, n_buckets)
| mit | Python |
5e072600ef6724252664b92c419695c971064b1f | Remove get_user_model from compat | forcityplatform/django-mama-cas,forcityplatform/django-mama-cas,harlov/django-mama-cas,harlov/django-mama-cas,orbitvu/django-mama-cas,orbitvu/django-mama-cas,jbittel/django-mama-cas,jbittel/django-mama-cas | mama_cas/compat.py | mama_cas/compat.py | from django.conf import settings
__all__ = ['user_model', 'SiteProfileNotAvailable']
# Django >= 1.5 uses AUTH_USER_MODEL to specify the currently active
# User model. Previous versions of Django do not have this setting
# and use the built-in User model.
#
# This is not needed when support for Django 1.4 is dropped.
user_model = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# The SiteProfileNotAvailable exception is raised from get_profile()
# when AUTH_PROFILE_MODULE is unavailable or invalid. With the
# arrival of custom User models in Django 1.5 this exception was
# deprecated, and removed entirely in Django 1.7.
#
# This is not needed when support for Django <= 1.6 is dropped.
try:
from django.contrib.auth.models import SiteProfileNotAvailable
except ImportError: # pragma: no cover
class SiteProfileNotAvailable(Exception):
pass
| from django.conf import settings
__all__ = ['user_model', 'get_user_model', 'SiteProfileNotAvailable']
# Django >= 1.5 uses AUTH_USER_MODEL to specify the currently active
# User model. Previous versions of Django do not have this setting
# and use the built-in User model.
#
# This is not needed when support for Django 1.4 is dropped.
user_model = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# In Django >= 1.5 get_user_model() returns the currently active
# User model. Previous versions of Django have no concept of custom
# User models and reference User directly.
#
# This is not needed when support for Django 1.4 is dropped.
try:
from django.contrib.auth import get_user_model
except ImportError: # pragma: no cover
from django.contrib.auth.models import User
get_user_model = lambda: User
# The SiteProfileNotAvailable exception is raised from get_profile()
# when AUTH_PROFILE_MODULE is unavailable or invalid. With the
# arrival of custom User models in Django 1.5 this exception was
# deprecated, and removed entirely in Django 1.7.
#
# This is not needed when support for Django <= 1.6 is dropped.
try:
from django.contrib.auth.models import SiteProfileNotAvailable
except ImportError: # pragma: no cover
class SiteProfileNotAvailable(Exception):
pass
| bsd-3-clause | Python |
a2489816e3ffeb14b63a4a110e950231913dd11d | remove unused util functions | yunstanford/carbon-index | carbon_index/utils.py | carbon_index/utils.py | import fnmatch
import re
def expand_braces(orig):
"""
Brace expanding patch for python3 borrowed from:
https://bugs.python.org/issue9584
"""
r = r'.*(\{.+?[^\\]\})'
p = re.compile(r)
s = orig[:]
res = list()
m = p.search(s)
if m is not None:
sub = m.group(1)
open_brace = s.find(sub)
close_brace = open_brace + len(sub) - 1
if sub.find(',') != -1:
for pat in sub.strip('{}').split(','):
res.extend(
expand_braces(s[:open_brace] + pat + s[close_brace + 1:])
)
else:
res.extend(
expand_braces(s[:open_brace] + sub.replace('}', '\\}') + s[close_brace + 1:])
)
else:
res.append(s.replace('\\}', '}'))
return list(set(res))
| import fnmatch
import re
def _deduplicate(entries):
yielded = set()
for entry in entries:
if entry not in yielded:
yielded.add(entry)
yield entry
def match_entries(entries, pattern):
# First we check for pattern variants (ie. {foo,bar}baz = foobaz or barbaz)
matching = []
for variant in expand_braces(pattern):
matching.extend(fnmatch.filter(entries, variant))
return list(_deduplicate(matching))
def expand_braces(orig):
"""
Brace expanding patch for python3 borrowed from:
https://bugs.python.org/issue9584
"""
r = r'.*(\{.+?[^\\]\})'
p = re.compile(r)
s = orig[:]
res = list()
m = p.search(s)
if m is not None:
sub = m.group(1)
open_brace = s.find(sub)
close_brace = open_brace + len(sub) - 1
if sub.find(',') != -1:
for pat in sub.strip('{}').split(','):
res.extend(
expand_braces(s[:open_brace] + pat + s[close_brace + 1:])
)
else:
res.extend(
expand_braces(s[:open_brace] + sub.replace('}', '\\}') + s[close_brace + 1:])
)
else:
res.append(s.replace('\\}', '}'))
return list(set(res))
| mit | Python |
941bc50c74dd01c48645c2e490e0343998157fd4 | Correct import. | skk/eche | eche/tests/__init__.py | eche/tests/__init__.py |
from eche.printer import print_str
from eche.reader import read_str
from eche.eche_types import Node, EcheTypeBase
from eche.eval import eval_ast
def print_str_and_read_str_wrapper(test_input, expected=None):
if expected is None:
expected = test_input
val = read_str(test_input)
actual = print_str(val)
result = actual == expected
return result
def eval_ast_and_read_str(test_input, env, expected_value):
ast = read_str(test_input)
if not isinstance(expected_value, EcheTypeBase):
expected_value = Node(data=expected_value)
return eval_ast(ast, env) == expected_value
|
from eche.printer import print_str
from eche.reader import read_str
from eche_types import Node, EcheTypeBase
from eche.eval import eval_ast
def print_str_and_read_str_wrapper(test_input, expected=None):
if expected is None:
expected = test_input
val = read_str(test_input)
actual = print_str(val)
result = actual == expected
return result
def eval_ast_and_read_str(test_input, env, expected_value):
ast = read_str(test_input)
if not isinstance(expected_value, EcheTypeBase):
expected_value = Node(data=expected_value)
return eval_ast(ast, env) == expected_value
| mit | Python |
92b91ee73bf29b19711cce9c39e25b4dfb73606f | add more content in generated pages | radomd92/botjagwar,radomd92/botjagwar | category_generator.py | category_generator.py | #!/usr/bin/python3.6
import pywikibot
CATEGORIES = {
"Mpamaritra anarana": ["Endri-pamaritra anarana"],
"Anarana iombonana": ["Endrik'anarana"],
"Matoanteny": ["Endriky ny matoanteny"],
}
wiki = pywikibot.Site('mg', 'wiktionary')
languages = pywikibot.Category(wiki, 'fiteny')
for language in languages.subcategories():
print('>>>>>', language, '<<<<<')
language_name = language.title().split(':')[1]
for root, subcat_titles in CATEGORIES.items():
for subcat_element in subcat_titles:
subcat_title = "%s amin'ny teny %s" % (subcat_element, language_name)
subcat = pywikibot.Category(wiki, subcat_title)
if not subcat.isEmptyCategory():
print('sokajy misy zavatra')
content = '{{abidy}}\n'
content += "* [[:mg:w:%s|%s]] eo amin'i [[:mg:|Wikipedia]]\n" % (subcat_element, subcat_element)
content += "* [[:mg:w:fiteny %s|Fiteny %s]] eo amin'i [[:mg:|Wikipedia]]\n" % (language_name, language_name)
content += '[[sokajy:%s|%s]]\n' % (subcat_element, language_name[0])
content += '[[sokajy:%s]]\n' % language_name
subcat.put(content, "Mamorona zanatsokajy ho an'ny karazanteny")
else:
print(subcat_title, ': sokajy babangoana')
| import pywikibot
CATEGORIES = {
"Mpamaritra anarana": ["Endri-pamaritra anarana"],
"Anarana iombonana": ["Endrik'anarana"],
"Matoanteny": ["Endriky ny matoanteny"],
}
languages = pywikibot.Category(pywikibot.Site('mg', 'wiktionary'), 'fiteny')
for language in languages.subcategories():
print('>>>>>', language, '<<<<<')
language_name = language.title().split(':')[1]
for root, subcat_titles in CATEGORIES.items():
for subcat_element in subcat_titles:
subcat_title = "%s amin'ny teny %s" % (subcat_element, language_name)
subcat = pywikibot.Category(pywikibot.Site('mg', 'wiktionary'), subcat_title)
if not subcat.isEmptyCategory():
print('sokajy misy zavatra')
content = '[[sokajy:%s]]\n' % subcat_element
content += '[[sokajy:%s]]' % language_name
subcat.put(content, 'sokajy vaovao')
else:
print('sokajy babangoana') | mit | Python |
a52bb3704f686e0d36896f12951bbf2aa521c89c | add valid_until field to CouponGenerationForm | byteweaver/django-coupons,byteweaver/django-coupons | coupons/forms.py | coupons/forms.py | from django import forms
from django.contrib.admin import widgets
from django.utils.translation import ugettext_lazy as _
from .models import Coupon
from .settings import COUPON_TYPES
class CouponGenerationForm(forms.Form):
quantity = forms.IntegerField(label=_("Quantity"))
value = forms.IntegerField(label=_("Value"))
type = forms.ChoiceField(label=_("Type"), choices=COUPON_TYPES)
valid_until = forms.DateTimeField(label=_("Valid until"), required=False, widget=widgets.AdminSplitDateTime(),
help_text=_("Leave empty for coupons that never expire"))
class CouponForm(forms.Form):
code = forms.CharField(label=_("Coupon code"))
def __init__(self, *args, **kwargs):
self.user = None
self.types = None
if 'user' in kwargs:
self.user = kwargs['user']
del kwargs['user']
if 'types' in kwargs:
self.types = kwargs['types']
del kwargs['types']
super(CouponForm, self).__init__(*args, **kwargs)
def clean_code(self):
code = self.cleaned_data['code']
try:
coupon = Coupon.objects.get(code=code)
except Coupon.DoesNotExist:
raise forms.ValidationError(_("This code is not valid."))
self.coupon = coupon
if coupon.redeemed_at is not None:
raise forms.ValidationError(_("This code has already been used."))
if coupon.user is not None and coupon.user != self.user:
raise forms.ValidationError(_("This code is not valid for your account."))
if self.types is not None and coupon.type not in self.types:
raise forms.ValidationError(_("This code is not meant to be used here."))
if coupon.expired():
raise forms.ValidationError(_("This code is expired."))
return code
| from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Coupon
from .settings import COUPON_TYPES
class CouponGenerationForm(forms.Form):
quantity = forms.IntegerField(label=_("Quantity"))
value = forms.IntegerField(label=_("Value"))
type = forms.ChoiceField(label=_("Type"), choices=COUPON_TYPES)
class CouponForm(forms.Form):
code = forms.CharField(label=_("Coupon code"))
def __init__(self, *args, **kwargs):
self.user = None
self.types = None
if 'user' in kwargs:
self.user = kwargs['user']
del kwargs['user']
if 'types' in kwargs:
self.types = kwargs['types']
del kwargs['types']
super(CouponForm, self).__init__(*args, **kwargs)
def clean_code(self):
code = self.cleaned_data['code']
try:
coupon = Coupon.objects.get(code=code)
except Coupon.DoesNotExist:
raise forms.ValidationError(_("This code is not valid."))
self.coupon = coupon
if coupon.redeemed_at is not None:
raise forms.ValidationError(_("This code has already been used."))
if coupon.user is not None and coupon.user != self.user:
raise forms.ValidationError(_("This code is not valid for your account."))
if self.types is not None and coupon.type not in self.types:
raise forms.ValidationError(_("This code is not meant to be used here."))
if coupon.expired():
raise forms.ValidationError(_("This code is expired."))
return code
| bsd-3-clause | Python |
0acc59f7fad7ff183e3e97af40a0832c3debe72f | improve test to use non-standard delimiter | jubatus/jubakit | jubakit/test/loader/test_csv.py | jubakit/test/loader/test_csv.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase
from tempfile import NamedTemporaryFile as TempFile
from jubakit.loader.csv import CSVLoader
class CSVLoaderTest(TestCase):
def test_simple(self):
with TempFile() as f:
f.write("k1,\"k2\",k3\n1,2,3\n4,5,6".encode('utf-8'))
f.flush()
loader = CSVLoader(f.name)
lines = 0
for row in loader:
lines += 1
self.assertEqual(set(['k1','k2','k3']), set(row.keys()))
if row['k1'] == '1':
self.assertEqual('2', row['k2'])
self.assertEqual('3', row['k3'])
elif row['k1'] == '4':
self.assertEqual('5', row['k2'])
self.assertEqual('6', row['k3'])
else:
self.fail('unexpected row')
self.assertEqual(2, lines)
def test_guess_header(self):
with TempFile() as f:
f.write("k1|k2|k3\n1|2|3".encode())
f.flush()
loader = CSVLoader(f.name, fieldnames=True, delimiter='|')
self.assertEqual([{'k1': '1', 'k2': '2', 'k3': '3'}], list(loader))
def test_noheader(self):
with TempFile() as f:
f.write("1|\"2\"|3\n\"4\"|5|\"6\"".encode('utf-8'))
f.flush()
loader = CSVLoader(f.name, False, delimiter='|')
lines = 0
for row in loader:
lines += 1
self.assertEqual(set(['c0','c1','c2']), set(row.keys()))
if row['c0'] == '1':
self.assertEqual('2', row['c1'])
self.assertEqual('3', row['c2'])
elif row['c0'] == '4':
self.assertEqual('5', row['c1'])
self.assertEqual('6', row['c2'])
else:
self.fail('unexpected row')
self.assertEqual(2, lines)
def test_cp932(self):
with TempFile() as f:
f.write("v1,v2\nテスト1,テスト2\n".encode('cp932'))
f.flush()
loader = CSVLoader(f.name, None, 'cp932', delimiter=',')
lines = 0
for row in loader:
lines += 1
self.assertEqual('テスト1', row['v1'])
self.assertEqual('テスト2', row['v2'])
self.assertEqual(1, lines)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase
from tempfile import NamedTemporaryFile as TempFile
from jubakit.loader.csv import CSVLoader
class CSVLoaderTest(TestCase):
def test_simple(self):
with TempFile() as f:
f.write("k1,\"k2\",k3\n1,2,3\n4,5,6".encode('utf-8'))
f.flush()
loader = CSVLoader(f.name)
for row in loader:
self.assertEqual(set(['k1','k2','k3']), set(row.keys()))
if row['k1'] == '1':
self.assertEqual('2', row['k2'])
self.assertEqual('3', row['k3'])
elif row['k1'] == '4':
self.assertEqual('5', row['k2'])
self.assertEqual('6', row['k3'])
else:
self.fail('unexpected row')
def test_guess_header(self):
with TempFile() as f:
f.write("k1,k2,k3\n1,2,3".encode())
f.flush()
loader = CSVLoader(f.name, fieldnames=True)
self.assertEqual([{'k1': '1', 'k2': '2', 'k3': '3'}], list(loader))
def test_noheader(self):
with TempFile() as f:
f.write("1,\"2\",3\n\"4\",5,\"6\"".encode('utf-8'))
f.flush()
loader = CSVLoader(f.name, False)
for row in loader:
self.assertEqual(set(['c0','c1','c2']), set(row.keys()))
if row['c0'] == '1':
self.assertEqual('2', row['c1'])
self.assertEqual('3', row['c2'])
elif row['c0'] == '4':
self.assertEqual('5', row['c1'])
self.assertEqual('6', row['c2'])
else:
self.fail('unexpected row')
def test_cp932(self):
with TempFile() as f:
f.write("テスト1,テスト2".encode('cp932'))
f.flush()
loader = CSVLoader(f.name, None, 'cp932')
for row in loader:
self.assertEqual('テスト1', row['c0'])
self.assertEqual('テスト2', row['c1'])
| mit | Python |
417ce074b6874f98d2f34ba82994fdcd02d98794 | Make the Spacer widget accept a background color (#1365) | tych0/qtile,soulchainer/qtile,ramnes/qtile,zordsdavini/qtile,qtile/qtile,zordsdavini/qtile,tych0/qtile,qtile/qtile,ramnes/qtile,soulchainer/qtile | libqtile/widget/spacer.py | libqtile/widget/spacer.py | # Copyright (c) 2008, 2010 Aldo Cortesi
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2012 Tim Neumann
# Copyright (c) 2012 Craig Barnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .. import bar
from . import base
class Spacer(base._Widget):
"""Just an empty space on the bar
Often used with length equal to bar.STRETCH to push bar widgets to the
right or bottom edge of the screen.
Parameters
==========
length :
Length of the widget. Can be either ``bar.STRETCH`` or a length in
pixels.
width :
DEPRECATED, same as ``length``.
"""
orientations = base.ORIENTATION_BOTH
defaults = [
("background", None, "Widget background color")
]
def __init__(self, length=bar.STRETCH, width=None, **config):
"""
"""
# 'width' was replaced by 'length' since the widget can be installed in
# vertical bars
if width is not None:
base.deprecated('width kwarg or positional argument is '
'deprecated. Please use length.')
length = width
base._Widget.__init__(self, length, **config)
self.add_defaults(Spacer.defaults)
def draw(self):
self.drawer.clear(self.background or self.bar.background)
if self.bar.horizontal:
self.drawer.draw(offsetx=self.offset, width=self.length)
else:
self.drawer.draw(offsety=self.offset, height=self.length)
| # Copyright (c) 2008, 2010 Aldo Cortesi
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2012 Tim Neumann
# Copyright (c) 2012 Craig Barnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .. import bar
from . import base
class Spacer(base._Widget):
"""Just an empty space on the bar
Often used with length equal to bar.STRETCH to push bar widgets to the
right or bottom edge of the screen.
Parameters
==========
length :
Length of the widget. Can be either ``bar.STRETCH`` or a length in
pixels.
width :
DEPRECATED, same as ``length``.
"""
orientations = base.ORIENTATION_BOTH
def __init__(self, length=bar.STRETCH, width=None):
"""
"""
# 'width' was replaced by 'length' since the widget can be installed in
# vertical bars
if width is not None:
base.deprecated('width kwarg or positional argument is '
'deprecated. Please use length.')
length = width
base._Widget.__init__(self, length)
def draw(self):
self.drawer.clear(self.bar.background)
if self.bar.horizontal:
self.drawer.draw(offsetx=self.offset, width=self.length)
else:
self.drawer.draw(offsety=self.offset, height=self.length)
| mit | Python |
4e8ed559d585ab4c5d852833d380b0502837e740 | Fix bug with attempt to create low-rank ManifoldElement from (potentially big) sparse matrix. | Nehoroshiy/multi_classifier,Nehoroshiy/multi_classifier | riemannian_optimization/sparse/utils/projections/riemannian_grad_full.py | riemannian_optimization/sparse/utils/projections/riemannian_grad_full.py | """
Copyright (c) 2015-2016 Constantine Belev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from ..loss_functions import delta_on_sigma_set
from riemannian_optimization.lowrank_matrix import ManifoldElement
def riemannian_grad_full(x, a, sigma_set, grad=None):
"""
Riemannian gradient
Compute projection of Euclidean gradient of function
$\dfrac{1}{2}| P_{\Sigma}(X - A)|_F^2$ at tangent space to manifold at x.
Projection has the form
$Proj(Z) = UU^*Z + ZVV^* + UU^*ZVV^*$
Parameters
----------
x : ManifoldElement, shape (M, N)
Rank-r manifold element in which we compute gradient
a : sparse matrix, shape (M, N)
Matrix that we need to approximate -- it has nonzero entries only
on sigma_set
sigma_set : array_like
set of indices in which matrix a can be evaluated
grad : sparse matrix, shape (M, N), optional
gradient given for being projected
Returns
-------
out : ManifoldElement
Projection of an Euclidean gradient onto the Tangent space at x
"""
grad = delta_on_sigma_set(x, a, sigma_set) if grad is None else grad
left_projected = grad.T.dot(x.u)
right_projected = grad.dot(x.v.T)
mid = x.u.T.dot(right_projected)
u = right_projected - x.u.dot(mid)
v = left_projected - x.v.T.dot(mid.T)
mid = ManifoldElement(mid, x.r).rdot(x.u).dot(x.v)
u = ManifoldElement(u, x.r).dot(x.v)
v = ManifoldElement(v.T, x.r).rdot(x.u)
return mid + u + v
| """
Copyright (c) 2015-2016 Constantine Belev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from ..loss_functions import delta_on_sigma_set
from riemannian_optimization.lowrank_matrix import ManifoldElement
def riemannian_grad_full(x, a, sigma_set, grad=None):
"""
Riemannian gradient
Compute projection of Euclidean gradient of function
$\dfrac{1}{2}| P_{\Sigma}(X - A)|_F^2$ at tangent space to manifold at x.
Projection has the form
$Proj(Z) = UU^*Z + ZVV^* + UU^*ZVV^*$
Parameters
----------
x : ManifoldElement, shape (M, N)
Rank-r manifold element in which we compute gradient
a : sparse matrix, shape (M, N)
Matrix that we need to approximate -- it has nonzero entries only
on sigma_set
sigma_set : array_like
set of indices in which matrix a can be evaluated
grad : sparse matrix, shape (M, N), optional
gradient given for being projected
Returns
-------
out : ManifoldElement
Projection of an Euclidean gradient onto the Tangent space at x
"""
grad = ManifoldElement(delta_on_sigma_set(x, a, sigma_set)) if grad is None else grad
left_projected = grad.T.dot(x.u)
right_projected = grad.dot(x.v.T)
mid = x.u.T.dot(right_projected)
u = right_projected - x.u.dot(mid)
v = left_projected - x.v.T.dot(mid.T)
mid = ManifoldElement(mid, x.r).rdot(x.u).dot(x.v)
u = ManifoldElement(u, x.r).dot(x.v)
v = ManifoldElement(v.T, x.r).rdot(x.u)
return mid + u + v
| mit | Python |
db8b06f51066a09f5e663b73c8bd56ba6a1dd39f | move to 0.1.5 | arq5x/cyvcf,wenjiany/cyvcf,chapmanb/cyvcf | cyvcf/version.py | cyvcf/version.py | __version__="0.1.5" | __version__="0.1.4" | mit | Python |
b61f0d371239ee5a6286ce539fab24d2d3de432a | Remove auth module import | plotly/dash,plotly/dash,plotly/dash,plotly/dash,plotly/dash | dash/__init__.py | dash/__init__.py | from .dash import Dash # noqa: F401
from . import dependencies # noqa: F401
from . import development # noqa: F401
from . import exceptions # noqa: F401
from . import resources # noqa: F401
from .version import __version__ # noqa: F401
| from .dash import Dash # noqa: F401
from . import authentication # noqa: F401
from . import dependencies # noqa: F401
from . import development # noqa: F401
from . import exceptions # noqa: F401
from . import resources # noqa: F401
from .version import __version__ # noqa: F401
| mit | Python |
50b0fce8a23182b43e546c716f7c2c5550ac1292 | Tweak swears | jreese/ircstat,jreese/ircstat | ircstat/plugins/highbrow.py | ircstat/plugins/highbrow.py | # Copyright 2013 John Reese
# Licensed under the MIT license
import re
from ..lib import *
from ..ent import Message
from .base import Plugin
class Highbrow(Plugin):
"""Gathers metrics related to how intelligent the conversation (or users)
may be, like swear words used, etc."""
# a mapping of swears to regexes that match the variants of the word
swears = {
'crap': r'crap(s|py|ped)',
'shit': r'shits?',
'fuck': r'fuck(s?|ing|ed)',
'damn': r'damn(n?it|ed)',
'hell': r'hell',
}
# this regex fragment wraps every swear regex
swear_regex = r'\b%s\b'
def process_message(self, message):
if is_bot(message) or not message.content:
return
nick = message.nick
content = message.content.lower()
if self._swears is None:
self._swears = {swear: re.compile(self.swear_regex % regex)
for swear, regex in self.swears.items()}
swears = {swear: len(regex.findall(content))
for swear, regex in self._swears.items()}
self.inc_shared_stats(nick, **swears)
# cache compiled regexes
_swears = None
| # Copyright 2013 John Reese
# Licensed under the MIT license
import re
from ..lib import *
from ..ent import Message
from .base import Plugin
class Highbrow(Plugin):
"""Gathers metrics related to how intelligent the conversation (or users)
may be, like swear words used, etc."""
# a mapping of swears to regexes that match the variants of the word
swears = {
'crap': r'crap(s|ped)',
'shit': r'shits?',
'fuck': r'fuck(s?|ing|ed)',
'damn': r'damn(n?it|ed)',
'hell': r'hell',
}
# this regex fragment wraps every swear regex
swear_regex = r'\b%s\b'
def process_message(self, message):
if is_bot(message) or not message.content:
return
nick = message.nick
content = message.content.lower()
if self._swears is None:
self._swears = {swear: re.compile(self.swear_regex % regex)
for swear, regex in self.swears.items()}
swears = {swear: len(regex.findall(content))
for swear, regex in self._swears.items()}
self.inc_shared_stats(nick, **swears)
# cache compiled regexes
_swears = None
| mit | Python |
f9fa554f5201a784205aca1f94b8e463cc263fb7 | add doctest | benjamin-hodgson/poll | src/poll.py | src/poll.py | """
Utilities for polling, retrying, and exception handling.
"""
import functools
import time
def poll(until, timeout=15, interval=1):
"""
Decorator for functions that should be retried until a condition
or a timeout.
:param until: Called with the return value of the function;
return True if the operation was successful (and retrying should stop)
:param float timeout: How long to keep retrying the operation in seconds
:param float interval: How long to sleep in between attempts in seconds
>>> class TestPoll:
... def __init__(self):
... self.x = 0
... @poll(lambda x: x == 3, interval=0.01)
... def test(self):
... print(self.x)
... self.x += 1
... return self.x
>>> TestPoll().test()
0
1
2
3
"""
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
result = None
start_time = time.perf_counter()
while result is None or not until(result):
result = f(*args, **kwargs)
time.sleep(interval)
if time.perf_counter() - start_time > timeout:
raise TimeoutError("The operation {} timed out after {} seconds".format(f.__name__, timeout))
return result
return wrapper
return decorator
| """
Utilities for polling, retrying, and exception handling.
"""
import functools
import time
def poll(until, timeout=15, interval=1):
"""
Decorator for functions that should be retried until a condition
or a timeout.
:param until: Called with the return value of the function;
return True if the operation was successful (and retrying should stop)
:param float timeout: How long to keep retrying the operation in seconds
:param float interval: How long to sleep in between attempts in seconds
"""
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
result = None
start_time = time.perf_counter()
while result is None or not until(result):
result = f(*args, **kwargs)
time.sleep(interval)
if time.perf_counter() - start_time > timeout:
raise TimeoutError("The operation {} timed out after {} seconds".format(f.__name__, timeout))
return result
return wrapper
return decorator
| mit | Python |
ee5f2e9ca3cdc7395e350dcbc9fa8786f137652f | Switch to development mode | myint/yolk,myint/yolk | yolk/__init__.py | yolk/__init__.py | """yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.5a0'
| """yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.4'
| bsd-3-clause | Python |
83c90c53dfd8ee508911d9f1e471b39797ce2c72 | Fix types. | kou/zulip,kou/zulip,zulip/zulip,rht/zulip,zulip/zulip,rht/zulip,zulip/zulip,zulip/zulip,zulip/zulip,andersk/zulip,rht/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,zulip/zulip,andersk/zulip,andersk/zulip,rht/zulip,rht/zulip,rht/zulip,rht/zulip,zulip/zulip,kou/zulip,kou/zulip,andersk/zulip,kou/zulip,kou/zulip | zerver/lib/db.py | zerver/lib/db.py | import time
from typing import (
Any,
Callable,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
TypeVar,
Union,
overload,
)
from psycopg2.extensions import connection, cursor
from psycopg2.sql import Composable
CursorObj = TypeVar("CursorObj", bound=cursor)
Query = Union[str, bytes, Composable]
Params = Union[Sequence[object], Mapping[str, object], None]
ParamsT = TypeVar("ParamsT")
# Similar to the tracking done in Django's CursorDebugWrapper, but done at the
# psycopg2 cursor level so it works with SQLAlchemy.
def wrapper_execute(
self: CursorObj, action: Callable[[Query, ParamsT], None], sql: Query, params: ParamsT
) -> None:
start = time.time()
try:
action(sql, params)
finally:
stop = time.time()
duration = stop - start
self.connection.queries.append(
{
"time": f"{duration:.3f}",
}
)
class TimeTrackingCursor(cursor):
"""A psycopg2 cursor class that tracks the time spent executing queries."""
def execute(self, query: Query, vars: Params = None) -> None:
wrapper_execute(self, super().execute, query, vars)
def executemany(self, query: Query, vars: Iterable[Params]) -> None: # nocoverage
wrapper_execute(self, super().executemany, query, vars)
CursorT = TypeVar("CursorT", bound=cursor)
class TimeTrackingConnection(connection):
"""A psycopg2 connection class that uses TimeTrackingCursors."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.queries: List[Dict[str, str]] = []
super().__init__(*args, **kwargs)
@overload
def cursor(
self,
name: str = ...,
*,
scrollable: Optional[bool] = ...,
withhold: bool = ...,
) -> TimeTrackingCursor:
...
@overload
def cursor(
self,
name: str = ...,
cursor_factory: Callable[..., CursorT] = ...,
scrollable: Optional[bool] = ...,
withhold: bool = ...,
) -> CursorT:
...
def cursor(self, *args: object, **kwargs: object) -> cursor:
kwargs.setdefault("cursor_factory", TimeTrackingCursor)
return super().cursor(*args, **kwargs)
def reset_queries() -> None:
from django.db import connections
for conn in connections.all():
if conn.connection is not None:
conn.connection.queries = []
| import time
from typing import (
Any,
Callable,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
TypeVar,
Union,
overload,
)
from psycopg2.extensions import connection, cursor
from psycopg2.sql import Composable
CursorObj = TypeVar("CursorObj", bound=cursor)
Query = Union[str, Composable]
Params = Union[Sequence[object], Mapping[str, object]]
ParamsT = TypeVar("ParamsT")
# Similar to the tracking done in Django's CursorDebugWrapper, but done at the
# psycopg2 cursor level so it works with SQLAlchemy.
def wrapper_execute(
self: CursorObj, action: Callable[[Query, ParamsT], CursorObj], sql: Query, params: ParamsT
) -> CursorObj:
start = time.time()
try:
return action(sql, params)
finally:
stop = time.time()
duration = stop - start
self.connection.queries.append(
{
"time": f"{duration:.3f}",
}
)
class TimeTrackingCursor(cursor):
"""A psycopg2 cursor class that tracks the time spent executing queries."""
def execute(self, query: Query, vars: Optional[Params] = None) -> "TimeTrackingCursor":
return wrapper_execute(self, super().execute, query, vars)
def executemany(
self, query: Query, vars: Iterable[Params]
) -> "TimeTrackingCursor": # nocoverage
return wrapper_execute(self, super().executemany, query, vars)
CursorT = TypeVar("CursorT", bound=cursor)
class TimeTrackingConnection(connection):
"""A psycopg2 connection class that uses TimeTrackingCursors."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.queries: List[Dict[str, str]] = []
super().__init__(*args, **kwargs)
@overload
def cursor(
self,
name: str = ...,
*,
scrollable: Optional[bool] = ...,
withhold: bool = ...,
) -> TimeTrackingCursor:
...
@overload
def cursor(
self,
name: str = ...,
cursor_factory: Callable[..., CursorT] = ...,
scrollable: Optional[bool] = ...,
withhold: bool = ...,
) -> CursorT:
...
def cursor(self, *args: object, **kwargs: object) -> cursor:
kwargs.setdefault("cursor_factory", TimeTrackingCursor)
return super().cursor(*args, **kwargs)
def reset_queries() -> None:
from django.db import connections
for conn in connections.all():
if conn.connection is not None:
conn.connection.queries = []
| apache-2.0 | Python |
f9defcf8c0bcf85c04e5d02df47086f2404d9d5c | Bump version to 0.20.0 | elifesciences/elife-tools,elifesciences/elife-tools | elifetools/__init__.py | elifetools/__init__.py | __version__ = "0.20.0"
| __version__ = "0.19.0"
| mit | Python |
34a4432b85b0a0948f8481a03c6278e635c85912 | Add a default ordering for planets and moons. | StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser | elmo/eve_sde/models.py | elmo/eve_sde/models.py | from django.db import models
import roman
class Region(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class Meta:
default_permissions = ()
class Constellation(models.Model):
id = models.IntegerField(primary_key=True)
region = models.ForeignKey(
Region,
related_name='constellations',
db_index=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class Meta:
default_permissions = ()
class SolarSystem(models.Model):
id = models.IntegerField(primary_key=True)
constellation = models.ForeignKey(
Constellation,
related_name='systems',
db_index=True
)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
security = models.FloatField()
def __str__(self):
return self.name
class Meta:
default_permissions = ()
class Planet(models.Model):
id = models.IntegerField(primary_key=True)
system = models.ForeignKey(
SolarSystem,
related_name='planets',
db_index=True
)
number = models.IntegerField(db_index=True)
def as_roman(self):
return roman.toRoman(self.number)
def __str__(self):
return "%s %d" % (str(self.system), self.number)
class Meta:
default_permissions = ()
ordering = ('number',)
class Moon(models.Model):
id = models.IntegerField(primary_key=True)
planet = models.ForeignKey(
Planet,
related_name='moons',
db_index=True
)
number = models.IntegerField(db_index=True)
def __str__(self):
return "%s - Moon %d" % (str(self.planet), self.number)
class Meta:
default_permissions = ()
ordering = ('number',)
| from django.db import models
import roman
class Region(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class Meta:
default_permissions = ()
class Constellation(models.Model):
id = models.IntegerField(primary_key=True)
region = models.ForeignKey(
Region,
related_name='constellations',
db_index=True)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
def __str__(self):
return self.name
class Meta:
default_permissions = ()
class SolarSystem(models.Model):
id = models.IntegerField(primary_key=True)
constellation = models.ForeignKey(
Constellation,
related_name='systems',
db_index=True
)
name = models.CharField(
db_index=True,
unique=True,
max_length=64
)
security = models.FloatField()
def __str__(self):
return self.name
class Meta:
default_permissions = ()
class Planet(models.Model):
id = models.IntegerField(primary_key=True)
system = models.ForeignKey(
SolarSystem,
related_name='planets',
db_index=True
)
number = models.IntegerField(db_index=True)
def as_roman(self):
return roman.toRoman(self.number)
def __str__(self):
return "%s %d" % (str(self.system), self.number)
class Meta:
default_permissions = ()
class Moon(models.Model):
id = models.IntegerField(primary_key=True)
planet = models.ForeignKey(
Planet,
related_name='moons',
db_index=True
)
number = models.IntegerField(db_index=True)
def __str__(self):
return "%s - Moon %d" % (str(self.planet), self.number)
class Meta:
default_permissions = ()
| mit | Python |
4e75db543d17fc102334cf421969c5a65595ba35 | optimize code in mongo connection function | Edraak/edx-platform,Edraak/edx-platform,Edraak/edx-platform,Edraak/edx-platform,Edraak/edx-platform | common/lib/xmodule/xmodule/mongo_connection.py | common/lib/xmodule/xmodule/mongo_connection.py | """
Common MongoDB connection functions.
"""
import pymongo
from mongodb_proxy import MongoProxy
# pylint: disable=bad-continuation
def connect_to_mongodb(
db, host,
port=27017, tz_aware=True, user=None, password=None,
retry_wait_time=0.1, proxy=True, **kwargs
):
"""
Returns a MongoDB Database connection, optionally wrapped in a proxy. The proxy
handles AutoReconnect errors by retrying read operations, since these exceptions
typically indicate a temporary step-down condition for MongoDB.
"""
# The MongoReplicaSetClient class is deprecated in Mongo 3.x, in favor of using
# the MongoClient class for all connections. Update/simplify this code when using
# PyMongo 3.x.
if kwargs.get('replicaSet'):
# Enable reading from secondary nodes in the MongoDB replicaset by using the
# MongoReplicaSetClient class.
# The 'replicaSet' parameter in kwargs is required for secondary reads.
# The read_preference should be set to a proper value, like SECONDARY_PREFERRED.
mongo_client_class = pymongo.MongoReplicaSetClient
else:
# No 'replicaSet' in kwargs - so no secondary reads.
mongo_client_class = pymongo.MongoClient
mongo_conn = pymongo.database.Database(
mongo_client_class(
host=host,
port=port,
tz_aware=tz_aware,
document_class=dict,
**kwargs
),
db
)
if proxy:
mongo_conn = MongoProxy(
mongo_conn,
wait_time=retry_wait_time
)
# If credentials were provided, authenticate the user.
auth_src = kwargs.get('authSource')
if user is not None and password is not None:
mongo_conn.authenticate(user, password, source=auth_src)
return mongo_conn
| """
Common MongoDB connection functions.
"""
import pymongo
from mongodb_proxy import MongoProxy
# pylint: disable=bad-continuation
def connect_to_mongodb(
db, host,
port=27017, tz_aware=True, user=None, password=None,
retry_wait_time=0.1, proxy=True, **kwargs
):
"""
Returns a MongoDB Database connection, optionally wrapped in a proxy. The proxy
handles AutoReconnect errors by retrying read operations, since these exceptions
typically indicate a temporary step-down condition for MongoDB.
"""
# The MongoReplicaSetClient class is deprecated in Mongo 3.x, in favor of using
# the MongoClient class for all connections. Update/simplify this code when using
# PyMongo 3.x.
if kwargs.get('replicaSet'):
# Enable reading from secondary nodes in the MongoDB replicaset by using the
# MongoReplicaSetClient class.
# The 'replicaSet' parameter in kwargs is required for secondary reads.
# The read_preference should be set to a proper value, like SECONDARY_PREFERRED.
mongo_client_class = pymongo.MongoReplicaSetClient
else:
# No 'replicaSet' in kwargs - so no secondary reads.
mongo_client_class = pymongo.MongoClient
mongo_conn = pymongo.database.Database(
mongo_client_class(
host=host,
port=port,
tz_aware=tz_aware,
document_class=dict,
**kwargs
),
db
)
if proxy:
mongo_conn = MongoProxy(
mongo_conn,
wait_time=retry_wait_time
)
# If credentials were provided, authenticate the user.
auth_src = None
if kwargs.get('authSource'):
auth_src=kwargs.get('authSource')
if user is not None and password is not None:
mongo_conn.authenticate(user, password, source=auth_src)
return mongo_conn
| agpl-3.0 | Python |
b568877e42043747df6e11af5d04cce5fcdc1c0c | Allow introspection of enabled services | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/modules/introspect.py | salt/modules/introspect.py | # -*- coding: utf-8 -*-
'''
Functions to perform introspection on a minion, and return data in a format
usable by Salt States
'''
import os
def running_service_owners(
exclude=('/dev', '/home', '/media', '/proc', '/run', '/sys/', '/tmp',
'/var')
):
'''
Determine which packages own the currently running services. By default,
excludes files whose full path starts with ``/dev``, ``/home``, ``/media``,
``/proc``, ``/run``, ``/sys``, ``/tmp`` and ``/var``. This can be
overridden by passing in a new list to ``exclude``.
CLI Example:
salt myminion introspect.running_service_owners
'''
error = {}
if not 'pkg.owner' in __salt__:
error['Unsupported Package Manager'] = (
'The module for the package manager on this system does not '
'support looking up which package(s) owns which file(s)'
)
if not 'file.open_files' in __salt__:
error['Unsupported File Module'] = (
'The file module on this system does not '
'support looking up open files on the system'
)
if error:
return {'Error': error}
ret = {}
open_files = __salt__['file.open_files']()
execs = __salt__['service.execs']()
for path in open_files:
ignore = False
for bad_dir in exclude:
if path.startswith(bad_dir):
ignore = True
if ignore:
continue
if not os.access(path, os.X_OK):
continue
for service in execs:
if path == execs[service]:
pkg = __salt__['pkg.owner'](path)
ret[service] = pkg.values()[0]
return ret
def enabled_service_owners():
'''
Return which packages own each of the services that are currently enabled.
CLI Example:
salt myminion introspect.enabled_service_owners
'''
error = {}
if not 'pkg.owner' in __salt__:
error['Unsupported Package Manager'] = (
'The module for the package manager on this system does not '
'support looking up which package(s) owns which file(s)'
)
if not 'service.show' in __salt__:
error['Unsupported Service Manager'] = (
'The module for the service manager on this system does not '
'support showing descriptive service data'
)
if error:
return {'Error': error}
ret = {}
services = __salt__['service.get_enabled']()
for service in services:
data = __salt__['service.show'](service)
if not 'ExecStart' in data:
continue
start_cmd = data['ExecStart']['path']
pkg = __salt__['pkg.owner'](start_cmd)
ret[service] = pkg.values()[0]
return ret
| # -*- coding: utf-8 -*-
'''
Functions to perform introspection on a minion, and return data in a format
usable by Salt States
'''
import os
def running_service_owners(
exclude=('/dev', '/home', '/media', '/proc', '/run', '/sys/', '/tmp',
'/var')
):
'''
Determine which packages own the currently running services. By default,
excludes files whose full path starts with ``/dev``, ``/home``, ``/media``,
``/proc``, ``/run``, ``/sys``, ``/tmp`` and ``/var``. This can be
overridden by passing in a new list to ``exclude``.
CLI Example:
salt myminion introspect.running_service_owners
'''
error = {}
if not 'pkg.owner' in __salt__:
error['Unsupported Package Manager'] = (
'The module for the package manager on this system does not '
'support looking up which package(s) owns which file(s)'
)
if not 'file.open_files' in __salt__:
error['Unsupported File Module'] = (
'The file module on this system does not '
'support looking up open files on the system'
)
if error:
return {'Error': error}
ret = {}
open_files = __salt__['file.open_files']()
execs = __salt__['service.execs']()
for path in open_files:
ignore = False
for bad_dir in exclude:
if path.startswith(bad_dir):
ignore = True
if ignore:
continue
if not os.access(path, os.X_OK):
continue
for service in execs:
if path == execs[service]:
pkg = __salt__['pkg.owner'](path)
ret[service] = pkg.values()[0]
return ret
| apache-2.0 | Python |
99e783e63782d17dc4e2183685444cfefd72d6f6 | Update 0071_partnerteammember_champion_admin.py | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/migrations/0071_partnerteammember_champion_admin.py | accelerator/migrations/0071_partnerteammember_champion_admin.py | # Generated by Django 2.2.10 on 2021-08-26 17:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0070_judginground_champion_partner_label')
]
operations = [
migrations.AddField(
model_name='partnerteammember',
name='champion_admin',
field=models.BooleanField(default=False),
),
]
| # Generated by Django 2.2.10 on 2021-08-26 17:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0070_judginground_champion_partner_label.py')
]
operations = [
migrations.AddField(
model_name='partnerteammember',
name='champion_admin',
field=models.BooleanField(default=False),
),
]
| mit | Python |
4a72294d7d494ae3231e7bad74f57baaa4da088c | Remove code to patch bootloader flash size. | henriknelson/micropython,lowRISC/micropython,pramasoul/micropython,MrSurly/micropython-esp32,adafruit/micropython,blazewicz/micropython,tobbad/micropython,Peetz0r/micropython-esp32,oopy/micropython,hiway/micropython,tobbad/micropython,puuu/micropython,MrSurly/micropython-esp32,Timmenem/micropython,micropython/micropython-esp32,kerneltask/micropython,pfalcon/micropython,micropython/micropython-esp32,infinnovation/micropython,ryannathans/micropython,tobbad/micropython,pramasoul/micropython,chrisdearman/micropython,kerneltask/micropython,bvernoux/micropython,puuu/micropython,deshipu/micropython,ryannathans/micropython,MrSurly/micropython,kerneltask/micropython,mhoffma/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,Peetz0r/micropython-esp32,HenrikSolver/micropython,lowRISC/micropython,micropython/micropython-esp32,TDAbboud/micropython,cwyark/micropython,kerneltask/micropython,cwyark/micropython,puuu/micropython,Peetz0r/micropython-esp32,lowRISC/micropython,SHA2017-badge/micropython-esp32,chrisdearman/micropython,PappaPeppar/micropython,trezor/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,pfalcon/micropython,bvernoux/micropython,HenrikSolver/micropython,MrSurly/micropython,blazewicz/micropython,alex-robbins/micropython,swegener/micropython,infinnovation/micropython,PappaPeppar/micropython,oopy/micropython,pramasoul/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,adafruit/circuitpython,pozetroninc/micropython,bvernoux/micropython,bvernoux/micropython,tralamazza/micropython,mhoffma/micropython,chrisdearman/micropython,trezor/micropython,adafruit/micropython,oopy/micropython,blazewicz/micropython,MrSurly/micropython,swegener/micropython,tuc-osg/micropython,adafruit/circuitpython,MrSurly/micropython-esp32,cwyark/micropython,TDAbboud/micropython,torwag/micropython,adafruit/micropython,toolmacher/micropython,pfalcon/micropython,blazewicz/micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,Timmenem/micropython,toolmacher/micropython,adafruit/circuitpython,alex-robbins/micropython,cwyark/micropython,micropython/micropython-esp32,HenrikSolver/micropython,tuc-osg/micropython,torwag/micropython,AriZuu/micropython,oopy/micropython,AriZuu/micropython,selste/micropython,torwag/micropython,AriZuu/micropython,tralamazza/micropython,infinnovation/micropython,toolmacher/micropython,dmazzella/micropython,TDAbboud/micropython,PappaPeppar/micropython,chrisdearman/micropython,pramasoul/micropython,tobbad/micropython,pfalcon/micropython,dmazzella/micropython,lowRISC/micropython,selste/micropython,bvernoux/micropython,henriknelson/micropython,alex-robbins/micropython,mhoffma/micropython,TDAbboud/micropython,AriZuu/micropython,pozetroninc/micropython,alex-robbins/micropython,infinnovation/micropython,MrSurly/micropython-esp32,tuc-osg/micropython,TDAbboud/micropython,pramasoul/micropython,ryannathans/micropython,swegener/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,MrSurly/micropython-esp32,chrisdearman/micropython,selste/micropython,trezor/micropython,selste/micropython,tralamazza/micropython,torwag/micropython,trezor/micropython,AriZuu/micropython,MrSurly/micropython,hiway/micropython,cwyark/micropython,henriknelson/micropython,deshipu/micropython,tuc-osg/micropython,oopy/micropython,tralamazza/micropython,torwag/micropython,henriknelson/micropython,tobbad/micropython,swegener/micropython,toolmacher/micropython,Timmenem/micropython,MrSurly/micropython,blazewicz/micropython,deshipu/micropython,dmazzella/micropython,adafruit/micropython,lowRISC/micropython,alex-robbins/micropython,dmazzella/micropython,hiway/micropython,pozetroninc/micropython,pozetroninc/micropython,hiway/micropython,deshipu/micropython,puuu/micropython,mhoffma/micropython,puuu/micropython,selste/micropython,toolmacher/micropython,henriknelson/micropython,swegener/micropython,ryannathans/micropython,HenrikSolver/micropython,Timmenem/micropython,pozetroninc/micropython,trezor/micropython,tuc-osg/micropython,pfalcon/micropython,HenrikSolver/micropython,ryannathans/micropython,hiway/micropython,mhoffma/micropython,PappaPeppar/micropython,kerneltask/micropython,infinnovation/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,adafruit/micropython,adafruit/circuitpython | esp8266/modules/flashbdev.py | esp8266/modules/flashbdev.py | import esp
class FlashBdev:
SEC_SIZE = 4096
RESERVED_SECS = 1
START_SEC = esp.flash_user_start() // SEC_SIZE + RESERVED_SECS
NUM_BLK = 0x6b - RESERVED_SECS
def __init__(self, blocks=NUM_BLK):
self.blocks = blocks
def readblocks(self, n, buf):
#print("readblocks(%s, %x(%d))" % (n, id(buf), len(buf)))
esp.flash_read((n + self.START_SEC) * self.SEC_SIZE, buf)
def writeblocks(self, n, buf):
#print("writeblocks(%s, %x(%d))" % (n, id(buf), len(buf)))
#assert len(buf) <= self.SEC_SIZE, len(buf)
esp.flash_erase(n + self.START_SEC)
esp.flash_write((n + self.START_SEC) * self.SEC_SIZE, buf)
def ioctl(self, op, arg):
#print("ioctl(%d, %r)" % (op, arg))
if op == 4: # BP_IOCTL_SEC_COUNT
return self.blocks
if op == 5: # BP_IOCTL_SEC_SIZE
return self.SEC_SIZE
def set_bl_flash_size(real_size):
if real_size == 256*1024:
code = 1
elif real_size == 512*1024:
code = 0
elif real_size == 1024*1024:
code = 2
elif real_size == 2048*1024:
code = 3
elif real_size == 4096*1024:
code = 4
else:
code = 2
buf = bytearray(4096)
esp.flash_read(0, buf)
buf[3] = (buf[3] & 0xf) | (code << 4)
esp.flash_erase(0)
esp.flash_write(0, buf)
size = esp.flash_size()
if size < 1024*1024:
bdev = None
else:
# 20K at the flash end is reserved for SDK params storage
bdev = FlashBdev((size - 20480) // FlashBdev.SEC_SIZE - FlashBdev.START_SEC)
| import esp
class FlashBdev:
SEC_SIZE = 4096
RESERVED_SECS = 1
START_SEC = esp.flash_user_start() // SEC_SIZE + RESERVED_SECS
NUM_BLK = 0x6b - RESERVED_SECS
def __init__(self, blocks=NUM_BLK):
self.blocks = blocks
def readblocks(self, n, buf):
#print("readblocks(%s, %x(%d))" % (n, id(buf), len(buf)))
esp.flash_read((n + self.START_SEC) * self.SEC_SIZE, buf)
def writeblocks(self, n, buf):
#print("writeblocks(%s, %x(%d))" % (n, id(buf), len(buf)))
#assert len(buf) <= self.SEC_SIZE, len(buf)
esp.flash_erase(n + self.START_SEC)
esp.flash_write((n + self.START_SEC) * self.SEC_SIZE, buf)
def ioctl(self, op, arg):
#print("ioctl(%d, %r)" % (op, arg))
if op == 4: # BP_IOCTL_SEC_COUNT
return self.blocks
if op == 5: # BP_IOCTL_SEC_SIZE
return self.SEC_SIZE
def set_bl_flash_size(real_size):
if real_size == 256*1024:
code = 1
elif real_size == 512*1024:
code = 0
elif real_size == 1024*1024:
code = 2
elif real_size == 2048*1024:
code = 3
elif real_size == 4096*1024:
code = 4
else:
code = 2
buf = bytearray(4096)
esp.flash_read(0, buf)
buf[3] = (buf[3] & 0xf) | (code << 4)
esp.flash_erase(0)
esp.flash_write(0, buf)
# If bootloader size ID doesn't correspond to real Flash size,
# fix bootloader value and reboot.
size = esp.flash_id() >> 16
# Check that it looks like realistic power of 2 for flash sizes
# commonly used with esp8266
if 22 >= size >= 18:
size = 1 << size
if size != esp.flash_size():
import machine
import time
print("Bootloader Flash size appear to have been set incorrectly, trying to fix")
set_bl_flash_size(size)
machine.reset()
while 1: time.sleep(1)
size = esp.flash_size()
if size < 1024*1024:
bdev = None
else:
# 20K at the flash end is reserved for SDK params storage
bdev = FlashBdev((size - 20480) // FlashBdev.SEC_SIZE - FlashBdev.START_SEC)
| mit | Python |
02e4a879c9a074aaa89117b048f346b306b95995 | Use string formatting | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/pillar/etcd_pillar.py | salt/pillar/etcd_pillar.py | # -*- coding: utf-8 -*-
'''
Use etcd data as a Pillar source
.. versionadded:: Helium
:depends: - python-etcd
In order to use an etcd server, a profile must be created in the master
configuration file:
.. code-block:: yaml
my_etd_config:
etcd.host: 127.0.0.1
etcd.port: 4001
After the profile is created, configure the external pillar system to use it.
Optionally, a root may be specified.
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config
ext_pillar:
- etcd: my_etcd_config root=/salt
Using these configuration profiles, multiple etcd sources may also be used:
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config
- etcd: my_other_etcd_config
The ``minion_id`` may be used in the ``root`` path to expose minion-specific
information stored in etcd.
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config root=/salt/%(minion_id)s
Minion-specific values may override shared values when the minion-specific root
appears after the shared root:
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config root=/salt-shared
- etcd: my_other_etcd_config root=/salt-private/%(minion_id)s
Using the configuration above, the following commands could be used to share a
key with all minions but override its value for a specific minion::
etcdctl set /salt-shared/mykey my_value
etcdctl set /salt-private/special_minion_id/mykey my_other_value
'''
# Import python libs
import logging
# Import third party libs
try:
from salt.utils import etcd_util
HAS_LIBS = True
except Exception:
HAS_LIBS = False
__virtualname__ = 'etcd'
# Set up logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Only return if python-etcd is installed
'''
return __virtualname__ if HAS_LIBS else False
def ext_pillar(minion_id, pillar, conf): # pylint: disable=W0613
'''
Check etcd for all data
'''
comps = conf.split()
profile = None
if comps[0]:
profile = comps[0]
client = etcd_util.get_conn(__opts__, profile)
path = '/'
if len(comps) > 1 and comps[1].startswith('root='):
path = comps[1].replace('root=', '')
# put the minion's ID in the path if necessary
path %= {
'minion_id': minion_id
}
try:
pillar = etcd_util.tree(client, path)
except KeyError:
log.error('No such key in etcd profile {0}: {1}'.format(profile, path))
pillar = {}
return pillar
| # -*- coding: utf-8 -*-
'''
Use etcd data as a Pillar source
.. versionadded:: Helium
:depends: - python-etcd
In order to use an etcd server, a profile must be created in the master
configuration file:
.. code-block:: yaml
my_etd_config:
etcd.host: 127.0.0.1
etcd.port: 4001
After the profile is created, configure the external pillar system to use it.
Optionally, a root may be specified.
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config
ext_pillar:
- etcd: my_etcd_config root=/salt
Using these configuration profiles, multiple etcd sources may also be used:
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config
- etcd: my_other_etcd_config
The ``minion_id`` may be used in the ``root`` path to expose minion-specific
information stored in etcd.
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config root=/salt/%(minion_id)s
Minion-specific values may override shared values when the minion-specific root
appears after the shared root:
.. code-block:: yaml
ext_pillar:
- etcd: my_etcd_config root=/salt-shared
- etcd: my_other_etcd_config root=/salt-private/%(minion_id)s
Using the configuration above, the following commands could be used to share a
key with all minions but override its value for a specific minion::
etcdctl set /salt-shared/mykey my_value
etcdctl set /salt-private/special_minion_id/mykey my_other_value
'''
# Import python libs
import logging
# Import third party libs
try:
from salt.utils import etcd_util
HAS_LIBS = True
except Exception:
HAS_LIBS = False
__virtualname__ = 'etcd'
# Set up logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Only return if python-etcd is installed
'''
return __virtualname__ if HAS_LIBS else False
def ext_pillar(minion_id, pillar, conf): # pylint: disable=W0613
'''
Check etcd for all data
'''
comps = conf.split()
profile = None
if comps[0]:
profile = comps[0]
client = etcd_util.get_conn(__opts__, profile)
path = '/'
if len(comps) > 1 and comps[1].startswith('root='):
path = comps[1].replace('root=', '')
# put the minion's ID in the path if necessary
path %= {
'minion_id': minion_id
}
try:
pillar = etcd_util.tree(client, path)
except KeyError:
log.error('No such key in etcd profile %s: %s' % (profile, path))
pillar = {}
return pillar
| apache-2.0 | Python |
2b2c24687ed27861b5ebddf5112f19a67615cbf5 | Fix value generation | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/exp10/test/fixtures/python/runner.py | lib/node_modules/@stdlib/math/base/special/exp10/test/fixtures/python/runner.py | #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import numpy as np
from scipy.special import exp10
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, './data.json')
```
"""
y = exp10(x)
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
# Write the data to the output filepath as JSON:
with open(filepath, "w") as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
# Medium negative values:
x = np.linspace(-300.0, -1.0, 2003)
gen(x, "medium_negative.json")
# Medium positive values:
x = np.linspace(1.0, 300.0, 2003)
gen(x, "medium_positive.json")
# Small negative values:
x = np.linspace(-1.0, -2**-54, 1000) # TODO: improve range
gen(x, "small_negative.json")
# Small positive values:
x = np.linspace(2**-54, 1.0, 1000) # TODO: improve range
gen(x, "small_positive.json")
# Tiny values:
x = np.linspace(-2**-54, 2**-54, 1000) # TODO: improve range
gen(x, "tiny.json")
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(file)
def gen(x, name):
"""Generate fixture data and write to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001)
python> gen(x, \"./data.json\")
```
"""
y = special.exp10(x)
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
# Medium negative values:
x = np.linspace(-300.0, -1.0, 2003)
gen(x, "medium_negative.json")
# Medium positive values:
x = np.linspace(1.0, 300.0, 2003)
gen(x, "medium_positive.json")
# Small negative values:
x = np.linspace(-1.0, -2^-54, 1000) # TODO: improve range
gen(x, "small_negative.json")
# Small positive values:
x = np.linspace(2^-54, 1.0, 1000 ) # TODO: improve range
gen(x, "small_positive.json")
# Tiny values:
x = np.linspace(-2^-54, 2^-54, 1000) # TODO: improve range
gen(x, "tiny.json")
if __name__ == "__main__":
main()
| apache-2.0 | Python |
26f984a7732491e87e4eb756caf0056a7ac71484 | Fix unlink, >1 filter and lines too long | bullet92/contract,open-synergy/contract | contract_invoice_merge_by_partner/models/account_analytic_analysis.py | contract_invoice_merge_by_partner/models/account_analytic_analysis.py | # -*- coding: utf-8 -*-
# © 2016 Carlos Dauden <carlos.dauden@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, models
class PurchaseOrderLine(models.Model):
_inherit = 'account.analytic.account'
@api.multi
def _recurring_create_invoice(self, automatic=False):
invoice_obj = self.env['account.invoice']
invoices = invoice_obj.browse(
super(PurchaseOrderLine, self)._recurring_create_invoice(
automatic))
res = []
unlink_list = []
for partner in invoices.mapped('partner_id'):
inv_to_merge = invoices.filtered(
lambda x: x.partner_id.id == partner)
if partner.contract_invoice_merge and len(inv_to_merge) > 1:
invoices_merged = inv_to_merge.do_merge()
res.extend(invoices_merged)
unlink_list.extend(inv_to_merge)
else:
res.extend(inv_to_merge)
if unlink_list:
invoice_obj.browse(unlink_list).unlink()
return res
| # -*- coding: utf-8 -*-
# © 2016 Carlos Dauden <carlos.dauden@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, models
class PurchaseOrderLine(models.Model):
_inherit = 'account.analytic.account'
@api.multi
def _recurring_create_invoice(self, automatic=False):
invoice_obj = self.env['account.invoice']
invoices = invoice_obj.browse(
super(PurchaseOrderLine, self)._recurring_create_invoice(automatic))
res = []
unlink_list = []
for partner in invoices.mapped('partner_id'):
inv_to_merge = invoices.filtered(
lambda x: x.partner_id.id == partner)
if partner.contract_invoice_merge:
invoices_merged = inv_to_merge.do_merge()
res.extend(invoices_merged)
unlink_list.extend(inv_to_merge)
else:
res.extend(inv_to_merge)
if unlink_list:
invoice_obj.unlink([x.id for x in unlink_list])
return res
| agpl-3.0 | Python |
36d9b32a8b3f328c51668fdbb8e01080a936ca99 | Add license to file header | tensorflow/cloud,tensorflow/cloud | tests/integration/call_run_within_script_with_autokeras_test.py | tests/integration/call_run_within_script_with_autokeras_test.py | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Search for a good model for the
[MNIST](https://keras.io/datasets/#mnist-database-of-handwritten-digits) dataset.
"""
import argparse
import os
import autokeras as ak
import tensorflow_cloud as tfc
from tensorflow.keras.datasets import mnist
parser = argparse.ArgumentParser(description="Model save path arguments.")
parser.add_argument("--path", required=True, type=str, help="Keras model save path")
args = parser.parse_args()
tfc.run(
chief_config=tfc.COMMON_MACHINE_CONFIGS["V100_1X"],
docker_base_image="haifengjin/autokeras:1.0.3",
)
# Prepare the dataset.
(x_train, y_train), (x_test, y_test) = mnist.load_data()
print(x_train.shape) # (60000, 28, 28)
print(y_train.shape) # (60000,)
print(y_train[:3]) # array([7, 2, 1], dtype=uint8)
# Initialize the ImageClassifier.
clf = ak.ImageClassifier(max_trials=2)
# Search for the best model.
clf.fit(x_train, y_train, epochs=10)
# Evaluate on the testing data.
print("Accuracy: {accuracy}".format(accuracy=clf.evaluate(x_test, y_test)[1]))
clf.export_model().save(os.path.join(args.path, "model.h5"))
| """
Search for a good model for the
[MNIST](https://keras.io/datasets/#mnist-database-of-handwritten-digits) dataset.
"""
import argparse
import os
import autokeras as ak
import tensorflow_cloud as tfc
from tensorflow.keras.datasets import mnist
parser = argparse.ArgumentParser(description="Model save path arguments.")
parser.add_argument("--path", required=True, type=str, help="Keras model save path")
args = parser.parse_args()
tfc.run(
chief_config=tfc.COMMON_MACHINE_CONFIGS["V100_1X"],
docker_base_image="haifengjin/autokeras:1.0.3",
)
# Prepare the dataset.
(x_train, y_train), (x_test, y_test) = mnist.load_data()
print(x_train.shape) # (60000, 28, 28)
print(y_train.shape) # (60000,)
print(y_train[:3]) # array([7, 2, 1], dtype=uint8)
# Initialize the ImageClassifier.
clf = ak.ImageClassifier(max_trials=2)
# Search for the best model.
clf.fit(x_train, y_train, epochs=10)
# Evaluate on the testing data.
print("Accuracy: {accuracy}".format(accuracy=clf.evaluate(x_test, y_test)[1]))
clf.export_model().save(os.path.join(args.path, "model.h5"))
| apache-2.0 | Python |
a9d7477238bae3c16da4bcab7c449cc592f5d1eb | fix typo | CIRCL/bgpranking-redis-api,CIRCL/bgpranking-redis-api,CIRCL/bgpranking-redis-api | example/export/init_redis.py | example/export/init_redis.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import bgpranking
import argparse
import redis
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate a list of all IPS for a day.')
parser.add_argument('-d', '--date', default=None, help='Date of the dump (YYYY-MM-DD)')
args = parser.parse_args()
date = args.date
if date is None:
date = bgpranking.get_default_date()
dates_sources = bgpranking.prepare_sources_by_dates(date, 1)
asns = bgpranking.existing_asns_timeframe(dates_sources)
r = redis.Redis(unix_socket_path='./redis_export.sock')
r.set('date', date)
for asn in asns:
timestamps = bgpranking.get_all_asn_timestamps(asn)
p = r.pipeline(False)
for ts in timestamps:
p.sadd('asn_ts', "{asn}_{ts}".format(asn=asn, ts=ts))
p.execute()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import bgpranking
import argparse
import redis
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate a list of all IPS for a day.')
parser.add_argument('-d', '--date', default=None, help='Date of the dump (YYYY-MM-DD)')
args = parser.parse_args()
date = args.date
if date is None:
date = bgpranking.get_default_date()
dates_sources = bgpranking.prepare_sources_by_dates(date, 1)
asns = bgpranking.existing_asns_timeframe(dates_sources)
r = redis.Redis(unix_socket_path='./redis_export.sock')
r.set('date', date)
for asn in asns:
timestamps = bgpranking.get_all_asn_timestamps(asn)
p = r.pipeline(False)
for ts in timestamps:
p.sadd('asns_ts', "{asn}_{ts}".format(asn=asn, ts=ts))
p.execute()
| bsd-2-clause | Python |
c106b1b0403010c872b0315f76cc5d4cd7e1296f | put version number to 1.0 | BT-ojossen/odoo,minhtuancn/odoo,rahuldhote/odoo,laslabs/odoo,christophlsa/odoo,microcom/odoo,dezynetechnologies/odoo,TRESCLOUD/odoopub,cdrooom/odoo,avoinsystems/odoo,chiragjogi/odoo,CubicERP/odoo,chiragjogi/odoo,sv-dev1/odoo,Ichag/odoo,pedrobaeza/OpenUpgrade,CopeX/odoo,bealdav/OpenUpgrade,guerrerocarlos/odoo,abdellatifkarroum/odoo,bguillot/OpenUpgrade,KontorConsulting/odoo,FlorianLudwig/odoo,codekaki/odoo,bealdav/OpenUpgrade,frouty/odoogoeen,dsfsdgsbngfggb/odoo,provaleks/o8,camptocamp/ngo-addons-backport,blaggacao/OpenUpgrade,oasiswork/odoo,guewen/OpenUpgrade,apanju/GMIO_Odoo,odoo-turkiye/odoo,vnsofthe/odoo,abstract-open-solutions/OCB,diagramsoftware/odoo,ApuliaSoftware/odoo,PongPi/isl-odoo,synconics/odoo,slevenhagen/odoo-npg,VitalPet/odoo,acshan/odoo,andreparames/odoo,acshan/odoo,Maspear/odoo,sebalix/OpenUpgrade,gdgellatly/OCB1,BT-ojossen/odoo,FlorianLudwig/odoo,glovebx/odoo,dllsf/odootest,grap/OpenUpgrade,hifly/OpenUpgrade,alexcuellar/odoo,jusdng/odoo,charbeljc/OCB,gdgellatly/OCB1,jaxkodex/odoo,shivam1111/odoo,CatsAndDogsbvba/odoo,abdellatifkarroum/odoo,sadleader/odoo,joshuajan/odoo,massot/odoo,podemos-info/odoo,joariasl/odoo,javierTerry/odoo,vrenaville/ngo-addons-backport,srimai/odoo,Eric-Zhong/odoo,storm-computers/odoo,cpyou/odoo,vrenaville/ngo-addons-backport,Endika/OpenUpgrade,luiseduardohdbackup/odoo,goliveirab/odoo,SerpentCS/odoo,jiachenning/odoo,fevxie/odoo,Grirrane/odoo,zchking/odoo,simongoffin/website_version,rahuldhote/odoo,bkirui/odoo,nhomar/odoo-mirror,tinkhaven-organization/odoo,addition-it-solutions/project-all,joshuajan/odoo,pedrobaeza/odoo,chiragjogi/odoo,florian-dacosta/OpenUpgrade,mmbtba/odoo,credativUK/OCB,aviciimaxwell/odoo,feroda/odoo,jesramirez/odoo,xzYue/odoo,ramadhane/odoo,BT-astauder/odoo,shingonoide/odoo,demon-ru/iml-crm,fuselock/odoo,hubsaysnuaa/odoo,wangjun/odoo,mkieszek/odoo,lombritz/odoo,apanju/odoo,guerrerocarlos/odoo,deKupini/erp,minhtuancn/odoo,gorjuce/odoo,xujb/odoo,pedrobaeza/OpenUpgrade,elmerdpadilla/iv,bguillot/OpenUpgrade,SerpentCS/odoo,juanalfonsopr/odoo,jolevq/odoopub,prospwro/odoo,codekaki/odoo,credativUK/OCB,Maspear/odoo,SerpentCS/odoo,MarcosCommunity/odoo,NL66278/OCB,dsfsdgsbngfggb/odoo,stephen144/odoo,BT-ojossen/odoo,ShineFan/odoo,guewen/OpenUpgrade,alexteodor/odoo,lsinfo/odoo,gavin-feng/odoo,incaser/odoo-odoo,ClearCorp-dev/odoo,ChanduERP/odoo,ShineFan/odoo,gvb/odoo,blaggacao/OpenUpgrade,prospwro/odoo,dfang/odoo,savoirfairelinux/OpenUpgrade,tarzan0820/odoo,charbeljc/OCB,fuhongliang/odoo,ubic135/odoo-design,gsmartway/odoo,goliveirab/odoo,leorochael/odoo,cedk/odoo,Ichag/odoo,CubicERP/odoo,jfpla/odoo,makinacorpus/odoo,pedrobaeza/OpenUpgrade,jpshort/odoo,arthru/OpenUpgrade,gavin-feng/odoo,syci/OCB,fevxie/odoo,JonathanStein/odoo,luistorresm/odoo,nagyistoce/odoo-dev-odoo,PongPi/isl-odoo,avoinsystems/odoo,demon-ru/iml-crm,odoo-turkiye/odoo,spadae22/odoo,OpenUpgrade-dev/OpenUpgrade,rschnapka/odoo,alqfahad/odoo,damdam-s/OpenUpgrade,jusdng/odoo,Codefans-fan/odoo,avoinsystems/odoo,ingadhoc/odoo,dllsf/odootest,nhomar/odoo,jaxkodex/odoo,hanicker/odoo,mkieszek/odoo,alexteodor/odoo,christophlsa/odoo,x111ong/odoo,hanicker/odoo,windedge/odoo,Ernesto99/odoo,hmen89/odoo,rubencabrera/odoo,draugiskisprendimai/odoo,microcom/odoo,microcom/odoo,cloud9UG/odoo,brijeshkesariya/odoo,0k/OpenUpgrade,abdellatifkarroum/odoo,rschnapka/odoo,doomsterinc/odoo,factorlibre/OCB,gvb/odoo,feroda/odoo,fgesora/odoo,alexcuellar/odoo,alexteodor/odoo,thanhacun/odoo,0k/OpenUpgrade,erkrishna9/odoo,Ernesto99/odoo,rschnapka/odoo,cdrooom/odoo,glovebx/odoo,syci/OCB,avoinsystems/odoo,markeTIC/OCB,fuselock/odoo,mkieszek/odoo,VitalPet/odoo,grap/OCB,BT-ojossen/odoo,nhomar/odoo-mirror,mkieszek/odoo,Noviat/odoo,ovnicraft/odoo,alexcuellar/odoo,ecosoft-odoo/odoo,massot/odoo,ygol/odoo,gdgellatly/OCB1,leorochael/odoo,cysnake4713/odoo,zchking/odoo,spadae22/odoo,incaser/odoo-odoo,Adel-Magebinary/odoo,realsaiko/odoo,Kilhog/odoo,hifly/OpenUpgrade,OpenPymeMx/OCB,Grirrane/odoo,ihsanudin/odoo,JGarcia-Panach/odoo,florentx/OpenUpgrade,codekaki/odoo,bakhtout/odoo-educ,QianBIG/odoo,nhomar/odoo,luistorresm/odoo,hoatle/odoo,OpenUpgrade-dev/OpenUpgrade,gdgellatly/OCB1,OpenUpgrade/OpenUpgrade,jiachenning/odoo,savoirfairelinux/odoo,Nowheresly/odoo,hip-odoo/odoo,rahuldhote/odoo,srsman/odoo,Ichag/odoo,alqfahad/odoo,avoinsystems/odoo,havt/odoo,florian-dacosta/OpenUpgrade,x111ong/odoo,srimai/odoo,apanju/odoo,mszewczy/odoo,mustafat/odoo-1,SAM-IT-SA/odoo,vrenaville/ngo-addons-backport,shingonoide/odoo,SAM-IT-SA/odoo,AuyaJackie/odoo,numerigraphe/odoo,salaria/odoo,nexiles/odoo,colinnewell/odoo,takis/odoo,gvb/odoo,Nowheresly/odoo,nitinitprof/odoo,windedge/odoo,alhashash/odoo,incaser/odoo-odoo,collex100/odoo,RafaelTorrealba/odoo,leorochael/odoo,stonegithubs/odoo,nitinitprof/odoo,shaufi/odoo,n0m4dz/odoo,Drooids/odoo,Drooids/odoo,rdeheele/odoo,BT-fgarbely/odoo,ecosoft-odoo/odoo,poljeff/odoo,rowemoore/odoo,sinbazhou/odoo,mustafat/odoo-1,numerigraphe/odoo,shaufi/odoo,cpyou/odoo,janocat/odoo,wangjun/odoo,alqfahad/odoo,VitalPet/odoo,abenzbiria/clients_odoo,tvibliani/odoo,draugiskisprendimai/odoo,credativUK/OCB,ShineFan/odoo,camptocamp/ngo-addons-backport,rowemoore/odoo,Ernesto99/odoo,dezynetechnologies/odoo,Drooids/odoo,bkirui/odoo,podemos-info/odoo,sebalix/OpenUpgrade,nuuuboo/odoo,elmerdpadilla/iv,abenzbiria/clients_odoo,Bachaco-ve/odoo,OpenUpgrade/OpenUpgrade,MarcosCommunity/odoo,camptocamp/ngo-addons-backport,papouso/odoo,prospwro/odoo,Daniel-CA/odoo,fossoult/odoo,Adel-Magebinary/odoo,BT-rmartin/odoo,jfpla/odoo,hbrunn/OpenUpgrade,jiachenning/odoo,papouso/odoo,jolevq/odoopub,jiangzhixiao/odoo,Endika/OpenUpgrade,hip-odoo/odoo,BT-rmartin/odoo,nuuuboo/odoo,guerrerocarlos/odoo,sysadminmatmoz/OCB,ThinkOpen-Solutions/odoo,oasiswork/odoo,fuselock/odoo,Elico-Corp/odoo_OCB,frouty/odoogoeen,omprakasha/odoo,gavin-feng/odoo,rschnapka/odoo,fgesora/odoo,grap/OpenUpgrade,Grirrane/odoo,ubic135/odoo-design,0k/odoo,oliverhr/odoo,hifly/OpenUpgrade,hip-odoo/odoo,savoirfairelinux/OpenUpgrade,hmen89/odoo,ApuliaSoftware/odoo,virgree/odoo,naousse/odoo,bplancher/odoo,sysadminmatmoz/OCB,codekaki/odoo,Kilhog/odoo,gvb/odoo,osvalr/odoo,cysnake4713/odoo,dfang/odoo,javierTerry/odoo,andreparames/odoo,numerigraphe/odoo,jpshort/odoo,kirca/OpenUpgrade,oihane/odoo,tvtsoft/odoo8,fdvarela/odoo8,rowemoore/odoo,bguillot/OpenUpgrade,dkubiak789/odoo,Endika/odoo,pedrobaeza/odoo,xujb/odoo,lombritz/odoo,gdgellatly/OCB1,fuselock/odoo,Drooids/odoo,fjbatresv/odoo,hifly/OpenUpgrade,laslabs/odoo,fossoult/odoo,oliverhr/odoo,alqfahad/odoo,markeTIC/OCB,tarzan0820/odoo,MarcosCommunity/odoo,ecosoft-odoo/odoo,storm-computers/odoo,VielSoft/odoo,Eric-Zhong/odoo,apanju/odoo,sergio-incaser/odoo,florentx/OpenUpgrade,thanhacun/odoo,shaufi/odoo,collex100/odoo,Gitlab11/odoo,christophlsa/odoo,acshan/odoo,Antiun/odoo,andreparames/odoo,dllsf/odootest,rgeleta/odoo,cloud9UG/odoo,OpenPymeMx/OCB,ihsanudin/odoo,spadae22/odoo,ramitalat/odoo,ramadhane/odoo,rschnapka/odoo,charbeljc/OCB,apanju/GMIO_Odoo,rgeleta/odoo,odooindia/odoo,OpusVL/odoo,oasiswork/odoo,fgesora/odoo,srsman/odoo,codekaki/odoo,lombritz/odoo,fdvarela/odoo8,KontorConsulting/odoo,credativUK/OCB,Gitlab11/odoo,optima-ict/odoo,laslabs/odoo,blaggacao/OpenUpgrade,agrista/odoo-saas,minhtuancn/odoo,credativUK/OCB,Codefans-fan/odoo,poljeff/odoo,nexiles/odoo,andreparames/odoo,massot/odoo,hbrunn/OpenUpgrade,cedk/odoo,virgree/odoo,avoinsystems/odoo,apanju/odoo,JGarcia-Panach/odoo,ccomb/OpenUpgrade,windedge/odoo,dsfsdgsbngfggb/odoo,OpenPymeMx/OCB,cloud9UG/odoo,frouty/odoogoeen,nexiles/odoo,mszewczy/odoo,jeasoft/odoo,odootr/odoo,OSSESAC/odoopubarquiluz,dkubiak789/odoo,shingonoide/odoo,arthru/OpenUpgrade,tinkhaven-organization/odoo,savoirfairelinux/odoo,mlaitinen/odoo,NeovaHealth/odoo,tangyiyong/odoo,srimai/odoo,synconics/odoo,odoo-turkiye/odoo,ccomb/OpenUpgrade,grap/OCB,dariemp/odoo,inspyration/odoo,jfpla/odoo,PongPi/isl-odoo,ChanduERP/odoo,Ichag/odoo,Antiun/odoo,BT-astauder/odoo,dllsf/odootest,KontorConsulting/odoo,lightcn/odoo,CatsAndDogsbvba/odoo,nexiles/odoo,SAM-IT-SA/odoo,ecosoft-odoo/odoo,agrista/odoo-saas,NL66278/OCB,havt/odoo,oliverhr/odoo,hopeall/odoo,jeasoft/odoo,nhomar/odoo-mirror,shivam1111/odoo,shaufi10/odoo,Bachaco-ve/odoo,jolevq/odoopub,fjbatresv/odoo,addition-it-solutions/project-all,florentx/OpenUpgrade,JCA-Developpement/Odoo,leoliujie/odoo,thanhacun/odoo,PongPi/isl-odoo,odootr/odoo,OpenUpgrade-dev/OpenUpgrade,ojengwa/odoo,Nowheresly/odoo,CubicERP/odoo,leoliujie/odoo,erkrishna9/odoo,stonegithubs/odoo,shaufi10/odoo,poljeff/odoo,gorjuce/odoo,odoousers2014/odoo,fgesora/odoo,draugiskisprendimai/odoo,vrenaville/ngo-addons-backport,cloud9UG/odoo,sadleader/odoo,OpenPymeMx/OCB,numerigraphe/odoo,bwrsandman/OpenUpgrade,joariasl/odoo,ramadhane/odoo,omprakasha/odoo,Gitlab11/odoo,rschnapka/odoo,Maspear/odoo,csrocha/OpenUpgrade,markeTIC/OCB,grap/OCB,fossoult/odoo,alhashash/odoo,fuhongliang/odoo,ojengwa/odoo,brijeshkesariya/odoo,juanalfonsopr/odoo,mlaitinen/odoo,lgscofield/odoo,lombritz/odoo,javierTerry/odoo,damdam-s/OpenUpgrade,papouso/odoo,sinbazhou/odoo,shivam1111/odoo,blaggacao/OpenUpgrade,slevenhagen/odoo,alqfahad/odoo,jaxkodex/odoo,fjbatresv/odoo,dezynetechnologies/odoo,nagyistoce/odoo-dev-odoo,jusdng/odoo,juanalfonsopr/odoo,MarcosCommunity/odoo,deKupini/erp,kittiu/odoo,bkirui/odoo,abenzbiria/clients_odoo,takis/odoo,acshan/odoo,stonegithubs/odoo,fevxie/odoo,AuyaJackie/odoo,wangjun/odoo,mmbtba/odoo,ehirt/odoo,mmbtba/odoo,leoliujie/odoo,SAM-IT-SA/odoo,charbeljc/OCB,draugiskisprendimai/odoo,omprakasha/odoo,ApuliaSoftware/odoo,mustafat/odoo-1,makinacorpus/odoo,jusdng/odoo,sv-dev1/odoo,SerpentCS/odoo,nhomar/odoo-mirror,Bachaco-ve/odoo,addition-it-solutions/project-all,codekaki/odoo,osvalr/odoo,GauravSahu/odoo,factorlibre/OCB,deKupini/erp,Elico-Corp/odoo_OCB,tinkerthaler/odoo,incaser/odoo-odoo,acshan/odoo,osvalr/odoo,aviciimaxwell/odoo,jpshort/odoo,funkring/fdoo,factorlibre/OCB,aviciimaxwell/odoo,charbeljc/OCB,vrenaville/ngo-addons-backport,aviciimaxwell/odoo,dezynetechnologies/odoo,collex100/odoo,dalegregory/odoo,addition-it-solutions/project-all,Eric-Zhong/odoo,stonegithubs/odoo,grap/OpenUpgrade,Gitlab11/odoo,srsman/odoo,alhashash/odoo,dgzurita/odoo,fevxie/odoo,jaxkodex/odoo,takis/odoo,storm-computers/odoo,kifcaliph/odoo,oasiswork/odoo,VielSoft/odoo,juanalfonsopr/odoo,OpusVL/odoo,ApuliaSoftware/odoo,alexcuellar/odoo,Endika/odoo,ubic135/odoo-design,srimai/odoo,QianBIG/odoo,AuyaJackie/odoo,cloud9UG/odoo,sadleader/odoo,Codefans-fan/odoo,RafaelTorrealba/odoo,ChanduERP/odoo,ramitalat/odoo,kifcaliph/odoo,cedk/odoo,matrixise/odoo,feroda/odoo,dgzurita/odoo,funkring/fdoo,apocalypsebg/odoo,rgeleta/odoo,dllsf/odootest,AuyaJackie/odoo,ingadhoc/odoo,joariasl/odoo,tinkhaven-organization/odoo,hbrunn/OpenUpgrade,alhashash/odoo,poljeff/odoo,cpyou/odoo,OSSESAC/odoopubarquiluz,OpenUpgrade-dev/OpenUpgrade,sinbazhou/odoo,SerpentCS/odoo,avoinsystems/odoo,matrixise/odoo,Codefans-fan/odoo,ovnicraft/odoo,virgree/odoo,tangyiyong/odoo,tvibliani/odoo,BT-ojossen/odoo,cysnake4713/odoo,kittiu/odoo,BT-fgarbely/odoo,draugiskisprendimai/odoo,jiangzhixiao/odoo,hoatle/odoo,mszewczy/odoo,fuselock/odoo,sadleader/odoo,nuncjo/odoo,frouty/odoogoeen,frouty/odoogoeen,CatsAndDogsbvba/odoo,draugiskisprendimai/odoo,sebalix/OpenUpgrade,idncom/odoo,slevenhagen/odoo,sve-odoo/odoo,jesramirez/odoo,steedos/odoo,dalegregory/odoo,BT-fgarbely/odoo,apanju/odoo,rdeheele/odoo,Nowheresly/odoo,OpenUpgrade/OpenUpgrade,srimai/odoo,Drooids/odoo,juanalfonsopr/odoo,tinkhaven-organization/odoo,QianBIG/odoo,mlaitinen/odoo,xzYue/odoo,sergio-incaser/odoo,dezynetechnologies/odoo,naousse/odoo,patmcb/odoo,Bachaco-ve/odoo,rubencabrera/odoo,ihsanudin/odoo,KontorConsulting/odoo,hmen89/odoo,havt/odoo,patmcb/odoo,Codefans-fan/odoo,gsmartway/odoo,Nowheresly/odoo,hubsaysnuaa/odoo,x111ong/odoo,steedos/odoo,shivam1111/odoo,camptocamp/ngo-addons-backport,ramadhane/odoo,jiangzhixiao/odoo,elmerdpadilla/iv,blaggacao/OpenUpgrade,cpyou/odoo,prospwro/odoo,provaleks/o8,hoatle/odoo,addition-it-solutions/project-all,VielSoft/odoo,damdam-s/OpenUpgrade,colinnewell/odoo,massot/odoo,klunwebale/odoo,Nick-OpusVL/odoo,OpenUpgrade/OpenUpgrade,hanicker/odoo,rowemoore/odoo,tarzan0820/odoo,florian-dacosta/OpenUpgrade,oliverhr/odoo,Codefans-fan/odoo,ccomb/OpenUpgrade,tarzan0820/odoo,podemos-info/odoo,provaleks/o8,cedk/odoo,highco-groupe/odoo,x111ong/odoo,florian-dacosta/OpenUpgrade,matrixise/odoo,fevxie/odoo,n0m4dz/odoo,odooindia/odoo,0k/OpenUpgrade,NL66278/OCB,nhomar/odoo,christophlsa/odoo,ccomb/OpenUpgrade,lightcn/odoo,wangjun/odoo,hassoon3/odoo,odoo-turkiye/odoo,fdvarela/odoo8,takis/odoo,hifly/OpenUpgrade,ClearCorp-dev/odoo,grap/OCB,Nick-OpusVL/odoo,OpusVL/odoo,MarcosCommunity/odoo,bakhtout/odoo-educ,lightcn/odoo,Adel-Magebinary/odoo,hopeall/odoo,kittiu/odoo,erkrishna9/odoo,rowemoore/odoo,stonegithubs/odoo,waytai/odoo,microcom/odoo,Adel-Magebinary/odoo,mszewczy/odoo,ihsanudin/odoo,abenzbiria/clients_odoo,kittiu/odoo,shaufi10/odoo,CatsAndDogsbvba/odoo,JonathanStein/odoo,ingadhoc/odoo,apocalypsebg/odoo,dkubiak789/odoo,fuselock/odoo,ecosoft-odoo/odoo,slevenhagen/odoo,dsfsdgsbngfggb/odoo,funkring/fdoo,fgesora/odoo,mvaled/OpenUpgrade,joariasl/odoo,damdam-s/OpenUpgrade,Gitlab11/odoo,lsinfo/odoo,syci/OCB,RafaelTorrealba/odoo,pedrobaeza/odoo,papouso/odoo,agrista/odoo-saas,demon-ru/iml-crm,lgscofield/odoo,hubsaysnuaa/odoo,SAM-IT-SA/odoo,NL66278/OCB,pedrobaeza/OpenUpgrade,JonathanStein/odoo,odoousers2014/odoo,bkirui/odoo,synconics/odoo,waytai/odoo,cloud9UG/odoo,vnsofthe/odoo,sv-dev1/odoo,sinbazhou/odoo,ramitalat/odoo,tinkerthaler/odoo,eino-makitalo/odoo,rowemoore/odoo,ujjwalwahi/odoo,cpyou/odoo,aviciimaxwell/odoo,salaria/odoo,steedos/odoo,ujjwalwahi/odoo,collex100/odoo,Adel-Magebinary/odoo,grap/OCB,ojengwa/odoo,steedos/odoo,nagyistoce/odoo-dev-odoo,shivam1111/odoo,Adel-Magebinary/odoo,hip-odoo/odoo,naousse/odoo,spadae22/odoo,realsaiko/odoo,stephen144/odoo,JonathanStein/odoo,rahuldhote/odoo,RafaelTorrealba/odoo,gorjuce/odoo,mmbtba/odoo,hbrunn/OpenUpgrade,CopeX/odoo,savoirfairelinux/odoo,patmcb/odoo,waytai/odoo,shaufi/odoo,jfpla/odoo,jpshort/odoo,odoo-turkiye/odoo,PongPi/isl-odoo,chiragjogi/odoo,ccomb/OpenUpgrade,ramitalat/odoo,kirca/OpenUpgrade,inspyration/odoo,frouty/odoogoeen,Nowheresly/odoo,glovebx/odoo,JCA-Developpement/Odoo,NeovaHealth/odoo,syci/OCB,lightcn/odoo,datenbetrieb/odoo,JonathanStein/odoo,colinnewell/odoo,thanhacun/odoo,cdrooom/odoo,podemos-info/odoo,steedos/odoo,jeasoft/odoo,sinbazhou/odoo,apocalypsebg/odoo,realsaiko/odoo,havt/odoo,tinkerthaler/odoo,xujb/odoo,javierTerry/odoo,glovebx/odoo,ClearCorp-dev/odoo,mvaled/OpenUpgrade,nexiles/odoo,jeasoft/odoo,Noviat/odoo,fdvarela/odoo8,QianBIG/odoo,incaser/odoo-odoo,Bachaco-ve/odoo,bobisme/odoo,ovnicraft/odoo,Antiun/odoo,nuncjo/odoo,frouty/odoo_oph,ehirt/odoo,pedrobaeza/odoo,realsaiko/odoo,provaleks/o8,mvaled/OpenUpgrade,lightcn/odoo,leoliujie/odoo,vnsofthe/odoo,naousse/odoo,elmerdpadilla/iv,bplancher/odoo,jeasoft/odoo,goliveirab/odoo,fossoult/odoo,MarcosCommunity/odoo,janocat/odoo,shivam1111/odoo,eino-makitalo/odoo,tinkhaven-organization/odoo,odootr/odoo,grap/OCB,BT-fgarbely/odoo,odooindia/odoo,kittiu/odoo,srimai/odoo,osvalr/odoo,joariasl/odoo,chiragjogi/odoo,BT-ojossen/odoo,cysnake4713/odoo,hoatle/odoo,omprakasha/odoo,luiseduardohdbackup/odoo,VielSoft/odoo,BT-ojossen/odoo,BT-fgarbely/odoo,stephen144/odoo,datenbetrieb/odoo,numerigraphe/odoo,nagyistoce/odoo-dev-odoo,agrista/odoo-saas,mlaitinen/odoo,Adel-Magebinary/odoo,feroda/odoo,cedk/odoo,hip-odoo/odoo,nagyistoce/odoo-dev-odoo,nitinitprof/odoo,PongPi/isl-odoo,lsinfo/odoo,shaufi10/odoo,omprakasha/odoo,VielSoft/odoo,ClearCorp-dev/odoo,hifly/OpenUpgrade,Nick-OpusVL/odoo,rubencabrera/odoo,dezynetechnologies/odoo,sergio-incaser/odoo,abstract-open-solutions/OCB,CopeX/odoo,brijeshkesariya/odoo,dsfsdgsbngfggb/odoo,eino-makitalo/odoo,virgree/odoo,gavin-feng/odoo,simongoffin/website_version,bealdav/OpenUpgrade,matrixise/odoo,abdellatifkarroum/odoo,tarzan0820/odoo,laslabs/odoo,lgscofield/odoo,ovnicraft/odoo,pplatek/odoo,nuuuboo/odoo,ingadhoc/odoo,aviciimaxwell/odoo,highco-groupe/odoo,bealdav/OpenUpgrade,mmbtba/odoo,KontorConsulting/odoo,ChanduERP/odoo,VitalPet/odoo,guerrerocarlos/odoo,deKupini/erp,factorlibre/OCB,javierTerry/odoo,idncom/odoo,ygol/odoo,fossoult/odoo,TRESCLOUD/odoopub,thanhacun/odoo,sve-odoo/odoo,vnsofthe/odoo,OpenPymeMx/OCB,joshuajan/odoo,ygol/odoo,leorochael/odoo,shingonoide/odoo,charbeljc/OCB,apanju/GMIO_Odoo,GauravSahu/odoo,NeovaHealth/odoo,rschnapka/odoo,ehirt/odoo,bwrsandman/OpenUpgrade,mlaitinen/odoo,apanju/GMIO_Odoo,ubic135/odoo-design,dfang/odoo,alexcuellar/odoo,minhtuancn/odoo,bguillot/OpenUpgrade,kybriainfotech/iSocioCRM,BT-rmartin/odoo,sve-odoo/odoo,tinkerthaler/odoo,ygol/odoo,nitinitprof/odoo,jfpla/odoo,makinacorpus/odoo,luiseduardohdbackup/odoo,n0m4dz/odoo,janocat/odoo,diagramsoftware/odoo,fuhongliang/odoo,leorochael/odoo,osvalr/odoo,dezynetechnologies/odoo,diagramsoftware/odoo,synconics/odoo,bobisme/odoo,NeovaHealth/odoo,goliveirab/odoo,Kilhog/odoo,brijeshkesariya/odoo,andreparames/odoo,OpusVL/odoo,csrocha/OpenUpgrade,damdam-s/OpenUpgrade,arthru/OpenUpgrade,mmbtba/odoo,VitalPet/odoo,papouso/odoo,bobisme/odoo,xzYue/odoo,0k/odoo,hassoon3/odoo,ihsanudin/odoo,mustafat/odoo-1,tinkerthaler/odoo,simongoffin/website_version,ShineFan/odoo,Danisan/odoo-1,luiseduardohdbackup/odoo,addition-it-solutions/project-all,makinacorpus/odoo,pplatek/odoo,Codefans-fan/odoo,salaria/odoo,CatsAndDogsbvba/odoo,alexcuellar/odoo,brijeshkesariya/odoo,stonegithubs/odoo,ramadhane/odoo,sadleader/odoo,provaleks/o8,kybriainfotech/iSocioCRM,salaria/odoo,bwrsandman/OpenUpgrade,glovebx/odoo,OpenPymeMx/OCB,dariemp/odoo,joariasl/odoo,vrenaville/ngo-addons-backport,blaggacao/OpenUpgrade,shaufi/odoo,numerigraphe/odoo,srsman/odoo,synconics/odoo,fuhongliang/odoo,bakhtout/odoo-educ,waytai/odoo,fgesora/odoo,acshan/odoo,eino-makitalo/odoo,Grirrane/odoo,guerrerocarlos/odoo,AuyaJackie/odoo,optima-ict/odoo,jusdng/odoo,srsman/odoo,camptocamp/ngo-addons-backport,fuselock/odoo,rubencabrera/odoo,alexteodor/odoo,gsmartway/odoo,QianBIG/odoo,abdellatifkarroum/odoo,apanju/GMIO_Odoo,JCA-Developpement/Odoo,VielSoft/odoo,leoliujie/odoo,bakhtout/odoo-educ,OpenPymeMx/OCB,tvibliani/odoo,tarzan0820/odoo,Danisan/odoo-1,gorjuce/odoo,slevenhagen/odoo-npg,ubic135/odoo-design,gvb/odoo,FlorianLudwig/odoo,erkrishna9/odoo,Ernesto99/odoo,andreparames/odoo,provaleks/o8,rdeheele/odoo,javierTerry/odoo,jpshort/odoo,0k/OpenUpgrade,hifly/OpenUpgrade,Eric-Zhong/odoo,idncom/odoo,leorochael/odoo,rahuldhote/odoo,slevenhagen/odoo,ihsanudin/odoo,collex100/odoo,bguillot/OpenUpgrade,kirca/OpenUpgrade,dariemp/odoo,odooindia/odoo,Daniel-CA/odoo,bguillot/OpenUpgrade,klunwebale/odoo,JGarcia-Panach/odoo,ThinkOpen-Solutions/odoo,nuuuboo/odoo,OpenUpgrade/OpenUpgrade,CatsAndDogsbvba/odoo,sebalix/OpenUpgrade,minhtuancn/odoo,savoirfairelinux/odoo,dariemp/odoo,pplatek/odoo,Maspear/odoo,dfang/odoo,hanicker/odoo,diagramsoftware/odoo,leoliujie/odoo,optima-ict/odoo,laslabs/odoo,NeovaHealth/odoo,fgesora/odoo,osvalr/odoo,oihane/odoo,markeTIC/OCB,datenbetrieb/odoo,kirca/OpenUpgrade,eino-makitalo/odoo,acshan/odoo,gsmartway/odoo,joshuajan/odoo,apocalypsebg/odoo,dgzurita/odoo,Nowheresly/odoo,havt/odoo,vnsofthe/odoo,Antiun/odoo,oliverhr/odoo,arthru/OpenUpgrade,spadae22/odoo,fevxie/odoo,florentx/OpenUpgrade,gavin-feng/odoo,nhomar/odoo,odoousers2014/odoo,bakhtout/odoo-educ,apocalypsebg/odoo,bwrsandman/OpenUpgrade,salaria/odoo,dfang/odoo,ThinkOpen-Solutions/odoo,elmerdpadilla/iv,luiseduardohdbackup/odoo,gsmartway/odoo,frouty/odoo_oph,wangjun/odoo,diagramsoftware/odoo,patmcb/odoo,hmen89/odoo,doomsterinc/odoo,mkieszek/odoo,Endika/odoo,rdeheele/odoo,JGarcia-Panach/odoo,ehirt/odoo,osvalr/odoo,oliverhr/odoo,shingonoide/odoo,Noviat/odoo,nexiles/odoo,ovnicraft/odoo,OpenUpgrade-dev/OpenUpgrade,alqfahad/odoo,jiangzhixiao/odoo,naousse/odoo,nuncjo/odoo,tinkhaven-organization/odoo,AuyaJackie/odoo,bwrsandman/OpenUpgrade,mszewczy/odoo,Antiun/odoo,mustafat/odoo-1,Eric-Zhong/odoo,florian-dacosta/OpenUpgrade,nitinitprof/odoo,CubicERP/odoo,Elico-Corp/odoo_OCB,xzYue/odoo,naousse/odoo,hoatle/odoo,GauravSahu/odoo,jusdng/odoo,tvibliani/odoo,klunwebale/odoo,x111ong/odoo,syci/OCB,gorjuce/odoo,salaria/odoo,OpenUpgrade-dev/OpenUpgrade,slevenhagen/odoo,dariemp/odoo,PongPi/isl-odoo,jiangzhixiao/odoo,makinacorpus/odoo,dfang/odoo,QianBIG/odoo,luistorresm/odoo,nuuuboo/odoo,dkubiak789/odoo,virgree/odoo,waytai/odoo,hip-odoo/odoo,tangyiyong/odoo,christophlsa/odoo,srimai/odoo,jeasoft/odoo,mkieszek/odoo,kybriainfotech/iSocioCRM,hanicker/odoo,nagyistoce/odoo-dev-odoo,glovebx/odoo,SerpentCS/odoo,jesramirez/odoo,kybriainfotech/iSocioCRM,ygol/odoo,xujb/odoo,prospwro/odoo,takis/odoo,hopeall/odoo,hbrunn/OpenUpgrade,ramitalat/odoo,Kilhog/odoo,incaser/odoo-odoo,gsmartway/odoo,poljeff/odoo,luistorresm/odoo,steedos/odoo,0k/OpenUpgrade,FlorianLudwig/odoo,massot/odoo,vrenaville/ngo-addons-backport,goliveirab/odoo,bakhtout/odoo-educ,cloud9UG/odoo,odoousers2014/odoo,ClearCorp-dev/odoo,sv-dev1/odoo,grap/OpenUpgrade,eino-makitalo/odoo,Daniel-CA/odoo,jesramirez/odoo,GauravSahu/odoo,jaxkodex/odoo,Antiun/odoo,doomsterinc/odoo,srsman/odoo,kirca/OpenUpgrade,sebalix/OpenUpgrade,vnsofthe/odoo,alhashash/odoo,guerrerocarlos/odoo,mszewczy/odoo,GauravSahu/odoo,bobisme/odoo,stephen144/odoo,dariemp/odoo,matrixise/odoo,credativUK/OCB,oihane/odoo,bplancher/odoo,frouty/odoo_oph,jesramirez/odoo,oasiswork/odoo,CopeX/odoo,fjbatresv/odoo,wangjun/odoo,hassoon3/odoo,Endika/OpenUpgrade,janocat/odoo,ojengwa/odoo,bobisme/odoo,bkirui/odoo,sebalix/OpenUpgrade,takis/odoo,tinkerthaler/odoo,grap/OCB,waytai/odoo,ThinkOpen-Solutions/odoo,nhomar/odoo,synconics/odoo,xujb/odoo,fossoult/odoo,nuncjo/odoo,inspyration/odoo,ygol/odoo,rgeleta/odoo,zchking/odoo,lsinfo/odoo,tvibliani/odoo,funkring/fdoo,n0m4dz/odoo,abenzbiria/clients_odoo,pplatek/odoo,ehirt/odoo,Ernesto99/odoo,ChanduERP/odoo,CubicERP/odoo,ShineFan/odoo,Maspear/odoo,VitalPet/odoo,shaufi/odoo,shivam1111/odoo,odootr/odoo,doomsterinc/odoo,inspyration/odoo,janocat/odoo,incaser/odoo-odoo,JonathanStein/odoo,klunwebale/odoo,optima-ict/odoo,Maspear/odoo,Ichag/odoo,gavin-feng/odoo,lsinfo/odoo,sve-odoo/odoo,jeasoft/odoo,Nick-OpusVL/odoo,hopeall/odoo,rgeleta/odoo,apocalypsebg/odoo,sv-dev1/odoo,AuyaJackie/odoo,draugiskisprendimai/odoo,sysadminmatmoz/OCB,realsaiko/odoo,feroda/odoo,RafaelTorrealba/odoo,oihane/odoo,optima-ict/odoo,chiragjogi/odoo,VielSoft/odoo,diagramsoftware/odoo,nuuuboo/odoo,diagramsoftware/odoo,Grirrane/odoo,ovnicraft/odoo,gvb/odoo,lgscofield/odoo,xzYue/odoo,jfpla/odoo,zchking/odoo,odootr/odoo,frouty/odoo_oph,dsfsdgsbngfggb/odoo,rgeleta/odoo,omprakasha/odoo,doomsterinc/odoo,feroda/odoo,csrocha/OpenUpgrade,frouty/odoo_oph,Kilhog/odoo,mvaled/OpenUpgrade,cdrooom/odoo,rdeheele/odoo,RafaelTorrealba/odoo,dgzurita/odoo,ihsanudin/odoo,Nick-OpusVL/odoo,charbeljc/OCB,0k/odoo,shingonoide/odoo,windedge/odoo,bealdav/OpenUpgrade,klunwebale/odoo,stephen144/odoo,bwrsandman/OpenUpgrade,jiangzhixiao/odoo,tangyiyong/odoo,gavin-feng/odoo,janocat/odoo,dgzurita/odoo,Gitlab11/odoo,microcom/odoo,spadae22/odoo,oliverhr/odoo,gdgellatly/OCB1,nhomar/odoo,idncom/odoo,agrista/odoo-saas,codekaki/odoo,savoirfairelinux/OpenUpgrade,CubicERP/odoo,sve-odoo/odoo,Danisan/odoo-1,ThinkOpen-Solutions/odoo,mlaitinen/odoo,dkubiak789/odoo,bguillot/OpenUpgrade,klunwebale/odoo,BT-astauder/odoo,simongoffin/website_version,apocalypsebg/odoo,Daniel-CA/odoo,alhashash/odoo,fuhongliang/odoo,csrocha/OpenUpgrade,hoatle/odoo,tvtsoft/odoo8,simongoffin/website_version,ramadhane/odoo,demon-ru/iml-crm,lightcn/odoo,VitalPet/odoo,alexcuellar/odoo,shaufi10/odoo,markeTIC/OCB,bwrsandman/OpenUpgrade,ujjwalwahi/odoo,rschnapka/odoo,datenbetrieb/odoo,prospwro/odoo,guerrerocarlos/odoo,makinacorpus/odoo,luistorresm/odoo,Noviat/odoo,ujjwalwahi/odoo,rubencabrera/odoo,ehirt/odoo,Endika/odoo,datenbetrieb/odoo,odoo-turkiye/odoo,shaufi10/odoo,OpenPymeMx/OCB,colinnewell/odoo,vrenaville/ngo-addons-backport,OpenUpgrade/OpenUpgrade,jolevq/odoopub,Kilhog/odoo,mvaled/OpenUpgrade,ramadhane/odoo,windedge/odoo,doomsterinc/odoo,csrocha/OpenUpgrade,kybriainfotech/iSocioCRM,provaleks/o8,dalegregory/odoo,Nick-OpusVL/odoo,bkirui/odoo,vnsofthe/odoo,kifcaliph/odoo,leorochael/odoo,fjbatresv/odoo,slevenhagen/odoo-npg,odootr/odoo,xzYue/odoo,windedge/odoo,rubencabrera/odoo,bakhtout/odoo-educ,papouso/odoo,camptocamp/ngo-addons-backport,klunwebale/odoo,bkirui/odoo,bobisme/odoo,abdellatifkarroum/odoo,grap/OpenUpgrade,cedk/odoo,gvb/odoo,kittiu/odoo,bplancher/odoo,grap/OpenUpgrade,takis/odoo,tangyiyong/odoo,collex100/odoo,minhtuancn/odoo,nagyistoce/odoo-dev-odoo,ojengwa/odoo,lombritz/odoo,luiseduardohdbackup/odoo,gsmartway/odoo,sinbazhou/odoo,podemos-info/odoo,tangyiyong/odoo,juanalfonsopr/odoo,kittiu/odoo,bealdav/OpenUpgrade,ccomb/OpenUpgrade,nexiles/odoo,havt/odoo,nhomar/odoo-mirror,MarcosCommunity/odoo,pedrobaeza/odoo,BT-astauder/odoo,patmcb/odoo,savoirfairelinux/odoo,papouso/odoo,glovebx/odoo,OSSESAC/odoopubarquiluz,dalegregory/odoo,SerpentCS/odoo,andreparames/odoo,xujb/odoo,pedrobaeza/odoo,gdgellatly/OCB1,ingadhoc/odoo,CopeX/odoo,wangjun/odoo,eino-makitalo/odoo,KontorConsulting/odoo,xzYue/odoo,patmcb/odoo,Drooids/odoo,0k/odoo,ApuliaSoftware/odoo,dariemp/odoo,factorlibre/OCB,Endika/odoo,nuuuboo/odoo,sergio-incaser/odoo,Kilhog/odoo,Ernesto99/odoo,idncom/odoo,Noviat/odoo,Ichag/odoo,lombritz/odoo,luiseduardohdbackup/odoo,jpshort/odoo,fjbatresv/odoo,feroda/odoo,patmcb/odoo,hubsaysnuaa/odoo,joariasl/odoo,OSSESAC/odoopubarquiluz,dgzurita/odoo,dalegregory/odoo,abstract-open-solutions/OCB,srsman/odoo,rubencabrera/odoo,poljeff/odoo,Elico-Corp/odoo_OCB,pedrobaeza/OpenUpgrade,hoatle/odoo,guewen/OpenUpgrade,GauravSahu/odoo,Nick-OpusVL/odoo,lgscofield/odoo,makinacorpus/odoo,markeTIC/OCB,guewen/OpenUpgrade,alexteodor/odoo,funkring/fdoo,apanju/odoo,mlaitinen/odoo,fdvarela/odoo8,jpshort/odoo,abdellatifkarroum/odoo,sysadminmatmoz/OCB,ChanduERP/odoo,Endika/OpenUpgrade,JCA-Developpement/Odoo,sergio-incaser/odoo,naousse/odoo,colinnewell/odoo,NL66278/OCB,BT-rmartin/odoo,kifcaliph/odoo,TRESCLOUD/odoopub,poljeff/odoo,BT-rmartin/odoo,Ernesto99/odoo,storm-computers/odoo,alqfahad/odoo,ecosoft-odoo/odoo,ThinkOpen-Solutions/odoo,blaggacao/OpenUpgrade,GauravSahu/odoo,mustafat/odoo-1,tvtsoft/odoo8,FlorianLudwig/odoo,ygol/odoo,pplatek/odoo,Eric-Zhong/odoo,hassoon3/odoo,NeovaHealth/odoo,pplatek/odoo,gorjuce/odoo,jiachenning/odoo,mustafat/odoo-1,ojengwa/odoo,OpenUpgrade/OpenUpgrade,credativUK/OCB,damdam-s/OpenUpgrade,janocat/odoo,sysadminmatmoz/OCB,highco-groupe/odoo,apanju/GMIO_Odoo,nitinitprof/odoo,Daniel-CA/odoo,aviciimaxwell/odoo,camptocamp/ngo-addons-backport,bplancher/odoo,Eric-Zhong/odoo,jeasoft/odoo,Gitlab11/odoo,credativUK/OCB,sv-dev1/odoo,tvibliani/odoo,doomsterinc/odoo,demon-ru/iml-crm,minhtuancn/odoo,odoousers2014/odoo,thanhacun/odoo,kifcaliph/odoo,BT-astauder/odoo,tvtsoft/odoo8,odooindia/odoo,syci/OCB,OSSESAC/odoopubarquiluz,Grirrane/odoo,camptocamp/ngo-addons-backport,savoirfairelinux/odoo,ehirt/odoo,lsinfo/odoo,n0m4dz/odoo,JCA-Developpement/Odoo,dsfsdgsbngfggb/odoo,virgree/odoo,hopeall/odoo,podemos-info/odoo,slevenhagen/odoo-npg,x111ong/odoo,hubsaysnuaa/odoo,Bachaco-ve/odoo,goliveirab/odoo,odootr/odoo,hbrunn/OpenUpgrade,ingadhoc/odoo,oasiswork/odoo,tvibliani/odoo,kirca/OpenUpgrade,stonegithubs/odoo,sysadminmatmoz/OCB,arthru/OpenUpgrade,ChanduERP/odoo,BT-rmartin/odoo,odoousers2014/odoo,lombritz/odoo,christophlsa/odoo,luistorresm/odoo,kybriainfotech/iSocioCRM,hanicker/odoo,TRESCLOUD/odoopub,slevenhagen/odoo-npg,xujb/odoo,mvaled/OpenUpgrade,savoirfairelinux/OpenUpgrade,dalegregory/odoo,FlorianLudwig/odoo,goliveirab/odoo,Antiun/odoo,Daniel-CA/odoo,Drooids/odoo,nuncjo/odoo,savoirfairelinux/OpenUpgrade,RafaelTorrealba/odoo,cedk/odoo,CopeX/odoo,ovnicraft/odoo,florentx/OpenUpgrade,apanju/odoo,pedrobaeza/OpenUpgrade,synconics/odoo,jaxkodex/odoo,tinkerthaler/odoo,MarcosCommunity/odoo,ujjwalwahi/odoo,abstract-open-solutions/OCB,rowemoore/odoo,joshuajan/odoo,collex100/odoo,idncom/odoo,salaria/odoo,lsinfo/odoo,damdam-s/OpenUpgrade,dgzurita/odoo,codekaki/odoo,kirca/OpenUpgrade,CatsAndDogsbvba/odoo,tinkhaven-organization/odoo,fuhongliang/odoo,fossoult/odoo,virgree/odoo,factorlibre/OCB,hubsaysnuaa/odoo,ujjwalwahi/odoo,factorlibre/OCB,odoo-turkiye/odoo,nuncjo/odoo,x111ong/odoo,csrocha/OpenUpgrade,brijeshkesariya/odoo,jaxkodex/odoo,VitalPet/odoo,fuhongliang/odoo,Bachaco-ve/odoo,highco-groupe/odoo,savoirfairelinux/OpenUpgrade,storm-computers/odoo,datenbetrieb/odoo,tvtsoft/odoo8,jiachenning/odoo,frouty/odoo_oph,luistorresm/odoo,stephen144/odoo,ShineFan/odoo,optima-ict/odoo,Endika/odoo,grap/OCB,tarzan0820/odoo,0k/OpenUpgrade,abstract-open-solutions/OCB,ApuliaSoftware/odoo,grap/OpenUpgrade,ApuliaSoftware/odoo,jfpla/odoo,shaufi10/odoo,tvtsoft/odoo8,hanicker/odoo,Danisan/odoo-1,BT-fgarbely/odoo,Danisan/odoo-1,hubsaysnuaa/odoo,kybriainfotech/iSocioCRM,rgeleta/odoo,deKupini/erp,colinnewell/odoo,zchking/odoo,slevenhagen/odoo-npg,erkrishna9/odoo,steedos/odoo,SAM-IT-SA/odoo,jiangzhixiao/odoo,sv-dev1/odoo,sebalix/OpenUpgrade,ThinkOpen-Solutions/odoo,christophlsa/odoo,mszewczy/odoo,omprakasha/odoo,idncom/odoo,oasiswork/odoo,spadae22/odoo,oihane/odoo,guewen/OpenUpgrade,brijeshkesariya/odoo,BT-fgarbely/odoo,zchking/odoo,pplatek/odoo,Danisan/odoo-1,sergio-incaser/odoo,slevenhagen/odoo-npg,fevxie/odoo,windedge/odoo,lgscofield/odoo,abstract-open-solutions/OCB,CubicERP/odoo,ramitalat/odoo,hmen89/odoo,oihane/odoo,storm-computers/odoo,jiachenning/odoo,bobisme/odoo,hopeall/odoo,Endika/OpenUpgrade,juanalfonsopr/odoo,slevenhagen/odoo,SAM-IT-SA/odoo,FlorianLudwig/odoo,JGarcia-Panach/odoo,Elico-Corp/odoo_OCB,javierTerry/odoo,n0m4dz/odoo,lgscofield/odoo,abstract-open-solutions/OCB,Ichag/odoo,frouty/odoogoeen,Endika/OpenUpgrade,Noviat/odoo,arthru/OpenUpgrade,markeTIC/OCB,florian-dacosta/OpenUpgrade,apanju/GMIO_Odoo,frouty/odoogoeen,chiragjogi/odoo,cysnake4713/odoo,datenbetrieb/odoo,ujjwalwahi/odoo,colinnewell/odoo,Endika/OpenUpgrade,nitinitprof/odoo,ccomb/OpenUpgrade,Maspear/odoo,jusdng/odoo,Elico-Corp/odoo_OCB,CopeX/odoo,JonathanStein/odoo,ecosoft-odoo/odoo,zchking/odoo,hopeall/odoo,pedrobaeza/OpenUpgrade,microcom/odoo,tangyiyong/odoo,podemos-info/odoo,havt/odoo,laslabs/odoo,BT-rmartin/odoo,sysadminmatmoz/OCB,Danisan/odoo-1,joshuajan/odoo,JGarcia-Panach/odoo,shaufi/odoo,Daniel-CA/odoo,waytai/odoo,0k/odoo,numerigraphe/odoo,slevenhagen/odoo,mmbtba/odoo,guewen/OpenUpgrade,thanhacun/odoo,prospwro/odoo,rahuldhote/odoo,TRESCLOUD/odoopub,hassoon3/odoo,leoliujie/odoo,oihane/odoo,NeovaHealth/odoo,n0m4dz/odoo,funkring/fdoo,funkring/fdoo,OSSESAC/odoopubarquiluz,JGarcia-Panach/odoo,highco-groupe/odoo,mvaled/OpenUpgrade,florentx/OpenUpgrade,shingonoide/odoo,ojengwa/odoo,dalegregory/odoo,hassoon3/odoo,rahuldhote/odoo,gorjuce/odoo,dkubiak789/odoo,ShineFan/odoo,KontorConsulting/odoo,fjbatresv/odoo,nuncjo/odoo,sinbazhou/odoo,Endika/odoo,ingadhoc/odoo,lightcn/odoo,csrocha/OpenUpgrade,bplancher/odoo,guewen/OpenUpgrade,dkubiak789/odoo,Noviat/odoo,gdgellatly/OCB1,jolevq/odoopub | addons/portal/__openerp__.py | addons/portal/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : "Portal",
'version' : "1.0",
'depends' : ["base"],
'author' : "OpenERP SA",
'category': 'Tools',
'description': """
This module defines 'portals' to customize the access to your OpenERP database
for external users.
A portal defines customized user menu and access rights for a group of users
(the ones associated to that portal). It also associates user groups to the
portal users (adding a group in the portal automatically adds it to the portal
users, etc). That feature is very handy when used in combination with the
module 'share'.
""",
'website': 'http://www.openerp.com',
'demo_xml': [],
'data': [
'security/portal_security.xml', 'security/ir.model.access.csv',
'portal_view.xml', 'wizard_view.xml', 'res_user_view.xml',
],
'installable': True,
'certificate' : '',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : "Portal",
'version' : "0.9",
'depends' : ["base"],
'author' : "OpenERP SA",
'category': 'Tools',
'description': """
This module defines 'portals' to customize the access to your OpenERP database
for external users.
A portal defines customized user menu and access rights for a group of users
(the ones associated to that portal). It also associates user groups to the
portal users (adding a group in the portal automatically adds it to the portal
users, etc). That feature is very handy when used in combination with the
module 'share'.
""",
'website': 'http://www.openerp.com',
'demo_xml': [],
'data': [
'security/portal_security.xml', 'security/ir.model.access.csv',
'portal_view.xml', 'wizard_view.xml', 'res_user_view.xml',
],
'installable': True,
'certificate' : '',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
0c013b3580ede9dd976ae2bec8cfa4b856cd3975 | Make sure we use a Python that speaks f-string | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api | scripts/build_sbt_image.py | scripts/build_sbt_image.py | #!/usr/bin/env python3.6
# -*- encoding: utf-8 -*-
"""
Build a Docker image for one of our sbt applications.
Usage:
build_sbt_image.py --project=<PROJECT> [--version=<VERSION>] [--env=<BUILD_ENV>]
build_sbt_image.py -h | --help
Options:
-h --help Show this screen.
--project=<PROJECT> Name of the sbt project (e.g. api, transformer)
--version=<VERSION> Version to use in the release ID
--env=<BUILD_ENV> Build environment (dev, prod, etc.)
"""
import subprocess
import docker
import docopt
import os
import shutil
from tooling import (
write_release_id,
CURRENT_COMMIT,
ROOT,
PLATFORM_ENV
)
DEFAULT_VERSION = '0.0.1'
if __name__ == '__main__':
args = docopt.docopt(__doc__)
# Read arguments from docopt
project = args['--project']
version = args['--version'] or DEFAULT_VERSION
build_env = args['--env'] or PLATFORM_ENV
print(f'*** Building sbt Docker image for {project}')
# Construct the release ID and the tag
release_id = f'{version}-{CURRENT_COMMIT}_{build_env}'
tag = f'{project}:{release_id}'
print(f'*** Image will be tagged {tag}')
print(f'*** Building the Scala binaries')
subprocess.check_call(['sbt', f'project {project}', 'stage'])
source_target = os.path.join(ROOT, project, 'target', 'universal', 'stage')
docker_root = os.path.join(ROOT, 'docker', 'scala_service')
dest_target = os.path.join(docker_root, 'target', project)
print(f'*** Copying build artefacts to {dest_target} from {source_target}')
shutil.rmtree(dest_target, ignore_errors = True)
shutil.copytree(source_target, dest_target)
print('*** Building the new Docker image')
print(f'*** Dockerfile is at {docker_root}')
client = docker.from_env()
client.images.build(path=docker_root, buildargs={'project': project}, tag=tag)
print('*** Saving the release ID to .releases')
write_release_id(project=project, release_id=release_id)
| #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
Build a Docker image for one of our sbt applications.
Usage:
build_sbt_image.py --project=<PROJECT> [--version=<VERSION>] [--env=<BUILD_ENV>]
build_sbt_image.py -h | --help
Options:
-h --help Show this screen.
--project=<PROJECT> Name of the sbt project (e.g. api, transformer)
--version=<VERSION> Version to use in the release ID
--env=<BUILD_ENV> Build environment (dev, prod, etc.)
"""
import subprocess
import docker
import docopt
import os
import shutil
from tooling import (
write_release_id,
CURRENT_COMMIT,
ROOT,
PLATFORM_ENV
)
DEFAULT_VERSION = '0.0.1'
if __name__ == '__main__':
args = docopt.docopt(__doc__)
# Read arguments from docopt
project = args['--project']
version = args['--version'] or DEFAULT_VERSION
build_env = args['--env'] or PLATFORM_ENV
print(f'*** Building sbt Docker image for {project}')
# Construct the release ID and the tag
release_id = f'{version}-{CURRENT_COMMIT}_{build_env}'
tag = f'{project}:{release_id}'
print(f'*** Image will be tagged {tag}')
print(f'*** Building the Scala binaries')
subprocess.check_call(['sbt', f'project {project}', 'stage'])
source_target = os.path.join(ROOT, project, 'target', 'universal', 'stage')
docker_root = os.path.join(ROOT, 'docker', 'scala_service')
dest_target = os.path.join(docker_root, 'target', project)
print(f'*** Copying build artefacts to {dest_target} from {source_target}')
shutil.rmtree(dest_target, ignore_errors = True)
shutil.copytree(source_target, dest_target)
print('*** Building the new Docker image')
print(f'*** Dockerfile is at {docker_root}')
client = docker.from_env()
client.images.build(path=docker_root, buildargs={'project': project}, tag=tag)
print('*** Saving the release ID to .releases')
write_release_id(project=project, release_id=release_id)
| mit | Python |
05fdbe1aa093dbe252f9c16c4a5ccaf47a7bb3a7 | Add unittest for get_thumbnail_of_file() | Commonists/CommonsDownloader | test/test_commonsdowloader.py | test/test_commonsdowloader.py | #!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Unit tests."""
from os.path import dirname, join
import unittest
import commonsdownloader
class TestCommonsDownloaderOffline(unittest.TestCase):
"""Testing methods from commonsdownloader which do not require connection."""
def test_clean_up_filename(self):
"""Test clean_up_filename."""
values = [('Example.jpg', 'Example.jpg'),
('Example.jpg ', 'Example.jpg'),
(' Example.jpg', 'Example.jpg'),
('My Example.jpg', 'My_Example.jpg')]
for (input_value, expected_value) in values:
self.assertEqual(commonsdownloader.clean_up_filename(input_value),
expected_value)
def test_make_thumb_url(self):
"""Test make_thumb_url."""
input_value = ('My_Example.jpg', 100)
expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100"
output = commonsdownloader.make_thumb_url(*input_value)
self.assertEqual(output, expected_value)
def test_make_thumbnail_name(self):
"""Test make_thumbnail_name."""
input_value = ('Example.svg', 'png')
expected_value = "Example.png"
output = commonsdownloader.make_thumbnail_name(*input_value)
self.assertEqual(output, expected_value)
class TestCommonsDownloaderOnline(unittest.TestCase):
"""Testing methods from commonsdownloader which require connection"""
def setUp(self):
"""Sett up the TestCase with the data files."""
self.outputfile1 = join(dirname(__file__), 'data', 'Example-100.jpg')
self.outputfile2 = join(dirname(__file__), 'data', 'Example-50.jpg')
def test_get_thumbnail_of_file(self):
"""Test get_thumbnail_of_file."""
values = [(('Example.jpg', 100), (self.outputfile1, 'Example.jpeg')),
(('Example.jpg', 50), (self.outputfile2, 'Example.jpeg'))]
for (input_value, expected) in values:
expected_value = (open(expected[0]).read(), expected[1])
output = commonsdownloader.get_thumbnail_of_file(*input_value)
self.assertEqual(output, expected_value)
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Unit tests."""
from os.path import dirname, join
import unittest
import commonsdownloader
class TestCommonsDownloaderOffline(unittest.TestCase):
"""Testing methods from commonsdownloader which do not require connection."""
def test_clean_up_filename(self):
"""Test clean_up_filename."""
values = [('Example.jpg', 'Example.jpg'),
('Example.jpg ', 'Example.jpg'),
(' Example.jpg', 'Example.jpg'),
('My Example.jpg', 'My_Example.jpg')]
for (input_value, expected_value) in values:
self.assertEqual(commonsdownloader.clean_up_filename(input_value),
expected_value)
def test_make_thumb_url(self):
"""Test make_thumb_url."""
input_value = ('My_Example.jpg', 100)
expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100"
output = commonsdownloader.make_thumb_url(*input_value)
self.assertEqual(output, expected_value)
def test_make_thumbnail_name(self):
"""Test make_thumbnail_name."""
input_value = ('Example.svg', 'png')
expected_value = "Example.png"
output = commonsdownloader.make_thumbnail_name(*input_value)
self.assertEqual(output, expected_value)
class TestCommonsDownloaderOnline(unittest.TestCase):
"""Testing methods from commonsdownloader which require connection"""
def setUp(self):
"""Sett up the TestCase with the data files."""
self.outputfile1 = join(dirname(__file__), 'data', 'Example-100.jpg')
self.outputfile2 = join(dirname(__file__), 'data', 'Example-50.jpg')
if __name__ == "__main__":
unittest.main()
| mit | Python |
442b8d886f74ef377c96c48755e4d50eb83b0137 | fix import error | noisyboiler/wampy | test/test_roles.py | test/test_roles.py | import pytest
from wampy.constants import DEFAULT_REALM
from wampy.peers.clients import Client
from wampy.roles.subscriber import TopicSubscriber
from test.helpers import assert_stops_raising
class TestTopicSubscriber(object):
@pytest.yield_fixture
def publisher(self, router):
with Client() as client:
yield client
def test_subscribe_to_topic(self, router, publisher):
subscriber = TopicSubscriber(
router=router, realm=DEFAULT_REALM, topic="foo")
def wait_for_message():
message = subscriber.messages.pop()
assert message == "bar"
with subscriber:
publisher.publish(topic="foo", message="bar")
assert_stops_raising(wait_for_message)
| import pytest
from wampy.constants import DEFAULT_REALM
from wampy.peers.clients import DefaultClient as Client
from wampy.roles.subscriber import TopicSubscriber
from test.helpers import assert_stops_raising
class TestTopicSubscriber(object):
@pytest.yield_fixture
def publisher(self, router):
with Client() as client:
yield client
def test_subscribe_to_topic(self, router, publisher):
subscriber = TopicSubscriber(
router=router, realm=DEFAULT_REALM, topic="foo")
def wait_for_message():
message = subscriber.messages.pop()
assert message == "bar"
with subscriber:
publisher.publish(topic="foo", message="bar")
assert_stops_raising(wait_for_message)
| mpl-2.0 | Python |
2d2cc5e0b8b3d5d658c7b7df68dd206518c3d4e9 | Fix test script | vietor/pyweaknet,vietor/pyweaknet,vietor/pysunday | test/test_speed.py | test/test_speed.py | #!/usr/bin/env python
from __future__ import division, print_function, with_statement
import os
import sys
sys.path.append(os.path.abspath("./"))
sys.path.append(os.path.abspath("../"))
import time
import random
import string
from weaknet import *
def randomBytes(size):
return xbytes(''.join(random.choice(string.ascii_uppercase + string.digits + string.ascii_lowercase) for _ in range(size)))
def testAlgorithm(algorithm, secret, source):
encrypt = SecretEngine(algorithm, secret).encrypt
decrypt = SecretEngine(algorithm, secret).decrypt
edata = encrypt(source)
ddata = decrypt(edata)
print(algorithm + " " + str(len(edata) - len(source)) +
" " + str(source == ddata))
sys.stdout.flush()
def stressTestAlgorithm(algorithm, secret, count, source):
encrypt = SecretEngine(algorithm, secret).encrypt
decrypt = SecretEngine(algorithm, secret).decrypt
begin = time.time()
for i in range(count):
decrypt(encrypt(source))
duration = time.time() - begin
print(algorithm + " " + str(duration))
sys.stdout.flush()
COUNTS_MAX = 100
SECRET_KEY = "secret key for test"
SOURCE_BUF = randomBytes(128 * 1024)
ALGORITHM_KEYS = sorted(secret_method_supported.keys())
for key in ALGORITHM_KEYS:
stressTestAlgorithm(key, SECRET_KEY, COUNTS_MAX, SOURCE_BUF)
| from __future__ import division, print_function, with_statement
import os
import sys
sys.path.append(os.path.abspath("./"))
sys.path.append(os.path.abspath("../"))
import time
import random
import string
from weaknet import *
def randomBytes(size):
return xbytes(''.join(random.choice(string.ascii_uppercase + string.digits + string.ascii_lowercase) for _ in range(size)))
def testAlgorithm(algorithm, secret, source):
encrypt = SecretEngine(algorithm, secret).encrypt
decrypt = SecretEngine(algorithm, secret).decrypt
edata = encrypt(source)
ddata = decrypt(edata)
print(algorithm + " " + str(len(edata) - len(source)) +
" " + str(source == ddata))
sys.stdout.flush()
def stressTestAlgorithm(algorithm, secret, count, source):
encrypt = SecretEngine(algorithm, secret).encrypt
decrypt = SecretEngine(algorithm, secret).decrypt
begin = time.time()
for i in range(count):
decrypt(encrypt(source))
duration = time.time() - begin
print(algorithm + " " + str(duration))
sys.stdout.flush()
COUNTS_MAX = 100
SECRET_KEY = "secret key for test"
SOURCE_BUF = randomBytes(128 * 1024)
ALGORITHM_KEYS = sorted(secret_method_supported.keys())
for key in ALGORITHM_KEYS:
stressTestAlgorithm(key, SECRET_KEY, COUNTS_MAX, SOURCE_BUF)
| mit | Python |
24a245fc3309a79e06e435b18582f70dfadab019 | update K8sReplicaSet | sebastienc/kubernetes-py,mnubo/kubernetes-py,froch/kubernetes-py,mnubo/kubernetes-py,froch/kubernetes-py,sebastienc/kubernetes-py | kubernetes/K8sReplicaSet.py | kubernetes/K8sReplicaSet.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
from kubernetes.K8sObject import K8sObject
from kubernetes.models.v1beta1.ReplicaSet import ReplicaSet
class K8sReplicaSet(K8sObject):
"""
http://kubernetes.io/docs/api-reference/extensions/v1beta1/definitions/#_v1beta1_replicaset
"""
def __init__(self, config=None, name=None):
super(K8sReplicaSet, self).__init__(config=config, obj_type='ReplicaSet', name=name)
# ------------------------------------------------------------------------------------- get
def get(self):
self.model = ReplicaSet(model=self.get_model())
return self
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
from kubernetes.K8sPodBasedObject import K8sPodBasedObject
from kubernetes.models.v1.ReplicaSet import ReplicaSet
class K8sReplicaSet(K8sPodBasedObject):
"""
This module uses K8sReplicaSet for HTTP DELETE operations.
From http://kubernetes.io/docs/user-guide/replicasets/:
While Replica Sets can be used independently, today it’s mainly used by Deployments as
a mechanism to orchestrate pod creation, deletion and updates. When you use Deployments
you don’t have to worry about managing the Replica Sets that they create.
Deployments own and manage their Replica Sets.
A Replica Set ensures that a specified number of pod “replicas” are running at any given time.
However, a Deployment is a higher-level concept that manages Replica Sets and provides declarative
updates to pods along with a lot of other useful features. Therefore, we recommend using Deployments
instead of directly using Replica Sets, unless you require custom update orchestration or don’t
require updates at all.
This actually means that you may never need to manipulate Replica Set objects: use directly a
Deployment and define your application in the spec section.
"""
def __init__(self, config=None, name=None):
super(K8sReplicaSet, self).__init__(config=config, obj_type='ReplicaSet', name=name)
self.model = ReplicaSet(name=name, namespace=self.config.namespace)
# ------------------------------------------------------------------------------------- get
def get(self):
self.model = ReplicaSet(model=self.get_model())
return self
| apache-2.0 | Python |
38f94d1e15118849315b5d7146cde984035c7782 | Disable TestStopPCs when there's no XML support | apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb | packages/Python/lldbsuite/test/functionalities/gdb_remote_client/TestStopPCs.py | packages/Python/lldbsuite/test/functionalities/gdb_remote_client/TestStopPCs.py | from __future__ import print_function
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
from gdbclientutils import *
class TestStopPCs(GDBRemoteTestBase):
@skipIfXmlSupportMissing
def test(self):
class MyResponder(MockGDBServerResponder):
def haltReason(self):
return "T02thread:1ff0d;threads:1ff0d,2ff0d;thread-pcs:10001bc00,10002bc00;"
def threadStopInfo(self, threadnum):
if threadnum == 0x1ff0d:
return "T02thread:1ff0d;threads:1ff0d,2ff0d;thread-pcs:10001bc00,10002bc00;"
if threadnum == 0x2ff0d:
return "T00thread:2ff0d;threads:1ff0d,2ff0d;thread-pcs:10001bc00,10002bc00;"
def qXferRead(self, obj, annex, offset, length):
if annex == "target.xml":
return """<?xml version="1.0"?>
<target version="1.0">
<architecture>i386:x86-64</architecture>
<feature name="org.gnu.gdb.i386.core">
<reg name="rip" bitsize="64" regnum="0" type="code_ptr" group="general"/>
</feature>
</target>""", False
else:
return None, False
self.server.responder = MyResponder()
target = self.dbg.CreateTarget('')
if self.TraceOn():
self.runCmd("log enable gdb-remote packets")
process = self.connect(target)
self.assertEqual(process.GetNumThreads(), 2)
th0 = process.GetThreadAtIndex(0)
th1 = process.GetThreadAtIndex(1)
self.assertEqual(th0.GetThreadID(), 0x1ff0d)
self.assertEqual(th1.GetThreadID(), 0x2ff0d)
self.assertEqual(th0.GetFrameAtIndex(0).GetPC(), 0x10001bc00)
self.assertEqual(th1.GetFrameAtIndex(0).GetPC(), 0x10002bc00)
| from __future__ import print_function
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
from gdbclientutils import *
class TestStopPCs(GDBRemoteTestBase):
def test(self):
class MyResponder(MockGDBServerResponder):
def haltReason(self):
return "T02thread:1ff0d;threads:1ff0d,2ff0d;thread-pcs:10001bc00,10002bc00;"
def threadStopInfo(self, threadnum):
if threadnum == 0x1ff0d:
return "T02thread:1ff0d;threads:1ff0d,2ff0d;thread-pcs:10001bc00,10002bc00;"
if threadnum == 0x2ff0d:
return "T00thread:2ff0d;threads:1ff0d,2ff0d;thread-pcs:10001bc00,10002bc00;"
def qXferRead(self, obj, annex, offset, length):
if annex == "target.xml":
return """<?xml version="1.0"?>
<target version="1.0">
<architecture>i386:x86-64</architecture>
<feature name="org.gnu.gdb.i386.core">
<reg name="rip" bitsize="64" regnum="0" type="code_ptr" group="general"/>
</feature>
</target>""", False
else:
return None, False
self.server.responder = MyResponder()
target = self.dbg.CreateTarget('')
if self.TraceOn():
self.runCmd("log enable gdb-remote packets")
process = self.connect(target)
self.assertEqual(process.GetNumThreads(), 2)
th0 = process.GetThreadAtIndex(0)
th1 = process.GetThreadAtIndex(1)
self.assertEqual(th0.GetThreadID(), 0x1ff0d)
self.assertEqual(th1.GetThreadID(), 0x2ff0d)
self.assertEqual(th0.GetFrameAtIndex(0).GetPC(), 0x10001bc00)
self.assertEqual(th1.GetFrameAtIndex(0).GetPC(), 0x10002bc00)
| apache-2.0 | Python |
c573d3d69e58819d40a7e0097bfba823d6c9e10c | print removed | potato/searx,pointhi/searx,jibe-b/searx,PwnArt1st/searx,PwnArt1st/searx,jcherqui/searx,gugod/searx,potato/searx,matejc/searx,dzc34/searx,jibe-b/searx,framasoft/searx,gugod/searx,asciimoo/searx,GreenLunar/searx,PwnArt1st/searx,gugod/searx,misnyo/searx,jibe-b/searx,dalf/searx,asciimoo/searx,dalf/searx,jpope777/searx,kdani3/searx,jpope777/searx,pointhi/searx,GreenLunar/searx,kdani3/searx,jpope777/searx,dzc34/searx,PwnArt1st/searx,jcherqui/searx,jibe-b/searx,dalf/searx,framasoft/searx,dalf/searx,potato/searx,matejc/searx,misnyo/searx,gugod/searx,kdani3/searx,pointhi/searx,jcherqui/searx,dzc34/searx,matejc/searx,framasoft/searx,dzc34/searx,kdani3/searx,matejc/searx,GreenLunar/searx,pointhi/searx,GreenLunar/searx,asciimoo/searx,potato/searx,jcherqui/searx,misnyo/searx,misnyo/searx,framasoft/searx,asciimoo/searx,jpope777/searx | searx/engines/piratebay.py | searx/engines/piratebay.py | from lxml import html
from urlparse import urljoin
from cgi import escape
from urllib import quote
categories = ['videos', 'music']
url = 'https://thepiratebay.se/'
search_url = url + 'search/{search_term}/{pageno}/99/{search_type}'
search_types = {'videos': '200',
'music': '100',
'files': '0'}
magnet_xpath = './/a[@title="Download this torrent using magnet"]'
content_xpath = './/font[@class="detDesc"]//text()'
paging = True
def request(query, params):
search_type = search_types.get(params['category'], '200')
params['url'] = search_url.format(search_term=quote(query),
search_type=search_type,
pageno=params['pageno'] - 1)
return params
def response(resp):
results = []
dom = html.fromstring(resp.text)
search_res = dom.xpath('//table[@id="searchResult"]//tr')
if not search_res:
return results
for result in search_res[1:]:
link = result.xpath('.//div[@class="detName"]//a')[0]
href = urljoin(url, link.attrib.get('href'))
title = ' '.join(link.xpath('.//text()'))
content = escape(' '.join(result.xpath(content_xpath)))
seed, leech = result.xpath('.//td[@align="right"]/text()')[:2]
magnetlink = result.xpath(magnet_xpath)[0]
results.append({'url': href,
'title': title,
'content': content,
'seed': seed,
'leech': leech,
'magnetlink': magnetlink.attrib['href'],
'template': 'torrent.html'})
return results
| from lxml import html
from urlparse import urljoin
from cgi import escape
from urllib import quote
categories = ['videos', 'music']
url = 'https://thepiratebay.se/'
search_url = url + 'search/{search_term}/{pageno}/99/{search_type}'
search_types = {'videos': '200',
'music': '100',
'files': '0'}
magnet_xpath = './/a[@title="Download this torrent using magnet"]'
content_xpath = './/font[@class="detDesc"]//text()'
paging = True
def request(query, params):
search_type = search_types.get(params['category'], '200')
params['url'] = search_url.format(search_term=quote(query),
search_type=search_type,
pageno=params['pageno'] - 1)
print params['url']
return params
def response(resp):
results = []
dom = html.fromstring(resp.text)
search_res = dom.xpath('//table[@id="searchResult"]//tr')
if not search_res:
return results
for result in search_res[1:]:
link = result.xpath('.//div[@class="detName"]//a')[0]
href = urljoin(url, link.attrib.get('href'))
title = ' '.join(link.xpath('.//text()'))
content = escape(' '.join(result.xpath(content_xpath)))
seed, leech = result.xpath('.//td[@align="right"]/text()')[:2]
magnetlink = result.xpath(magnet_xpath)[0]
results.append({'url': href,
'title': title,
'content': content,
'seed': seed,
'leech': leech,
'magnetlink': magnetlink.attrib['href'],
'template': 'torrent.html'})
return results
| agpl-3.0 | Python |
cb9b1a2163f960e34721f74bad30622fda71e43b | Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function. | apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@skipUnlessDarwin
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Foo")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Darwin")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| apache-2.0 | Python |
5975b9f42965b4193b13bb5264dd6be1f4e30b89 | Complete sol | bowen0701/algorithms_data_structures | lc0681_next_closest_time.py | lc0681_next_closest_time.py | """Leetcode 681. Next Closest Time (Premium)
Medium
URL: https://leetcode.com/problems/next-closest-time
Given a time represented in the format "HH:MM", form the next closest time by
reusing the current digits. There is no limit on how many times a digit can be
reused.
You may assume the given input string is always valid. For example, "01:34",
"12:09" are valid. "1:34", "12:9" are invalid.
Example 1:
Input: "19:34"
Output: "19:39"
Explanation:
The next closest time choosing from digits 1, 9, 3, 4, is 19:39, which occurs
5 minutes later.
It is not 19:33, because this occurs 23 hours and 59 minutes later.
Example 2:
Input: "23:59"
Output: "22:22"
Explanation: It may be assumed that the returned time is next day's time since
it is smaller than the input time numerically.
"""
class Solution(object):
def nextClosestTime(self, time):
"""
@param time: the given time
@return: the next closest time
Time complexity: O(1).
Space complexity: O(1).
"""
hh, mm = time.split(':')
minutes = int(hh) * 60 + int(mm)
set_time = set(time)
day_minutes = 60 * 24
for new_minutes in range(minutes + 1, minutes + day_minutes + 1):
# Take mode for 2nd day's minutes.
new_minutes = new_minutes % day_minutes
new_time = '{:02d}:{:02d}'.format(new_minutes // 60, new_minutes % 60)
if set(new_time) <= set_time:
# Found new time reusing the current digits.
break
return new_time
def main():
# Output: "19:39"
time = "19:34"
print Solution().nextClosestTime(time)
# Output: "22:22"
time = "23:59"
print Solution().nextClosestTime(time)
if __name__ == '__main__':
main()
| """Leetcode 681. Next Closest Time (Premium)
Medium
URL: https://leetcode.com/problems/next-closest-time
Given a time represented in the format "HH:MM", form the next closest time by
reusing the current digits. There is no limit on how many times a digit can be
reused.
You may assume the given input string is always valid. For example, "01:34",
"12:09" are valid. "1:34", "12:9" are invalid.
Example 1:
Input: "19:34"
Output: "19:39"
Explanation:
The next closest time choosing from digits 1, 9, 3, 4, is 19:39, which occurs
5 minutes later.
It is not 19:33, because this occurs 23 hours and 59 minutes later.
Example 2:
Input: "23:59"
Output: "22:22"
Explanation: It may be assumed that the returned time is next day's time since
it is smaller than the input time numerically.
"""
class Solution:
def nextClosestTime(self, time):
"""
@param time: the given time
@return: the next closest time
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
7ad47fad53be18a07aede85c02e41176a96c5de2 | Update celery setup according to docs | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | learnwithpeople/__init__.py | learnwithpeople/__init__.py | # This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)
__version__ = "dev"
GIT_REVISION = "dev"
| # This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__version__ = "dev"
GIT_REVISION = "dev"
| mit | Python |
e67c57128f88b61eac08e488e54343d48f1454c7 | Update LoginForm to match reality | dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard | ddcz/forms/authentication.py | ddcz/forms/authentication.py | import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
| import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=20)
password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
| mit | Python |
f3a6ebf0351050553363e28c271a103c9545b422 | Fix print http_interface.py | Koheron/lase | lase/core/http_interface.py | lase/core/http_interface.py | #! /usr/bin/python
import requests
class HTTPInterface:
def __init__(self, IP, port=80):
self.url = 'http://' + IP + ':' + str(port)
def get_bistream_id(self):
r = requests.get(self.url + '/bitstream_id')
return r.text
def ping(self):
r = requests.post(self.url + '/ping', data={})
def deploy_remote_instrument(self, name, version):
""" Deploy a remotely available instrument
Args:
- name: Instrument name
- version: Instrument version
"""
zip_filename = name + '-' + version + '.zip'
r = requests.post(self.url + '/deploy/remote/' + zip_filename, data={})
def deploy_local_instrument(self, name, version):
zip_filename = name + '-' + version + '.zip'
print('Deploying ' + zip_filename)
try:
r = requests.post(self.url + '/deploy/local/' + zip_filename, data={} , timeout=0.5)
except:
pass
#print('Timeout occured')
def remove_local_instrument(self, name, version):
zip_filename = name + '-' + version + '.zip'
r = requests.get(self.url + '/remove/local/' + zip_filename)
return r.text
def get_local_instruments(self):
r = requests.get(self.url + '/get_local_instruments')
return r.json()
def install_instrument(self, instrument_name):
instruments = self.get_local_instruments()
for name, shas in instruments.iteritems():
if name == instrument_name and len(shas) > 0:
self.deploy_local_instrument(name, shas[0])
return
raise ValueError("Instrument " + instrument_name + " not found")
if __name__ == "__main__":
http = HTTPInterface('192.168.1.15')
print(http.get_bistream_id())
# http.ping()
# http.deploy_remote_instrument('spectrum', '06ee48f')
# http.deploy_local_instrument('oscillo', '06ee48f')
# print(http.remove_local_instrument('oscillo', '06ee48f'))
print(http.get_local_instruments())
http.install_instrument("spectrum")
| #! /usr/bin/python
import requests
class HTTPInterface:
def __init__(self, IP, port=80):
self.url = 'http://' + IP + ':' + str(port)
def get_bistream_id(self):
r = requests.get(self.url + '/bitstream_id')
return r.text
def ping(self):
r = requests.post(self.url + '/ping', data={})
def deploy_remote_instrument(self, name, version):
""" Deploy a remotely available instrument
Args:
- name: Instrument name
- version: Instrument version
"""
zip_filename = name + '-' + version + '.zip'
r = requests.post(self.url + '/deploy/remote/' + zip_filename, data={})
def deploy_local_instrument(self, name, version):
zip_filename = name + '-' + version + '.zip'
print('Deploying ' + zip_filename)
try:
r = requests.post(self.url + '/deploy/local/' + zip_filename, data={} , timeout=0.5)
except:
pass
#print('Timeout occured')
def remove_local_instrument(self, name, version):
zip_filename = name + '-' + version + '.zip'
r = requests.get(self.url + '/remove/local/' + zip_filename)
return r.text
def get_local_instruments(self):
r = requests.get(self.url + '/get_local_instruments')
return r.json()
def install_instrument(self, instrument_name):
instruments = self.get_local_instruments()
for name, shas in instruments.iteritems():
if name == instrument_name and len(shas) > 0:
self.deploy_local_instrument(name, shas[0])
return
raise ValueError("Instrument " + instrument_name + " not found")
if __name__ == "__main__":
http = HTTPInterface('192.168.1.15')
print http.get_bistream_id()
# http.ping()
# http.deploy_remote_instrument('spectrum', '06ee48f')
# http.deploy_local_instrument('oscillo', '06ee48f')
# print http.remove_local_instrument('oscillo', '06ee48f')
print http.get_local_instruments()
http.install_instrument("spectrum")
| mit | Python |
49c54f33f2f8fce62eb6260e9ea360b8368f16ce | Support custom extensions. | appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource | ievv_opensource/ievvtasks_development/management/commands/ievvtasks_makemessages.py | ievv_opensource/ievvtasks_development/management/commands/ievvtasks_makemessages.py | import os
from django.conf import settings
from django.core import management
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Run makemessages for the languages specified in the ' \
'IEVVTASKS_MAKEMESSAGES_LANGUAGE_CODES setting.'
def __makemessages(self, ignore, extensions, domain):
management.call_command(
'makemessages',
locale=settings.IEVVTASKS_MAKEMESSAGES_LANGUAGE_CODES,
ignore=ignore,
extensions=extensions,
domain=domain)
def __build_python_translations(self):
ignore = getattr(settings, 'IEVVTASKS_MAKEMESSAGES_IGNORE', [
'static/*'
])
extensions = getattr(settings, 'IEVVTASKS_MAKEMESSAGES_EXTENSIONS', [
'py', 'html', 'txt'])
self.__makemessages(ignore=ignore,
extensions=extensions,
domain='django')
def __build_javascript_translations(self):
ignore = getattr(settings, 'IEVVTASKS_MAKEMESSAGES_JAVASCRIPT_IGNORE', [
'node_modules/*',
'bower_components/*',
'not_for_deploy/*',
])
extensions = getattr(settings, 'IEVVTASKS_MAKEMESSAGES_JAVASCRIPT_EXTENSIONS', [
'js'])
self.__makemessages(ignore=ignore,
extensions=extensions,
domain='djangojs')
def handle(self, *args, **options):
current_directory = os.getcwd()
for directory in getattr(settings, 'IEVVTASKS_MAKEMESSAGES_DIRECTORIES', [current_directory]):
directory = os.path.abspath(directory)
self.stdout.write('Running makemessages for python files in {}'.format(directory))
os.chdir(directory)
self.__build_python_translations()
if getattr(settings, 'IEVVTASKS_MAKEMESSAGES_BUILD_JAVASCRIPT_TRANSLATIONS', False):
self.stdout.write('Running makemessages for javascript files in {}'.format(directory))
self.__build_javascript_translations()
os.chdir(current_directory)
| import os
from django.conf import settings
from django.core import management
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Run makemessages for the languages specified in the ' \
'IEVVTASKS_MAKEMESSAGES_LANGUAGE_CODES setting.'
def __build_python_translations(self):
ignore = getattr(settings, 'IEVVTASKS_MAKEMESSAGES_IGNORE', [
'static/*'
])
management.call_command('makemessages',
locale=settings.IEVVTASKS_MAKEMESSAGES_LANGUAGE_CODES,
ignore=ignore)
def __build_javascript_translations(self):
ignore = getattr(settings, 'IEVVTASKS_MAKEMESSAGES_JAVASCRIPT_IGNORE', [
'node_modules/*',
'bower_components/*',
'not_for_deploy/*',
])
management.call_command('makemessages',
domain='djangojs',
locale=settings.IEVVTASKS_MAKEMESSAGES_LANGUAGE_CODES,
ignore=ignore)
def handle(self, *args, **options):
current_directory = os.getcwd()
for directory in getattr(settings, 'IEVVTASKS_MAKEMESSAGES_DIRECTORIES', [current_directory]):
directory = os.path.abspath(directory)
self.stdout.write('Running makemessages for python files in {}'.format(directory))
os.chdir(directory)
self.__build_python_translations()
if getattr(settings, 'IEVVTASKS_MAKEMESSAGES_BUILD_JAVASCRIPT_TRANSLATIONS', False):
self.stdout.write('Running makemessages for javascript files in {}'.format(directory))
self.__build_javascript_translations()
os.chdir(current_directory)
| bsd-3-clause | Python |
f7d4701aa7f8550f3c2a90c4110ef41bf2def8f5 | Fix hardcoding on xml | NeutronUfscarDatacom/DriverDatacom | dcclient/rpc.py | dcclient/rpc.py | """ RPC class used to communicate with the hardware
"""
import pycurl
import gzip
from StringIO import StringIO as sio
from oslo.config import cfg
class RPC:
""" RPC class. Used to connect to the client and pass the XML files.
"""
def __init__(self):
self.auth = self.CONF.dm_username+':'+self.CONF.dm_password
self.host = cfg.CONF.dm_host
self.method = cfg.CONF.method
def _create_url(self):
""" Internal method that returns the switches' URLs given the cfg
attributes.
"""
return self.method+'://'+self.auth+'@'+self.host+\
'/System/File/file_config.html'
def send_xml(self, xml_content):
""" Method used to send a given xml file to the switches
"""
req = pycurl.Curl()
#ignore ssl certificate verification
if self.method is 'https':
req.setopt(req.SSL_VERIFYPEER, 0)
req.setopt(req.SSL_VERIFYHOST, 0)
#set url being used
req.setopt(req.URL, self._create_url())
ziped = sio()
with gzip.GzipFile(fileobj=ziped, mode='w') as gzip_file:
gzip_file.write(xml_content)
run_data = ziped.getvalue()
#sets necessary multipart fields and adds the zip from buffer
data = [('page', 'file_upload'),
('running_part', '1'),
('file_to_upload', (req.FORM_BUFFER, 'upate_config',
req.FORM_BUFFERPTR, run_data))]
#sets POST method and the multipart packet
req.setopt(req.HTTPPOST, data)
#executes curl and exits
req.perform()
req.close()
| """ RPC class used to communicate with the hardware
"""
import pycurl
import gzip
from StringIO import StringIO as sio
from oslo.config import cfg
class RPC:
""" RPC class. Used to connect to the client and pass the XML files.
"""
def __init__(self):
self.auth = self.CONF.dm_username+':'+self.CONF.dm_password
self.host = cfg.CONF.dm_host
self.method = cfg.CONF.method
def _create_url(self):
""" Internal method that returns the switches' URLs given the cfg
attributes.
"""
return self.method+'://'+self.auth+'@'+self.host+\
'/System/File/file_config.html'
def send_xml(self):
""" Method used to send a given xml file to the switches
"""
req = pycurl.Curl()
#ignore ssl certificate verification
if self.method is 'https':
req.setopt(req.SSL_VERIFYPEER, 0)
req.setopt(req.SSL_VERIFYHOST, 0)
#set url being used
req.setopt(req.URL, self._create_url())
#oppening and zipping the xml file
with open('teste.xml', 'r') as xml_file:
xml_content = xml_file.read()
ziped = sio()
with gzip.GzipFile(fileobj=ziped, mode='w') as gzip_file:
gzip_file.write(xml_content)
run_data = ziped.getvalue()
#sets necessary multipart fields and adds the zip from buffer
data = [('page', 'file_upload'),
('running_part', '1'),
('file_to_upload', (req.FORM_BUFFER, 'upate_config',
req.FORM_BUFFERPTR, run_data))]
#sets POST method and the multipart packet
req.setopt(req.HTTPPOST, data)
#executes curl and exits
req.perform()
req.close()
| apache-2.0 | Python |
da360641e3da0fd15e3bf266d65b455e6bacaed6 | fix a bug on connection lost | angelonuffer/Sheldon-Chess,angelonuffer/Sheldon-Chess,angelonuffer/Sheldon-Chess | sheldonchess/interface/web/sheldonchess.py | sheldonchess/interface/web/sheldonchess.py | from rajesh import Application, run, expr
from rajesh.element import Img
from screens import MainMenu, NormalGameLobby
class Player(object):
def __init__(self, app):
self.app = app
self.name = ""
class SheldonChess(Application):
def begin(self):
self.player = Player(self)
background = Img(id="background", src="images/sheldonchess_background.png", width="100%", height="100%")
self.put(background, (0, 0))
main_menu = MainMenu(self)
self.put(main_menu, ("50%", "50%"))
def connectionLost(self, reason):
for player in NormalGameLobby.players:
if player == self.player:
NormalGameLobby.players.remove(player)
NormalGameLobby.update_players()
if __name__ == "__main__":
run()
| from rajesh import Application, run, expr
from rajesh.element import Img
from screens import MainMenu
class Player(object):
def __init__(self, app):
self.app = app
self.name = ""
class SheldonChess(Application):
def begin(self):
self.player = Player(self)
background = Img(id="background", src="images/sheldonchess_background.png", width="100%", height="100%")
self.put(background, (0, 0))
main_menu = MainMenu(self)
self.put(main_menu, ("50%", "50%"))
if __name__ == "__main__":
run()
| mit | Python |
cc4198819d3763f97625ef0066311f60809846ab | Add LUKS encrypted LV to LVM example | rvykydal/blivet,vojtechtrefny/blivet,vojtechtrefny/blivet,rvykydal/blivet | examples/lvm.py | examples/lvm.py | import os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
from blivet.devices import LUKSDevice
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new lv with base size 5GiB and unbounded growth and an ext4 filesystem
dev = b.new_lv(fmt_type="ext4", size=Size("5GiB"), grow=True,
parents=[vg], name="unbounded")
b.create_device(dev)
# new lv with base size 5GiB and growth up to 15GiB and an ext4 filesystem
dev = b.new_lv(fmt_type="ext4", size=Size("5GiB"), grow=True,
maxsize=Size("15GiB"), parents=[vg], name="bounded")
b.create_device(dev)
# new lv with a fixed size of 2GiB formatted as swap space
dev = b.new_lv(fmt_type="swap", size=Size("2GiB"), parents=[vg])
b.create_device(dev)
# new LUKS encrypted lv with fixed size of 10GiB and ext4 filesystem
dev = b.new_lv(fmt_type="luks", fmt_args={"passphrase": "12345"},
size=Size("10GiB"), parents=[vg], name="encrypted")
b.create_device(dev)
luks_dev = LUKSDevice(name="luks-%s" % dev.name,
size=dev.size, parents=[dev])
b.create_device(luks_dev)
luks_fmt = blivet.formats.get_format(fmt_type="ext4", device=luks_dev.path)
b.format_device(luks_dev, luks_fmt)
# allocate the growable lvs
blivet.partitioning.grow_lvm(b)
print(b.devicetree)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
| import os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new lv with base size 5GiB and unbounded growth and an ext4 filesystem
dev = b.new_lv(fmt_type="ext4", size=Size("5GiB"), grow=True,
parents=[vg], name="unbounded")
b.create_device(dev)
# new lv with base size 5GiB and growth up to 15GiB and an ext4 filesystem
dev = b.new_lv(fmt_type="ext4", size=Size("5GiB"), grow=True,
maxsize=Size("15GiB"), parents=[vg], name="bounded")
b.create_device(dev)
# new lv with a fixed size of 2GiB formatted as swap space
dev = b.new_lv(fmt_type="swap", size=Size("2GiB"), parents=[vg])
b.create_device(dev)
# allocate the growable lvs
blivet.partitioning.grow_lvm(b)
print(b.devicetree)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
| lgpl-2.1 | Python |
3dcad84c48e1009cd97fe4dc87471fd8b61522bf | rename searchMusic method -> searchDB | Pusungwi/pymaniadb | pymaniadb.py | pymaniadb.py | #!/usr/bin/env python
#Title : pymaniadb
#Version : 0.1
#Author : Yi 'Pusungwi' Yeon Jae
#Description : a codes of pymaniadb
import urllib.request
import urllib.parse
import io
from lxml import etree
MANIADB_ROOT_URL = 'http://www.maniadb.com/api/'
MANIADB_SEARCH_URL = 'search.asp' #검색 asp 파일 정보
class pymaniadb:
def __init__(self, apiKey, debug=False):
#EloManager main class init method
if debug == True:
print("paymaniadb Init...")
self.debug = debug
self.apiKey = apiKey
def searchDB(self, queryStr, itemtypeStr, targetStr="music", displayNum=10):
SEARCH_PARAM = urllib.parse.urlencode({'key': self.apiKey, 'target': targetStr, 'itemtype':itemtypeStr, 'query':queryStr, 'display':displayNum},
encoding='utf-8')
resultDictsArray = []
try:
requestFullUrl = MANIADB_ROOT_URL + MANIADB_SEARCH_URL + '?' + SEARCH_PARAM
recvSearchXml = urllib.request.urlopen(requestFullUrl)
except IOError:
print("URL address Error")
else:
parseEvents = ("start", "end")
tParseXml = io.BytesIO(recvSearchXml.read())
recvParsedXml = etree.iterparse(tParseXml, events=parseEvents)
for action, elem in recvParsedXml:
if action in ("start") and elem.tag == "item":
tmpResultDict = {'id':elem.values()[0]}
for subElem in elem.getchildren():
tmpResultDict[subElem.tag] = subElem.text
resultDictsArray.append(tmpResultDict)
return resultDictsArray
if __name__ == "__main__":
pymania = pymaniadb(apiKey="[HERE IS API KEY]")
resultsArray = pymania.searchDB(queryStr="muse", itemtypeStr="album")
print(resultsArray)
| #!/usr/bin/env python
#Title : pymaniadb
#Version : 0.1
#Author : Yi 'Pusungwi' Yeon Jae
#Description : a codes of pymaniadb
import urllib.request
import urllib.parse
import io
from lxml import etree
MANIADB_ROOT_URL = 'http://www.maniadb.com/api/'
MANIADB_SEARCH_URL = 'search.asp' #검색 asp 파일 정보
class pymaniadb:
def __init__(self, apiKey, debug=False):
#EloManager main class init method
print("paymaniadb Init...")
self.apiKey = apiKey
def searchMusic(self, queryStr, itemtypeStr, targetStr='music', displayNum=10):
SEARCH_PARAM = urllib.parse.urlencode({'key': self.apiKey, 'target': targetStr, 'itemtype':itemtypeStr, 'query':queryStr, 'display':displayNum},
encoding='utf-8')
resultDictsArray = []
try:
requestFullUrl = MANIADB_ROOT_URL + MANIADB_SEARCH_URL + '?' + SEARCH_PARAM
recvSearchXml = urllib.request.urlopen(requestFullUrl)
except IOError:
print("URL address Error")
else:
parseEvents = ("start", "end")
tParseXml = io.BytesIO(recvSearchXml.read())
recvParsedXml = etree.iterparse(tParseXml, events=parseEvents)
for action, elem in recvParsedXml:
if action in ("start") and elem.tag == "item":
tmpResultDict = {'id':elem.values()[0]}
for subElem in elem.getchildren():
tmpResultDict[subElem.tag] = subElem.text
resultDictsArray.append(tmpResultDict)
return resultDictsArray
if __name__ == "__main__":
pymania = pymaniadb(apiKey='[HERE IS API KEY]')
resultsArray = pymania.searchMusic(queryStr="muse", itemtypeStr="artist")
print(resultsArray)
| mit | Python |
ddfc1a223b8d7d62983499277f0528e2a0162976 | correct log msg | uw-it-aca/bridge-sis-provisioner,uw-it-aca/bridge-sis-provisioner | sis_provisioner/account_managers/verify.py | sis_provisioner/account_managers/verify.py | """
This class will check the users in DB, if the action on a user record failed
in the last load, re-apply it.
"""
import logging
from restclients.exceptions import DataFailureException
from sis_provisioner.dao.bridge import is_active_user_exist
from sis_provisioner.account_managers import fetch_users_from_db
logger = logging.getLogger(__name__)
def set_bridge_ids():
total = 0
set_bridge_id_total = 0
for uw_bri_user in fetch_users_from_db(logger):
if uw_bri_user.disabled:
continue
try:
total = total + 1
active, bridge_user = is_active_user_exist(uw_bri_user.netid)
if active:
if not uw_bri_user.has_bridge_id():
uw_bri_user.set_bridge_id(bridge_user.bridge_id)
logger.info("Set bridge id for %s" % uw_bri_user)
set_bridge_id_total = set_bridge_id_total + 1
else:
if bridge_user.bridge_id != uw_bri_user.bridge_id:
uw_bri_user.set_bridge_id(bridge_user.bridge_id)
logger.info("Update BridgeID on local %s by %s",
uw_bri_user, bridge_user)
set_bridge_id_total = set_bridge_id_total + 1
except DataFailureException as ex:
logger.error("GET %s ==> %s" % (uw_bri_user, ex))
if ex.status == 404:
logger.info("Not in Bridge, remove local record %s" %
uw_bri_user)
uw_bri_user.delete()
logger.info("Set bridge ids for %d users" % set_bridge_id_total)
logger.info("Verified %d users" % total)
return set_bridge_id_total
| """
This class will check the users in DB, if the action on a user record failed
in the last load, re-apply it.
"""
import logging
from restclients.exceptions import DataFailureException
from sis_provisioner.dao.bridge import is_active_user_exist
from sis_provisioner.account_managers import fetch_users_from_db
logger = logging.getLogger(__name__)
def set_bridge_ids():
total = 0
set_bridge_id_total = 0
for uw_bri_user in fetch_users_from_db(logger):
if uw_bri_user.disabled:
continue
try:
total = total + 1
active, bridge_user = is_active_user_exist(uw_bri_user.netid)
if active:
if not uw_bri_user.has_bridge_id():
uw_bri_user.set_bridge_id(bridge_user.bridge_id)
logger.info("Set bridge id for %s" % uw_bri_user)
set_bridge_id_total = set_bridge_id_total + 1
else:
if bridge_user.bridge_id != uw_bri_user.bridge_id:
logger.info("Update BridgeID on local %s by %s",
bridge_user, uw_bri_user)
uw_bri_user.set_bridge_id(bridge_user.bridge_id)
set_bridge_id_total = set_bridge_id_total + 1
except DataFailureException as ex:
logger.error("GET %s ==> %s" % (uw_bri_user, ex))
if ex.status == 404:
logger.info("Not in Bridge, remove local record %s" %
uw_bri_user)
uw_bri_user.delete()
logger.info("Set bridge ids for %d users" % set_bridge_id_total)
logger.info("Verified %d users" % total)
return set_bridge_id_total
| apache-2.0 | Python |
846ae9bff247b09aee0f07b2384f6b967b354eff | Add the rest of the LUCI builders to the temporary isolate lookup. | catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult | dashboard/dashboard/pinpoint/models/isolate.py | dashboard/dashboard/pinpoint/models/isolate.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Model for storing information to look up isolates.
An isolate is a way to describe the dependencies of a specific build.
More about isolates:
https://github.com/luci/luci-py/blob/master/appengine/isolate/doc/client/Design.md
"""
from google.appengine.ext import ndb
# A list of builders that recently changed names.
# TODO(dtu): Remove 6 months after LUCI migration is complete.
_BUILDER_NAME_MAP = {
'Android Compile Perf': 'android-builder-perf',
'Android arm64 Compile Perf': 'android_arm64-builder-perf',
'Linux Builder Perf': 'linux-builder-perf',
'Mac Builder Perf': 'mac-builder-perf',
'Win Builder Perf': 'win32-builder-perf',
'Win x64 Builder Perf': 'win64-builder-perf',
}
def Get(builder_name, change, target):
"""Retrieve an isolate hash from the Datastore.
Args:
builder_name: The name of the builder that produced the isolate.
change: The Change the isolate was built at.
target: The compile target the isolate is for.
Returns:
A tuple containing the isolate server and isolate hash as strings.
"""
entity = ndb.Key(Isolate, _Key(builder_name, change, target)).get()
if not entity:
if builder_name in _BUILDER_NAME_MAP:
# The builder has changed names. Try again with the new name.
# TODO(dtu): Remove 6 months after LUCI migration is complete.
builder_name = _BUILDER_NAME_MAP[builder_name]
entity = ndb.Key(Isolate, _Key(builder_name, change, target)).get()
if not entity:
raise KeyError('No isolate with builder %s, change %s, and target %s.' %
(builder_name, change, target))
else:
raise KeyError('No isolate with builder %s, change %s, and target %s.' %
(builder_name, change, target))
return entity.isolate_server, entity.isolate_hash
def Put(isolate_infos):
"""Add isolate hashes to the Datastore.
This function takes multiple entries to do a batched Datstore put.
Args:
isolate_infos: An iterable of tuples. Each tuple is of the form
(builder_name, change, target, isolate_server, isolate_hash).
"""
entities = []
for isolate_info in isolate_infos:
builder_name, change, target, isolate_server, isolate_hash = isolate_info
entity = Isolate(
isolate_server=isolate_server,
isolate_hash=isolate_hash,
id=_Key(builder_name, change, target))
entities.append(entity)
ndb.put_multi(entities)
class Isolate(ndb.Model):
isolate_server = ndb.StringProperty(indexed=False, required=True)
isolate_hash = ndb.StringProperty(indexed=False, required=True)
created = ndb.DateTimeProperty(auto_now_add=True)
def _Key(builder_name, change, target):
# The key must be stable across machines, platforms,
# Python versions, and Python invocations.
return '\n'.join((builder_name, change.id_string, target))
| # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Model for storing information to look up isolates.
An isolate is a way to describe the dependencies of a specific build.
More about isolates:
https://github.com/luci/luci-py/blob/master/appengine/isolate/doc/client/Design.md
"""
from google.appengine.ext import ndb
# A list of builders that recently changed names.
# TODO(dtu): Remove 6 months after LUCI migration is complete.
_BUILDER_NAME_MAP = {
'Android arm64 Compile Perf': 'android_arm64-builder-perf',
}
def Get(builder_name, change, target):
"""Retrieve an isolate hash from the Datastore.
Args:
builder_name: The name of the builder that produced the isolate.
change: The Change the isolate was built at.
target: The compile target the isolate is for.
Returns:
A tuple containing the isolate server and isolate hash as strings.
"""
entity = ndb.Key(Isolate, _Key(builder_name, change, target)).get()
if not entity:
if builder_name in _BUILDER_NAME_MAP:
# The builder has changed names. Try again with the new name.
# TODO(dtu): Remove 6 months after LUCI migration is complete.
builder_name = _BUILDER_NAME_MAP[builder_name]
entity = ndb.Key(Isolate, _Key(builder_name, change, target)).get()
if not entity:
raise KeyError('No isolate with builder %s, change %s, and target %s.' %
(builder_name, change, target))
else:
raise KeyError('No isolate with builder %s, change %s, and target %s.' %
(builder_name, change, target))
return entity.isolate_server, entity.isolate_hash
def Put(isolate_infos):
"""Add isolate hashes to the Datastore.
This function takes multiple entries to do a batched Datstore put.
Args:
isolate_infos: An iterable of tuples. Each tuple is of the form
(builder_name, change, target, isolate_server, isolate_hash).
"""
entities = []
for isolate_info in isolate_infos:
builder_name, change, target, isolate_server, isolate_hash = isolate_info
entity = Isolate(
isolate_server=isolate_server,
isolate_hash=isolate_hash,
id=_Key(builder_name, change, target))
entities.append(entity)
ndb.put_multi(entities)
class Isolate(ndb.Model):
isolate_server = ndb.StringProperty(indexed=False, required=True)
isolate_hash = ndb.StringProperty(indexed=False, required=True)
created = ndb.DateTimeProperty(auto_now_add=True)
def _Key(builder_name, change, target):
# The key must be stable across machines, platforms,
# Python versions, and Python invocations.
return '\n'.join((builder_name, change.id_string, target))
| bsd-3-clause | Python |
a89dbe08a0a25e4650ffae02e0f0c600f2f4832e | add warning: pygr.Data is deprecated | cjlee112/pygr,RitwikGupta/pygr,cjlee112/pygr,RitwikGupta/pygr,GenDataPro/pygr,cjlee112/pygr,GenDataPro/pygr,theoryno3/pygr,RitwikGupta/pygr,theoryno3/pygr,ctb/pygr,cjlee112/pygr,theoryno3/pygr,ctb/pygr,ctb/pygr,GenDataPro/pygr,ctb/pygr,ctb/pygr | pygr/Data.py | pygr/Data.py |
import warnings
warnings.warn('pygr.Data is deprecated. Use "from pygr import worldbase" instead!',
DeprecationWarning, stacklevel=2)
from pygr import worldbase
from metabase import ResourceServer, dumps, OneToManyRelation, OneToOneRelation,\
ManyToManyRelation, WorldbaseNotPortableError, WorldbaseNotFoundError, \
WorldbaseMismatchError, WorldbaseEmptyError, WorldbaseReadOnlyError, \
WorldbaseSchemaError, WorldbaseNoModuleError, ResourceZone
schema = worldbase.schema # ROOT OF OUR SCHEMA NAMESPACE
# PROVIDE TOP-LEVEL NAMES IN OUR RESOURCE HIERARCHY
Bio = worldbase.Bio
getResource = worldbase._mdb # our metabase interface
addResource = worldbase._mdb.add_resource
def addResourceDict(d, layer=None):
'queue a dict of name:object pairs for saving to specified db layer'
if layer is not None: # use the named metabase specified by layer
mdb = worldbase._mdb.zoneDict[layer] # KeyError if layer not found!
else: # use default MetabaseList
mdb = worldbase._mdb
for k,v in d.items(): # queue each resource in the dictionary
mdb.add_resource(k, v)
addSchema = worldbase._mdb.add_schema
deleteResource = worldbase._mdb.delete_resource
dir = worldbase._mdb.dir
def newServer(*args, **kwargs):
return ResourceServer(worldbase._mdb, *args, **kwargs)
save = worldbase._mdb.commit
rollback = worldbase._mdb.rollback
list_pending = worldbase._mdb.list_pending
loads = worldbase._mdb.loads
update = worldbase._mdb.update
clear_cache = worldbase._mdb.clear_cache
# TOP-LEVEL NAMES FOR STANDARDIZED LAYERS
here = ResourceZone(getResource, 'here')
my = ResourceZone(getResource, 'my')
system = ResourceZone(getResource, 'system')
subdir = ResourceZone(getResource, 'subdir')
remote = ResourceZone(getResource, 'remote')
MySQL = ResourceZone(getResource, 'MySQL')
__all__ = ('Bio', 'schema', 'getResource', 'addResource', 'addSchema',
'deleteResource', 'dir', 'newServer', 'save', 'rollback',
'list_pending', 'loads', 'dumps', 'update', 'clear_cache',
'OneToManyRelation', 'ManyToManyRelation',
'OneToOneRelation', 'WorldbaseNotPortableError',
'WorldbaseNotFoundError', 'WorldbaseMismatchError',
'WorldbaseEmptyError', 'WorldbaseReadOnlyError',
'WorldbaseSchemaError', 'WorldbaseNoModuleError',
'here', 'my', 'system', 'subdir', 'remote', 'MySQL')
|
from pygr import worldbase
from metabase import ResourceServer, dumps, OneToManyRelation, OneToOneRelation,\
ManyToManyRelation, WorldbaseNotPortableError, WorldbaseNotFoundError, \
WorldbaseMismatchError, WorldbaseEmptyError, WorldbaseReadOnlyError, \
WorldbaseSchemaError, WorldbaseNoModuleError, ResourceZone
schema = worldbase.schema # ROOT OF OUR SCHEMA NAMESPACE
# PROVIDE TOP-LEVEL NAMES IN OUR RESOURCE HIERARCHY
Bio = worldbase.Bio
getResource = worldbase._mdb # our metabase interface
addResource = worldbase._mdb.add_resource
def addResourceDict(d, layer=None):
'queue a dict of name:object pairs for saving to specified db layer'
if layer is not None: # use the named metabase specified by layer
mdb = worldbase._mdb.zoneDict[layer] # KeyError if layer not found!
else: # use default MetabaseList
mdb = worldbase._mdb
for k,v in d.items(): # queue each resource in the dictionary
mdb.add_resource(k, v)
addSchema = worldbase._mdb.add_schema
deleteResource = worldbase._mdb.delete_resource
dir = worldbase._mdb.dir
def newServer(*args, **kwargs):
return ResourceServer(worldbase._mdb, *args, **kwargs)
save = worldbase._mdb.commit
rollback = worldbase._mdb.rollback
list_pending = worldbase._mdb.list_pending
loads = worldbase._mdb.loads
update = worldbase._mdb.update
clear_cache = worldbase._mdb.clear_cache
# TOP-LEVEL NAMES FOR STANDARDIZED LAYERS
here = ResourceZone(getResource, 'here')
my = ResourceZone(getResource, 'my')
system = ResourceZone(getResource, 'system')
subdir = ResourceZone(getResource, 'subdir')
remote = ResourceZone(getResource, 'remote')
MySQL = ResourceZone(getResource, 'MySQL')
__all__ = ('Bio', 'schema', 'getResource', 'addResource', 'addSchema',
'deleteResource', 'dir', 'newServer', 'save', 'rollback',
'list_pending', 'loads', 'dumps', 'update', 'clear_cache',
'OneToManyRelation', 'ManyToManyRelation',
'OneToOneRelation', 'WorldbaseNotPortableError',
'WorldbaseNotFoundError', 'WorldbaseMismatchError',
'WorldbaseEmptyError', 'WorldbaseReadOnlyError',
'WorldbaseSchemaError', 'WorldbaseNoModuleError',
'here', 'my', 'system', 'subdir', 'remote', 'MySQL')
| bsd-3-clause | Python |
8dc18110345bd7a67f393b5a608f233b38af072a | Verify that the requested builders are actually trybots Review URL: https://codereview.appspot.com/7194048 | google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot | master/skia_master_scripts/monkeypatches.py | master/skia_master_scripts/monkeypatches.py | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Monkeypatches to override upstream code. """
from master import try_job_base
from master import try_job_svn
from master.try_job_base import text_to_dict
from twisted.internet import defer
from twisted.python import log
################################################################################
############################# Trybot Monkeypatches #############################
################################################################################
@defer.deferredGenerator
def SubmitTryJobChanges(self, changes):
""" Override of SVNPoller.submit_changes:
http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/master/try_job_svn.py?view=markup
We modify it so that the patch file url is added to the build properties.
This allows the slave to download the patch directly rather than receiving
it from the master.
"""
for chdict in changes:
# pylint: disable=E1101
parsed = self.parent.parse_options(text_to_dict(chdict['comments']))
# 'fix' revision.
# LKGR must be known before creating the change object.
wfd = defer.waitForDeferred(self.parent.get_lkgr(parsed))
yield wfd
wfd.getResult()
wfd = defer.waitForDeferred(self.master.addChange(
author=','.join(parsed['email']),
revision=parsed['revision'],
comments='',
properties={'patch_file_url': chdict['repository'] + '/' + \
chdict['files'][0]}))
yield wfd
change = wfd.getResult()
self.parent.addChangeInner(chdict['files'], parsed, change.number)
try_job_svn.SVNPoller.submit_changes = SubmitTryJobChanges
def TryJobCreateBuildset(self, ssid, parsed_job):
""" Override of TryJobBase.create_buildset:
http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/master/try_job_base.py?view=markup
We modify it to verify that the requested builders are in the builder pool for
this try scheduler. This prevents try requests from running on builders which
are not registered as trybots. This apparently isn't a problem for Chromium
since they use a separate try master.
"""
log.msg('Creating try job(s) %s' % ssid)
result = None
for builder in parsed_job['bot']:
if builder in self.pools[self.name]:
result = self.addBuildsetForSourceStamp(ssid=ssid,
reason=parsed_job['name'],
external_idstring=parsed_job['name'],
builderNames=[builder],
properties=self.get_props(builder, parsed_job))
else:
log.msg('Rejecting try job for builder: %s' % builder)
return result
try_job_base.TryJobBase.create_buildset = TryJobCreateBuildset | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Monkeypatches to override upstream code. """
from master import try_job_svn
from master.try_job_base import text_to_dict
from twisted.internet import defer
################################################################################
############################# Trybot Monkeypatches #############################
################################################################################
@defer.deferredGenerator
def SubmitTryJobChanges(self, changes):
""" Override of SVNPoller.submit_changes:
http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/master/try_job_svn.py?revision=119459&view=markup
We modify it so that the patch file url is added to the build properties.
This allows the slave to download the patch directly rather than receiving
it from the master.
"""
for chdict in changes:
# pylint: disable=E1101
parsed = self.parent.parse_options(text_to_dict(chdict['comments']))
# 'fix' revision.
# LKGR must be known before creating the change object.
wfd = defer.waitForDeferred(self.parent.get_lkgr(parsed))
yield wfd
wfd.getResult()
wfd = defer.waitForDeferred(self.master.addChange(
author=','.join(parsed['email']),
revision=parsed['revision'],
comments='',
properties={'patch_file_url': chdict['repository'] + '/' + \
chdict['files'][0]}))
yield wfd
change = wfd.getResult()
self.parent.addChangeInner(chdict['files'], parsed, change.number)
try_job_svn.SVNPoller.submit_changes = SubmitTryJobChanges
| bsd-3-clause | Python |
d74c9cee91cd0040510ac90a9435f8083a498421 | add get_load_averages() | victronenergy/velib_python | ve_utils.py | ve_utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from traceback import print_exc
from os import _exit as os_exit
from os import statvfs
import logging
logger = logging.getLogger(__name__)
# Use this function to make sure the code quits on an unexpected exception. Make sure to use it
# when using gobject.idle_add and also gobject.timeout_add.
# Without this, the code will just keep running, since gobject does not stop the mainloop on an
# exception.
# Example: gobject.idle_add(exit_on_error, myfunc, arg1, arg2)
def exit_on_error(func, *args, **kwargs):
try:
return func(*args, **kwargs)
except:
try:
print 'exit_on_error: there was an exception. Printing stacktrace will be tryed and then exit'
print_exc()
except:
pass
# sys.exit() is not used, since that throws an exception, which does not lead to a program
# halt when used in a dbus callback, see connection.py in the Python/Dbus libraries, line 230.
os_exit(1)
__vrm_portal_id = None
def get_vrm_portal_id():
# For the CCGX, the definition of the VRM Portal ID is that it is the mac address of the onboard-
# ethernet port (eth0), stripped from its colons (:) and lower case.
# nice coincidence is that this also works fine when running on your (linux) development computer.
global __vrm_portal_id
if __vrm_portal_id:
return __vrm_portal_id
# Assume we are on linux
import fcntl, socket, struct
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', 'eth0'[:15]))
__vrm_portal_id = ''.join(['%02x' % ord(char) for char in info[18:24]])
return __vrm_portal_id
# See VE.Can registers - public.docx for definition of this conversion
def convert_vreg_version_to_readable(version):
def str_to_arr(x, length):
a = []
for i in range(0, len(x), length):
a.append(x[i:i+length])
return a
x = "%x" % version
x = x.upper()
if len(x) == 5 or len(x) == 3 or len(x) == 1:
x = '0' + x
a = str_to_arr(x, 2);
# remove the first 00 if there are three bytes and it is 00
if len(a) == 3 and a[0] == '00':
a.remove(0);
# if we have two or three bytes now, and the first character is a 0, remove it
if len(a) >= 2 and a[0][0:1] == '0':
a[0] = a[0][1];
result = ''
for item in a:
result += ('.' if result != '' else '') + item
result = 'v' + result
return result
def get_free_space(path):
result = -1
try:
s = statvfs(path)
result = s.f_frsize * s.f_bavail # Number of free bytes that ordinary users
except Exception, ex:
logger.info("Error while retrieving free space for path %s: %s" % (path, ex))
return result
def get_load_averages():
try:
with open('/proc/loadavg', 'r') as f:
line = f.read()
except Exception, ex:
logger.info("Error while reading & processing load average: %s" % ex)
return line.split()[:3]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from traceback import print_exc
from os import _exit as os_exit
from os import statvfs
import logging
logger = logging.getLogger(__name__)
# Use this function to make sure the code quits on an unexpected exception. Make sure to use it
# when using gobject.idle_add and also gobject.timeout_add.
# Without this, the code will just keep running, since gobject does not stop the mainloop on an
# exception.
# Example: gobject.idle_add(exit_on_error, myfunc, arg1, arg2)
def exit_on_error(func, *args, **kwargs):
try:
return func(*args, **kwargs)
except:
try:
print 'exit_on_error: there was an exception. Printing stacktrace will be tryed and then exit'
print_exc()
except:
pass
# sys.exit() is not used, since that throws an exception, which does not lead to a program
# halt when used in a dbus callback, see connection.py in the Python/Dbus libraries, line 230.
os_exit(1)
__vrm_portal_id = None
def get_vrm_portal_id():
# For the CCGX, the definition of the VRM Portal ID is that it is the mac address of the onboard-
# ethernet port (eth0), stripped from its colons (:) and lower case.
# nice coincidence is that this also works fine when running on your (linux) development computer.
global __vrm_portal_id
if __vrm_portal_id:
return __vrm_portal_id
# Assume we are on linux
import fcntl, socket, struct
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', 'eth0'[:15]))
__vrm_portal_id = ''.join(['%02x' % ord(char) for char in info[18:24]])
return __vrm_portal_id
# See VE.Can registers - public.docx for definition of this conversion
def convert_vreg_version_to_readable(version):
def str_to_arr(x, length):
a = []
for i in range(0, len(x), length):
a.append(x[i:i+length])
return a
x = "%x" % version
x = x.upper()
if len(x) == 5 or len(x) == 3 or len(x) == 1:
x = '0' + x
a = str_to_arr(x, 2);
# remove the first 00 if there are three bytes and it is 00
if len(a) == 3 and a[0] == '00':
a.remove(0);
# if we have two or three bytes now, and the first character is a 0, remove it
if len(a) >= 2 and a[0][0:1] == '0':
a[0] = a[0][1];
result = ''
for item in a:
result += ('.' if result != '' else '') + item
result = 'v' + result
return result
def get_free_space(path):
result = -1
try:
s = statvfs(path)
result = s.f_frsize * s.f_bavail # Number of free bytes that ordinary users
except Exception, ex:
logger.info("Error while retrieving free space for path %s: %s" % (path, ex))
return result
| mit | Python |
987a4cceb6b4096923df82d2ef291cc2601e32d7 | Update LightIntensity.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | device/src/LightIntensity.py | device/src/LightIntensity.py | #--------------------------------------
# ___ _ ____
# / __(_)__ / __/
# _\ \/ / _ \_\ \
# /___/_/ .__/___/
# /_/
#
# LightIntensity.py
# Get light intensity data from GY-30 module which is base on bh1750 sensor.
#
# Author : Arvin
# Date : 15/09/2017
#--------------------------------------
#Wiring method between TPYBoard and GY-30 module.
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address) <--> GND
from pyb import I2C
import time
# Define some constants from the datasheet
DEVICE = 0x23 # The value is 0x23 if GY-30's ADO(ADDR) pin is connected to GND, value is 0x5c while VCC.
POWER_DOWN = 0x00 # No active state
POWER_ON = 0x01 # Power on
RESET = 0x07 # Reset data register value
# Start measurement at 4lx resolution. Time typically 16ms.
CONTINUOUS_LOW_RES_MODE = 0x13
# Start measurement at 1lx resolution. Time typically 120ms
CONTINUOUS_HIGH_RES_MODE_1 = 0x10
# Start measurement at 0.5lx resolution. Time typically 120ms
CONTINUOUS_HIGH_RES_MODE_2 = 0x11
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_1 = 0x20
# Start measurement at 0.5lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_2 = 0x21
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_LOW_RES_MODE = 0x23
i2c = I2C(1, I2C.MASTER) # create and init as a master
#i2c.is_ready(0x23) # check if slave 0x23 is ready
#i2c.scan() # scan for slaves on the bus, returning
def convertToNumber(data):
# Simple function to convert 2 bytes of data
# into a decimal number
#return ((data[1] + (256 * data[0])) / 1.2)
#convert float to int
return int(((data[1] + (256 * data[0])) / 1.2))
def readLight(addr=DEVICE):
# data = bus.read_i2c_block_data(addr,ONE_TIME_HIGH_RES_MODE_1)
i2c.send(CONTINUOUS_HIGH_RES_MODE_1, DEVICE)
time.sleep(0.2) #Waiting for the sensor data
data = i2c.mem_read(8, DEVICE, 2) # read 3 bytes from memory of slave 0x23, tarting at address 2 in the slave
#print(data)
#print(data[1])
#print(data[2])
return convertToNumber(data)
| #--------------------------------------
# ___ _ ____
# / __(_)__ / __/
# _\ \/ / _ \_\ \
# /___/_/ .__/___/
# /_/
#
# LightIntensity.py
# Get light intensity data from GY-30 module which is base on bh1750 sensor.
#
# Author : Arvin
# Date : 15/09/2017
#--------------------------------------
#Wiring method between TPYBoard and GY-30 module.
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address) <--> GND
from pyb import I2C
import time
# Define some constants from the datasheet
DEVICE = 0x23 # The value is 0x23 if GY-30's ADO(ADDR) pin is connected to GND, value is 0x5c while VCC.
POWER_DOWN = 0x00 # No active state
POWER_ON = 0x01 # Power on
RESET = 0x07 # Reset data register value
# Start measurement at 4lx resolution. Time typically 16ms.
CONTINUOUS_LOW_RES_MODE = 0x13
# Start measurement at 1lx resolution. Time typically 120ms
CONTINUOUS_HIGH_RES_MODE_1 = 0x10
# Start measurement at 0.5lx resolution. Time typically 120ms
CONTINUOUS_HIGH_RES_MODE_2 = 0x11
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_1 = 0x20
# Start measurement at 0.5lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_2 = 0x21
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_LOW_RES_MODE = 0x23
i2c = I2C(1, I2C.MASTER) # create and init as a master
#i2c.is_ready(0x23) # check if slave 0x23 is ready
#i2c.scan() # scan for slaves on the bus, returning
def convertToNumber(data):
# Simple function to convert 2 bytes of data
# into a decimal number
#return ((data[1] + (256 * data[0])) / 1.2)
return int(((data[1] + (256 * data[0])) / 1.2))
def readLight(addr=DEVICE):
# data = bus.read_i2c_block_data(addr,ONE_TIME_HIGH_RES_MODE_1)
i2c.send(CONTINUOUS_HIGH_RES_MODE_1, DEVICE)
time.sleep(0.2) #Waiting for the sensor data
data = i2c.mem_read(8, DEVICE, 2) # read 3 bytes from memory of slave 0x23, tarting at address 2 in the slave
#print(data)
#print(data[1])
#print(data[2])
return convertToNumber(data)
| mit | Python |
bb85e3475c9d7b7f685a75916498ac98241fff86 | test ssh | Fendoe/open-hackathon-o,YaningX/open-hackathon,rapidhere/open-hackathon,frankyao47/open-hackathon,Fendoe/open-hackathon,msopentechcn/open-hackathon,xunxunzgq/open-hackathon-bak_01,frankyao47/open-hackathon,Fendoe/open-hackathon,msopentechcn/open-hackathon,Fendoe/open-hackathon,xunxunzgq/open-hackathon-bak_01,lclchen/open-hackathon,rapidhere/open-hackathon,YaningX/open-hackathon,msopentechcn/open-hackathon,mshubian/BAK_open-hackathon,Fendoe/open-hackathon-o,mshubian/BAK_open-hackathon,SpAiNiOr/open-hackathon,Fendoe/open-hackathon,mshubian/BAK_open-hackathon,rapidhere/open-hackathon,SpAiNiOr/open-hackathon,YaningX/open-hackathon,lclchen/open-hackathon,frankyao47/open-hackathon,msopentechcn/open-hackathon,msopentechcn/open-hackathon,mshubian/BAK_open-hackathon,lclchen/open-hackathon,juniwang/open-hackathon,rapidhere/open-hackathon,Fendoe/open-hackathon-o,xunxunzgq/open-hackathon-bak_01,frankyao47/open-hackathon,Fendoe/open-hackathon-o,Fendoe/open-hackathon,lclchen/open-hackathon,xunxunzgq/open-hackathon-bak_01,frankyao47/open-hackathon,mshubian/BAK_open-hackathon,juniwang/open-hackathon,mshubian/BAK_open-hackathon,rapidhere/open-hackathon,juniwang/open-hackathon,Fendoe/open-hackathon,mshubian/BAK_open-hackathon,juniwang/open-hackathon,frankyao47/open-hackathon,SpAiNiOr/open-hackathon,rapidhere/open-hackathon,YaningX/open-hackathon,Fendoe/open-hackathon-o,xunxunzgq/open-hackathon-bak_01,lclchen/open-hackathon,Fendoe/open-hackathon-o,lclchen/open-hackathon,YaningX/open-hackathon,SpAiNiOr/open-hackathon,juniwang/open-hackathon,xunxunzgq/open-hackathon-bak_01,YaningX/open-hackathon,SpAiNiOr/open-hackathon,SpAiNiOr/open-hackathon,msopentechcn/open-hackathon,juniwang/open-hackathon | testlogging/log.py | testlogging/log.py | import logging
import logging.config
logging.config.fileConfig("logging.conf")
#create logger
logger = logging.getLogger("myLogger")
#"application" code
def log_debug(debug):
logger.debug(debug)
def log_info(info):
logger.info(info)
def log_warn(warn):
logger.warn(warn)
def log_error(error):
logger.error(str(error))
def log_critical(critical):
logger.critical(critical)
There was an error verifying your email. | import logging
import logging.config
logging.config.fileConfig("logging.conf")
#create logger
logger = logging.getLogger("myLogger")
#"application" code
def log_debug(debug):
logger.debug(debug)
def log_info(info):
logger.info(info)
def log_warn(warn):
logger.warn(warn)
def log_error(error):
logger.error(str(error))
def log_critical(critical):
logger.critical(critical) | mit | Python |
22f200b5f0e1263dd7061c0a793380b54001c531 | remove the System class and refactor the solve function0 | AlanCristhian/symbolic_old,AlanCristhian/symbolic_old | expressions/solvers.py | expressions/solvers.py | from expressions import core
def solve_single_equality(equality, variable):
expression = equality.replace("==","-(") + ")"
c = eval(expression, {variable: 1j})
return {variable: -c.real/c.imag}
def solve(system):
variable = system._generator.gi_code.co_varnames[1:][0]
return solve_single_equality(system._expression, variable)
def extract_coefficients(system):
print(system._expression)
# equalities = system._expression.split('|')
# equality
| from expressions import core
class System(core.BaseType):
"""The most generic type."""
def __setitem__(self, key, value):
return self._array.__setitem__(key, value)
def __repr__(self):
return 'System(%s)' % repr(self._array)
def solve(system):
variable = system._generator.gi_code.co_varnames[1:][0]
expression = system._expression.replace("==","-(") + ")"
c = eval(expression, {variable: 1j})
return {variable: -c.real/c.imag} | mit | Python |
0f71be12093e7d650cbd8a723a123d033df8ea57 | add more tests | grnet/ac16 | src/ilin2.py | src/ilin2.py | from petlib.ec import EcGroup
def params_gen(nid=713):
G = EcGroup(nid)
g = G.generator()
o = G.order()
return (G, g, o)
def key_gen(params):
_, g, o = params
x = 1 + (o - 2).random()
h = x * g
return ((g, h), x)
def enc(pk, s1, s2, m):
g, h = pk
return (s1*h, s2*(g+h), (m + s1 + s2) * g)
def dec(c, params, sk, table):
_, g, o = params
c1, c2, c3 = c
e1 = -(sk).mod_inverse(o)
e2 = -(sk + 1).mod_inverse(o)
v = (c3 + e2*c2 + e1*c1)
return table[v]
def make_table(params, n):
_, g, _ = params
table = {}
for i in range(n):
table[i * g] = i
return table
def test_encdec():
params = params_gen()
table = make_table(params, 1000)
G, g, o = params
pk, sk = key_gen(params)
s1 = o.random()
s2 = o.random()
c = enc(pk, s1, s2, 666)
assert(dec(c, params, sk, table) == 666)
c = enc(pk, s1, s2, 7)
assert(dec(c, params, sk, table) == 7)
import random
ps = random.sample(range(1000), 100)
for i in range(100):
c = enc(pk, s1, s2, ps[i])
assert(dec(c, params, sk, table) == ps[i])
if __name__ == '__main__':
test_encdec()
| from petlib.ec import EcGroup
def params_gen(nid=713):
G = EcGroup(nid)
g = G.generator()
o = G.order()
return (G, g, o)
def key_gen(params):
_, g, o = params
x = 1 + (o - 2).random()
h = x * g
return ((g, h), x)
def enc(pk, s1, s2, m):
g, h = pk
return (s1*h, s2*(g+h), (m + s1 + s2) * g)
def dec(c, params, sk, table):
_, g, o = params
c1, c2, c3 = c
e1 = -(sk).mod_inverse(o)
e2 = -(sk + 1).mod_inverse(o)
v = (c3 + e2*c2 + e1*c1)
return table[v]
def make_table(params, n):
_, g, o = params
table = {}
for i in range(n):
table[i * g] = i
return table
def test_encdec():
params = params_gen()
table = make_table(params, 1000)
G, g, o = params
pk, sk = key_gen(params)
s1 = o.random()
s2 = o.random()
c = enc(pk, s1, s2, 666)
assert(dec(c, params, sk, table) == 666)
if __name__ == '__main__':
test_encdec()
| agpl-3.0 | Python |
91156898a69b9d37a3e587fc7aaf2a3320b46881 | use new date formatting function in doepages | erinspace/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,jeffreyliu3230/scrapi,erinspace/scrapi,alexgarciac/scrapi,felliott/scrapi,mehanig/scrapi,icereval/scrapi,mehanig/scrapi,fabianvf/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,felliott/scrapi | scrapi/harvesters/doepages.py | scrapi/harvesters/doepages.py | from __future__ import unicode_literals
from datetime import date, timedelta
from lxml import etree
from scrapi import requests
from scrapi import settings
from scrapi.base import XMLHarvester
from scrapi.linter import RawDocument
from scrapi.util import copy_to_unicode, format_date_with_slashes
from scrapi.base.schemas import DOESCHEMA
class DoepagesHarvester(XMLHarvester):
short_name = 'doepages'
long_name = 'Department of Energy Pages'
url = 'http://www.osti.gov/pages/'
schema = DOESCHEMA
namespaces = {
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'dc': 'http://purl.org/dc/elements/1.1/',
'dcq': 'http://purl.org/dc/terms/'
}
def harvest(self, start_date=None, end_date=None):
start_date = start_date or date.today() - timedelta(settings.DAYS_BACK)
end_date = end_date or date.today()
base_url = 'http://www.osti.gov/pages/pagesxml?nrows={0}&EntryDateFrom={1}&EntryDateTo={2}'
url = base_url.format('1', format_date_with_slashes(start_date), format_date_with_slashes(end_date))
initial_data = requests.get(url)
record_encoding = initial_data.encoding
initial_doc = etree.XML(initial_data.content)
num_results = int(initial_doc.xpath('//records/@count', namespaces=self.namespaces)[0])
url = base_url.format(num_results, start_date, end_date)
data = requests.get(url)
doc = etree.XML(data.content)
records = doc.xpath('records/record')
xml_list = []
for record in records:
doc_id = record.xpath('dc:ostiId/node()', namespaces=self.namespaces)[0]
record = etree.tostring(record, encoding=record_encoding)
xml_list.append(RawDocument({
'doc': record,
'source': self.short_name,
'docID': copy_to_unicode(doc_id),
'filetype': 'xml'
}))
return xml_list
| from __future__ import unicode_literals
from datetime import date, timedelta
from lxml import etree
from scrapi import requests
from scrapi.base import XMLHarvester
from scrapi.linter import RawDocument
from scrapi.util import copy_to_unicode
from scrapi.base.schemas import DOESCHEMA
class DoepagesHarvester(XMLHarvester):
short_name = 'doepages'
long_name = 'Department of Energy Pages'
url = 'http://www.osti.gov/pages/'
schema = DOESCHEMA
namespaces = {
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'dc': 'http://purl.org/dc/elements/1.1/',
'dcq': 'http://purl.org/dc/terms/'
}
def harvest(self, start_date=None, end_date=None):
start_date = start_date or date.today() - timedelta(1)
end_date = end_date or date.today()
base_url = 'http://www.osti.gov/pages/pagesxml?nrows={0}&EntryDateFrom={1}&EntryDateTo={2}'
url = base_url.format('1', start_date.strftime('%m/%d/%Y'), end_date.strftime('%m/%d/%Y'))
initial_data = requests.get(url)
record_encoding = initial_data.encoding
initial_doc = etree.XML(initial_data.content)
num_results = int(initial_doc.xpath('//records/@count', namespaces=self.namespaces)[0])
url = base_url.format(num_results, start_date, end_date)
data = requests.get(url)
doc = etree.XML(data.content)
records = doc.xpath('records/record')
xml_list = []
for record in records:
doc_id = record.xpath('dc:ostiId/node()', namespaces=self.namespaces)[0]
record = etree.tostring(record, encoding=record_encoding)
xml_list.append(RawDocument({
'doc': record,
'source': self.short_name,
'docID': copy_to_unicode(doc_id),
'filetype': 'xml'
}))
return xml_list
| apache-2.0 | Python |
956f9c476273d506313a15fe49e62adcdaa2f70a | upgrade recipe order fix | tiborsimko/invenio,inveniosoftware/invenio,tiborsimko/invenio,inveniosoftware/invenio | invenio/modules/accounts/upgrades/accounts_2015_03_06_namedefaults.py | invenio/modules/accounts/upgrades/accounts_2015_03_06_namedefaults.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from sqlalchemy import *
from sqlalchemy.dialects import mysql
from invenio.ext.sqlalchemy import db
from invenio.modules.upgrader.api import op
depends_on = [u'accounts_2015_03_06_passlib',
u'accounts_2015_01_14_add_name_columns']
def info():
return "Default value for family/given names."
def do_upgrade():
"""Implement your upgrades here."""
m = db.MetaData(bind=db.engine)
m.reflect()
u = m.tables['user']
conn = db.engine.connect()
conn.execute(u.update().where(u.c.family_name == None).values(
family_name=''))
conn.execute(u.update().where(u.c.given_names == None).values(
given_names=''))
op.alter_column('user', 'family_name',
existing_type=mysql.VARCHAR(length=255),
nullable=False,
server_default='')
op.alter_column('user', 'given_names',
existing_type=mysql.VARCHAR(length=255),
nullable=False,
server_default='')
def estimate():
"""Estimate running time of upgrade in seconds (optional)."""
return 1
| # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from sqlalchemy import *
from sqlalchemy.dialects import mysql
from invenio.ext.sqlalchemy import db
from invenio.modules.upgrader.api import op
depends_on = [u'accounts_2015_03_06_passlib']
def info():
return "Default value for family/given names."
def do_upgrade():
"""Implement your upgrades here."""
m = db.MetaData(bind=db.engine)
m.reflect()
u = m.tables['user']
conn = db.engine.connect()
conn.execute(u.update().where(u.c.family_name == None).values(
family_name=''))
conn.execute(u.update().where(u.c.given_names == None).values(
given_names=''))
op.alter_column('user', 'family_name',
existing_type=mysql.VARCHAR(length=255),
nullable=False,
server_default='')
op.alter_column('user', 'given_names',
existing_type=mysql.VARCHAR(length=255),
nullable=False,
server_default='')
def estimate():
"""Estimate running time of upgrade in seconds (optional)."""
return 1
| mit | Python |
356e61e7d68dee817a25a06766ec33a406598833 | add disabled to local-dist | fabianvf/scrapi,jeffreyliu3230/scrapi,icereval/scrapi,alexgarciac/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,erinspace/scrapi,fabianvf/scrapi,felliott/scrapi,ostwald/scrapi,mehanig/scrapi,mehanig/scrapi | scrapi/settings/local-dist.py | scrapi/settings/local-dist.py | DEBUG = False
ELASTIC_TIMEOUT = 10
ELASTIC_INDEX = 'share_v2'
ELASTIC_URI = 'localhost:9200'
BROKER_URL = 'amqp://guest@localhost'
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
RECORD_HTTP_TRANSACTIONS = False
NORMALIZED_PROCESSING = []
RAW_PROCESSING = []
SENTRY_DSN = None
USE_FLUENTD = False
FLUENTD_ARGS = {
'tag': 'app.scrapi'
}
disabled = []
CASSANDRA_URI = ['127.0.0.1']
CASSANDRA_KEYSPACE = 'scrapi'
FRONTEND_KEYS = [
"uris",
"contributors",
"providerUpdatedDateTime",
"description",
"title",
"freeToRead",
"languages",
"licenses",
"publisher",
"subjects",
"tags",
"sponsorships",
"otherProperties",
"shareProperties"
]
| DEBUG = False
ELASTIC_TIMEOUT = 10
ELASTIC_INDEX = 'share_v2'
ELASTIC_URI = 'localhost:9200'
BROKER_URL = 'amqp://guest@localhost'
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
RECORD_HTTP_TRANSACTIONS = False
NORMALIZED_PROCESSING = []
RAW_PROCESSING = []
SENTRY_DSN = None
USE_FLUENTD = False
FLUENTD_ARGS = {
'tag': 'app.scrapi'
}
CASSANDRA_URI = ['127.0.0.1']
CASSANDRA_KEYSPACE = 'scrapi'
FRONTEND_KEYS = [
"uris",
"contributors",
"providerUpdatedDateTime",
"description",
"title",
"freeToRead",
"languages",
"licenses",
"publisher",
"subjects",
"tags",
"sponsorships",
"otherProperties",
"shareProperties"
]
| apache-2.0 | Python |
e08b5a5b630d3e1856c2eecb4adfce677961865b | Bump patch version number. | ambitioninc/django-localized-recurrence,wesleykendall/django-localized-recurrence,travistruett/django-localized-recurrence | localized_recurrence/version.py | localized_recurrence/version.py | __version__ = '0.2.3'
| __version__ = '0.2.2'
| mit | Python |
14d6955118893c532c1d9f8f6037d1da1b18dbbb | Add multiple skeletons for the moment. | lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment | analysis/plot-skeleton.py | analysis/plot-skeleton.py | #!/usr/bin/env python
import climate
import pandas as pd
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block03/*trial00*.csv.gz'):
for trial in database.Experiment(root).trials_matching(pattern):
with plots.space() as ax:
for i in range(3):
plots.skeleton(ax, trial, 1000 + 300 * i, lw=2, color='#fd3220', alpha=0.3)
#trial.rotate_heading(pd.Series([-6.28 / 10] * len(trial.df)))
trial.make_body_relative()
for i in range(3):
plots.skeleton(ax, trial, 1000 + 300 * i, offset=(0.5 * i, 0.5 * i), lw=2, color='#111111', alpha=0.3)
if __name__ == '__main__':
climate.call(main)
| #!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
for trial in database.Experiment(root).trials_matching(pattern):
with plots.space() as ax:
plots.skeleton(ax, trial, 100)
if __name__ == '__main__':
climate.call(main)
| mit | Python |
4b12e3c2ace8cae9071ae108d6ddc13f086d4fb9 | improve error recovery | pdebuyl/pyh5md | examples/read.py | examples/read.py | #!/usr/bin/env python
"""
Open a H5MD file and displays the elements in the given particles group
"""
from __future__ import print_function, division
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('file', type=str, help='H5MD file')
parser.add_argument('--group', type=str, help='name of the particles group')
args = parser.parse_args()
import numpy as np
from pyh5md import File, element
with File(args.file, 'r') as f:
if args.group is None:
print('Particles groups in this file:', *list(f['particles'].keys()))
import sys
sys.exit()
assert args.group in f['particles'], "group not found in particles group"
all_particles = f.particles_group(args.group)
for loc, name in (
(f['observables'], 'v'),
(all_particles, 'mass'),
(all_particles['box'], 'edges'),
(all_particles, 'id'),
(all_particles, 'position'),
(all_particles, 'force'),
(all_particles, 'velocity'),
):
if name not in loc:
continue
el = element(loc, name)
print('---------------------------------------------------------------')
print('%-10s ----------------------------------------------------' % name)
print(el.element_type)
print("shape :", el.value.shape)
print("step :", el.step, el.step_offset)
print("time :", el.time, el.time_offset)
print("value :", el.value)
| #!/usr/bin/env python
"""
Open a H5MD file and displays the elements in the given particles group
"""
from __future__ import print_function, division
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('file', type=str, help='H5MD file')
parser.add_argument('--group', type=str, help='name of the particles group', required=True)
args = parser.parse_args()
import numpy as np
from pyh5md import File, element
with File(args.file, 'r') as f:
all_particles = f.particles_group(args.group)
for loc, name in (
(f['observables'], 'v'),
(all_particles, 'mass'),
(all_particles['box'], 'edges'),
(all_particles, 'id'),
(all_particles, 'position'),
(all_particles, 'force'),
(all_particles, 'velocity'),
):
if name not in loc:
continue
el = element(loc, name)
print('---------------------------------------------------------------')
print('%-10s ----------------------------------------------------' % name)
print(el.element_type)
print("shape :", el.value.shape)
print("step :", el.step, el.step_offset)
print("time :", el.time, el.time_offset)
print("value :", el.value)
| bsd-3-clause | Python |
8f732803e30ae421167463defce27be215c0056d | Add file removing. | tomwys/fabric-utils | fabric_utils/backup.py | fabric_utils/backup.py | import datetime
from os import path
from fabric.api import env, run, sudo
def postgresql(database, file):
owner = sudo("stat -c %%U `dirname %s`" % file)
sudo("chown postgres `dirname %s`" % file)
sudo("pg_dump %s > %s" % (database, file), user='postgres')
sudo("chown %s `dirname %s` %s" % (owner, file, file))
class TmpDir(object):
def __enter__(self):
self.dir_name = run("mktemp -d")
return self.dir_name
def __exit__(self, type, value, traceback):
sudo("rm -rf %s" % self.dir_name)
def generate_name():
hostname = run("hostname")
date = datetime.datetime.now().isoformat('_')
return "%s_%s" % (hostname, date)
def archive_dir(source_path, target_path, archive_name):
file_name = path.join(target_path, "%s.tar" % archive_name)
run('cd %s; tar c * > "%s"' % (source_path, file_name))
return file_name
def compress(file_name):
run('bzip2 -9 "%s"' % file_name)
return "%s.bz2" % file_name
def rm_file(file_name):
run('rm "%s"' % file_name)
| import datetime
from os import path
from fabric.api import env, run, sudo
def postgresql(database, file):
owner = sudo("stat -c %%U `dirname %s`" % file)
sudo("chown postgres `dirname %s`" % file)
sudo("pg_dump %s > %s" % (database, file), user='postgres')
sudo("chown %s `dirname %s` %s" % (owner, file, file))
class TmpDir(object):
def __enter__(self):
self.dir_name = run("mktemp -d")
return self.dir_name
def __exit__(self, type, value, traceback):
sudo("rm -rf %s" % self.dir_name)
def generate_name():
hostname = run("hostname")
date = datetime.datetime.now().isoformat('_')
return "%s_%s" % (hostname, date)
def archive_dir(source_path, target_path, archive_name):
file_name = path.join(target_path, "%s.tar" % archive_name)
run('cd %s; tar c * > "%s"' % (source_path, file_name))
return file_name
def compress(file_name):
run('bzip2 -9 "%s"' % file_name)
return "%s.bz2" % file_name
| mit | Python |
f898778887eb828e59f0a5c19cc361a5a245e99d | make log format betterer | majestrate/srndv2 | src/srnd/main.py | src/srnd/main.py | #
# main.py
#
from . import config
from . import network
from . import sql
import asyncio
import logging
def main():
"""
run srnd
"""
conf = config.load_config()
log = conf['log']
if log['level'].lower() == 'debug':
lvl = logging.DEBUG
else:
lvl = logging.INFO
logging.basicConfig(level=lvl, format=' %(levelname)s\t%(asctime)s %(filename)s:%(lineno)d %(name)s %(message)s')
srnd_conf = conf['srnd']
store_conf = conf['store']
feed_conf = config.load_feed_config()
daemon = network.NNTPD(srnd_conf, feed_conf, store_conf)
sql.create()
daemon.start()
loop = asyncio.get_event_loop()
try:
loop.run_forever()
finally:
daemon.end()
loop.close()
| #
# main.py
#
from . import config
from . import network
from . import sql
import asyncio
import logging
def main():
"""
run srnd
"""
conf = config.load_config()
log = conf['log']
if log['level'].lower() == 'debug':
lvl = logging.DEBUG
else:
lvl = logging.INFO
logging.basicConfig(level=lvl, format=' %(levelname)s %(asctime)s %(filename)s:%(lineno)d %(name)s %(message)s')
srnd_conf = conf['srnd']
store_conf = conf['store']
feed_conf = config.load_feed_config()
daemon = network.NNTPD(srnd_conf, feed_conf, store_conf)
sql.create()
daemon.start()
loop = asyncio.get_event_loop()
try:
loop.run_forever()
finally:
daemon.end()
loop.close()
| mit | Python |
bfd75a927da2b46cb8630fab0cd3828ba71bf4ee | Use pip instead of easy_install | ValiMail/arc_test_suite | dependencies.py | dependencies.py | #! /usr/bin/env python3
import subprocess
import sys
requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"]
def install(package):
subprocess.call([sys.executable, "-m", "pip", "install", package])
for module in requires:
install(module)
| #! /usr/bin/env python3
from setuptools.command import easy_install
requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"]
for module in requires:
easy_install.main( ["-U",module] )
| mit | Python |
3171e7e355536f41a6c517ca7128a152c2577829 | Add test for categorical colors staying around after subsetting | theislab/anndata | anndata/tests/test_uns.py | anndata/tests/test_uns.py | import numpy as np
import pandas as pd
from anndata import AnnData
def test_uns_color_subset():
# Tests for https://github.com/theislab/anndata/issues/257
obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)])
obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category")
obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category")
uns = dict(
cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"],
)
adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns)
# If number of categories does not match number of colors,
# they should be reset
v = adata[:, [0, 1]]
assert "cat1_colors" not in v.uns
assert "cat2_colors" not in v.uns
# Otherwise the colors should still match after reseting
cat1_colors = ["red", "green", "blue", "yellow"]
adata.uns["cat1_colors"] = cat1_colors.copy()
v = adata[[0, 1], :]
assert len(v.uns["cat1_colors"]) == 1
assert v.uns["cat1_colors"][0] == "red"
# But original object should not change
assert list(adata.uns["cat1_colors"]) == cat1_colors
| import numpy as np
import pandas as pd
from anndata import AnnData
def test_uns_color_subset():
# Tests for https://github.com/theislab/anndata/issues/257
obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)])
obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category")
obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category")
uns = dict(
cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"],
)
adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns)
# If number of categories does not match number of colors,
# they should be reset
v = adata[:, [0, 1]]
assert "cat1_colors" not in v.uns
assert "cat2_colors" not in v.uns
# Otherwise the colors should still match after reseting
adata.uns["cat1_colors"] = ["red", "green", "blue", "yellow"]
v = adata[[0, 1], :]
assert len(v.uns["cat1_colors"]) == 1
assert v.uns["cat1_colors"][0] == "red"
| bsd-3-clause | Python |
21a2c973404930c85855b7cdd60b85c55f125b0a | Fix error message | yuma-m/raspi_lcd_acm1602ni | raspi_lcd.py | raspi_lcd.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import smbus
import unicodedata
from config import BUS_NUMBER, LCD_ADDR
from character_table import INITIALIZE_CODES, LINEBREAK_CODE, CHAR_TABLE
COMMAND_ADDR = 0x00
DATA_ADDR = 0x80
class LCDController:
def __init__(self):
self.bus = smbus.SMBus(BUS_NUMBER)
pass
def send_command(self, command, is_data=True):
if is_data:
self.bus.write_i2c_block_data(LCD_ADDR, DATA_ADDR, [command])
else:
self.bus.write_i2c_block_data(LCD_ADDR, COMMAND_ADDR, [command])
def initialize_display(self):
for code in INITIALIZE_CODES:
self.send_command(code, is_data=False)
def send_linebreak(self):
for code in LINEBREAK_CODE:
self.send_command(code, is_data=False)
def normalize_message(self, message):
if isinstance(message, str):
message = message.decode('utf-8')
return unicodedata.normalize('NFKC', message)
def convert_message(self, message):
char_code_list = []
for char in message:
if char not in CHAR_TABLE:
error_message = 'undefined character: %s' % (char.encode('utf-8'))
raise ValueError(error_message)
char_code_list += CHAR_TABLE[char]
return char_code_list
def display_one_line(self, line_no, message):
message = self.normalize_message(message)
char_code_list = self.convert_message(message)
print(char_code_list)
for code in char_code_list:
self.send_command(code)
def display_messages(self, message_list):
self.initialize_display()
for line_no, message in enumerate(message_list):
if line_no == 1:
self.send_linebreak()
self.display_one_line(line_no, message)
def main():
if not 2 <= len(sys.argv) <= 3:
print('Usage: python raspi_lcd.py "message for line 1" ["message for line 2"]')
return
else:
lcd_controller = LCDController()
lcd_controller.display_messages(sys.argv[1:3])
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import smbus
import unicodedata
from config import BUS_NUMBER, LCD_ADDR
from character_table import INITIALIZE_CODES, LINEBREAK_CODE, CHAR_TABLE
COMMAND_ADDR = 0x00
DATA_ADDR = 0x80
class LCDController:
def __init__(self):
self.bus = smbus.SMBus(BUS_NUMBER)
pass
def send_command(self, command, is_data=True):
if is_data:
self.bus.write_i2c_block_data(LCD_ADDR, DATA_ADDR, [command])
else:
self.bus.write_i2c_block_data(LCD_ADDR, COMMAND_ADDR, [command])
def initialize_display(self):
for code in INITIALIZE_CODES:
self.send_command(code, is_data=False)
def send_linebreak(self):
for code in LINEBREAK_CODE:
self.send_command(code, is_data=False)
def normalize_message(self, message):
if isinstance(message, str):
message = message.decode('utf-8')
return unicodedata.normalize('NFKC', message)
def convert_message(self, message):
char_code_list = []
for char in message:
if char not in CHAR_TABLE:
raise ValueError('undefined character: %s' % (char.encode('utf-8')))
char_code_list += CHAR_TABLE[char]
return char_code_list
def display_one_line(self, line_no, message):
message = self.normalize_message(message)
char_code_list = self.convert_message(message)
print(char_code_list)
for code in char_code_list:
self.send_command(code)
def display_messages(self, message_list):
self.initialize_display()
for line_no, message in enumerate(message_list):
if line_no == 1:
self.send_linebreak()
self.display_one_line(line_no, message)
def main():
if not 2 <= len(sys.argv) <= 3:
print('Usage: python raspi_lcd.py "message for line 1" ["message for line 2"]')
return
else:
lcd_controller = LCDController()
lcd_controller.display_messages(sys.argv[1:3])
if __name__ == '__main__':
main()
| mit | Python |
1d2df9b6d578a8780e0e8021abb0565389fe5ff7 | Change all test constants to floats | ererkka/GDX2py | tests/constants.py | tests/constants.py | #
"""
Constants for tests
See file 'data/example_data.gms' for the GAMS code used to create
file 'data/example.gdx'.
"""
import sys
import math
SYMLIST = ['set1', 'set2', 'CONSTANT', 'par1', 'par2', 'par3']
SET1 = ['a', 'b', 'c', 'd']
SET1_TEXT = "A one-dimensional set"
SET1_ASSOC_TEXTS = ['alpha', 'beta', 'charlie', 'delta']
SET2 = [('a', 'foo'), ('b', 'bar'), ('c', 'baz')]
SET2_TEXT = "A multidimensional set"
SET2_DOMAIN = ['set1', None]
CONSTANT = 10.0
CONSTANT_TEXT = "A scalar"
PAR1 = {'a': 1.0, 'b': 2.0, 'c':3.0, 'd': 4.0}
PAR1_TEXT = "A one-dimensional parameter"
PAR1_DOMAIN = ['set1']
PAR2 = {('a', 'aaa'): 10.0,
('b', 'bbb'): 20.0,
('c', 'ccc'): 30.0}
PAR2_TEXT = "A multidimensional parameter"
PAR2_DOMAIN = ['set1', None]
PAR3 = {'na': math.nan, 'eps': sys.float_info.min, 'pinf': math.inf, 'ninf': -math.inf} | #
"""
Constants for tests
See file 'data/example_data.gms' for the GAMS code used to create
file 'data/example.gdx'.
"""
import sys
import math
SYMLIST = ['set1', 'set2', 'CONSTANT', 'par1', 'par2', 'par3']
SET1 = ['a', 'b', 'c', 'd']
SET1_TEXT = "A one-dimensional set"
SET1_ASSOC_TEXTS = ['alpha', 'beta', 'charlie', 'delta']
SET2 = [('a', 'foo'), ('b', 'bar'), ('c', 'baz')]
SET2_TEXT = "A multidimensional set"
SET2_DOMAIN = ['set1', None]
CONSTANT = 10
CONSTANT_TEXT = "A scalar"
PAR1 = {'a': 1, 'b': 2, 'c':3, 'd': 4}
PAR1_TEXT = "A one-dimensional parameter"
PAR1_DOMAIN = ['set1']
PAR2 = {('a', 'aaa'): 10,
('b', 'bbb'): 20,
('c', 'ccc'): 30}
PAR2_TEXT = "A multidimensional parameter"
PAR2_DOMAIN = ['set1', None]
PAR3 = {'na': math.nan, 'eps': sys.float_info.min, 'pinf': math.inf, 'ninf': -math.inf} | mit | Python |
3a0ef1e7a7e2fd6a86ab1d68ffbea0dd2b2017ba | Update deploy scripts | winnieWinne/Tickets-Client,winnieWinne/Tickets-Client | deploy.py | deploy.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os, sys
if (len(sys.argv) == 1):
print("Usage: 'python3 deploy.py <server_path>'")
else:
path = sys.argv[1] + "src/main/webapp"
filename = "tmp.tar.gz"
os.system("gulp clean")
os.system("gulp build")
os.system("tar -zcvf %s tmp/" % filename)
os.system("mv tmp.tar.gz %s" % path)
os.system("tar -zxvf %s/%s --directory=%s --strip-components 1" % (path, filename, path))
os.system("rm %s/%s" % (path, filename))
os.system("mv %s/index.html %s/WEB-INF/templates/tickets.html" % (path, path))
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os, sys
if (len(sys.argv) == 1):
print("Usage: 'python3 deploy.py <server_path>'")
else:
path = sys.argv[1] + "src/main/webapp"
filename = "tmp.tar.gz"
os.system("tar -zcvf %s tmp/" % filename)
os.system("mv tmp.tar.gz %s" % path)
os.system("tar -zxvf %s/%s --directory=%s --strip-components 1" % (path, filename, path))
os.system("rm %s/%s" % (path, filename))
os.system("mv %s/index.html %s/WEB-INF/templates/tickets.html" % (path, path))
| mit | Python |
3ced1b19d706a824d36cf74846eaffd63d8c1b8a | Improve 24 cog formatting | Harmon758/Harmonbot,Harmon758/Harmonbot | Discord/cogs/24.py | Discord/cogs/24.py |
from discord.ext import commands
import sys
from utilities import checks
sys.path.insert(0, "..")
from units.twenty_four import check_solution, generate_numbers
sys.path.pop(0)
def setup(bot):
bot.add_cog(TwentyFour())
class TwentyFour(commands.Cog, name = "24"):
@commands.command(name = "24", aliases = ["twenty-four"])
@checks.not_forbidden()
async def twenty_four(self, ctx):
'''24 Game'''
numbers = generate_numbers()
CEK = '\N{COMBINING ENCLOSING KEYCAP}'
numbers = list(map(str, numbers))
await ctx.embed_reply(
f"{numbers[0]}{CEK}{numbers[1]}{CEK}\n"
f"{numbers[2]}{CEK}{numbers[3]}{CEK}\n"
)
async def incorrect(message, value):
response_ctx = await ctx.bot.get_context(message)
await response_ctx.embed_reply(
title = "Incorrect",
description = f"`{message.content} = {value}`",
in_response_to = False, attempt_delete = False
)
def check(message):
if message.channel != ctx.channel:
return False
if (value := check_solution(numbers, message.content)) is False:
return False
if value != 24:
ctx.bot.loop.create_task(
incorrect(message, int(value)),
name = "Send response to incorrect solution for 24 Game"
)
return False
return True
message = await ctx.bot.wait_for('message', check = check)
ctx = await ctx.bot.get_context(message)
await ctx.embed_reply(
title = "Correct!", description = f"`{message.content} = 24`",
in_response_to = False, attempt_delete = False
)
|
from discord.ext import commands
import sys
from utilities import checks
sys.path.insert(0, "..")
from units.twenty_four import check_solution, generate_numbers
sys.path.pop(0)
def setup(bot):
bot.add_cog(TwentyFour())
class TwentyFour(commands.Cog, name = "24"):
@commands.command(name = "24", aliases = ["twenty-four"])
@checks.not_forbidden()
async def twenty_four(self, ctx):
'''24 Game'''
numbers = generate_numbers()
CEK = '\N{COMBINING ENCLOSING KEYCAP}'
numbers = list(map(str, numbers))
await ctx.embed_reply(
f"{numbers[0]}{CEK}{numbers[1]}{CEK}\n"
f"{numbers[2]}{CEK}{numbers[3]}{CEK}\n"
)
async def incorrect(message, value):
response_ctx = await ctx.bot.get_context(message)
await response_ctx.embed_reply(
title = "Incorrect",
description = f"`{message.content} = {value}`",
in_response_to = False, attempt_delete = False
)
def check(message):
if message.channel != ctx.channel:
return False
if (value := check_solution(numbers, message.content)) is False:
return False
if value != 24:
ctx.bot.loop.create_task(
incorrect(message, int(value)),
name = "Send response to incorrect solution for 24 Game"
)
return False
return True
message = await ctx.bot.wait_for('message', check = check)
ctx = await ctx.bot.get_context(message)
await ctx.embed_reply(
title = "Correct!", description = f"`{message.content} = 24`",
in_response_to = False, attempt_delete = False
)
| mit | Python |
d964774feac4477ca0cd2939fccd9e7783a10d3c | Update __init__.py | adrianliaw/PyCuber | CFOP/__init__.py | CFOP/__init__.py | ""
| mit | Python | |
6408a99d65016d679bc16528c9e4df53cb3f5931 | Fix some permissions for getting orcid/twitter | OpenChemistry/mongochemserver | girder/molecules/molecules/user.py | girder/molecules/molecules/user.py | from girder.api import access
from girder.api.describe import Description, autoDescribeRoute
from girder.api.rest import getCurrentUser
from girder.models.model_base import AccessType
from girder.models.user import User
def _set_user_field(user, field_name, field_value):
query = {
'_id': user['_id']
}
update = {
'$set': {
field_name: field_value
}
}
User().update(query, update)
# Get the updated user and return it
user = User().findOne(user['_id'])
return User().filter(user, getCurrentUser(), [field_name])
@access.public
@autoDescribeRoute(
Description('Get the orcid of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.READ)
)
def get_orcid(user):
return user.get('orcid')
@access.user
@autoDescribeRoute(
Description('Set the orcid of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.ADMIN)
.param('orcid', 'The orcid to set.')
.param('public', 'Whether or not the orcid is public.', dataType='boolean',
required=False)
)
def set_orcid(user, orcid, public):
return _set_user_field(user, 'orcid', orcid)
@access.public
@autoDescribeRoute(
Description('Get the twitter username of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.READ)
)
def get_twitter(user):
return user.get('twitter')
@access.user
@autoDescribeRoute(
Description('Set the twitter username of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.ADMIN)
.param('twitter', 'The twitter username to set.')
.param('public', 'Whether or not the twitter username is public.', dataType='boolean',
required=False)
)
def set_twitter(user, twitter, public):
return _set_user_field(user, 'twitter', twitter)
| from girder.api import access
from girder.api.describe import Description, autoDescribeRoute
from girder.api.rest import getCurrentUser
from girder.models.model_base import AccessType
from girder.models.user import User
def _set_user_field(user, field_name, field_value):
query = {
'_id': user['_id']
}
update = {
'$set': {
field_name: field_value
}
}
User().update(query, update)
return User().load(user['_id'], user=getCurrentUser())
@access.public
@autoDescribeRoute(
Description('Get the orcid of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.ADMIN)
)
def get_orcid(user):
return user.get('orcid')
@access.user
@autoDescribeRoute(
Description('Set the orcid of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.READ)
.param('orcid', 'The orcid to set.')
.param('public', 'Whether or not the orcid is public.', dataType='boolean',
required=False)
)
def set_orcid(user, orcid, public):
return _set_user_field(user, 'orcid', orcid)
@access.public
@autoDescribeRoute(
Description('Get the twitter username of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.ADMIN)
)
def get_twitter(user):
return user.get('twitter')
@access.user
@autoDescribeRoute(
Description('Set the twitter username of a user.')
.modelParam('id', 'The ID of the user.', model=User, level=AccessType.READ)
.param('twitter', 'The twitter to set.')
.param('public', 'Whether or not the twitter is public.', dataType='boolean',
required=False)
)
def set_twitter(user, twitter, public):
return _set_user_field(user, 'twitter', twitter)
| bsd-3-clause | Python |
d3c600ddc0cdfdc6548c7d7f1d3c859d380f6768 | move ipython fabfiles to ipython.py | rajpushkar83/cloudmesh,rajpushkar83/cloudmesh,rajpushkar83/cloudmesh,rajpushkar83/cloudmesh,rajpushkar83/cloudmesh,rajpushkar83/cloudmesh,rajpushkar83/cloudmesh | fabfile/india.py | fabfile/india.py | from fabric.api import task
from util import ec2secgroup_openport, yaml_file_replace
@task
def configure():
"""configure india environment for cloudmesh rapid deployment"""
yaml_file_replace(filename='/cloudmesh_server.yaml',
replacements={
'browser: True': 'browser: False',
'host: 127.0.0.1': 'host: 0.0.0.0'
}
)
ec2secgroup_openport('india', 5000)
print "Configuration changes have been made successfully"
| from fabric.api import task
from util import ec2secgroup_openport, yaml_file_replace
@task
def configure():
"""configure india environment for cloudmesh rapid deployment"""
yaml_file_replace(filename='/cloudmesh_server.yaml',
replacements={
'browser: True': 'browser: False',
'host: 127.0.0.1': 'host: 0.0.0.0'
}
)
ec2secgroup_openport('india', 5000)
print "Configuration changes have been made successfully"
@task
def ipython():
IPython.lib import passwd
from fabfile.util import yaml_file_replace
local("ipython profile create nbserver")
local('cp etc/ipython_notebook_config.py ~/.ipython/profile_nbserver')
result = passwd()
yaml_file_replace(filename='~/.ipython/profile_nbserver/ipython_notebook_config.py',
replacements={'SHAPASSWORD': result}
)
local("openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout mycert.pem -out ~/.ipython/profile_nbserver/mycert.pem")
local("chmod go-rw ~/.ipython/profile_nbserver/mycert.pem")
@task
def start():
local("ipython notebook --certfile=~/.ipython/profile_nbserver/mycert.pem --profile=nbserver")
| apache-2.0 | Python |
2dece45476170e24e14903f19f9bf400c10ebf42 | Allow WOW animations to be used in text plugin. | narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow | djangocms_wow/cms_plugins.py | djangocms_wow/cms_plugins.py | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from . import models
class AnimationPlugin(CMSPluginBase):
model = models.Animation
name = _('Animation')
render_template = 'djangocms_wow/animation.html'
allow_children = True
text_enabled = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(AnimationPlugin)
class WOWAnimationPlugin(CMSPluginBase):
model = models.WOWAnimation
name = _("Wow Animation")
render_template = 'djangocms_wow/wow_animation.html'
allow_children = True
text_enabled = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(WOWAnimationPlugin)
| # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from . import models
class AnimationPlugin(CMSPluginBase):
model = models.Animation
name = _('Animation')
render_template = 'djangocms_wow/animation.html'
allow_children = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(AnimationPlugin)
class WOWAnimationPlugin(CMSPluginBase):
model = models.WOWAnimation
name = _("Wow Animation")
render_template = 'djangocms_wow/wow_animation.html'
allow_children = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(WOWAnimationPlugin)
| bsd-3-clause | Python |
50eba1720cd34684eaf0a931e28474ad987ea699 | Change polling interval to 5 seconds | asana/python-asana,asana/python-asana,Asana/python-asana | asana/resources/events.py | asana/resources/events.py |
from ._events import _Events
from ..error import InvalidTokenError
import time
class Events(_Events):
POLL_INTERVAL = 5000
def get_next(self, params):
params = params.copy()
if 'sync' not in params:
try:
self.get(params)
except InvalidTokenError as e:
params['sync'] = e.value['sync']
while True:
result = self.get(params)
if 'data' in result and len(result['data']) > 0:
return (result['data'], result['sync'])
else:
params['sync'] = result['sync']
time.sleep(self.POLL_INTERVAL / 1000.0)
def get_iterator(self, params):
params = params.copy()
while True:
items, sync = self.get_next(params)
for item in items:
yield item
params['sync'] = sync
|
from ._events import _Events
from ..error import InvalidTokenError
import time
class Events(_Events):
POLL_INTERVAL = 1000
def get_next(self, params):
params = params.copy()
if 'sync' not in params:
try:
self.get(params)
except InvalidTokenError as e:
params['sync'] = e.value['sync']
while True:
result = self.get(params)
if 'data' in result and len(result['data']) > 0:
return (result['data'], result['sync'])
else:
params['sync'] = result['sync']
time.sleep(self.POLL_INTERVAL / 1000.0)
def get_iterator(self, params):
params = params.copy()
while True:
items, sync = self.get_next(params)
for item in items:
yield item
params['sync'] = sync
| mit | Python |
b6112b62c2951de39146013b4d82777ec0b29470 | Make setup.py work on RHEL5 | pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus | lib/pegasus/python/setup.py | lib/pegasus/python/setup.py | import os
import sys
import subprocess
from setuptools import setup, find_packages
srcdir = os.path.dirname(__file__)
homedir = os.path.abspath(os.path.join(srcdir, "../../.."))
# Utility function to read the pegasus Version.in file
def readversion():
return subprocess.Popen("%s/release-tools/getversion" % homedir,
stdout=subprocess.PIPE, shell=True).communicate()[0].strip()
# Utility function to read the README file.
def read(fname):
return open(os.path.join(srcdir, fname)).read()
def find_package_data(dirname):
def find_paths(dirname):
items = []
for fname in os.listdir(dirname):
path = os.path.join(dirname, fname)
if os.path.isdir(path):
items += find_paths(path)
elif not path.endswith(".py") and not path.endswith(".pyc"):
items.append(path)
return items
items = find_paths(dirname)
return [path.replace(dirname, "") for path in items]
setup(
name = "pegasus-wms",
version = readversion(),
author = "Pegasus Team",
author_email = "pegasus@isi.edu",
description = "Pegasus Workflow Management System Python API",
long_description = read("README"),
license = "Apache2",
url = "http://pegasus.isi.edu",
keywords = ["scientific workflows"],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Operating System :: Unix",
"Programming Language :: Python",
"Topic :: Scientific/Engineering",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
packages = find_packages(exclude=["Pegasus.test"]),
package_data = {"Pegasus.service" : find_package_data("Pegasus/service/") },
include_package_data = True,
zip_safe = False,
install_requires = [
"Werkzeug==0.9.3",
"Flask==0.10",
"Jinja2==2.7",
"Flask-SQLAlchemy==0.16",
"Flask-Cache==0.13.1",
"SQLAlchemy==0.8.0",
"WTForms==1.0.3",
"requests==1.2.3",
"passlib==1.6.1",
"MarkupSafe==0.18",
"itsdangerous==0.21"
]
)
| import os
import sys
import subprocess
from setuptools import setup, find_packages
srcdir = os.path.dirname(__file__)
homedir = os.path.abspath(os.path.join(srcdir, "../../.."))
# Utility function to read the pegasus Version.in file
def readversion():
return subprocess.Popen("%s/release-tools/getversion" % homedir,
stdout=subprocess.PIPE, shell=True).communicate()[0].strip()
# Utility function to read the README file.
def read(fname):
return open(os.path.join(srcdir, fname)).read()
def find_package_data(dirname):
def find_paths(dirname):
items = []
for fname in os.listdir(dirname):
path = os.path.join(dirname, fname)
if os.path.isdir(path):
items += find_paths(path)
elif not path.endswith(".py") and not path.endswith(".pyc"):
items.append(path)
return items
items = find_paths(dirname)
return [os.path.relpath(path, dirname) for path in items]
setup(
name = "pegasus-wms",
version = readversion(),
author = "Pegasus Team",
author_email = "pegasus@isi.edu",
description = "Pegasus Workflow Management System Python API",
long_description = read("README"),
license = "Apache2",
url = "http://pegasus.isi.edu",
keywords = ["scientific workflows"],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Operating System :: Unix",
"Programming Language :: Python",
"Topic :: Scientific/Engineering",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
packages = find_packages(exclude=["Pegasus.test"]),
package_data = {"Pegasus.service" : find_package_data("Pegasus/service") },
include_package_data = True,
zip_safe = False,
install_requires = [
"Werkzeug==0.9.3",
"Flask==0.10",
"Jinja2==2.7",
"Flask-SQLAlchemy==0.16",
"Flask-Cache==0.13.1",
"SQLAlchemy==0.8.0",
"WTForms==1.0.3",
"requests==1.2.3",
"passlib==1.6.1",
"MarkupSafe==0.18",
"itsdangerous==0.21"
]
)
| apache-2.0 | Python |
e099b7457b7401eb780fd5da2bd6efc947a97449 | Fix test coverage | SCUEvals/scuevals-api,SCUEvals/scuevals-api | tests/auth/test_decorators.py | tests/auth/test_decorators.py | import json
from flask_jwt_extended import create_access_token
from scuevals_api.models import db
from tests import TestCase
from tests.fixtures.factories import StudentFactory
class AuthRequiredTestCase(TestCase):
def test_incorrect_permissions(self):
student = StudentFactory(permissions=[])
db.session.flush()
token = create_access_token(identity=student.to_dict())
rv = self.client.get('/quarters', headers={'Authorization': 'Bearer ' + token})
self.assertEqual(401, rv.status_code)
data = json.loads(rv.data)
self.assertIn('could not verify that you are authorized to access the URL requested', data['message'])
def test_missing_authorization_header(self):
rv = self.client.get('/quarters', headers={})
self.assertEqual(401, rv.status_code)
data = json.loads(rv.data)
self.assertIn('missing authorization header', data['message'])
| import json
from flask_jwt_extended import create_access_token
from scuevals_api.models import db
from tests import TestCase
from tests.fixtures.factories import StudentFactory
class AuthRequiredTestCase(TestCase):
def test_incorrect_permissions(self):
student = StudentFactory(permissions=[])
db.session.flush()
token = create_access_token(identity=student.to_dict())
rv = self.client.get('/quarters', headers={'Authorization': 'Bearer ' + token})
self.assertEqual(401, rv.status_code)
data = json.loads(rv.data)
self.assertIn('could not verify that you are authorized to access the URL requested', data['message'])
| agpl-3.0 | Python |
0208a3884b9fb648452297a6e1c2358c61d0d124 | Remove redundant asserts | takeflight/cookiecutter,sp1rs/cookiecutter,janusnic/cookiecutter,vincentbernat/cookiecutter,moi65/cookiecutter,terryjbates/cookiecutter,michaeljoseph/cookiecutter,agconti/cookiecutter,janusnic/cookiecutter,jhermann/cookiecutter,kkujawinski/cookiecutter,nhomar/cookiecutter,sp1rs/cookiecutter,benthomasson/cookiecutter,venumech/cookiecutter,Springerle/cookiecutter,Vauxoo/cookiecutter,christabor/cookiecutter,vintasoftware/cookiecutter,pjbull/cookiecutter,tylerdave/cookiecutter,luzfcb/cookiecutter,tylerdave/cookiecutter,stevepiercy/cookiecutter,cguardia/cookiecutter,terryjbates/cookiecutter,dajose/cookiecutter,hackebrot/cookiecutter,lucius-feng/cookiecutter,foodszhang/cookiecutter,pjbull/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,cguardia/cookiecutter,christabor/cookiecutter,ionelmc/cookiecutter,hackebrot/cookiecutter,drgarcia1986/cookiecutter,cichm/cookiecutter,atlassian/cookiecutter,cichm/cookiecutter,Vauxoo/cookiecutter,lgp171188/cookiecutter,atlassian/cookiecutter,0k/cookiecutter,jhermann/cookiecutter,willingc/cookiecutter,lucius-feng/cookiecutter,foodszhang/cookiecutter,agconti/cookiecutter,Springerle/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,takeflight/cookiecutter,ramiroluz/cookiecutter,moi65/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,drgarcia1986/cookiecutter,nhomar/cookiecutter,luzfcb/cookiecutter,ramiroluz/cookiecutter,audreyr/cookiecutter,dajose/cookiecutter,vincentbernat/cookiecutter,kkujawinski/cookiecutter,ionelmc/cookiecutter,lgp171188/cookiecutter,stevepiercy/cookiecutter,vintasoftware/cookiecutter | tests/test_find.py | tests/test_find.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_find
---------
Tests for `cookiecutter.find` module.
"""
import os
import pytest
from cookiecutter import find
@pytest.fixture(params=['fake-repo-pre', 'fake-repo-pre2'])
def repo_dir(request):
return os.path.join('tests', request.param)
def test_find_template(repo_dir):
template = find.find_template(repo_dir=repo_dir)
test_dir = os.path.join(repo_dir, '{{cookiecutter.repo_name}}')
assert template == test_dir
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_find
---------
Tests for `cookiecutter.find` module.
"""
import os
import pytest
from cookiecutter import find
@pytest.fixture(params=['fake-repo-pre', 'fake-repo-pre2'])
def repo_dir(request):
return os.path.join('tests', request.param)
def test_find_template(repo_dir):
template = find.find_template(repo_dir=repo_dir)
test_dir = os.path.join(repo_dir, '{{cookiecutter.repo_name}}')
assert template == test_dir
test_dir = os.path.join(repo_dir, '{{cookiecutter.repo_name }}')
assert template != test_dir
test_dir = os.path.join(repo_dir, '{{ cookiecutter.repo_name }}')
assert template != test_dir
| bsd-3-clause | Python |
d0bf37eeb8bb00a4b7bed195663e18049ca2228a | Add traling comma | tomchristie/apistar,tomchristie/apistar,encode/apistar,tomchristie/apistar,rsalmaso/apistar,rsalmaso/apistar,encode/apistar,rsalmaso/apistar,tomchristie/apistar,rsalmaso/apistar,encode/apistar,encode/apistar | tests/backends/test_django.py | tests/backends/test_django.py | from os import environ
import dj_database_url
import apistar
from apistar import App, http, routing, test
from apistar.backends import DjangoBackend
from apistar.commands import django_makemigrations, django_migrate
from apistar.test import CommandLineRunner
def list_stars(orm: DjangoBackend):
Star = orm.Star
return {
'stars': Star.objects.values('name', 'id')
}
def create_star(orm: DjangoBackend, name: http.QueryParam):
star = orm.Star(name=name)
star.save()
return {
'star': {'name': star.name, 'id': star.id}
}
app = App(
routes=[
routing.Route('/api/stars', 'GET', list_stars),
routing.Route('/api/stars/create', 'GET', create_star),
],
settings={
'DATABASES': {
'default': dj_database_url.config(
default=environ.get('DB_URL', 'sqlite:///test.db')
)
},
'INSTALLED_APPS': ['django_project'],
},
commands=[django_makemigrations, django_migrate]
)
client = test.TestClient(app)
runner = CommandLineRunner(app)
def test_list_create(monkeypatch):
def mock_get_current_app():
return app
monkeypatch.setattr(apistar.main, 'get_current_app', mock_get_current_app)
result = runner.invoke(['django_makemigrations'])
assert 'makemigrations' in result.output
result = runner.invoke(['django_migrate'])
assert 'migrate' in result.output
response = client.get('http://example.com/api/stars/create?name=mars')
assert response.status_code == 200
created_star = response.json()
assert created_star['star']['name'] == 'mars'
response = client.get('http://example.com/api/stars')
assert response.status_code == 200
assert response.json() == {'stars': [created_star['star']]}
| from os import environ
import dj_database_url
import apistar
from apistar import App, http, routing, test
from apistar.backends import DjangoBackend
from apistar.commands import django_makemigrations, django_migrate
from apistar.test import CommandLineRunner
def list_stars(orm: DjangoBackend):
Star = orm.Star
return {
'stars': Star.objects.values('name', 'id')
}
def create_star(orm: DjangoBackend, name: http.QueryParam):
star = orm.Star(name=name)
star.save()
return {
'star': {'name': star.name, 'id': star.id}
}
app = App(
routes=[
routing.Route('/api/stars', 'GET', list_stars),
routing.Route('/api/stars/create', 'GET', create_star),
],
settings={
'DATABASES': {
'default': dj_database_url.config(
default=environ.get('DB_URL', 'sqlite:///test.db')
)
},
'INSTALLED_APPS': ['django_project']
},
commands=[django_makemigrations, django_migrate]
)
client = test.TestClient(app)
runner = CommandLineRunner(app)
def test_list_create(monkeypatch):
def mock_get_current_app():
return app
monkeypatch.setattr(apistar.main, 'get_current_app', mock_get_current_app)
result = runner.invoke(['django_makemigrations'])
assert 'makemigrations' in result.output
result = runner.invoke(['django_migrate'])
assert 'migrate' in result.output
response = client.get('http://example.com/api/stars/create?name=mars')
assert response.status_code == 200
created_star = response.json()
assert created_star['star']['name'] == 'mars'
response = client.get('http://example.com/api/stars')
assert response.status_code == 200
assert response.json() == {'stars': [created_star['star']]}
| bsd-3-clause | Python |
3b2a96796dab6c5844f37a9467b70980f50e2931 | Update P1_strongPwDetect.py added docstrings and wrapped in main() function | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | pythontutorials/books/AutomateTheBoringStuff/Ch07/Projects/P1_strongPwDetect.py | pythontutorials/books/AutomateTheBoringStuff/Ch07/Projects/P1_strongPwDetect.py | """Strong password detection
This program ensures passwords entered are "strong."
Write a function, :meth:`is_strong_pw`, that uses regular expressions to make sure
the password string it is passed is strong.
A strong password is defined as one that is at least eight characters long,
contains both uppercase and lowercase characters, and has at least one digit.
Note:
You may need to test the string against multiple regex patterns to validate its strength.
"""
import re
def is_strong_pw(text: str) -> bool:
"""Is strong password
Uses three :py:mod:`re` object patterns to check if a given text is at least 8 numbers
and characters long, has at least one uppercase and lowercase character, and has at least
one digit.
Args:
text: String containing password to test strength of.
Returns:
True if the given text matches the regex patterns, False otherwise.
"""
length_regex = re.compile(r"[\d\w]{8,}") #: At least 8 numbers and characters
upper_lower_regex = re.compile(r"[a-z|A-Z]?[A-Z]+") #: At least 1 upper and lower character
digit_regex = re.compile(r"[\d]+") #: At least one digit
if not length_regex.search(text):
return False
if not digit_regex.search(text):
return False
if not upper_lower_regex.search(text):
return False
return True
def main():
password = "AutomateTheBoringStuff1"
print(is_strong_pw(password))
if __name__ == '__main__':
main()
| # Strong Password Detection - this program ensures passwords entered are "strong"
#
# Write a function that uses regular expressions to make sure the password string
# it is passed is strong. A strong password is defined as one that is at least eight
# characters long, contains both uppercase and lowercase characters, and has at least
# one digit. You may need to test the string against multiple regex patterns to validate
# its strength.
import re
def is_strong_pw(text):
length_regex = re.compile(r"[\d\w]{8,}") # at least 8 numbers and characters
upper_lower_regex = re.compile(r"[a-z|A-Z]?[A-Z]+") # at least 1 upper and lower character
digit_regex = re.compile(r"[\d]+") # at least one digit
if not length_regex.search(text):
return False
if not digit_regex.search(text):
return False
if not upper_lower_regex.search(text):
return False
return True
password = "AutomateTheBoringStuff1"
print(is_strong_pw(password))
| mit | Python |
a6c3012ff25106d6770c34189d72179e16594bc2 | fix variable name | yuyu2172/chainercv,chainer/chainercv,chainer/chainercv,pfnet/chainercv,yuyu2172/chainercv | tests/links_tests/model_tests/resnet_tests/test_resnet.py | tests/links_tests/model_tests/resnet_tests/test_resnet.py | import unittest
import numpy as np
from chainer import testing
from chainer.testing import attr
from chainer import Variable
from chainercv.links import ResNet101
from chainercv.links import ResNet152
from chainercv.links import ResNet50
@testing.parameterize(*(
testing.product_dict(
[
{'pick': 'prob', 'shapes': (1, 200), 'n_class': 200},
{'pick': 'res5',
'shapes': (1, 2048, 7, 7), 'n_class': None},
{'pick': ['res2', 'conv1'],
'shapes': ((1, 256, 56, 56), (1, 64, 112, 112)), 'n_class': None},
],
[
{'model_class': ResNet50},
{'model_class': ResNet101},
{'model_class': ResNet152},
],
[
{'mode': 'fb'},
{'mode': 'he'}
]
)
))
class TestResNetCall(unittest.TestCase):
def setUp(self):
self.link = self.model_class(
n_class=self.n_class, pretrained_model=None, mode=self.mode)
self.link.pick = self.pick
def check_call(self):
xp = self.link.xp
x = Variable(xp.asarray(np.random.uniform(
-1, 1, (1, 3, 224, 224)).astype(np.float32)))
features = self.link(x)
if isinstance(features, tuple):
for activation, shape in zip(features, self.shapes):
self.assertEqual(activation.shape, shape)
else:
self.assertEqual(features.shape, self.shapes)
self.assertEqual(features.dtype, np.float32)
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
testing.run_module(__name__, __file__)
| import unittest
import numpy as np
from chainer import testing
from chainer.testing import attr
from chainer import Variable
from chainercv.links import ResNet101
from chainercv.links import ResNet152
from chainercv.links import ResNet50
@testing.parameterize(*(
testing.product_dict(
[
{'pick': 'prob', 'shapes': (1, 200), 'n_class': 200},
{'pick': 'res5',
'shapes': (1, 2048, 7, 7), 'n_class': None},
{'pick': ['res2', 'conv1'],
'shapes': ((1, 256, 56, 56), (1, 64, 112, 112)), 'n_class': None},
],
[
{'model_class': ResNet50},
{'model_class': ResNet101},
{'model_class': ResNet152},
],
[
{'mode': 'fb'},
{'mode': 'he'}
]
)
))
class TestResNetCall(unittest.TestCase):
def setUp(self):
self.link = self.model_class(
n_class=self.n_class, pretrained_model=None, mode=self.mode)
self.link.pick = self.pick
def check_call(self):
xp = self.link.xp
x1 = Variable(xp.asarray(np.random.uniform(
-1, 1, (1, 3, 224, 224)).astype(np.float32)))
features = self.link(x1)
if isinstance(features, tuple):
for activation, shape in zip(features, self.shapes):
self.assertEqual(activation.shape, shape)
else:
self.assertEqual(features.shape, self.shapes)
self.assertEqual(features.dtype, np.float32)
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
testing.run_module(__name__, __file__)
| mit | Python |
422910a398a2c14da0bac2c39551beec861c4504 | test impôts | benjello/openfisca-france,SophieIPP/openfisca-france,adrienpacifico/openfisca-france,adrienpacifico/openfisca-france,antoinearnoud/openfisca-france,sgmap/openfisca-france,SophieIPP/openfisca-france,benjello/openfisca-france,sgmap/openfisca-france,antoinearnoud/openfisca-france | tests/test_irpp.py | tests/test_irpp.py | # -*- coding:utf-8 -*-
# Created on 14 mai 2013
# This file is part of OpenFisca.
# OpenFisca is a socio-fiscal microsimulation software
# Copyright ©2013 Clément Schaff, Mahdi Ben Jelloul
# Licensed under the terms of the GVPLv3 or later license
# (see openfisca/__init__.py for details)
import nose
from src.lib.simulation import ScenarioSimulation
from datetime import datetime
''' test pour un célibataire ayant un revenu salarial (1AJ) de 20 000 € '''
irpp = {2010: 1181, 2011 : 1181}
def test_irpp_20000():
country = 'france'
for yr in range(2010,2012):
simulation = ScenarioSimulation()
simulation.set_config(year = yr, country = country,
nmen = 2, maxrev = 350000, xaxis = 'sali')
# Adding a husband/wife on the same tax sheet (foyer)
#simulation.scenario.addIndiv(1, datetime(1975,1,1).date(), 'conj', 'part')
# simulation.scenario.addIndiv(2, datetime(1975,2,2).date(), 'conj', 'part')
simulation.set_param()
test_case = simulation.scenario
test_case.indiv[0].update({"sali":20000})
#pour les cases non individualisables
# test_case.declar[0].update({"f2tr":20000})
# Adding children on the same tax sheet (foyer)
# simulation.scenario.addIndiv(3, datetime(2000,1,1).date(), 'pac', 'enf')
# simulation.scenario.addIndiv(4, datetime(2000,1,1).date(), 'pac', 'enf')
df = simulation.get_results_dataframe(index_by_code=True)
print df.loc["irpp"][0]
# print test_case
print test_case.indiv[0]
# print test_case.declar[0]
# print irpp[yr]
# print abs(df.loc["irpp"][0] - irpp[yr]) < 1e-3
assert abs(df.loc["irpp"][0] - irpp[yr]) < 1e-3
# montant de l'irpp
if __name__ == '__main__':
test_irpp_20000()
nose.core.runmodule(argv=[__file__, '-v', '-i test_*.py'])
# nose.core.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], exit=False)
| # -*- coding:utf-8 -*-
# Created on 14 mai 2013
# This file is part of OpenFisca.
# OpenFisca is a socio-fiscal microsimulation software
# Copyright ©2013 Clément Schaff, Mahdi Ben Jelloul
# Licensed under the terms of the GVPLv3 or later license
# (see openfisca/__init__.py for details)
import nose
from src.lib.simulation import ScenarioSimulation
from datetime import datetime
''' test pour un célibataire ayant un revenu salarial (1AJ) de 20 000 € '''
irpp = {2010: 1181, 2011 : 1181}
def test_irpp_20000():
country = 'france'
for yr in range(2010,2012):
simulation = ScenarioSimulation()
simulation.set_config(year = yr, country = country,
nmen = 2, maxrev = 350000, xaxis = 'sali')
# Adding a husband/wife on the same tax sheet (foyer)
simulation.scenario.addIndiv(1, datetime(1975,1,1).date(), 'conj', 'part')
# simulation.scenario.addIndiv(2, datetime(1975,2,2).date(), 'conj', 'part')
simulation.set_param()
# Adding children on the same tax sheet (foyer)
# simulation.scenario.addIndiv(3, datetime(2000,1,1).date(), 'pac', 'enf')
# simulation.scenario.addIndiv(4, datetime(2000,1,1).date(), 'pac', 'enf')
df = simulation.get_results_dataframe(index_by_code=True)
print df.loc["irpp"][0]
print irpp[yr]
print abs(df.loc["irpp"][0] - irpp[yr]) < 1e-3
assert abs(df.loc["irpp"][0] - irpp[yr]) < 1e-3
# montant d'irpp
if __name__ == '__main__':
test_irpp_20000()
nose.core.runmodule(argv=[__file__, '-v', '-i test_*.py'])
# nose.core.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], exit=False)
| agpl-3.0 | Python |
213da28887ef2e9be1c9d3a45f72a7e27618b2c3 | add Valeera | rhots/automation | automation/heroes_data.py | automation/heroes_data.py | # This is a list of official hero names in order of release date. The
# first set of heroes from Abathur to Zeratul were available on
# release. The rest are listed in chronological order of release. New
# heroes should be added to the end of the list.
HEROES = [
"Abathur",
"Arthas",
"Diablo",
"E.T.C.",
"Falstad",
"Gazlowe",
"Illidan",
"Kerrigan",
"Malfurion",
"Muradin",
"Nazeebo",
"Nova",
"Raynor",
"Sgt. Hammer",
"Sonya",
"Stitches",
"Tassadar",
"Tyrael",
"Tyrande",
"Uther",
"Valla",
"Zeratul",
"Tychus",
"Li Li",
"Brightwing",
"Murky",
"Zagara",
"Rehgar",
"Chen",
"Azmodan",
"Anub'arak",
"Jaina",
"Thrall",
"The Lost Vikings",
"Sylvanas",
"Kael'thas",
"Johanna",
"The Butcher",
"Leoric",
"Kharazim",
"Rexxar",
"Lt. Morales",
"Artanis",
"Cho",
"Gall",
"Lunara",
"Greymane",
"Li-Ming",
"Xul",
"Dehaka",
"Tracer",
"Chromie",
"Medivh",
"Guldan",
"Auriel",
"Alarak",
"Zarya",
"Samuro",
"Varian",
"Ragnaros",
"Zul'jin",
"Valeera",
]
| # This is a list of official hero names in order of release date. The
# first set of heroes from Abathur to Zeratul were available on
# release. The rest are listed in chronological order of release. New
# heroes should be added to the end of the list.
HEROES = [
"Abathur",
"Arthas",
"Diablo",
"E.T.C.",
"Falstad",
"Gazlowe",
"Illidan",
"Kerrigan",
"Malfurion",
"Muradin",
"Nazeebo",
"Nova",
"Raynor",
"Sgt. Hammer",
"Sonya",
"Stitches",
"Tassadar",
"Tyrael",
"Tyrande",
"Uther",
"Valla",
"Zeratul",
"Tychus",
"Li Li",
"Brightwing",
"Murky",
"Zagara",
"Rehgar",
"Chen",
"Azmodan",
"Anub'arak",
"Jaina",
"Thrall",
"The Lost Vikings",
"Sylvanas",
"Kael'thas",
"Johanna",
"The Butcher",
"Leoric",
"Kharazim",
"Rexxar",
"Lt. Morales",
"Artanis",
"Cho",
"Gall",
"Lunara",
"Greymane",
"Li-Ming",
"Xul",
"Dehaka",
"Tracer",
"Chromie",
"Medivh",
"Guldan",
"Auriel",
"Alarak",
"Zarya",
"Samuro",
"Varian",
"Ragnaros",
"Zul'jin",
]
| isc | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.