commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
d98e33d28105c8180b591e3097af83e42599bfc5 | Remove the usage of nargs to avoid different behaviors between chronograph and command line. | weijia/django-local-apps,weijia/django-local-apps | django_local_apps/management/commands/docker_exec.py | django_local_apps/management/commands/docker_exec.py | import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
# for using with chronograph, do not use nargs param, because chronograph seems do not support passing
# array, but using nargs will generate a list for the parameters
parser.add_argument('-c', '--container_id')
parser.add_argument('-w', '--work_dir', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["container_id"])
print(self.options["work_dir"])
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=(self.options["work_dir"])))
Command = DockerExecutor
| import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('--container_id', nargs=1)
parser.add_argument('--work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"]))
Command = DockerExecutor
| bsd-3-clause | Python |
36e7af17c8d7c4bdb9cfd20387c470697e1bac96 | Simplify f-string to remove cast | amolenaar/gaphor,amolenaar/gaphor | po/build-babel.py | po/build-babel.py | import subprocess
from pathlib import Path
po_path: Path = Path(__file__).resolve().parent
def run_babel(command: str, input: Path, output_file: Path, locale: str):
subprocess.run(
[
"pybabel",
command,
f"--input={input}",
f"--output-file={output_file}",
f"--locale={locale}",
"--domain=gaphor",
]
)
def update_po_files():
pot_path = po_path / "gaphor.pot"
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
run_babel("update", pot_path, path, path.stem)
def compile_mo_files():
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
mo_path = po_path.parent / "locale" / path.stem / "LC_MESSAGES" / "gaphor.mo"
mo_path.parent.mkdir(parents=True, exist_ok=True)
run_babel("compile", path, mo_path, path.stem)
if __name__ == "__main__":
update_po_files()
compile_mo_files()
| import subprocess
from pathlib import Path
po_path: Path = Path(__file__).resolve().parent
def run_babel(command: str, input: Path, output_file: Path, locale: str):
subprocess.run(
[
"pybabel",
command,
f"--input={str(input)}",
f"--output-file={str(output_file)}",
f"--locale={str(locale)}",
"--domain=gaphor",
]
)
def update_po_files():
pot_path = po_path / "gaphor.pot"
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
run_babel("update", pot_path, path, path.stem)
def compile_mo_files():
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
mo_path = po_path.parent / "locale" / path.stem / "LC_MESSAGES" / "gaphor.mo"
mo_path.parent.mkdir(parents=True, exist_ok=True)
run_babel("compile", path, mo_path, path.stem)
if __name__ == "__main__":
update_po_files()
compile_mo_files()
| lgpl-2.1 | Python |
74d1dfabe7ccab9d75b53d596c47768220bfced2 | Rewrite solution | derekmpham/interview-prep,derekmpham/interview-prep | string/first-str-substr-occr.py | string/first-str-substr-occr.py | # Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
if len(substr) >= 1:
for i in range(0, len(string)):
if string[i:i+len(substr)] == substr:
return i
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr) # -1
# test 2
string = "Hello"
substr = "el"
print find_substring(string, substr) # 1
# test 3
string = "a"
substr = "a"
print find_substring(string, substr) # 0
# test 4
string = "a"
substr = "A"
print find_substring(string, substr) # -1
# test 5
string = "sst"
substr = "st"
print find_substring(string, substr) # 1
| # Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
j = 0
if substr_len >= 1:
for i in range(0, string_len):
if string[i] == substr[j]:
if j == substr_len - 1:
return i - substr_len + 1
j += 1
else:
j = 0
i += 1
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr) # -1
# test 2
string = "Hello"
substr = "el"
print find_substring(string, substr) # 1
# test 3
string = "a"
substr = "a"
print find_substring(string, substr) # 0
| mit | Python |
2a588df23755bd94924e42dae517705aca3938d8 | Use Pseudo-Normalized verbiage | gstaubli/pndb | proof_of_concept.py | proof_of_concept.py | import time
import csv
tables = [
{
'test_table':
{
'file_name': 'raw_data.tsv',
'column_structure': (
{'id': int},
{'first_name': str}
)
}
}
]
field_lookups = {
'first_name':
{
'type': str,
'values': {
1: 'John',
2: 'Bob',
3: 'Robert',
4: 'Michael',
5: 'Erin',
6: 'Victoria',
7: 'Charles'
}
}
}
def dictify_row(row,column_structure):
dictified_row = {}
for index, column_meta in enumerate(column_structure):
column_name = column_meta.keys()[0]
if column_name in field_lookups:
dictified_row[column_name] = field_lookups[column_name]['values'][row[index]]
else:
dictified_row[column_name] = row[index]
return dictified_row
def test_mmap():
for table in tables:
for table_name, table_structure in table.iteritems():
cs = table_structure['column_structure']
with open(table_structure['file_name']) as f:
reader = csv.reader(f,dialect='excel-tab')
for record in reader:
row = [int(i) for i in record]
row = dictify_row(row,cs)
times_to_test = 5
for i in range(times_to_test):
t1 = time.time()
test_mmap()
t2 = time.time()
print("Pseudo-Normalized: %f" % (t2-t1))
for i in range(times_to_test):
t1 = time.time()
with open('raw_data_with_names.tsv') as f:
reader = csv.DictReader(f,dialect='excel-tab')
for line in reader:
line
t2 = time.time()
print("Reading from File: %f" % (t2-t1)) | import time
import csv
tables = [
{
'test_table':
{
'file_name': 'raw_data.tsv',
'column_structure': (
{'id': int},
{'first_name': str}
)
}
}
]
field_lookups = {
'first_name':
{
'type': str,
'values': {
1: 'John',
2: 'Bob',
3: 'Robert',
4: 'Michael',
5: 'Erin',
6: 'Victoria',
7: 'Charles'
}
}
}
def dictify_row(row,column_structure):
dictified_row = {}
for index, column_meta in enumerate(column_structure):
column_name = column_meta.keys()[0]
if column_name in field_lookups:
dictified_row[column_name] = field_lookups[column_name]['values'][row[index]]
else:
dictified_row[column_name] = row[index]
return dictified_row
def test_mmap():
for table in tables:
for table_name, table_structure in table.iteritems():
cs = table_structure['column_structure']
with open(table_structure['file_name']) as f:
reader = csv.reader(f,dialect='excel-tab')
for record in reader:
row = [int(i) for i in record]
row = dictify_row(row,cs)
times_to_test = 5
for i in range(times_to_test):
t1 = time.time()
test_mmap()
t2 = time.time()
print("Memory Mapped: %f" % (t2-t1))
for i in range(times_to_test):
t1 = time.time()
with open('raw_data_with_names.tsv') as f:
reader = csv.DictReader(f,dialect='excel-tab')
for line in reader:
line
t2 = time.time()
print("Reading from File: %f" % (t2-t1)) | mit | Python |
a2ac7b84f9578ba8414c00769a0ab4e87ef87c6e | Clean up client | petrvanblokland/Xierpa3,petrvanblokland/Xierpa3 | xierpa3/sites/typo2014/client.py | xierpa3/sites/typo2014/client.py | # -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ buro@petr.com, www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
from xierpa3.server.twistedmatrix.twistedclient import TwistedClient
from typo2014 import Typo2014
class Client(TwistedClient):
typo2014 = Typo2014()
THEMES = {
# Matching theme names with Theme instances.
TwistedClient.DEFAULTTHEME: typo2014,
'typo': typo2014,
}
| # -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ buro@petr.com, www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
from xierpa3.server.twistedmatrix.twistedclient import TwistedClient
from xierpa3.sites.documentation.documentation import Documentation
from typo2014 import Typo2014
class Client(TwistedClient):
typo2014 = Typo2014()
documentation = Documentation()
THEMES = {
# Matching theme names with Theme instances.
TwistedClient.DEFAULTTHEME: typo2014,
'typo': typo2014,
'doc': documentation,
}
| mit | Python |
9d84b469b3f05b111c08ea83d8f465a4d4fbcca4 | FIX : obob in mpl_helpers | giltis/xray-vision,licode/xray-vision,licode/xray-vision,giltis/xray-vision,sameera2004/xray-vision,ericdill/xray-vision,sameera2004/xray-vision,ericdill/xray-vision | xray_vision/utils/mpl_helpers.py | xray_vision/utils/mpl_helpers.py | import matplotlib
import matplotlib.pyplot as plt
from functools import wraps
def ensure_ax(func):
@wraps(func)
def inner(*args, **kwargs):
if 'ax' in kwargs:
ax = kwargs.pop('ax', None)
elif len(args) > 0 and isinstance(args[0], matplotlib.axes.Axes):
ax = args[0]
args = args[1:]
else:
ax = plt.gca()
return func(ax, *args, **kwargs)
return inner
def ensure_ax_meth(func):
@wraps(func)
def inner(*args, **kwargs):
s = args[0]
args = args[1:]
if 'ax' in kwargs:
ax = kwargs.pop('ax', None)
elif len(args) > 1 and isinstance(args[0], matplotlib.axes.Axes):
ax = args[0]
args = args[1:]
else:
ax = plt.gca()
return func(s, ax, *args, **kwargs)
return inner
| import matplotlib
import matplotlib.pyplot as plt
from functools import wraps
def ensure_ax(func):
@wraps(func)
def inner(*args, **kwargs):
if 'ax' in kwargs:
ax = kwargs.pop('ax', None)
elif len(args) > 0 and isinstance(args[0], matplotlib.axes.Axes):
ax = args[0]
args = args[1:]
else:
ax = plt.gca()
return func(ax, *args, **kwargs)
return inner
def ensure_ax_meth(func):
@wraps(func)
def inner(*args, **kwargs):
s = args[0]
args = args[1:]
if 'ax' in kwargs:
ax = kwargs.pop('ax', None)
elif len(args) > 1 and isinstance(args[1], matplotlib.axes.Axes):
ax = args[1]
args = args[1:]
else:
ax = plt.gca()
return func(s, ax, *args, **kwargs)
return inner
| bsd-3-clause | Python |
718071622d8bae486dd8bc250daf1e1626d59d97 | use float comparisons | BT-ojossen/account-invoicing,iDTLabssl/account-invoicing,archetipo/account-invoicing,kittiu/account-invoicing,gurneyalex/account-invoicing,kmee/account-invoicing,eezee-it/account-invoicing,charbeljc/account-invoicing,open-synergy/account-invoicing,sysadminmatmoz/account-invoicing,Endika/account-invoicing,acsone/account-invoicing,EBII/account-invoicing,jbaudoux/account-invoicing,Trust-Code/account-invoicing,brain-tec/account-invoicing,damdam-s/account-invoicing,acsone/account-invoicing,abstract-open-solutions/account-invoicing,scigghia/account-invoicing,Elneo-group/account-invoicing,BT-jmichaud/account-invoicing,Noviat/account-invoicing,raycarnes/account-invoicing,sergiocorato/account-invoicing,taktik/account-invoicing,rschnapka/account-invoicing,akretion/account-invoicing,kmee/account-invoicing,Antiun/account-invoicing,bluestar-solutions/account-invoicing,BT-fgarbely/account-invoicing,hbrunn/account-invoicing,yvaucher/account-invoicing,brain-tec/account-invoicing | account_invoice_zero/account_invoice.py | account_invoice_zero/account_invoice.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from functools import partial
from openerp.osv import orm
from openerp.tools.float_utils import float_is_zero
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def invoice_validate(self, cr, uid, ids, context=None):
result = super(account_invoice, self).invoice_validate(
cr, uid, ids, context=context)
dp_obj = self.pool['decimal.precision']
precision = dp_obj.precision_get(cr, uid, 'Account')
is_zero = partial(float_is_zero, precision_digits=precision)
for invoice in self.browse(cr, uid, ids, context=context):
if is_zero(invoice.amount_total):
account = invoice.account_id.id
# search the payable / receivable lines
lines = [line for line in invoice.move_id.line_id
if line.account_id.id == account]
# reconcile the lines with a zero balance
if is_zero(sum(line.debit - line.credit for line in lines)):
move_line_obj = self.pool['account.move.line']
move_line_obj.reconcile(cr, uid,
[line.id for line in lines],
context=context)
return result
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def invoice_validate(self, cr, uid, ids, context=None):
result = super(account_invoice, self).invoice_validate(
cr, uid, ids, context=context)
for invoice in self.browse(cr, uid, ids, context=context):
if not invoice.amount_total:
account = invoice.account_id.id
# search the payable / receivable lines
lines = [line for line in invoice.move_id.line_id
if line.account_id.id == account]
# reconcile the lines with a zero balance
if not sum(line.debit - line.credit for line in lines):
move_line_obj = self.pool['account.move.line']
move_line_obj.reconcile(cr, uid,
[line.id for line in lines],
context=context)
return result
| agpl-3.0 | Python |
4258ffab62236c9648f15ecd32f238c56f06550b | fix argument name | yosida95/python-jsmapper | jsmapper/examples/product.py | jsmapper/examples/product.py | # -*- coding: utf-8 -*-
from jsmapper import (
JSONSchema,
Array,
Mapping,
Number,
Object,
String,
)
from jsmapper.defines import JSONSchemaDraftV4
class Product(Mapping):
class Dimensions(Mapping):
length = JSONSchema(type=Number())
width = JSONSchema(type=Number())
height = JSONSchema(type=Number())
id = JSONSchema(type=Number(),
description="The unique identifier for a product")
name = JSONSchema(type=String())
price = JSONSchema(type=Number(minimum=0, exclusive_minimum=True))
tags = JSONSchema(type=Array(items=JSONSchema(type=String()),
min_items=1, unique_items=True))
dimensions = JSONSchema(type=Object(
properties=Dimensions,
required=[Dimensions.length, Dimensions.width, Dimensions.height]
))
warehouseLocation = JSONSchema(
ref="http://json-schema.org/geo",
description="Coordinates of the warehouse with the product"
)
ProductSchema = JSONSchema(
schema=JSONSchemaDraftV4,
title="Product set",
type=Array(
items=JSONSchema(
title="Product",
type=Object(
properties=Product,
required=[Product.id, Product.name, Product.price]
)
)
),
)
| # -*- coding: utf-8 -*-
from jsmapper import (
JSONSchema,
Array,
Mapping,
Number,
Object,
String,
)
from jsmapper.defines import JSONSchemaDraftV4
class Product(Mapping):
id = JSONSchema(type=Number(),
description="The unique identifier for a product")
name = JSONSchema(type=String())
price = JSONSchema(type=Number(minimum=0, exclusiveMinimum=True))
tags = JSONSchema(type=Array(items=JSONSchema(type=String()),
minItems=1, uniqueItems=True))
dimensions = JSONSchema(type=Object(
properties=Dimensions,
required=[Dimensions.length, Dimensions.width, Dimensions.height]
))
warehouseLocation = JSONSchema(
ref="http://json-schema.org/geo",
description="Coordinates of the warehouse with the product"
)
ProductSchema = JSONSchema(
schema=JSONSchemaDraftV4,
title="Product set",
type=Array(
items=JSONSchema(
title="Product",
type=Object(
properties=Product,
required=[Product.id, Product.name, Product.price]
)
)
),
)
| mit | Python |
2bc8f9a633a6c433a9a5c9564a2f161adefc381e | Enable username hack by default (it's up to 255 characters now). | release-engineering/kobo,pombredanne/https-git.fedorahosted.org-git-kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,pombredanne/https-git.fedorahosted.org-git-kobo,pombredanne/https-git.fedorahosted.org-git-kobo,pombredanne/https-git.fedorahosted.org-git-kobo | kobo/django/auth/__init__.py | kobo/django/auth/__init__.py | # -*- coding: utf-8 -*-
# enable the username hack
# make sure that 'kobo.django.auth' precedes 'django.contrib.auth' in INSTALLED_APPS
import hacks
| lgpl-2.1 | Python | |
281997bf43f4e91fe48de97e7c09716b1c63b6aa | Enable the gallery plugin | jtiki/djangocms-cascade,jrief/djangocms-cascade,haricot/djangocms-bs4forcascade,rfleschenberg/djangocms-cascade,jtiki/djangocms-cascade,jrief/djangocms-cascade,rfleschenberg/djangocms-cascade,jrief/djangocms-cascade,rfleschenberg/djangocms-cascade,haricot/djangocms-bs4forcascade,jtiki/djangocms-cascade | cmsplugin_cascade/bootstrap3/settings.py | cmsplugin_cascade/bootstrap3/settings.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from cmsplugin_cascade.settings import CMSPLUGIN_CASCADE, orig_config
CASCADE_PLUGINS = ('buttons', 'carousel', 'accordion', 'container', 'image', 'picture', 'panel',
'tabs', 'gallery',)
if 'cms_bootstrap3' in settings.INSTALLED_APPS:
CASCADE_PLUGINS += ('secondary_menu',)
CMSPLUGIN_CASCADE['bootstrap3'] = {
'breakpoints': (
('xs', (768, 'mobile-phone', _("mobile phones"), 750)),
('sm', (768, 'tablet', _("tablets"), 750)),
('md', (992, 'laptop', _("laptops"), 970)),
('lg', (1200, 'desktop', _("large desktops"), 1170)),
),
'gutter': 30,
}
CMSPLUGIN_CASCADE['bootstrap3'].update(orig_config.get('bootstrap3', {}))
CMSPLUGIN_CASCADE['plugins_with_extra_render_templates'].setdefault('BootstrapSecondaryMenuPlugin', (
('cascade/bootstrap3/secmenu-list-group.html', _("default")),
('cascade/bootstrap3/secmenu-unstyled-list.html', _("unstyled")),
))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from cmsplugin_cascade.settings import CMSPLUGIN_CASCADE, orig_config
CASCADE_PLUGINS = ('buttons', 'carousel', 'accordion', 'container', 'image', 'picture', 'panel',
'tabs',)
if 'cms_bootstrap3' in settings.INSTALLED_APPS:
CASCADE_PLUGINS += ('secondary_menu',)
CMSPLUGIN_CASCADE['bootstrap3'] = {
'breakpoints': (
('xs', (768, 'mobile-phone', _("mobile phones"), 750)),
('sm', (768, 'tablet', _("tablets"), 750)),
('md', (992, 'laptop', _("laptops"), 970)),
('lg', (1200, 'desktop', _("large desktops"), 1170)),
),
'gutter': 30,
}
CMSPLUGIN_CASCADE['bootstrap3'].update(orig_config.get('bootstrap3', {}))
CMSPLUGIN_CASCADE['plugins_with_extra_render_templates'].setdefault('BootstrapSecondaryMenuPlugin', (
('cascade/bootstrap3/secmenu-list-group.html', _("default")),
('cascade/bootstrap3/secmenu-unstyled-list.html', _("unstyled")),
))
| mit | Python |
7cee7edabad08b01ceda0ed8f2798ebf47c87e95 | Fix catch-all URLs with prefix | uranusjr/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,uranusjr/pycontw2016 | src/pycontw2016/urls.py | src/pycontw2016/urls.py | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from core.views import flat_page
from users.views import user_dashboard
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name="index.html"), name='index'),
url(r'^dashboard/$', user_dashboard, name='user_dashboard'),
url(r'^accounts/', include('users.urls')),
url(r'^proposals/', include('proposals.urls')),
url(r'^admin/', include(admin.site.urls)),
]
if settings.URL_PREFIX:
urlpatterns = [
url(r'^{prefix}'.format(prefix=settings.URL_PREFIX),
include(urlpatterns)),
]
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Catch-all URL pattern must be put last.
if settings.URL_PREFIX:
urlpatterns += [
url(r'^{prefix}(?P<path>.+)/$'.format(prefix=settings.URL_PREFIX),
flat_page, name='page'),
]
else:
urlpatterns += [url(r'^(?P<path>.+)/$', flat_page, name='page')]
| from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from core.views import flat_page
from users.views import user_dashboard
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name="index.html"), name='index'),
url(r'^dashboard/$', user_dashboard, name='user_dashboard'),
url(r'^accounts/', include('users.urls')),
url(r'^proposals/', include('proposals.urls')),
url(r'^admin/', include(admin.site.urls)),
]
if settings.URL_PREFIX:
urlpatterns = [
url(r'^{prefix}'.format(prefix=settings.URL_PREFIX),
include(urlpatterns)),
]
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Catch-all URL pattern must be put last.
urlpatterns += [
url(r'^(?P<path>.+)/$', flat_page, name='page'),
]
| mit | Python |
c35be6d48a462d668c9a57f381682da905725329 | Update the version number. | jeremiedecock/pydfm,jeremiedecock/pydfm | pydfm/__init__.py | pydfm/__init__.py | # PyDuplicateFileManager
# The MIT License
#
# Copyright (c) 2010,2011,2012,2013,2015 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# Generic release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# Admissible pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
#
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = '3.0.dev2'
__all__ = ['core',
'database',
'file_hash']
| # PyDuplicateFileManager
# The MIT License
#
# Copyright (c) 2010,2011,2012,2013,2015 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# Generic release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# Admissible pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
#
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = '3.0.dev1'
__all__ = ['core']
| mit | Python |
30471ac300005a9c272a68237953161951ea35f4 | Add import matplotlib back | softwaresaved/international-survey | analysis/data_process/uk_2017/config.py | analysis/data_process/uk_2017/config.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
section_nbr_to_keep_after = 1
count_na = True
normalise = False
class PlottingConfig(CleaningConfig):
plot_na = False
normalise = True
# Different than normalise, add a freq_table with percentage
# in addition of the table with counts
show_percent = True
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
allow_errors = True
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from IPython.display import display',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count, get_percentage',
'from plotting import get_plot, display_side_by_side',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './'}}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
section_nbr_to_keep_after = 1
count_na = True
normalise = False
class PlottingConfig(CleaningConfig):
plot_na = False
normalise = True
# Different than normalise, add a freq_table with percentage
# in addition of the table with counts
show_percent = True
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
allow_errors = True
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib.pyplot as plt',
'from IPython.display import display',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count, get_percentage',
'from plotting import get_plot, display_side_by_side',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './'}}
| bsd-3-clause | Python |
44131b55cac2dadee7cb76b82bad455152a92139 | include \. in attachments file slugs | labkaxita/lakaxita,labkaxita/lakaxita,labkaxita/lakaxita | lakaxita/attachments/urls.py | lakaxita/attachments/urls.py | from django.conf.urls.defaults import patterns, url
from lakaxita.attachments.views import AttachmentDetail, FileRedirect
urlpatterns = patterns('',
url('^(?P<slug>(\w|\d|-)+)/$', AttachmentDetail.as_view(), name='detail'),
url('^file/(?P<slug>(\w|\d|-|\.)+)/$', FileRedirect.as_view(), name='file'),
)
| from django.conf.urls.defaults import patterns, url
from lakaxita.attachments.views import AttachmentDetail, FileRedirect
urlpatterns = patterns('',
url('^(?P<slug>(\w|\d|-)+)/$', AttachmentDetail.as_view(), name='detail'),
url('^file/(?P<slug>(\w|\d|-)+)/$', FileRedirect.as_view(), name='file'),
)
| agpl-3.0 | Python |
8ce19d44bce3d6325e4883739a16431e45b01792 | Add command-line parsing and control of the listening address to crossdomaind.py. | glamrock/flashproxy,infinity0/flashproxy,glamrock/flashproxy,arlolra/flashproxy,infinity0/flashproxy,arlolra/flashproxy,infinity0/flashproxy,arlolra/flashproxy,arlolra/flashproxy,arlolra/flashproxy,infinity0/flashproxy,glamrock/flashproxy,arlolra/flashproxy,infinity0/flashproxy,infinity0/flashproxy,glamrock/flashproxy,glamrock/flashproxy,arlolra/flashproxy,glamrock/flashproxy | crossdomaind.py | crossdomaind.py | #!/usr/bin/env python
import getopt
import socket
import sys
DEFAULT_ADDRESS = "0.0.0.0"
DEFAULT_PORT = 843
POLICY = """\
<cross-domain-policy>
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
\0"""
class options(object):
pass
def usage(f = sys.stdout):
print """\
Usage: %(progname)s <OPTIONS> [HOST] [PORT]
Serve a Flash crossdomain policy. By default HOST is %(addr)s
and PORT is %(port)d.
-h, --help show this help.\
""" % {"progname": sys.argv[0], "addr": DEFAULT_ADDRESS, "port": DEFAULT_PORT }
opts, args = getopt.gnu_getopt(sys.argv[1:], "h", ["help"])
for o, a in opts:
if o == "-h" or o == "--help":
usage()
sys.exit()
if len(args) == 0:
address = (DEFAULT_ADDRESS, DEFAULT_PORT)
elif len(args) == 1:
# Either HOST or PORT may be omitted; figure out which one.
if args[0].isdigit():
address = (DEFAULT_ADDRESS, args[0])
else:
address = (args[0], DEFAULT_PORT)
elif len(args) == 2:
address = (args[0], args[1])
else:
usage(sys.stderr)
sys.exit(1)
addrinfo = socket.getaddrinfo(address[0], address[1], 0, socket.SOCK_STREAM, socket.IPPROTO_TCP)[0]
s = socket.socket(addrinfo[0], addrinfo[1], addrinfo[2])
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(addrinfo[4])
s.listen(10)
while True:
(c, c_addr) = s.accept()
c.sendall(POLICY)
c.close()
| #!/usr/bin/env python
# A simple daemon to serve a cross-domain policy.
import socket
ADDRESS = ("0.0.0.0", 843)
POLICY = """\
<cross-domain-policy>
<allow-access-from domain="*" to-ports="*"/>
</cross-domain-policy>
\0"""
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(ADDRESS)
s.listen(10)
while True:
(c, c_addr) = s.accept()
c.sendall(POLICY)
c.close()
| mit | Python |
2651b475e998d6033d1cf31047398f985c89f23c | Correct usage message | cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader | client/cvra_bootloader/read_config.py | client/cvra_bootloader/read_config.py | #!/usr/bin/env python3
from cvra_bootloader import commands, utils
import msgpack
import json
def parse_commandline_args():
"""
Parses the program commandline arguments.
"""
DESCRIPTION = 'Read board configs and dumps to JSON'
parser = utils.ConnectionArgumentParser(description=DESCRIPTION)
parser.add_argument("ids", metavar='DEVICEID', nargs='*', type=int,
help="Device IDs to query")
parser.add_argument('-a', '--all', help="Try to scan the whole bus.",
action='store_true')
return parser.parse_args()
def main():
args = parse_commandline_args()
connection = utils.open_connection(args)
if args.all:
scan_queue = list()
# Broadcast ping
utils.write_command(connection, commands.encode_ping(), list(range(1, 128)))
reader = utils.read_can_datagrams(connection)
while True:
dt = next(reader)
if dt is None: # Timeout
break
_, _, src = dt
scan_queue.append(src)
else:
scan_queue = args.ids
# Broadcast ask for config
configs = utils.write_command_retry(connection,
commands.encode_read_config(),
scan_queue)
for id, raw_config in configs.items():
configs[id] = msgpack.unpackb(raw_config, encoding='ascii')
print(json.dumps(configs, indent=4, sort_keys=True))
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
from cvra_bootloader import commands, utils
import msgpack
import json
def parse_commandline_args():
"""
Parses the program commandline arguments.
"""
DESCRIPTION = 'Read board configs and dumps to JSON'
parser = utils.ConnectionArgumentParser(description=DESCRIPTION)
parser.add_argument("ids", metavar='DEVICEID', nargs='*', type=int,
help="Device IDs to flash")
parser.add_argument('-a', '--all', help="Try to scan all network.",
action='store_true')
return parser.parse_args()
def main():
args = parse_commandline_args()
connection = utils.open_connection(args)
if args.all:
scan_queue = list()
# Broadcast ping
utils.write_command(connection, commands.encode_ping(), list(range(1, 128)))
reader = utils.read_can_datagrams(connection)
while True:
dt = next(reader)
if dt is None: # Timeout
break
_, _, src = dt
scan_queue.append(src)
else:
scan_queue = args.ids
# Broadcast ask for config
configs = utils.write_command_retry(connection,
commands.encode_read_config(),
scan_queue)
for id, raw_config in configs.items():
configs[id] = msgpack.unpackb(raw_config, encoding='ascii')
print(json.dumps(configs, indent=4, sort_keys=True))
if __name__ == "__main__":
main()
| bsd-2-clause | Python |
37b12e53aff8eb4df8d2f106489da4bd4201c125 | Bump version to 0.4.2. | yunojuno/django-inbound-email | django_inbound_email/__init__.py | django_inbound_email/__init__.py | """An inbound email handler for Django."""
__title__ = 'django-inbound-email'
__version__ = '0.4.2'
__author__ = 'YunoJuno Ltd'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 YunoJuno'
__description__ = 'A Django app for receiving inbound emails.'
| """An inbound email handler for Django."""
__title__ = 'django-inbound-email'
__version__ = '0.4.1'
__author__ = 'YunoJuno Ltd'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 YunoJuno'
__description__ = 'A Django app for receiving inbound emails.'
| mit | Python |
7d75f1dd14374d6bfbb8011fc1853d7d53f4c98c | Fix dtype | wenleix/EdgePPR,wenleix/EdgePPR | src/python/genPriorV.py | src/python/genPriorV.py | # The preprocessing step of DEIM: given ppr_1, ppr_2, ... ppr_k, generate the orthogonal basis.
# This is based on SVD so the basis are sorted according to the singular values (from large to small)
import sys
import os
from sys import argv
import numpy as np
import time
from scipy import linalg
import deimCommon
if len(argv) != 7:
print 'Usage %s [Prefix] [Suffix] [NumVec] [Dim] [dtype] [V.bin]' % (argv[0])
sys.exit(1)
prefix = argv[1]
suffix = argv[2]
numVec = int(argv[3])
dim = int(argv[4])
dtype = argv[5]
vnpyName = argv[6]
tic = time.time()
pprs = np.zeros((dim, numVec), dtype=dtype)
idx = 0
for ppr in deimCommon.loadVectors(prefix, suffix, numVec):
pprs[:, idx] = ppr
idx += 1
print pprs.shape
toc = time.time()
print >> sys.stderr, "Read complete, %.2f sec elapsed." % (toc - tic)
tic = time.time()
(U, _, _) = linalg.svd(pprs, full_matrices=False, compute_uv=True)
toc = time.time()
print >> sys.stderr, "SVD complete, %.2f sec elapsed." % (toc - tic)
#print >> sys.stderr, 'Matrix R = '
#print >> sys.stderr, R
# Save V
np.save(vnpyName, U[:, :100])
# Save it into a C++ compatible form
# As tofile() write the matrix in the row order, we transpose the matrix first.
#print >> sys.stdout, U.shape
#U[:, :100].T.tofile(vnpyName)
| # The preprocessing step of DEIM: given ppr_1, ppr_2, ... ppr_k, generate the orthogonal basis.
# This is based on SVD so the basis are sorted according to the singular values (from large to small)
import sys
import os
from sys import argv
import numpy as np
import time
from scipy import linalg
import deimCommon
if len(argv) != 7:
print 'Usage %s [Prefix] [Suffix] [NumVec] [Dim] [dtype] [V.bin]' % (argv[0])
sys.exit(1)
prefix = argv[1]
suffix = argv[2]
numVec = int(argv[3])
dim = int(argv[4])
dtype = argv[5]
vnpyName = argv[6]
tic = time.time()
pprs = np.zeros((dim, numVec), dtype="float32")
idx = 0
for ppr in deimCommon.loadVectors(prefix, suffix, numVec):
pprs[:, idx] = ppr
idx += 1
print pprs.shape
toc = time.time()
print >> sys.stderr, "Read complete, %.2f sec elapsed." % (toc - tic)
tic = time.time()
(U, _, _) = linalg.svd(pprs, full_matrices=False, compute_uv=True)
toc = time.time()
print >> sys.stderr, "SVD complete, %.2f sec elapsed." % (toc - tic)
#print >> sys.stderr, 'Matrix R = '
#print >> sys.stderr, R
# Save V
np.save(vnpyName, U[:, :100])
# Save it into a C++ compatible form
# As tofile() write the matrix in the row order, we transpose the matrix first.
#print >> sys.stdout, U.shape
#U[:, :100].T.tofile(vnpyName)
| apache-2.0 | Python |
7851328ceec4a5fbe944f81783912706c7495d11 | Complete iter sol | bowen0701/algorithms_data_structures | lc0404_sum_of_left_leaves.py | lc0404_sum_of_left_leaves.py | """Leetcode 404. Sum of Left Leaves
Easy
URL: https://leetcode.com/problems/sum-of-left-leaves/
Find the sum of all left leaves in a given binary tree.
Example:
3
/ \
9 20
/ \
15 7
There are two left leaves in the binary tree, with values 9 and 15 respectively.
Return 24.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class SolutionRecur(object):
def sumOfLeftLeaves(self, root):
"""
:type root: TreeNode
:rtype: int
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for singly linked list.
"""
# Base case.
if not root:
return 0
if root.left and not root.left.left and not root.left.right:
return root.left.val + self.sumOfLeftLeaves(root.right)
else:
return self.sumOfLeftLeaves(root.left) + self.sumOfLeftLeaves(root.right)
class SolutionIter(object):
def sumOfLeftLeaves(self, root):
"""
:type root: TreeNode
:rtype: int
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for singly linked list.
"""
# Base case.
if not root:
return 0
# Use stack for iteratively accumulate left leave sum.
result = 0
stack = [root]
while stack:
current = stack.pop()
if current.left:
if not current.left.left and not current.left.right:
# Accumualte result if it is a left leave.
result += current.left.val
else:
# If not, add left node to stack.
stack.append(current.left)
if current.right:
if current.right.left or current.right.right:
# If left node is not a leave, add it to stack.
stack.append(current.right)
return result
def main():
# Output: 20.
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.right.left = TreeNode(15)
root.right.right = TreeNode(7)
print SolutionRecur().sumOfLeftLeaves(root)
print SolutionIter().sumOfLeftLeaves(root)
if __name__ == '__main__':
main()
| """Leetcode 404. Sum of Left Leaves
Easy
URL: https://leetcode.com/problems/sum-of-left-leaves/
Find the sum of all left leaves in a given binary tree.
Example:
3
/ \
9 20
/ \
15 7
There are two left leaves in the binary tree, with values 9 and 15 respectively.
Return 24.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class SolutionRecur(object):
def sumOfLeftLeaves(self, root):
"""
:type root: TreeNode
:rtype: int
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for singly linked list.
"""
# Base case.
if not root:
return 0
if root.left and not root.left.left and not root.left.right:
return root.left.val + self.sumOfLeftLeaves(root.right)
else:
return self.sumOfLeftLeaves(root.left) + self.sumOfLeftLeaves(root.right)
def main():
# Output: 20.
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.right.left = TreeNode(15)
root.right.right = TreeNode(7)
print SolutionRecur().sumOfLeftLeaves(root)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
a04e783c99f54ad7cc2525287ad95ae1308769b1 | clean up the users in the example | SupermanScott/Redboy | examples/user.py | examples/user.py | # -*- coding: utf-8 -*-
#
# © 2012 Scott Reynolds
# Author: Scott Reynolds <scott@scottreynolds.us>
#
"""Example of a User Model"""
from redboy.record import Record, MirroredRecord
from redboy.key import Key
from redboy.view import Queue, Stack, Score
from time import time
import redboy.exceptions as exc
user_prefix = "user:"
view_prefix = "created:"
pool_name = "database"
class UserEmail(MirroredRecord):
"""Mirrored Record allows for fetching by user email"""
def mirror_key(self, parent_record):
assert isinstance(parent_record, Record)
if 'email' in parent_record:
return self.make_key(parent_record['email'])
def make_key(self, key=None):
return Key(pool_name, "user:email:", key)
score_function = lambda x: x['created']
user_view = Queue(Key(pool_name, view_prefix, "users"))
stack_view = Stack(Key(pool_name, "reverse_created:", "users"))
scored_view = Score(Key(pool_name, "created_date:", "users"), score_function, True)
class User(Record):
_required = ('email',)
_prefix = user_prefix
_pool_name = pool_name
_views = (
user_view,
stack_view,
scored_view,
)
_mirrors = (UserEmail(),)
def main():
scott = User(first_name="Scott", last_name="Reynolds", created=time())
try:
scott.save()
except exc.ErrorMissingField, missing:
# Should happen because missing email
print missing
scott['email'] = 'scott@scottreynolds.us'
scott.save()
scott['first_name'] = 'scott'
assert scott['first_name'] != User().load(scott.key)['first_name'], \
"Name has now changed"
scott.save()
assert scott['first_name'] == User().load(scott.key)['first_name'], \
"Changed name is now saved to the database"
thomas_jeffereson = User(first_name="thomas", last_name="jefferson",
email="no-replay@us.gov", created=time()).save()
assert thomas_jeffereson['first_name'] == UserEmail().load(thomas_jeffereson['email'])['first_name']
assert len(user_view) == 2, "Only two users in the view"
assert len(stack_view) == 2, "Only two users in the stack view"
assert len(scored_view) == 2, "Only two users in the scored view"
assert user_view[0]['first_name'] == scott['first_name']
assert stack_view[0]['first_name'] == thomas_jeffereson['first_name']
for user in user_view:
print user
for user in scored_view:
print user
# Clean up
scott.remove()
thomas_jeffereson.remove()
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
#
# © 2012 Scott Reynolds
# Author: Scott Reynolds <scott@scottreynolds.us>
#
"""Example of a User Model"""
from record import Record, MirroredRecord
from redboy.key import Key
from redboy.view import Queue, Stack, Score
from time import time
import redboy.exceptions as exc
user_prefix = "user:"
view_prefix = "created:"
pool_name = "database"
class UserEmail(MirroredRecord):
"""Mirrored Record allows for fetching by user email"""
def mirror_key(self, parent_record):
assert isinstance(parent_record, Record)
if 'email' in parent_record:
return self.make_key(parent_record['email'])
def make_key(self, key=None):
return Key(pool_name, "user:email:", key)
score_function = lambda x: x['created']
user_view = Queue(Key(pool_name, view_prefix, "users"))
stack_view = Stack(Key(pool_name, "reverse_created:", "users"))
scored_view = Score(Key(pool_name, "created_date:", "users"), score_function, True)
class User(Record):
_required = ('email',)
_prefix = user_prefix
_pool_name = pool_name
_views = (
user_view,
stack_view,
scored_view,
)
_mirrors = (UserEmail(),)
def main():
scott = User(first_name="Scott", last_name="Reynolds", created=time())
try:
scott.save()
except exc.ErrorMissingField, missing:
# Should happen because missing email
print missing
scott['email'] = 'scott@scottreynolds.us'
scott.save()
scott['first_name'] = 'scott'
assert scott['first_name'] != User().load(scott.key)['first_name'], \
"Name has now changed"
scott.save()
assert scott['first_name'] == User().load(scott.key)['first_name'], \
"Changed name is now saved to the database"
thomas_jeffereson = User(first_name="thomas", last_name="jefferson",
email="no-replay@us.gov", created=time()).save()
assert thomas_jeffereson['first_name'] == UserEmail().load(thomas_jeffereson['email'])['first_name']
assert len(user_view) == 2, "Only two users in the view"
assert len(stack_view) == 2, "Only two users in the stack view"
assert len(scored_view) == 2, "Only two users in the scored view"
assert user_view[0]['first_name'] == scott['first_name']
assert stack_view[0]['first_name'] == thomas_jeffereson['first_name']
for user in user_view:
print user
for user in scored_view:
print user
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
d10860567eca20b95e652d13b8741338ec85f35e | Refresh cache when modifying tasks from API | OpenNewsLabs/pybossa,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,PyBossa/pybossa,geotagx/pybossa,Scifabric/pybossa,jean/pybossa,jean/pybossa,Scifabric/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,inteligencia-coletiva-lsd/pybossa | pybossa/api/task.py | pybossa/api/task.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa api module for exposing domain object Task via an API.
This package adds GET, POST, PUT and DELETE methods for:
* tasks
"""
from pybossa.model.task import Task
from api_base import APIBase
from pybossa.cache import apps as cached_apps
class TaskAPI(APIBase):
"""Class for domain object Task."""
__class__ = Task
def _refresh_cache(self, task):
cached_apps.clean_project(task.app_id)
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa api module for exposing domain object Task via an API.
This package adds GET, POST, PUT and DELETE methods for:
* tasks
"""
from pybossa.model.task import Task
from api_base import APIBase
class TaskAPI(APIBase):
"""Class for domain object Task."""
__class__ = Task
| agpl-3.0 | Python |
e2cbd73218e6f5cf5b86718f6adebd92e9fee2a3 | Make sure land filters are set up when testing | GeotrekCE/Geotrek-admin,johan--/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,johan--/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,Anaethelion/Geotrek | geotrek/trekking/tests/test_filters.py | geotrek/trekking/tests/test_filters.py | # Make sure land filters are set up when testing
from geotrek.land.filters import * # NOQA
from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.assertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super(TrekFilterLandTest, self).create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, useless_path, seek_path)
return useless_path, seek_path
| bsd-2-clause | Python |
d5780089b268f353e49d9a0a6460d3a2dde3888f | Update to 0.1.1 | britny/djangocms-styledlink,mkoistinen/djangocms-styledlink,mkoistinen/djangocms-styledlink,britny/djangocms-styledlink | djangocms_styledlink/__init__.py | djangocms_styledlink/__init__.py | __version__ = '0.1.1' | __version__ = '0.1.0' | bsd-3-clause | Python |
beafa2d2e34a55486fecae6944c8b9e365f35fc9 | Use create instead of created | jeffdwyatt/taiga-back,crr0004/taiga-back,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,EvgeneOskin/taiga-back,gam-phon/taiga-back,CoolCloud/taiga-back,19kestier/taiga-back,astronaut1712/taiga-back,dycodedev/taiga-back,WALR/taiga-back,coopsource/taiga-back,seanchen/taiga-back,Rademade/taiga-back,taigaio/taiga-back,dycodedev/taiga-back,astagi/taiga-back,coopsource/taiga-back,EvgeneOskin/taiga-back,jeffdwyatt/taiga-back,dycodedev/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,forging2012/taiga-back,astagi/taiga-back,frt-arch/taiga-back,obimod/taiga-back,Rademade/taiga-back,bdang2012/taiga-back-casting,seanchen/taiga-back,WALR/taiga-back,crr0004/taiga-back,crr0004/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,coopsource/taiga-back,CMLL/taiga-back,seanchen/taiga-back,astronaut1712/taiga-back,CMLL/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,CoolCloud/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,gauravjns/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,taigaio/taiga-back,Tigerwhit4/taiga-back,CoolCloud/taiga-back,joshisa/taiga-back,CoolCloud/taiga-back,bdang2012/taiga-back-casting,rajiteh/taiga-back,gauravjns/taiga-back,Tigerwhit4/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,frt-arch/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,seanchen/taiga-back,forging2012/taiga-back,obimod/taiga-back,dycodedev/taiga-back,WALR/taiga-back,Rademade/taiga-back,rajiteh/taiga-back,CMLL/taiga-back,Tigerwhit4/taiga-back,joshisa/taiga-back,bdang2012/taiga-back-casting,dayatz/taiga-back,astronaut1712/taiga-back,dayatz/taiga-back,obimod/taiga-back,forging2012/taiga-back,Tigerwhit4/taiga-back,coopsource/taiga-back,19kestier/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,astagi/taiga-back,xdevelsistemas/taiga-back-community,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,joshisa/taiga-back,crr0004/taiga-back,obimod/taiga-back,jeffdwyatt/taiga-back,joshisa/taiga-back,frt-arch/taiga-back | taiga/events/signal_handlers.py | taiga/events/signal_handlers.py | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db.models import signals
from django.db import connection
from django.dispatch import receiver
from taiga.base.utils.db import get_typename_for_model_instance
from . import middleware as mw
from . import events
def on_save_any_model(sender, instance, created, **kwargs):
# Ignore any object that can not have project_id
content_type = get_typename_for_model_instance(instance)
# Ignore any other events
if content_type not in events.watched_types:
return
sesionid = mw.get_current_session_id()
type = "change"
if created:
type = "create"
emit_event = lambda: events.emit_event_for_model(instance, sessionid=sesionid, type=type)
connection.on_commit(emit_event)
def on_delete_any_model(sender, instance, **kwargs):
# Ignore any object that can not have project_id
content_type = get_typename_for_model_instance(instance)
# Ignore any other changes
if content_type not in events.watched_types:
return
sesionid = mw.get_current_session_id()
emit_event = lambda: events.emit_event_for_model(instance, sessionid=sesionid, type="delete")
connection.on_commit(emit_event)
| # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db.models import signals
from django.db import connection
from django.dispatch import receiver
from taiga.base.utils.db import get_typename_for_model_instance
from . import middleware as mw
from . import events
def on_save_any_model(sender, instance, created, **kwargs):
# Ignore any object that can not have project_id
content_type = get_typename_for_model_instance(instance)
# Ignore any other events
if content_type not in events.watched_types:
return
sesionid = mw.get_current_session_id()
type = "change"
if created:
type = "created"
emit_event = lambda: events.emit_event_for_model(instance, sessionid=sesionid, type=type)
connection.on_commit(emit_event)
def on_delete_any_model(sender, instance, **kwargs):
# Ignore any object that can not have project_id
content_type = get_typename_for_model_instance(instance)
# Ignore any other changes
if content_type not in events.watched_types:
return
sesionid = mw.get_current_session_id()
emit_event = lambda: events.emit_event_for_model(instance, sessionid=sesionid, type="delete")
connection.on_commit(emit_event)
| agpl-3.0 | Python |
301ba33c0b9fcc1924a90aba646cc375a9ef76e5 | Simplify logic | daevaorn/sentry,gencer/sentry,fotinakis/sentry,alexm92/sentry,ifduyue/sentry,nicholasserra/sentry,mitsuhiko/sentry,mvaled/sentry,nicholasserra/sentry,zenefits/sentry,JamesMura/sentry,BuildingLink/sentry,ifduyue/sentry,BuildingLink/sentry,JackDanger/sentry,jean/sentry,ifduyue/sentry,gencer/sentry,alexm92/sentry,looker/sentry,beeftornado/sentry,JamesMura/sentry,JackDanger/sentry,nicholasserra/sentry,BuildingLink/sentry,looker/sentry,daevaorn/sentry,jean/sentry,zenefits/sentry,gencer/sentry,JackDanger/sentry,BuildingLink/sentry,zenefits/sentry,ifduyue/sentry,mitsuhiko/sentry,fotinakis/sentry,mvaled/sentry,gencer/sentry,looker/sentry,looker/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,mvaled/sentry,jean/sentry,mvaled/sentry,daevaorn/sentry,mvaled/sentry,ifduyue/sentry,daevaorn/sentry,looker/sentry,beeftornado/sentry,alexm92/sentry,JamesMura/sentry,fotinakis/sentry,fotinakis/sentry,jean/sentry,mvaled/sentry,JamesMura/sentry,JamesMura/sentry,zenefits/sentry,beeftornado/sentry,zenefits/sentry | src/sentry/api/endpoints/group_tags.py | src/sentry/api/endpoints/group_tags.py | from __future__ import absolute_import
from rest_framework.response import Response
from collections import defaultdict
from sentry.api.bases.group import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.models import GroupTagValue, GroupTagKey, TagKey, TagKeyStatus
class GroupTagsEndpoint(GroupEndpoint):
def get(self, request, group):
tag_keys = TagKey.objects.filter(
project=group.project,
status=TagKeyStatus.VISIBLE,
key__in=GroupTagKey.objects.filter(
group=group,
).values('key'),
)
# O(N) db access
data = []
all_top_values = []
for tag_key in tag_keys:
total_values = GroupTagValue.get_value_count(group.id, tag_key.key)
top_values = GroupTagValue.get_top_values(group.id, tag_key.key, limit=10)
if tag_key.key.startswith('sentry:'):
key = tag_key.key.split('sentry:', 1)[-1]
else:
key = tag_key.key
all_top_values.extend(top_values)
data.append({
'id': str(tag_key.id),
'key': key,
'name': tag_key.get_label(),
'uniqueValues': tag_key.values_seen,
'totalValues': total_values,
})
# Serialize all of the values at once to avoid O(n) serialize/db queries
top_values_by_key = defaultdict(list)
for value in serialize(all_top_values, request.user):
top_values_by_key[value['key']].append(value)
for d in data:
d['topValues'] = top_values_by_key[d['key']]
return Response(data)
| from __future__ import absolute_import
from rest_framework.response import Response
from collections import defaultdict
from itertools import chain
from sentry.api.bases.group import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.models import GroupTagValue, GroupTagKey, TagKey, TagKeyStatus
class GroupTagsEndpoint(GroupEndpoint):
def get(self, request, group):
tag_keys = TagKey.objects.filter(
project=group.project,
status=TagKeyStatus.VISIBLE,
key__in=GroupTagKey.objects.filter(
group=group,
).values('key'),
)
# O(N) db access
data = []
top_values_by_key = {}
for tag_key in tag_keys:
total_values = GroupTagValue.get_value_count(group.id, tag_key.key)
top_values = GroupTagValue.get_top_values(group.id, tag_key.key, limit=10)
if tag_key.key.startswith('sentry:'):
key = tag_key.key.split('sentry:', 1)[-1]
else:
key = tag_key.key
top_values_by_key[key] = top_values
data.append({
'id': str(tag_key.id),
'key': key,
'name': tag_key.get_label(),
'uniqueValues': tag_key.values_seen,
'totalValues': total_values,
})
# Serialize all of the values at once to avoid O(n) serialize/db queries
top_values_by_key_serialized = defaultdict(list)
for value in serialize(list(chain.from_iterable(top_values_by_key.itervalues())), request.user):
top_values_by_key_serialized[value['key']].append(value)
for d in data:
d['topValues'] = top_values_by_key_serialized[d['key']]
return Response(data)
| bsd-3-clause | Python |
718b7fe643fc49d1b4261338f68c2216a3391df4 | correct dev version | samuelcolvin/pydantic,samuelcolvin/pydantic | pydantic/version.py | pydantic/version.py | __all__ = ['VERSION', 'version_info']
VERSION = '1.4a1'
def version_info() -> str:
import platform
import sys
from importlib import import_module
from pathlib import Path
from .main import compiled
optional_deps = []
for p in ('typing-extensions', 'email-validator', 'devtools'):
try:
import_module(p.replace('-', '_'))
except ImportError:
continue
optional_deps.append(p)
info = {
'pydantic version': VERSION,
'pydantic compiled': compiled,
'install path': Path(__file__).resolve().parent,
'python version': sys.version,
'platform': platform.platform(),
'optional deps. installed': optional_deps,
}
return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items())
| __all__ = ['VERSION', 'version_info']
VERSION = '1.3a1'
def version_info() -> str:
import platform
import sys
from importlib import import_module
from pathlib import Path
from .main import compiled
optional_deps = []
for p in ('typing-extensions', 'email-validator', 'devtools'):
try:
import_module(p.replace('-', '_'))
except ImportError:
continue
optional_deps.append(p)
info = {
'pydantic version': VERSION,
'pydantic compiled': compiled,
'install path': Path(__file__).resolve().parent,
'python version': sys.version,
'platform': platform.platform(),
'optional deps. installed': optional_deps,
}
return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items())
| mit | Python |
751339df0a9c8b45c32b3e55e3a36e1e456d7f99 | Remove VerboseHook. | DOV-Vlaanderen/pydov | pydov/util/hooks.py | pydov/util/hooks.py | import sys
class AbstractHook(object):
def __init__(self, name):
self.name = name
def wfs_search(self, typename):
pass
def wfs_result(self, number_of_results):
pass
def xml_requested(self, url):
pass
def xml_cache_hit(self, url):
pass
def xml_cache_miss(self, url):
pass
def xml_downloaded(self, url):
pass
class SimpleStatusHook(AbstractHook):
def __init__(self):
super(SimpleStatusHook, self).__init__('SimpleStatusHook')
self.result_count = 0
self.prog_counter = 0
def _write_progress(self, char):
if self.prog_counter == 0:
sys.stdout.write('[%03i/%03i] ' % (self.prog_counter,
self.result_count))
sys.stdout.flush()
elif self.prog_counter % 50 == 0:
sys.stdout.write('\n[%03i/%03i] ' % (self.prog_counter,
self.result_count))
sys.stdout.flush()
sys.stdout.write(char)
sys.stdout.flush()
self.prog_counter += 1
if self.prog_counter == self.result_count:
sys.stdout.write('\n')
sys.stdout.flush()
def wfs_search(self, typename):
self.result_count = 0
self.prog_counter = 0
def wfs_result(self, number_of_results):
self.result_count = number_of_results
def xml_cache_hit(self, url):
self._write_progress('c')
def xml_cache_miss(self, url):
self._write_progress('.')
| import sys
class AbstractHook(object):
def __init__(self, name):
self.name = name
def wfs_search(self, typename):
pass
def wfs_result(self, number_of_results):
pass
def xml_requested(self, url):
pass
def xml_cache_hit(self, url):
pass
def xml_cache_miss(self, url):
pass
def xml_downloaded(self, url):
pass
class VerboseHook(AbstractHook):
def __init__(self):
super(VerboseHook, self).__init__('VerboseHook')
def wfs_search(self, typename):
print('Searching WFS service for %s.' % typename)
def wfs_result(self, number_of_results):
print('WFS query yielded %i results.' % number_of_results)
def xml_requested(self, url):
print('Requesting XML for object %s.' % url)
def xml_cache_hit(self, url):
print('Using cached XML for object %s.' % url)
def xml_cache_miss(self, url):
pass
def xml_downloaded(self, url):
print('Downloaded XML from DOV services for object %s.' % url)
class SimpleStatusHook(AbstractHook):
def __init__(self):
super(SimpleStatusHook, self).__init__('SimpleStatusHook')
self.result_count = 0
self.prog_counter = 0
def _write_progress(self, char):
if self.prog_counter == 0:
sys.stdout.write('[%03i/%03i] ' % (self.prog_counter,
self.result_count))
sys.stdout.flush()
elif self.prog_counter % 50 == 0:
sys.stdout.write('\n[%03i/%03i] ' % (self.prog_counter,
self.result_count))
sys.stdout.flush()
sys.stdout.write(char)
sys.stdout.flush()
self.prog_counter += 1
if self.prog_counter == self.result_count:
sys.stdout.write('\n')
sys.stdout.flush()
def wfs_search(self, typename):
self.result_count = 0
self.prog_counter = 0
def wfs_result(self, number_of_results):
self.result_count = number_of_results
def xml_cache_hit(self, url):
self._write_progress('c')
def xml_cache_miss(self, url):
self._write_progress('.')
| mit | Python |
e5dd9b6348a00815b3de59c4f41bc4e17cd4231e | remove loading messages from validator | NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint | fdp/validator.py | fdp/validator.py | import pkg_resources
from pyshacl import validate
from rdflib.graph import Graph
def _validate(data, shapes_file, fdp=False):
try:
data_format = 'turtle'
shapes_file_format = 'turtle'
# validate number of subjects or focus nodes
g = Graph()
g.parse(data=data, format=data_format)
s_set = set([s for s, p, o in g])
if len(s_set) == 0:
raise ValueError('Empty content in metadtata')
elif len(s_set) > 1 and fdp:
raise ValueError('FDP layer allows only one subject in metadata')
print('fdp only allow one subject')
# validate SHACL shapes
conforms, v_graph, v_text = validate(data, shacl_graph=shapes_file,
data_graph_format=data_format,
shacl_graph_format=shapes_file_format,
inference='rdfs', debug=False,
serialize_report_graph=True)
return conforms, v_text
except ValueError as e:
return False, e.args[0]
except Exception as e:
return False, e.message
class FDPValidator():
def __init__(self):
self.fdp_shapes = pkg_resources.resource_string(__name__, 'schema/fdp.shacl')
self.catalog_shapes = pkg_resources.resource_string(__name__, 'schema/catalog.shacl')
self.dataset_shapes = pkg_resources.resource_string(__name__, 'schema/dataset.shacl')
self.distribution_shapes = pkg_resources.resource_string(__name__, 'schema/distribution.shacl')
def validateFDP(self, data):
return _validate(data, self.fdp_shapes, fdp=True)
def validateCatalog(self, data):
return _validate(data, self.catalog_shapes)
def validateDataset(self, data):
return _validate(data, self.dataset_shapes)
def validateDistribution(self, data):
return _validate(data, self.distribution_shapes)
| import pkg_resources
from pyshacl import validate
from rdflib.graph import Graph
def _validate(data, shapes_file, fdp=False):
try:
data_format = 'turtle'
shapes_file_format = 'turtle'
# validate number of subjects or focus nodes
g = Graph()
g.parse(data=data, format=data_format)
s_set = set([s for s, p, o in g])
if len(s_set) == 0:
raise ValueError('Empty content in metadtata')
elif len(s_set) > 1 and fdp:
raise ValueError('FDP layer allows only one subject in metadata')
print('fdp only allow one subject')
# validate SHACL shapes
conforms, v_graph, v_text = validate(data, shacl_graph=shapes_file,
data_graph_format=data_format,
shacl_graph_format=shapes_file_format,
inference='rdfs', debug=False,
serialize_report_graph=True)
return conforms, v_text
except ValueError as e:
return False, e.args[0]
except Exception as e:
return False, e.message
class FDPValidator():
def __init__(self):
print('Loading fdp shapes')
self.fdp_shapes = pkg_resources.resource_string(__name__, 'schema/fdp.shacl')
print('Loading catalog shapes')
self.catalog_shapes = pkg_resources.resource_string(__name__, 'schema/catalog.shacl')
print('Loading dataset shapes')
self.dataset_shapes = pkg_resources.resource_string(__name__, 'schema/dataset.shacl')
print('Loading distribution shapes')
self.distribution_shapes = pkg_resources.resource_string(__name__, 'schema/distribution.shacl')
def validateFDP(self, data):
return _validate(data, self.fdp_shapes, fdp=True)
def validateCatalog(self, data):
return _validate(data, self.catalog_shapes)
def validateDataset(self, data):
return _validate(data, self.dataset_shapes)
def validateDistribution(self, data):
return _validate(data, self.distribution_shapes)
| apache-2.0 | Python |
dd2a0849b10feaf44d09f8a37e12e6da91c97c8f | annotate bad PDF function | zpace/stellarmass_pca | figures_tools.py | figures_tools.py | import os
import numpy as np
import matplotlib.pyplot as plt
from copy import copy
from astropy.wcs import WCS
from astropy.wcs.utils import skycoord_to_pixel, proj_plane_pixel_scales
import astropy.coordinates as coords
import warnings
try:
from astropy.wcs.utils import linear_offset_coordinates
except ImportError:
pass
else:
warnings.warn('linear_offset_coordinates now available! Use it instead!')
cm = copy(plt.cm.viridis)
cm.set_under(color='gray', alpha=0.5)
cm.set_bad(alpha=1.)
def linear_offset_coordinates(wcs, center):
'''
return a locally linear offset coordinate system
does the simplest thing possible and assumes no projection distortions
'''
assert isinstance(center, coords.SkyCoord), \
'`center` must by of type `SkyCoord`'
assert center.isscalar, '`center` must have length 1'
# Convert center to pixel coordinates
xp, yp = skycoord_to_pixel(center, wcs)
# Set up new WCS
new_wcs = WCS(naxis=2)
new_wcs.wcs.crpix = xp + 1, yp + 1
new_wcs.wcs.crval = 0., 0.
new_wcs.wcs.cdelt = proj_plane_pixel_scales(wcs)
new_wcs.wcs.ctype = 'XOFFSET', 'YOFFSET'
new_wcs.wcs.cunit = 'deg', 'deg'
return new_wcs
def savefig(fig, fname, fdir, close=True, **kwargs):
fpath = os.path.join(fdir, fname)
fig.savefig(fpath, **kwargs)
if close:
plt.close(fig)
def annotate_badPDF(ax, mask):
# place little, red 'x' markers where mask is true
x = np.array(range(mask.shape[0]))
y = np.array(range(mask.shape[1]))
XX, YY = np.meshgrid(x, y)
XX, YY = XX[mask], YY[mask]
ax.scatter(XX, YY, facecolor='r', edgecolor='None', s=5, marker='.',
zorder=10)
| import os
from astropy.wcs import WCS
from astropy.wcs.utils import skycoord_to_pixel, proj_plane_pixel_scales
import astropy.coordinates as coords
import warnings
try:
from astropy.wcs.utils import linear_offset_coordinates
except ImportError:
pass
else:
warnings.warn('linear_offset_coordinates now available! Use it instead!')
def linear_offset_coordinates(wcs, center):
'''
return a locally linear offset coordinate system
does the simplest thing possible and assumes no projection distortions
'''
assert isinstance(center, coords.SkyCoord), \
'`center` must by of type `SkyCoord`'
assert center.isscalar, '`center` must have length 1'
# Convert center to pixel coordinates
xp, yp = skycoord_to_pixel(center, wcs)
# Set up new WCS
new_wcs = WCS(naxis=2)
new_wcs.wcs.crpix = xp + 1, yp + 1
new_wcs.wcs.crval = 0., 0.
new_wcs.wcs.cdelt = proj_plane_pixel_scales(wcs)
new_wcs.wcs.ctype = 'XOFFSET', 'YOFFSET'
new_wcs.wcs.cunit = 'deg', 'deg'
return new_wcs
def savefig(fig, fname, fdir, **kwargs):
fpath = os.path.join(fdir, fname)
fig.savefig(fpath, **kwargs)
| mit | Python |
df12df3fd16baebcf9656b396d20d1a2bf4ddea0 | bump version | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/__init__.py | radar/__init__.py | __version__ = '2.48.12_beta'
| __version__ = '2.48.11'
| agpl-3.0 | Python |
44460760f4f5f202477947fd1946209801846e82 | Fix name | DoublePlusGood23/lc-president-challenge | first_problem.py | first_problem.py | def getFizzBuzz(num):
if num % 5 == 0 and num % 3 == 0:
return 'fizzbuzz'
elif num % 3 == 0:
return 'fizz'
elif num % 5 == 0:
return 'buzz'
else:
return str(num)
num = eval(input())
print(getFizzBuzz(num))
| def getFizz(num):
if num % 5 == 0 and num % 3 == 0:
return 'fizzbuzz'
elif num % 3 == 0:
return 'fizz'
elif num % 5 == 0:
return 'buzz'
else:
return str(num)
num = eval(input())
print(getFizz(num)) | mit | Python |
c1b0cfe9fdfbacf71ffbba45b4a8f7efe3fe36a7 | Update wront port script to Python3 | kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat | docker/dev_wrong_port_warning.py | docker/dev_wrong_port_warning.py | #!/usr/bin/env python
"""
Per kobotoolbox/kobo-docker#301, we have changed the uWSGI port to 8001. This
provides a helpful message to anyone still trying to use port 8000
"""
import sys
from http.server import BaseHTTPRequestHandler, HTTPServer
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(503)
self.end_headers()
self.wfile.write(
b'Your development environment is trying to connect to the KoBoCAT '
b'container on port 8000 instead of 8001. Please change this. See '
b'https://github.com/kobotoolbox/kobo-docker/issues/301 '
b'for more details.'
)
server_address = ('', int(sys.argv[1]))
httpd = HTTPServer(server_address, Handler)
httpd.serve_forever()
| #!/usr/bin/env python
"""
Per kobotoolbox/kobo-docker#301, we have changed the uWSGI port to 8001. This
provides a helpful message to anyone still trying to use port 8000
"""
import BaseHTTPServer
import sys
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(503)
self.end_headers()
self.wfile.write(
'Your development environment is trying to connect to the KoBoCAT '
'container on port 8000 instead of 8001. Please change this. See '
'https://github.com/kobotoolbox/kobo-docker/issues/301 '
'for more details.'
)
server_address = ('', int(sys.argv[1]))
httpd = BaseHTTPServer.HTTPServer(server_address, Handler)
httpd.serve_forever()
| bsd-2-clause | Python |
a0ea737e40863067db90ca8c1d6d811ed0505d26 | Update run_dc.py | jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi | apps/deeplearning/darknet-rpi/run_dc.py | apps/deeplearning/darknet-rpi/run_dc.py | ## Crate by TJ, https://github.com/taijoon
import serial,os,time
import sys
import RPi.GPIO as GPIO
import picamera
import subprocess
import datetime
import os
# check pin location
gled = 19
rled = 26
# HW setup, GPIO
GPIO.cleanup()
GPIO.setmode(GPIO.BCM)
GPIO.setup(rled, GPIO.OUT)
GPIO.setup(gled, GPIO.OUT)
time.sleep(1)
def ledr_on():
GPIO.output(rled, True)
def ledr_off():
GPIO.output(rled, False)
def ledg_on():
GPIO.output(gled, True)
def ledg_off():
GPIO.output(gled, False)
# init LED OFF
ledr_off()
ledg_off()
time.sleep(1)
with picamera.PiCamera() as camera:
while True:
camera.start_preview()
ledg_on()
camera.capture('./now.jpg')
camera.stop_preview()
result = subprocess.check_output(['./darknet detector test cfg/coco.data cfg/yolov3-tiny.cfg yolov3-tiny.weights now.jpg'], shell=True)
percent = result.find("person")
now = datetime.datetime.now()
nowdatetime = now.strftime('%Y-%m-%d_%H:%M:%S')
i = 0
f_percent = 0.0
while percent > 0 :
sub = result[percent:]
percent2 = sub.find('%')
temp_f = float(sub[8:percent2])
if f_percent < temp_f :
f_percent = temp_f
print str(i) + ' Percent value : ' +str(f_percent)
else :
print str(i) + ' Percent low'
i = i + 1
result = sub[percent2:]
percent = result.find("person")
if f_percent > 0 : # Person Key word OK
if f_percent > 60 :
print "Person OK: " + str(f_percent)
#now = datetime.datetime.now()
#nowdatetime = now.strftime('%Y-%m-%d_%H:%M:%S')
cmd = 'cp -f now.jpg ./screenshot/'+nowdatetime+'.jpg'
print 'S:' + cmd
os.system(cmd)
#time.sleep(10)
else :
print "Person Not enough :" + str(f_percent)
cmd = 'cp -f predictions.jpg ./errorshot/'+nowdatetime+'.jpg'
print 'E1:' + cmd
os.system(cmd)
#time.sleep(10)
else :
print "Person is Not Here"
cmd = 'cp -f predictions.jpg ./errorshot/'+nowdatetime+'.jpg'
print 'E2:' + cmd
os.system(cmd)
ledg_off()
time.sleep(60)
GPIO.cleanup()
|
## Crate by TJ, https://github.com/taijoon
import serial,os,time
import sys
import RPi.GPIO as GPIO
import picamera
import subprocess
import datetime
import os
# check pin location
gled = 19
rled = 26
# HW setup, GPIO
GPIO.cleanup()
GPIO.setmode(GPIO.BCM)
GPIO.setup(rled, GPIO.OUT)
GPIO.setup(gled, GPIO.OUT)
time.sleep(1)
def ledr_on():
GPIO.output(rled, True)
def ledr_off():
GPIO.output(rled, False)
def ledg_on():
GPIO.output(gled, True)
def ledg_off():
GPIO.output(gled, False)
# init LED OFF
ledr_off()
ledg_off()
time.sleep(1)
with picamera.PiCamera() as camera:
while True:
camera.start_preview()
ledg_on()
camera.capture('./now.jpg')
camera.stop_preview()
result = subprocess.check_output(['./darknet detector test cfg/coco.data cfg/yolov3-tiny.cfg yolov3-tiny.weights now.jpg'], shell=True)
percent = result.find("person")
now = datetime.datetime.now()
nowdatetime = now.strftime('%Y-%m-%d_%H:%M:%S')
i = 0
f_percent = 0.0
while percent > 0 :
sub = result[percent:]
percent2 = sub.find('%')
temp_f = float(sub[8:percent2])
if f_percent < temp_f :
f_percent = temp_f
print str(i) + ' Percent value : ' +str(f_percent)
else :
print str(i) + ' Percent low'
i = i + 1
result = sub[percent2:]
percent = result.find("person")
if f_percent > 0 : # Person Key word OK
if f_percent > 60 :
print "Person OK: " + str(f_percent)
#now = datetime.datetime.now()
#nowdatetime = now.strftime('%Y-%m-%d_%H:%M:%S')
cmd = 'cp -f now.jpg ./screenshot/'+nowdatetime+'.jpg'
print 'S:' + cmd
os.system(cmd)
#time.sleep(10)
else :
print "Person Not enough :" + str(f_percent)
cmd = 'cp -f predictions.jpg ./errorshot/'+nowdatetime+'.jpg'
print 'E1:' + cmd
os.system(cmd)
#time.sleep(10)
else :
print "Person is Not Here"
cmd = 'cp -f predictions.jpg ./errorshot/'+nowdatetime+'.jpg'
print 'E2:' + cmd
os.system(cmd)
ledg_off()
time.sleep(60)
GPIO.cleanup()
| bsd-2-clause | Python |
e7ac7e44fb63c81909e287040fb76bd15dc65df0 | Set version as 1.1.1 | Alignak-monitoring-contrib/alignak-checks-nrpe,Alignak-monitoring-contrib/alignak-checks-nrpe | version.py | version.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Frederic Mohier, frederic.mohier@alignak.net
#
"""
Alignak - Checks pack for NRPE monitored Linux hosts/services
"""
# Package name
__pkg_name__ = u"alignak_checks_nrpe"
# Checks types for PyPI keywords
# Used for:
# - PyPI keywords
# - directory where to store files in the Alignak configuration (eg. arbiter/packs/checks_type)
__checks_type__ = u"nrpe"
# Application manifest
__version__ = u"1.1.1"
__author__ = u"Frédéric MOHIER"
__author_email__ = u"frederic.mohier@alignak.net"
__copyright__ = u"(c) 2015-2017 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__git_url__ = u"https://github.com/Alignak-monitoring-contrib/alignak-checks-linux-nrpe"
__doc_url__ = u"http://alignak-doc.readthedocs.io/en/latest"
__description__ = u"Alignak checks pack for Linux NRPE monitored hosts"
__classifiers__ = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Systems Administration'
]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Frederic Mohier, frederic.mohier@alignak.net
#
"""
Alignak - Checks pack for NRPE monitored Linux hosts/services
"""
# Package name
__pkg_name__ = u"alignak_checks_nrpe"
# Checks types for PyPI keywords
# Used for:
# - PyPI keywords
# - directory where to store files in the Alignak configuration (eg. arbiter/packs/checks_type)
__checks_type__ = u"nrpe"
# Application manifest
__version__ = u"1.1.0"
__author__ = u"Frédéric MOHIER"
__author_email__ = u"frederic.mohier@alignak.net"
__copyright__ = u"(c) 2015-2017 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__git_url__ = u"https://github.com/Alignak-monitoring-contrib/alignak-checks-linux-nrpe"
__doc_url__ = u"http://alignak-doc.readthedocs.io/en/latest"
__description__ = u"Alignak checks pack for Linux NRPE monitored hosts"
__classifiers__ = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Systems Administration'
]
| agpl-3.0 | Python |
04bc0dc59276dff6d8b19f691695a1d4300b8705 | fix feed | tdhopper/westminster-daily,tdhopper/westminster-daily,olneyhymn/westminster-daily,tdhopper/westminster-daily,olneyhymn/westminster-daily,olneyhymn/westminster-daily,olneyhymn/westminster-daily | generate_feed.py | generate_feed.py | from feedgen.feed import FeedGenerator
import datetime as dt
import pytz
from premailer import transform
import markdown
from functools import lru_cache
from bs4 import BeautifulSoup
URL = "https://pandoc--westminster-daily.netlify.com/westminster-daily"
FILENAME = "feed.rss"
NUMBER_OF_DAYS = 30
@lru_cache()
def markdown_parser(month, day):
with open(f"content/{month}/{day}.md", "r") as f:
md = f.read()
markdown_parser = markdown.Markdown(
extensions=["meta", "footnotes"],
extension_configs={"footnotes": {"BACKLINK_TEXT": ""}},
)
return markdown_parser, markdown_parser.convert(md)
def meta(month, day):
return markdown_parser(month, day)[0].Meta
def content(month, day):
md_as_html = markdown_parser(month, day)[1]
c = transform(md_as_html, preserve_internal_links=True)
soup = BeautifulSoup(c)
for a in soup.findAll("a"):
a.replaceWithChildren()
c = str(soup)
c = c[(c.find("body") + len("body>")) : -len("</body></html>")]
c = c.replace("\n", "")
c = c.replace("\xa0", " ")
return c
def main():
fg = FeedGenerator()
fg.id(f"{URL}/{FILENAME}")
fg.title("Westminster Daily")
fg.author({"name": "Westminster Daily"})
fg.link(href=URL, rel="alternate")
fg.subtitle("Read through the Westminster Confession and Catechisms in a year.")
fg.link(href=f"{URL}/{FILENAME}", rel="self")
fg.language("en")
now = dt.datetime.now(tz=pytz.timezone("US/Eastern"))
for date in (now - dt.timedelta(n) for n in reversed(range(NUMBER_OF_DAYS))):
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
month = date.strftime("%m")
day = date.strftime("%d")
url = f"{URL}/{month}/{day}/"
fe = fg.add_entry()
fe.id(url)
fe.title(meta(month, day)["pagetitle"][0])
fe.link(href=url)
fe.guid(url, permalink=True)
fe.content(content(month, day), type="CDATA")
fe.updated(date)
fe.published(date)
fg.rss_file(FILENAME, pretty=True) # Write the RSS feed to a file
if __name__ == "__main__":
main()
| from feedgen.feed import FeedGenerator
import datetime as dt
import pytz
from premailer import transform
import markdown
from functools import lru_cache
from bs4 import BeautifulSoup
URL = "https://pandoc--westminster-daily.netlify.com/westminster-daily"
FILENAME = "feed.rss"
NUMBER_OF_DAYS = 30
@lru_cache()
def markdown_parser(month, day):
with open(f"content/{month}/{day}.md", "r") as f:
md = f.read()
markdown_parser = markdown.Markdown(
extensions=["meta", "footnotes"],
extension_configs={"footnotes": {"BACKLINK_TEXT": ""}},
)
return markdown_parser, markdown_parser.convert(md)
def meta(month, day):
return markdown_parser(month, day)[0].Meta
def content(month, day):
md_as_html = markdown_parser(month, day)[1]
c = transform(md_as_html, preserve_internal_links=True)
soup = BeautifulSoup(c)
for a in soup.findAll("a"):
a.replaceWithChildren()
c = str(soup)
c = c[(c.find("body") + len("body>")) : -len("</body></html>")]
c = c.replace("\n", "")
c = c.replace("\xa0", " ")
return str(soup)
def main():
fg = FeedGenerator()
fg.id(f"{URL}/{FILENAME}")
fg.title("Westminster Daily")
fg.author({"name": "Westminster Daily"})
fg.link(href=URL, rel="alternate")
fg.subtitle("Read through the Westminster Confession and Catechisms in a year.")
fg.link(href=f"{URL}/{FILENAME}", rel="self")
fg.language("en")
now = dt.datetime.now(tz=pytz.timezone("US/Eastern"))
for date in (now - dt.timedelta(n) for n in reversed(range(NUMBER_OF_DAYS))):
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
month = date.strftime("%m")
day = date.strftime("%d")
url = f"{URL}/{month}/{day}/"
fe = fg.add_entry()
fe.id(url)
fe.title(meta(month, day)["pagetitle"][0])
fe.link(href=url)
fe.guid(url, permalink=True)
fe.content(content(month, day), type="CDATA")
fe.updated(date)
fe.published(date)
fg.rss_file(FILENAME, pretty=True) # Write the RSS feed to a file
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
b256ed37b581b71c986772a3691d425d148400d9 | support iter_documents on cases | qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/form_processor/document_stores.py | corehq/form_processor/document_stores.py | from corehq.blobs import Error as BlobError
from corehq.form_processor.exceptions import CaseNotFound, XFormNotFound
from corehq.form_processor.interfaces.dbaccessors import FormAccessors, CaseAccessors
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyFormDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.form_accessors = FormAccessors(domain=domain)
def get_document(self, doc_id):
try:
return self.form_accessors.get_form(doc_id).to_json()
except (XFormNotFound, BlobError) as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
# todo: iterate over sql form IDs
raise NotImplementedError("You can't do this for SQL form data sources yet.")
class ReadonlyCaseDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.case_accessors = CaseAccessors(domain=domain)
def get_document(self, doc_id):
try:
return self.case_accessors.get_case(doc_id).to_json()
except CaseNotFound as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(CaseAccessors(self.domain).get_case_ids_in_domain())
def iter_documents(self, ids):
for wrapped_case in self.case_accessors.iter_cases(ids):
yield wrapped_case.to_json()
| from corehq.blobs import Error as BlobError
from corehq.form_processor.exceptions import CaseNotFound, XFormNotFound
from corehq.form_processor.interfaces.dbaccessors import FormAccessors, CaseAccessors
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyFormDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.form_accessors = FormAccessors(domain=domain)
def get_document(self, doc_id):
try:
return self.form_accessors.get_form(doc_id).to_json()
except (XFormNotFound, BlobError) as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
# todo: iterate over sql form IDs
raise NotImplementedError("You can't do this for SQL form data sources yet.")
class ReadonlyCaseDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.case_accessors = CaseAccessors(domain=domain)
def get_document(self, doc_id):
try:
return self.case_accessors.get_case(doc_id).to_json()
except CaseNotFound as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(CaseAccessors(self.domain).get_case_ids_in_domain())
| bsd-3-clause | Python |
a758175da8d2ebad0769fc45679fabe8ef7eacd8 | Bump version to 3.2-alpha | Shockblast/godot,ex/godot,Paulloz/godot,josempans/godot,godotengine/godot,vkbsb/godot,Faless/godot,Faless/godot,akien-mga/godot,Zylann/godot,Valentactive/godot,Shockblast/godot,vkbsb/godot,sanikoyes/godot,MarianoGnu/godot,Zylann/godot,Paulloz/godot,MarianoGnu/godot,Faless/godot,sanikoyes/godot,godotengine/godot,Zylann/godot,ex/godot,Valentactive/godot,DmitriySalnikov/godot,godotengine/godot,Valentactive/godot,ZuBsPaCe/godot,sanikoyes/godot,pkowal1982/godot,josempans/godot,sanikoyes/godot,Shockblast/godot,Valentactive/godot,MarianoGnu/godot,akien-mga/godot,guilhermefelipecgs/godot,sanikoyes/godot,josempans/godot,ex/godot,ex/godot,Zylann/godot,MarianoGnu/godot,Zylann/godot,vnen/godot,josempans/godot,vnen/godot,vkbsb/godot,vnen/godot,vnen/godot,honix/godot,Valentactive/godot,ZuBsPaCe/godot,Shockblast/godot,ZuBsPaCe/godot,BastiaanOlij/godot,pkowal1982/godot,BastiaanOlij/godot,firefly2442/godot,vkbsb/godot,guilhermefelipecgs/godot,firefly2442/godot,pkowal1982/godot,Paulloz/godot,firefly2442/godot,Shockblast/godot,Paulloz/godot,firefly2442/godot,akien-mga/godot,ZuBsPaCe/godot,Zylann/godot,godotengine/godot,pkowal1982/godot,BastiaanOlij/godot,MarianoGnu/godot,DmitriySalnikov/godot,josempans/godot,sanikoyes/godot,akien-mga/godot,Paulloz/godot,Shockblast/godot,guilhermefelipecgs/godot,pkowal1982/godot,guilhermefelipecgs/godot,akien-mga/godot,firefly2442/godot,vnen/godot,honix/godot,pkowal1982/godot,honix/godot,MarianoGnu/godot,honix/godot,vnen/godot,Faless/godot,Paulloz/godot,ex/godot,vkbsb/godot,akien-mga/godot,sanikoyes/godot,vnen/godot,BastiaanOlij/godot,BastiaanOlij/godot,DmitriySalnikov/godot,ZuBsPaCe/godot,josempans/godot,DmitriySalnikov/godot,firefly2442/godot,firefly2442/godot,pkowal1982/godot,Faless/godot,akien-mga/godot,Faless/godot,pkowal1982/godot,honix/godot,Faless/godot,Zylann/godot,DmitriySalnikov/godot,ex/godot,vkbsb/godot,DmitriySalnikov/godot,guilhermefelipecgs/godot,MarianoGnu/godot,akien-mga/godot,sanikoyes/godot,ex/godot,godotengine/godot,guilhermefelipecgs/godot,MarianoGnu/godot,Valentactive/godot,ex/godot,Shockblast/godot,Paulloz/godot,firefly2442/godot,ZuBsPaCe/godot,Shockblast/godot,BastiaanOlij/godot,ZuBsPaCe/godot,godotengine/godot,Zylann/godot,vnen/godot,josempans/godot,Valentactive/godot,Valentactive/godot,godotengine/godot,vkbsb/godot,godotengine/godot,DmitriySalnikov/godot,josempans/godot,Faless/godot,BastiaanOlij/godot,BastiaanOlij/godot,ZuBsPaCe/godot,guilhermefelipecgs/godot,vkbsb/godot,guilhermefelipecgs/godot,honix/godot | version.py | version.py | short_name = "godot"
name = "Godot Engine"
major = 3
minor = 2
status = "alpha"
module_config = ""
year = 2019
website = "https://godotengine.org"
| short_name = "godot"
name = "Godot Engine"
major = 3
minor = 2
status = "dev"
module_config = ""
year = 2019
website = "https://godotengine.org"
| mit | Python |
9014bcff07ad98035bfde6b46fb7c71c11762bd6 | remove prints | simonvh/genomepy | genomepy/base.py | genomepy/base.py | import os
import sys
import re
import norns
config = norns.config("genomepy", default="cfg/default.yaml")
class Plugin(object):
active = False
def name(self):
n = type(self).__name__.replace("Plugin", "")
return convert(n)
def activate(self):
self.active = True
def deactivate(self):
self.active = False
def after_genome_download(self, genome):
raise NotImplementedError("plugin should implement this method")
def get_properties(self, genome):
raise NotImplementedError("plugin should implement this method")
def find_plugins():
plugin_dir = os.path.dirname(os.path.realpath(__file__))
plugin_dir = os.path.join(plugin_dir, "plugins")
plugin_files = [x[:-3] for x in os.listdir(plugin_dir) if x.endswith(".py")]
sys.path.insert(0, plugin_dir)
for plugin in plugin_files:
bla = __import__(plugin)
def convert(name):
"""Convert CamelCase to underscore
Parameters
----------
name : str
Camelcase string
Returns
-------
name : str
Converted name
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def init_plugins():
find_plugins()
d = {}
for c in Plugin.__subclasses__():
ins = c()
if ins.name() in config.get("plugin", []):
ins.activate()
d[ins.name()] = ins
return d
def activate(name):
if name in plugins:
plugins[name].activate()
else:
raise Exception("plugin {} not found".format(name))
def deactivate(name):
if name in plugins:
plugins[name].deactivate()
else:
raise Exception("plugin {} not found".format(name))
def get_active_plugins():
for p,v in plugins.items():
return [inst for name, inst in plugins.items() if inst.active]
plugins = init_plugins()
| import os
import sys
import re
import norns
config = norns.config("genomepy", default="cfg/default.yaml")
class Plugin(object):
active = False
def name(self):
n = type(self).__name__.replace("Plugin", "")
return convert(n)
def activate(self):
self.active = True
def deactivate(self):
self.active = False
def after_genome_download(self, genome):
raise NotImplementedError("plugin should implement this method")
def get_properties(self, genome):
raise NotImplementedError("plugin should implement this method")
def find_plugins():
plugin_dir = os.path.dirname(os.path.realpath(__file__))
plugin_dir = os.path.join(plugin_dir, "plugins")
plugin_files = [x[:-3] for x in os.listdir(plugin_dir) if x.endswith(".py")]
sys.path.insert(0, plugin_dir)
for plugin in plugin_files:
bla = __import__(plugin)
def convert(name):
"""Convert CamelCase to underscore
Parameters
----------
name : str
Camelcase string
Returns
-------
name : str
Converted name
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def init_plugins():
find_plugins()
d = {}
print("I", config.config_file)
print(config.get("plugin" , []))
for c in Plugin.__subclasses__():
ins = c()
if ins.name() in config.get("plugin", []):
ins.activate()
d[ins.name()] = ins
return d
def activate(name):
if name in plugins:
plugins[name].activate()
else:
raise Exception("plugin {} not found".format(name))
def deactivate(name):
if name in plugins:
plugins[name].deactivate()
else:
raise Exception("plugin {} not found".format(name))
def get_active_plugins():
for p,v in plugins.items():
print(p, v.active)
return [inst for name, inst in plugins.items() if inst.active]
plugins = init_plugins()
| mit | Python |
823fd5f18a300ba919ab478bf94c3253dcbffd3a | Fix merge issue | iotile/python_iotile_cloud | version.py | version.py | version = '0.6.0'
| <<<<<<< HEAD
version = '0.6.0'
=======
version = '0.5.2'
>>>>>>> origin/master
| mit | Python |
81e3519e3f9927120c7a0ee32546607c1c40407f | patch rev bump | Applied-GeoSolutions/gippy,Applied-GeoSolutions/gippy,Applied-GeoSolutions/gippy | gippy/version.py | gippy/version.py | #!/usr/bin/env python
################################################################################
# GIPPY: Geospatial Image Processing library for Python
#
# AUTHOR: Matthew Hanson
# EMAIL: matt.a.hanson@gmail.com
#
# Copyright (C) 2015 Applied Geosolutions
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
__version__ = '0.3.11'
| #!/usr/bin/env python
################################################################################
# GIPPY: Geospatial Image Processing library for Python
#
# AUTHOR: Matthew Hanson
# EMAIL: matt.a.hanson@gmail.com
#
# Copyright (C) 2015 Applied Geosolutions
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
__version__ = '0.3.11'
| apache-2.0 | Python |
d16ed87c69f281a204ff8c46cca41112c82d1337 | Fix Global | MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI | EmeraldAI/Logic/Modules/Global.py | EmeraldAI/Logic/Modules/Global.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import platform
RootPath = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))).rstrip(os.sep) + os.sep
EmeraldPath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))).rstrip(os.sep) + os.sep
OS = platform.system().lower() # darwin (=osx) - windows - linux
class OperatingSystem():
Linux = "linux"
Windows = "windows"
OSX = "darwin"
def ReadDataFile(foldername, filename):
script_dir = EmeraldPath + \
"Data" + os.sep + foldername + os.sep + filename
return [line.rstrip('\n').rstrip('\r') for line in open(script_dir)]
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import platform
RootPath = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))).rstrip(os.sep) + os.sep
EmeraldPath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))).rstrip(os.sep) + os.sep
OS = platform.system().lower() # darwin (=osx) - windows - linux
class OperatingSystem():
Linux = "linux"
Windows = "windows"
OSX = "darwin"
def ReadDataFile(foldername, filename):
script_dir = EmeraldPath + \
"Data" + os.set + foldername + os.sep + filename
return [line.rstrip('\n').rstrip('\r') for line in open(script_dir)]
| apache-2.0 | Python |
d68db1bf08e1df17acd37f4aeca8958eaa48b2ab | switch back to development version | vgteam/toil-vg,vgteam/toil-vg | version.py | version.py | # Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '1.6.1a1'
required_versions = {'pyyaml': '>=5.1',
'tsv': '==1.2',
'scikit-learn': '==0.22.1',
'pyvcf': '==0.6.8',
'futures': '==3.1.1'}
dependency_links = []
| # Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '1.6.0'
required_versions = {'pyyaml': '>=5.1',
'tsv': '==1.2',
'scikit-learn': '==0.22.1',
'pyvcf': '==0.6.8',
'futures': '==3.1.1'}
dependency_links = []
| apache-2.0 | Python |
c25943811b3e821d6824d8c67ff1a4efb3580988 | Update __init__.py | walchko/pyxl320 | pyxl320/__init__.py | pyxl320/__init__.py | #!/usr/bin/env python
__version__ = '0.8.0'
__copyright__ = 'Copyright (c) 2016 Kevin Walchko'
__license__ = 'MIT'
__author__ = 'Kevin J. Walchko'
import Packet
from ServoSerial import ServoSerial, DummySerial
import utils
import xl320
| #!/usr/bin/env python
__version__ = '0.7.6'
__copyright__ = 'Copyright (c) 2016 Kevin Walchko'
__license__ = 'MIT'
__author__ = 'Kevin J. Walchko'
import Packet
from ServoSerial import ServoSerial, DummySerial
import utils
import xl320
# __doc__ = """
# pyxl320
# ========
#
# A python library to talk with Dynamixel XL-320 smart servos.
# """
| mit | Python |
ebcf97f786f7d3535bc7cb532a14fafa10161c86 | update model field | pyprism/Hiren-Git-Commit-Reminder,pyprism/Hiren-Git-Commit-Reminder | github/models.py | github/models.py | from django.db import models
# Create your models here.
class Hiren(models.Model):
access_token = models.CharField(max_length=200)
authorized = models.BooleanField(default=False)
class Counter(models.Model):
number = models.IntegerField
date = models.DateField(auto_now_add=True)
| from django.db import models
# Create your models here.
class Hiren(models.Model):
access_token = models.CharField(max_length=200)
authorized = models.BooleanField(default=False)
class Counter(models.Model):
number = models.BigIntegerField()
date = models.DateTimeField()
| mit | Python |
f6c8a005b497c896734623c2560d7692fae03fa9 | Increment provision version for upgradation of python dependencies. | blaze225/zulip,jrowan/zulip,vaidap/zulip,dawran6/zulip,timabbott/zulip,christi3k/zulip,brainwane/zulip,eeshangarg/zulip,amanharitsh123/zulip,hackerkid/zulip,andersk/zulip,j831/zulip,sonali0901/zulip,samatdav/zulip,mahim97/zulip,synicalsyntax/zulip,amanharitsh123/zulip,kou/zulip,brockwhittaker/zulip,brainwane/zulip,jrowan/zulip,JPJPJPOPOP/zulip,timabbott/zulip,shubhamdhama/zulip,brockwhittaker/zulip,showell/zulip,susansls/zulip,Galexrt/zulip,PhilSk/zulip,jainayush975/zulip,timabbott/zulip,verma-varsha/zulip,shubhamdhama/zulip,vabs22/zulip,hackerkid/zulip,dhcrzf/zulip,SmartPeople/zulip,punchagan/zulip,rht/zulip,isht3/zulip,isht3/zulip,jainayush975/zulip,jainayush975/zulip,tommyip/zulip,hackerkid/zulip,vabs22/zulip,eeshangarg/zulip,jphilipsen05/zulip,rishig/zulip,susansls/zulip,eeshangarg/zulip,jphilipsen05/zulip,rht/zulip,JPJPJPOPOP/zulip,ryanbackman/zulip,vabs22/zulip,blaze225/zulip,jackrzhang/zulip,susansls/zulip,PhilSk/zulip,blaze225/zulip,brainwane/zulip,jainayush975/zulip,kou/zulip,dhcrzf/zulip,aakash-cr7/zulip,andersk/zulip,verma-varsha/zulip,rishig/zulip,vaidap/zulip,j831/zulip,PhilSk/zulip,ryanbackman/zulip,punchagan/zulip,christi3k/zulip,vaidap/zulip,souravbadami/zulip,christi3k/zulip,showell/zulip,dawran6/zulip,Galexrt/zulip,rht/zulip,brockwhittaker/zulip,rht/zulip,blaze225/zulip,JPJPJPOPOP/zulip,jackrzhang/zulip,synicalsyntax/zulip,christi3k/zulip,eeshangarg/zulip,dawran6/zulip,punchagan/zulip,sonali0901/zulip,dawran6/zulip,aakash-cr7/zulip,sonali0901/zulip,dattatreya303/zulip,vabs22/zulip,timabbott/zulip,blaze225/zulip,tommyip/zulip,jackrzhang/zulip,jackrzhang/zulip,timabbott/zulip,shubhamdhama/zulip,andersk/zulip,verma-varsha/zulip,tommyip/zulip,amanharitsh123/zulip,showell/zulip,jainayush975/zulip,kou/zulip,timabbott/zulip,hackerkid/zulip,jphilipsen05/zulip,punchagan/zulip,susansls/zulip,sonali0901/zulip,PhilSk/zulip,jphilipsen05/zulip,souravbadami/zulip,verma-varsha/zulip,verma-varsha/zulip,zulip/zulip,tommyip/zulip,j831/zulip,blaze225/zulip,mahim97/zulip,jrowan/zulip,dattatreya303/zulip,jrowan/zulip,eeshangarg/zulip,mahim97/zulip,kou/zulip,rishig/zulip,zulip/zulip,ryanbackman/zulip,j831/zulip,shubhamdhama/zulip,shubhamdhama/zulip,jainayush975/zulip,dattatreya303/zulip,vabs22/zulip,synicalsyntax/zulip,andersk/zulip,jphilipsen05/zulip,christi3k/zulip,showell/zulip,vabs22/zulip,dhcrzf/zulip,eeshangarg/zulip,mahim97/zulip,Galexrt/zulip,hackerkid/zulip,j831/zulip,rht/zulip,isht3/zulip,ryanbackman/zulip,vaidap/zulip,dattatreya303/zulip,jrowan/zulip,isht3/zulip,verma-varsha/zulip,kou/zulip,shubhamdhama/zulip,j831/zulip,zulip/zulip,JPJPJPOPOP/zulip,samatdav/zulip,dhcrzf/zulip,andersk/zulip,dattatreya303/zulip,mahim97/zulip,rht/zulip,synicalsyntax/zulip,amanharitsh123/zulip,samatdav/zulip,Galexrt/zulip,dhcrzf/zulip,kou/zulip,tommyip/zulip,aakash-cr7/zulip,ryanbackman/zulip,sharmaeklavya2/zulip,SmartPeople/zulip,aakash-cr7/zulip,andersk/zulip,SmartPeople/zulip,brockwhittaker/zulip,SmartPeople/zulip,samatdav/zulip,hackerkid/zulip,PhilSk/zulip,christi3k/zulip,ryanbackman/zulip,brockwhittaker/zulip,amanharitsh123/zulip,brainwane/zulip,sonali0901/zulip,dawran6/zulip,JPJPJPOPOP/zulip,sharmaeklavya2/zulip,Galexrt/zulip,aakash-cr7/zulip,Galexrt/zulip,SmartPeople/zulip,SmartPeople/zulip,zulip/zulip,vaidap/zulip,zulip/zulip,synicalsyntax/zulip,brainwane/zulip,brainwane/zulip,samatdav/zulip,jrowan/zulip,souravbadami/zulip,shubhamdhama/zulip,dhcrzf/zulip,timabbott/zulip,punchagan/zulip,tommyip/zulip,mahim97/zulip,vaidap/zulip,JPJPJPOPOP/zulip,dhcrzf/zulip,sonali0901/zulip,susansls/zulip,punchagan/zulip,Galexrt/zulip,rht/zulip,synicalsyntax/zulip,brainwane/zulip,isht3/zulip,souravbadami/zulip,souravbadami/zulip,rishig/zulip,souravbadami/zulip,PhilSk/zulip,amanharitsh123/zulip,tommyip/zulip,aakash-cr7/zulip,andersk/zulip,showell/zulip,jackrzhang/zulip,isht3/zulip,rishig/zulip,eeshangarg/zulip,jackrzhang/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,sharmaeklavya2/zulip,punchagan/zulip,jphilipsen05/zulip,showell/zulip,rishig/zulip,zulip/zulip,kou/zulip,susansls/zulip,hackerkid/zulip,sharmaeklavya2/zulip,brockwhittaker/zulip,zulip/zulip,synicalsyntax/zulip,rishig/zulip,jackrzhang/zulip,dawran6/zulip,samatdav/zulip,sharmaeklavya2/zulip,showell/zulip | version.py | version.py | ZULIP_VERSION = "1.4.1+git"
PROVISION_VERSION = '3.3'
| ZULIP_VERSION = "1.4.1+git"
PROVISION_VERSION = '3.2'
| apache-2.0 | Python |
6e002721023de4d5994a54c0d49f01d5c5ec1f86 | Add some keywords to default and c_mode | philipdexter/vx,philipdexter/vx | vx/mode.py | vx/mode.py | import vx
import os.path
def mode_from_filename(file):
root, ext = os.path.splitext(file)
ext = ext if ext else root
mode = None
if ext == '.c':
return c_mode
class mode:
def __init__(self, window):
self.breaks = ('_', ' ', '\n', '\t')
self.keywords = ()
class python_mode(mode):
def __init__(self, window):
super(python_mode, self).__init__(window)
self.breaks = ('_', ' ', '\n', '\t', '(', ')', '{', '}', '.', ',', '#')
self.keywords = ('return', 'for', 'while', 'break', 'continue', 'def')
class c_mode(mode):
def __init__(self, window):
super(c_mode, self).__init__(window)
self.breaks = ('_', ' ', '\n', '\t', '(', ')', '<', '>', '.', ',', '#')
self.keywords = ('#include', '#define', 'if', 'else', 'return', 'goto', 'break', 'continue')
| import vx
import os.path
def mode_from_filename(file):
root, ext = os.path.splitext(file)
ext = ext if ext else root
mode = None
if ext == '.c':
return c_mode
class mode:
def __init__(self, window):
self.breaks = ('_', ' ', '\n', '\t')
class python_mode(mode):
def __init__(self, window):
super(python_mode, self).__init__(window)
self.breaks = ('_', ' ', '\n', '\t', '(', ')', '{', '}', '.', ',', '#')
self.keywords = ('return', 'for', 'while', 'break', 'continue', 'def')
class c_mode(mode):
def __init__(self, window):
super(c_mode, self).__init__(window)
self.breaks = ('_', ' ', '\n', '\t', '(', ')', '<', '>', '.', ',', '#')
self.keywords = ('return', 'goto', 'break', 'continue')
| mit | Python |
302ee4e6c5fce43213556405851c48afc3c340db | implement effigies comments on PR 580 | INCF/pybids | bids/layout/tests/test_path_building.py | bids/layout/tests/test_path_building.py | import pytest
from bids.layout import BIDSLayout
from os.path import join, abspath, sep
from pathlib import Path
from bids.tests import get_test_data_path
@pytest.fixture(scope='module')
def layout():
data_dir = join(get_test_data_path(), '7t_trt')
return BIDSLayout(data_dir)
def test_bold_construction(layout):
ents = dict(subject='01', run=1, task='rest', suffix='bold')
relative = Path("sub-01") / "func" / "sub-01_task-rest_run-1_bold.nii.gz"
absolute = Path(layout.root) / relative
assert layout.build_path(ents, absolute_paths=False) == str(relative)
assert layout.build_path(ents, absolute_paths=True) == str(absolute)
# layout fixture created with `absolute_paths=True`, defaulting to absolute
assert layout.build_path(ents) == str(absolute)
def test_invalid_file_construction(layout):
# no hyphens allowed!
ents = dict(subject='01', run=1, task='resting-state', suffix='bold')
with pytest.raises(ValueError):
layout.build_path(ents)
target = "sub-01/func/sub-01_task-resting-state_run-1_bold.nii.gz"
assert layout.build_path(ents, validate=False, absolute_paths=False) == target
def test_failed_file_construction(layout):
ents = dict(subject='01', fakekey='foobar')
with pytest.raises(ValueError):
layout.build_path(ents, strict=True)
@pytest.mark.parametrize("strict", [True, False])
@pytest.mark.parametrize("validate", [True, False])
def test_insufficient_entities(layout, strict, validate):
"""Check https://github.com/bids-standard/pybids/pull/574#discussion_r366447600."""
with pytest.raises(ValueError):
layout.build_path({'subject': '01'}, strict=strict, validate=validate)
| import pytest
from bids.layout import BIDSLayout
from os.path import join, abspath, sep
from bids.tests import get_test_data_path
@pytest.fixture(scope='module')
def layout():
data_dir = join(get_test_data_path(), '7t_trt')
return BIDSLayout(data_dir)
def test_bold_construction(layout):
ents = dict(subject='01', run=1, task='rest', suffix='bold')
assert layout.build_path(ents, absolute_paths=False) \
== "sub-01/func/sub-01_task-rest_run-1_bold.nii.gz", \
"Check relative path"
assert layout.build_path(ents) \
== layout.root + "/sub-01/func/sub-01_task-rest_run-1_bold.nii.gz", \
"Check absolute (default) path"
ents['acquisition'] = 'random'
assert layout.build_path(ents, absolute_paths=False) \
== "sub-01/func/sub-01_task-rest_acq-random_run-1_bold.nii.gz"
def test_invalid_file_construction(layout):
# no hyphens allowed!
ents = dict(subject='01', run=1, task='resting-state', suffix='bold')
with pytest.raises(ValueError):
layout.build_path(ents)
target = "sub-01/func/sub-01_task-resting-state_run-1_bold.nii.gz"
assert layout.build_path(ents, validate=False, absolute_paths=False) \
== target
def test_failed_file_construction(layout):
ents = dict(subject='01', fakekey='foobar')
with pytest.raises(ValueError):
layout.build_path(ents, strict=True)
@pytest.mark.parametrize("strict", [True, False])
@pytest.mark.parametrize("validate", [True, False])
def test_insufficient_entities(layout, strict, validate):
"""Check https://github.com/bids-standard/pybids/pull/574#discussion_r366447600."""
with pytest.raises(ValueError):
layout.build_path({'subject': '01'}, strict=strict, validate=validate)
| mit | Python |
da11faa1540bed04d96336422d259de3847fcefe | add mail asserts | Larhard/Maildir-Notifier,Larhard/Maildir-Notifier | watcher.py | watcher.py | import pyinotify
import re
import notify
import mailbox
class MailEventHandler(pyinotify.ProcessEvent):
def my_init(self, maildir):
self.maildir = mailbox.Maildir(maildir)
assert self.maildir is not None
def process_IN_MOVED_TO(self, event):
self.new_mail_notify(event.name)
def process_IN_CREATE(self, event):
self.new_mail_notify(event.name)
def new_mail_notify(self, mail_path):
mail_id, *_ = mail_path.split(':')
mail = self.maildir.get(mail_id)
assert mail is not None
notify.send('new_mail', 'From: {}\nSubject: {}'.format(mail.get('From'), mail.get('Subject')))
def watch_maildir(maildir):
notify.init('mail notifier')
watch_manager = pyinotify.WatchManager()
handler = MailEventHandler(maildir=maildir)
notifier = pyinotify.Notifier(watch_manager, handler)
watch_manager.add_watch(maildir, pyinotify.IN_CREATE | pyinotify.IN_MOVED_TO, rec=True)
notifier.loop()
| import pyinotify
import re
import notify
import mailbox
class MailEventHandler(pyinotify.ProcessEvent):
def my_init(self, maildir):
self.maildir = mailbox.Maildir(maildir)
def process_IN_MOVED_TO(self, event):
self.new_mail_notify(event.name)
def process_IN_CREATE(self, event):
self.new_mail_notify(event.name)
def new_mail_notify(self, mail_path):
mail_id, *_ = mail_path.split(':')
mail = self.maildir.get(mail_id)
notify.send('new_mail', 'From: {}\nSubject: {}'.format(mail.get('From'), mail.get('Subject')))
def watch_maildir(maildir):
notify.init('mail notifier')
watch_manager = pyinotify.WatchManager()
handler = MailEventHandler(maildir=maildir)
notifier = pyinotify.Notifier(watch_manager, handler)
watch_manager.add_watch(maildir, pyinotify.IN_CREATE | pyinotify.IN_MOVED_TO, rec=True)
notifier.loop()
| mit | Python |
a085f8f3124b926e48e979e67194f2b2318a568c | Add blank line (#95) | GoogleCloudPlatform/ai-platform-samples,GoogleCloudPlatform/ai-platform-samples | quickstart/setup.py | quickstart/setup.py | #!/usr/bin/env python
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = [
'tensorflow==1.15.2',
'scikit-learn>=0.20.2',
'google-api-python-client',
]
setup(
name='trainer',
version='0.1',
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
description='AI Platform | Quick Start'
)
| #!/usr/bin/env python
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = [
'tensorflow==1.15.2',
'scikit-learn>=0.20.2',
'google-api-python-client',
]
setup(
name='trainer',
version='0.1',
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
description='AI Platform | Quick Start'
) | apache-2.0 | Python |
fee10efeae410a0bc51842877ef8ffb5fe8b97af | Add gtk implementation of open_file | natduca/trace_event_viewer,natduca/trace_event_viewer,natduca/trace_event_viewer | src/file_dialogs.py | src/file_dialogs.py | #!/usr/bin/env python
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import message_loop
def open_file():
message_loop.init_main_loop()
if message_loop.is_gtk:
import gtk
dlg = gtk.FileChooserDialog(title=None,action=gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
flt = gtk.FileFilter()
flt.set_name("JSON files")
flt.add_pattern("*.json");
dlg.add_filter(flt)
flt = gtk.FileFilter()
flt.set_name("All files")
flt.add_pattern("*.*");
dlg.add_filter(flt)
resp = dlg.run()
if resp == gtk.RESPONSE_CANCEL:
dlg.destroy()
return None
f = dlg.get_filename()
dlg.destroy()
return f
elif message_loop.is_wx:
import wx
wc = "JSON files (*.json)|*.json|All files (*.*)|*.*"
fd = wx.FileDialog(None, "Open trace file...", style=wx.FD_OPEN, wildcard=wc)
res = fd.ShowModal()
if res != wx.ID_OK:
return None
return fd.GetPath()
else:
raise Exception("Not implemented.")
| #!/usr/bin/env python
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import message_loop
def open_file():
message_loop.init_main_loop()
if message_loop.is_gtk:
raise Exception("not implemented")
else:
import wx
wc = "JSON files (*.json)|*.json|All files (*.*)|*.*"
fd = wx.FileDialog(None, "Open trace file...", style=wx.FD_OPEN, wildcard=wc)
res = fd.ShowModal()
if res != wx.ID_OK:
return None
return fd.GetPath()
| apache-2.0 | Python |
494749fae544aac11c36c6d5277d1673de3140d7 | use pythonic way to test if a list is empty in mini solver (it's faster). | theonlydude/RandomMetroidSolver,theonlydude/RandomMetroidSolver,theonlydude/RandomMetroidSolver,theonlydude/RandomMetroidSolver,theonlydude/RandomMetroidSolver | rando/MiniSolver.py | rando/MiniSolver.py |
import log, random
from smboolmanager import SMBoolManager
class MiniSolver(object):
def __init__(self, startAP, areaGraph, restrictions):
self.startAP = startAP
self.areaGraph = areaGraph
self.restrictions = restrictions
self.settings = restrictions.settings
self.smbm = SMBoolManager()
self.log = log.get('MiniSolver')
# if True, does not mean it is actually beatable, unless you're sure of it from another source of information
# if False, it is certain it is not beatable
def isBeatable(self, itemLocations, maxDiff=None):
if maxDiff is None:
maxDiff = self.settings.maxDiff
locations = []
for il in itemLocations:
loc = il['Location']
if 'restricted' in loc and loc['restricted'] == True:
continue
loc['itemType'] = il['Item']['Type']
loc['difficulty'] = None
locations.append(loc)
self.smbm.resetItems()
ap = self.startAP
while True:
if not locations:
return True
self.areaGraph.getAvailableLocations(locations, self.smbm, maxDiff, ap)
toCollect = [loc for loc in locations if loc['difficulty'].bool == True and loc['difficulty'].difficulty <= maxDiff]
if not toCollect:
return False
self.smbm.addItems([loc['itemType'] for loc in toCollect])
for loc in toCollect:
locations.remove(loc)
# if len(locations) > 0:
# ap = random.choice([loc['accessPoint'] for loc in locations])
|
import log, random
from datetime import datetime
from smboolmanager import SMBoolManager
class MiniSolver(object):
def __init__(self, startAP, areaGraph, restrictions):
self.startAP = startAP
self.areaGraph = areaGraph
self.restrictions = restrictions
self.settings = restrictions.settings
self.smbm = SMBoolManager()
self.log = log.get('MiniSolver')
# if True, does not mean it is actually beatable, unless you're sure of it from another source of information
# if False, it is certain it is not beatable
def isBeatable(self, itemLocations, maxDiff=None):
if maxDiff is None:
maxDiff = self.settings.maxDiff
locations = []
for il in itemLocations:
loc = il['Location']
if 'restricted' in loc and loc['restricted'] == True:
continue
loc['itemType'] = il['Item']['Type']
loc['difficulty'] = None
locations.append(loc)
self.smbm.resetItems()
ap = self.startAP
while True:
if len(locations) == 0:
return True
self.areaGraph.getAvailableLocations(locations, self.smbm, maxDiff, ap)
toCollect = [loc for loc in locations if loc['difficulty'].bool == True and loc['difficulty'].difficulty <= maxDiff]
if len(toCollect) == 0:
return False
self.smbm.addItems([loc['itemType'] for loc in toCollect])
for loc in toCollect:
locations.remove(loc)
# if len(locations) > 0:
# ap = random.choice([loc['accessPoint'] for loc in locations])
| mit | Python |
c3f176c2d4f4c177679c2c7b5c308e245a91311a | Implement remark presentation | michaeljoseph/remarkable,michaeljoseph/remarkable,michaeljoseph/remarkable | remarkable/cli.py | remarkable/cli.py | """
remarkable.
Usage:
remarkable [options] another-command <param>
remarkable [options] remark <path-to-markdown-file>
remarkable -h | --help
Options:
--kw-arg=<kw> Keyword option description.
-b --boolean Boolean option description.
--debug Debug.
-h --help Show this screen.
"""
import logging
from docopt import docopt
from jinja2 import Environment, PackageLoader
import remarkable
log = logging.getLogger(__name__)
def main():
arguments = docopt(__doc__, version=remarkable.__version__)
debug = arguments['--debug']
logging.basicConfig(level=logging.DEBUG if debug else logging.INFO)
log.debug('arguments: %s', arguments)
if arguments['remark']:
file_name = arguments['<path-to-markdown-file>']
html_file_name = '%s.html' % file_name
html = render_remark(open(file_name).read())
with open(html_file_name, 'w') as html_file:
html_file.write(html)
log.info('Created %s' % html_file_name)
def render_remark(markdown):
loader = PackageLoader('remarkable', 'templates')
env = Environment(loader=loader)
template = env.get_template('remark.html')
return template.render({'markdown': markdown})
| """
remarkable.
Usage:
remarkable [options] command <param> <another_params>
remarkable [options] another-command <param>
remarkable -h | --help
Options:
--kw-arg=<kw> Keyword option description.
-b --boolean Boolean option description.
--debug Debug.
-h --help Show this screen.
"""
from docopt import docopt
import logging
import remarkable
log = logging.getLogger(__name__)
def main():
arguments = docopt(__doc__, version=remarkable.__version__)
debug = arguments['--debug']
logging.basicConfig(level=logging.DEBUG if debug else logging.INFO)
log.debug('arguments: %s', arguments)
| apache-2.0 | Python |
790427faccee15c4a398e340ffe11d0e1ee1488f | Update version.py | RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,beeva-fernandocerezal/rasa_nlu,RasaHQ/rasa_nlu,PHLF/rasa_nlu,PHLF/rasa_nlu,beeva-fernandocerezal/rasa_nlu | rasa_nlu/version.py | rasa_nlu/version.py | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
__version__ = '0.9.0a6'
| from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
__version__ = '0.9.0a5'
| apache-2.0 | Python |
361f28d341e30392cc69a1b0d4e538feee77f7c7 | Fix rainy weather condition code | admk/tmux-batteries | weather.py | weather.py | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import os
import sys
import urllib.request
import json
location = 'London'
celcius = True
precision = 1
emoji = True
def fetch(location, celcius=True):
unit = 'metric' if celcius else 'imperial'
weather_url = \
'http://api.openweathermap.org/data/2.5/weather?q=%s&units=%s' % \
(location, unit)
response = urllib.request.urlopen(weather_url).read()
return json.loads(response.decode('utf-8'))
def pictograph(json_str, use_emoji):
def is_daytime():
from datetime import datetime
return 6 <= datetime.now().hour < 18
_pictograph_dict = {
2: '☈⚡', # thunderstorm
3: '☂🌂', # drizzle
5: '☔☔', # rain
6: '❄⛄', # snow
7: '〰🌁', # mist/smoke/haze/sand/fog
8: '☁⛅', # clouds
9: '颶🌀', # extreme
# specials
800: ['☽☼', '🌜🌞'] # clear sky
}
code = json_str['weather'][0]['id']
if code not in json_str:
code = int(code / 100)
pict = _pictograph_dict[code][use_emoji]
if len(pict) != 1:
pict = pict[is_daytime()]
if use_emoji:
pict += ' '
return pict
def temperature(json_str):
return json_str['main']['temp']
def weather(location, celcius=True, precision=0):
location = os.environ.get('WEATHER_LOCATION') or location
celcius = os.environ.get('WEATHER_CELCIUS') or celcius
precision = os.environ.get('WEATHER_PRECISION') or precision
json_str = fetch(location, celcius)
unit = '℃' if celcius else '℉'
use_emoji = emoji and sys.platform == 'darwin'
return '{pictograph}{temperature:.{precision}f}{unit}'.format(
pictograph=pictograph(json_str, use_emoji), precision=precision,
temperature=temperature(json_str), unit=unit)
if __name__ == '__main__':
sys.stdout.write(weather(location, celcius, precision))
sys.stdout.flush()
| #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import os
import sys
import urllib.request
import json
location = 'London'
celcius = True
precision = 1
emoji = True
def fetch(location, celcius=True):
unit = 'metric' if celcius else 'imperial'
weather_url = \
'http://api.openweathermap.org/data/2.5/weather?q=%s&units=%s' % \
(location, unit)
response = urllib.request.urlopen(weather_url).read()
return json.loads(response.decode('utf-8'))
def pictograph(json_str, use_emoji):
def is_daytime():
from datetime import datetime
return 6 <= datetime.now().hour < 18
_pictograph_dict = {
2: '☈⚡', # thunderstorm
3: '☂🌂', # drizzle
4: '☔☔', # rain
6: '❄⛄', # snow
7: '〰🌁', # mist/smoke/haze/sand/fog
8: '☁⛅', # clouds
9: '颶🌀', # extreme
# specials
800: ['☽☼', '🌜🌞'] # clear sky
}
code = json_str['weather'][0]['id']
if code not in json_str:
code = int(code / 100)
pict = _pictograph_dict[code][use_emoji]
if len(pict) != 1:
pict = pict[is_daytime()]
if use_emoji:
pict += ' '
return pict
def temperature(json_str):
return json_str['main']['temp']
def weather(location, celcius=True, precision=0):
location = os.environ.get('WEATHER_LOCATION') or location
celcius = os.environ.get('WEATHER_CELCIUS') or celcius
precision = os.environ.get('WEATHER_PRECISION') or precision
json_str = fetch(location, celcius)
unit = '℃' if celcius else '℉'
use_emoji = emoji and sys.platform == 'darwin'
return '{pictograph}{temperature:.{precision}f}{unit}'.format(
pictograph=pictograph(json_str, use_emoji), precision=precision,
temperature=temperature(json_str), unit=unit)
if __name__ == '__main__':
sys.stdout.write(weather(location, celcius, precision))
sys.stdout.flush()
| mit | Python |
2c744e5e18fcf43c4ca55244b3595ea9159eab5e | fix typo in get.perfetto.dev am: d03fd291f3 am: 86c5037948 am: c6e9282977 | google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto | infra/perfetto-get.appspot.com/main.py | infra/perfetto-get.appspot.com/main.py | # Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.api import memcache
from google.appengine.api import urlfetch
import webapp2
import base64
BASE = 'https://android.googlesource.com/platform/external/perfetto.git/' \
'+/master/%s?format=TEXT'
RESOURCES = {
'traceconv': 'tools/traceconv',
'trace_processor': 'tools/trace_processor',
}
class RedirectHandler(webapp2.RequestHandler):
def get(self):
self.error(301)
self.response.headers['Location'] = 'https://www.perfetto.dev/'
class GitilesMirrorHandler(webapp2.RequestHandler):
def get(self, resource):
resource = resource.lower()
if resource not in RESOURCES:
self.error(404)
self.response.out.write('Resource "%s" not found' % resource)
return
url = BASE % RESOURCES[resource]
contents = memcache.get(url)
if not contents or self.request.get('reload'):
result = urlfetch.fetch(url)
if result.status_code != 200:
memcache.delete(url)
self.response.set_status(result.status_code)
self.response.write(
'http error %d while fetching %s' % (result.status_code, url))
return
contents = base64.b64decode(result.content)
memcache.set(url, contents, time=3600) # 1h
self.response.headers['Content-Type'] = 'text/plain'
self.response.headers['Content-Disposition'] = \
'attachment; filename="%s"' % resource
self.response.write(contents)
app = webapp2.WSGIApplication([
('/', RedirectHandler),
('/(.*)', GitilesMirrorHandler),
],
debug=True)
| # Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.api import memcache
from google.appengine.api import urlfetch
import webapp2
import base64
BASE = 'https://android.googlesource.com/platform/external/perfetto.git/' \
'+/master/%s?format=TEXT'
RESOURCES = {
'traceconv': 'tools/traceconv',
'trace_processor': 'tools/trace_processor',
}
class RedirectHandler(webapp2.RequestHandler):
def get(self):
self.error(301)
self.response.headers['Location'] = 'https://www.perfetto.dev/'
class GitilesMirrorHandler(webapp2.RequestHandler):
def get(self, resource):
resource = resource.lower()
if resource not in RESOURCES:
self.error(404)
self.response.out.write('Rerource "%s" not found' % resource)
return
url = BASE % RESOURCES[resource]
contents = memcache.get(url)
if not contents or self.request.get('reload'):
result = urlfetch.fetch(url)
if result.status_code != 200:
memcache.delete(url)
self.response.set_status(result.status_code)
self.response.write(
'http error %d while fetching %s' % (result.status_code, url))
return
contents = base64.b64decode(result.content)
memcache.set(url, contents, time=3600) # 1h
self.response.headers['Content-Type'] = 'text/plain'
self.response.headers['Content-Disposition'] = \
'attachment; filename="%s"' % resource
self.response.write(contents)
app = webapp2.WSGIApplication([
('/', RedirectHandler),
('/(.*)', GitilesMirrorHandler),
],
debug=True)
| apache-2.0 | Python |
900336adbbd41b87c71512f4109d6918988f2e4b | bump version | mgedmin/readme,pypa/readme,pypa/readme_renderer,sigmavirus24/readme | readme/__about__.py | readme/__about__.py | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "readme"
__summary__ = ('readme is a library for rendering "readme" descriptions for '
'Warehouse')
__uri__ = "https://github.com/pypa/readme"
__version__ = "0.4.1"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2014 %s" % __author__
| # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "readme"
__summary__ = ('readme is a library for rendering "readme" descriptions for '
'Warehouse')
__uri__ = "https://github.com/pypa/readme"
__version__ = "0.4.0"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2014 %s" % __author__
| apache-2.0 | Python |
d642d545d3755fc8686c9da14d578b4af0bc8f3f | Remove deprecated get_model from tasks | fcurella/django-recommends,fcurella/django-recommends,python-recsys/django-recommends,python-recsys/django-recommends | recommends/tasks.py | recommends/tasks.py | from celery.task import task, periodic_task
from celery.schedules import crontab
from .utils import filelock
from .settings import RECOMMENDS_TASK_RUN, RECOMMENDS_TASK_CRONTAB, RECOMMENDS_TASK_EXPIRES
def recommends_precompute():
results = []
from .providers import recommendation_registry
# I know this is weird, but it's faster (tested on CPyhton 2.6.5)
def _precompute(provider_instance):
results.append(provider_instance.precompute())
if recommendation_registry.storage.can_lock:
locked = recommendation_registry.storage.get_lock()
if locked:
try:
[_precompute(provider_instance) for provider_instance in recommendation_registry.get_vote_providers()]
finally:
recommendation_registry.storage.release_lock()
else:
with filelock('recommends_precompute.lock'):
[_precompute(provider_instance)
for provider_instance in recommendation_registry.get_vote_providers()]
return results
if RECOMMENDS_TASK_RUN:
@periodic_task(name='recommends_precompute', run_every=crontab(**RECOMMENDS_TASK_CRONTAB), expires=RECOMMENDS_TASK_EXPIRES)
def _recommends_precompute():
recommends_precompute()
@task(name='remove_suggestions')
def remove_suggestions(rated_model, object_id):
from django.db.models import apps
from recommends.providers import recommendation_registry
ObjectClass = apps.get_model(*rated_model.split('.'))
provider_instance = recommendation_registry.get_provider_for_content(
ObjectClass)
obj = ObjectClass.objects.get(pk=object_id)
provider_instance.storage.remove_recommendations(obj)
@task(name='remove_similarities')
def remove_similarities(rated_model, object_id):
from django.db.models import apps
from recommends.providers import recommendation_registry
ObjectClass = apps.get_model(*rated_model.split('.'))
provider_instance = recommendation_registry.get_provider_for_content(
ObjectClass)
obj = ObjectClass.objects.get(pk=object_id)
provider_instance.storage.remove_similarities(obj)
| from celery.task import task, periodic_task
from celery.schedules import crontab
from .utils import filelock
from .settings import RECOMMENDS_TASK_RUN, RECOMMENDS_TASK_CRONTAB, RECOMMENDS_TASK_EXPIRES
def recommends_precompute():
results = []
from .providers import recommendation_registry
# I know this is weird, but it's faster (tested on CPyhton 2.6.5)
def _precompute(provider_instance):
results.append(provider_instance.precompute())
if recommendation_registry.storage.can_lock:
locked = recommendation_registry.storage.get_lock()
if locked:
try:
[_precompute(provider_instance) for provider_instance in recommendation_registry.get_vote_providers()]
finally:
recommendation_registry.storage.release_lock()
else:
with filelock('recommends_precompute.lock'):
[_precompute(provider_instance)
for provider_instance in recommendation_registry.get_vote_providers()]
return results
if RECOMMENDS_TASK_RUN:
@periodic_task(name='recommends_precompute', run_every=crontab(**RECOMMENDS_TASK_CRONTAB), expires=RECOMMENDS_TASK_EXPIRES)
def _recommends_precompute():
recommends_precompute()
@task(name='remove_suggestions')
def remove_suggestions(rated_model, object_id):
from django.db.models import get_model
from recommends.providers import recommendation_registry
ObjectClass = get_model(*rated_model.split('.'))
provider_instance = recommendation_registry.get_provider_for_content(
ObjectClass)
obj = ObjectClass.objects.get(pk=object_id)
provider_instance.storage.remove_recommendations(obj)
@task(name='remove_similarities')
def remove_similarities(rated_model, object_id):
from django.db.models import get_model
from recommends.providers import recommendation_registry
ObjectClass = get_model(*rated_model.split('.'))
provider_instance = recommendation_registry.get_provider_for_content(
ObjectClass)
obj = ObjectClass.objects.get(pk=object_id)
provider_instance.storage.remove_similarities(obj)
| mit | Python |
325aa95e39a7a581f58578ba64ea2b447f52e34a | update comment | tkerola/chainer,jnishi/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,okuta/chainer,hvy/chainer,hvy/chainer,keisuke-umezawa/chainer,chainer/chainer,chainer/chainer,jnishi/chainer,niboshi/chainer,jnishi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,pfnet/chainer,wkentaro/chainer,ktnyt/chainer,wkentaro/chainer,keisuke-umezawa/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ktnyt/chainer,niboshi/chainer,jnishi/chainer,chainer/chainer,okuta/chainer,niboshi/chainer,hvy/chainer,niboshi/chainer,okuta/chainer,keisuke-umezawa/chainer | tests/chainer_tests/conftest.py | tests/chainer_tests/conftest.py | import pytest
import chainerx
if not chainerx.is_available():
# Skip all ChainerX tests if ChainerX is unavailable.
# TODO(kmaehashi) This is an tentative fix. This file should be removed
# once chainer-test supports ChainerX.
pytest.mark.chainerx = pytest.mark.skip
# testing.run_module(__name__, __file__)
| import pytest
import chainerx
if not chainerx.is_available():
# Skip all ChainerX tests if it is unavailable.
# TODO(kmaehashi) add `not chainerx` condition to chainer-test.
pytest.mark.chainerx = pytest.mark.skip
# testing.run_module(__name__, __file__)
| mit | Python |
6104b111b4ceaec894018b77cbea4a0de31400d4 | Add name to the snapshot extension | hvy/chainer,jnishi/chainer,ktnyt/chainer,chainer/chainer,cupy/cupy,hvy/chainer,ktnyt/chainer,cupy/cupy,wkentaro/chainer,ysekky/chainer,kikusu/chainer,pfnet/chainer,jnishi/chainer,okuta/chainer,keisuke-umezawa/chainer,ktnyt/chainer,okuta/chainer,niboshi/chainer,niboshi/chainer,cupy/cupy,rezoo/chainer,chainer/chainer,hvy/chainer,delta2323/chainer,chainer/chainer,aonotas/chainer,wkentaro/chainer,keisuke-umezawa/chainer,tkerola/chainer,niboshi/chainer,niboshi/chainer,kashif/chainer,chainer/chainer,jnishi/chainer,keisuke-umezawa/chainer,kikusu/chainer,cupy/cupy,keisuke-umezawa/chainer,anaruse/chainer,okuta/chainer,wkentaro/chainer,jnishi/chainer,wkentaro/chainer,ronekko/chainer,ktnyt/chainer,kiyukuta/chainer,hvy/chainer,okuta/chainer | chainer/trainer/extensions/_snapshot.py | chainer/trainer/extensions/_snapshot.py | from chainer.serializers import npz
from chainer.trainer import extension
def snapshot(savefun=npz.save_npz,
filename='snapshot_iter_{.updater.iteration}'):
"""Return a trainer extension to take snapshots of the trainer.
This extension serializes the trainer object and saves it to the output
directory. It is used to support resuming the training loop from the saved
state.
This extension is called once for each epoch by default.
.. note::
This extension first writes the serialized object to a temporary file
and then rename it to the target file name. Thus, if the program stops
right before the renaming, the temporary file might be left in the
output directory.
Args:
savefun: Function to save the trainer. It takes two arguments: the
output file path and the trainer object.
filename (str): Name of the file into which the trainer is serialized.
It can be a format string, where the trainer object is passed to
the :meth:`str.format` method.
"""
@extension.make_extension(name='snapshot', trigger=(1, 'epoch'))
def ext(trainer):
fname = filename.format(trainer)
fd, tmppath = tempfile.mkstemp(prefix=fname, dir=trainer.out)
try:
savefun(tmppath, trainer)
finally:
os.close(fd)
os.rename(tmppath, os.path.join(trainer.out, fname))
return ext
| from chainer.serializers import npz
from chainer.trainer import extension
def snapshot(savefun=npz.save_npz,
filename='snapshot_iter_{.updater.iteration}'):
"""Return a trainer extension to take snapshots of the trainer.
This extension serializes the trainer object and saves it to the output
directory. It is used to support resuming the training loop from the saved
state.
This extension is called once for each epoch by default.
.. note::
This extension first writes the serialized object to a temporary file
and then rename it to the target file name. Thus, if the program stops
right before the renaming, the temporary file might be left in the
output directory.
Args:
savefun: Function to save the trainer. It takes two arguments: the
output file path and the trainer object.
filename (str): Name of the file into which the trainer is serialized.
It can be a format string, where the trainer object is passed to
the :meth:`str.format` method.
"""
@extension.make_extension(trigger=(1, 'epoch'))
def ext(trainer):
fname = filename.format(trainer)
fd, tmppath = tempfile.mkstemp(prefix=fname, dir=trainer.out)
try:
savefun(tmppath, trainer)
finally:
os.close(fd)
os.rename(tmppath, os.path.join(trainer.out, fname))
return ext
| mit | Python |
ab6b61d8d0b91ebc2d0b1b8cbd526cfbb6a45a42 | Check for a user from previously in the pipeline before checking for duplicate user. | chicagopython/chipy.org,tanyaschlusser/chipy.org,agfor/chipy.org,bharathelangovan/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,brianray/chipy.org,chicagopython/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,brianray/chipy.org,agfor/chipy.org,bharathelangovan/chipy.org,brianray/chipy.org,agfor/chipy.org | chipy_org/libs/social_auth_pipelines.py | chipy_org/libs/social_auth_pipelines.py | from django.contrib.auth import get_user_model
from django.utils.translation import ugettext
from social_auth.exceptions import AuthAlreadyAssociated
from social_auth.backends.pipeline.associate import associate_by_email as super_associate_by_email
def associate_by_email(*args, **kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
backend = kwargs['backend']
if backend.name in ['google-oauth2', 'github'] or kwargs.get('user'):
# We provide and exception here for users upgrading.
return super_associate_by_email(*args, **kwargs)
email = kwargs['details'].get('email')
if email:
User = get_user_model()
if User.objects.filter(email=email).exists():
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
| from django.contrib.auth import get_user_model
from django.utils.translation import ugettext
from social_auth.exceptions import AuthAlreadyAssociated
from social_auth.backends.pipeline.associate import associate_by_email as super_associate_by_email
def associate_by_email(*args, **kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
backend = kwargs['backend']
if backend.name in ['google-oauth2', 'github']:
# We provide and exception here for users upgrading.
return super_associate_by_email(*args, **kwargs)
email = kwargs['details'].get('email')
if email:
User = get_user_model()
if User.objects.filter(email=email).exists():
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
| mit | Python |
5ff5faa7b7d7edb0bd591e794fdb2dae113e5771 | Use cached_property | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/hqwebapp/async_handler.py | corehq/apps/hqwebapp/async_handler.py | import json
from django.http import HttpResponse, HttpRequest
from django.utils.functional import cached_property
class AsyncHandlerMixin(object):
"""
To be mixed in with a TemplateView.
todo write better documentation on this (biyeun)
"""
async_handlers = []
@property
def handler_slug(self):
return self.request.POST.get('handler')
def get_async_handler(self):
handler_class = dict([(h.slug, h) for h in self.async_handlers])[self.handler_slug]
return handler_class(self.request)
@cached_property
def async_response(self):
if self.handler_slug in [h.slug for h in self.async_handlers]:
return self.get_async_handler().get_response()
class AsyncHandlerError(Exception):
pass
class BaseAsyncHandler(object):
"""
Handles serving async responses for an ajax post request, say in a form.
Usage:
1) specify an allowed action slug in allowed actions
2) implement the property <allowed action slug>_response, which returns a dict.
example:
allowed_actions = [
'create'
]
then implement
@property
def create_response(self):
return {}
"""
slug = None
allowed_actions = []
def __init__(self, request):
if not isinstance(request, HttpRequest):
raise ValueError("request must be an HttpRequest.")
self.request = request
self.data = request.POST if request.method == 'POST' else request.GET
self.action = self.data.get('action')
def _fmt_error(self, error):
return json.dumps({
'success': False,
'error': error.message,
})
def _fmt_success(self, data):
return json.dumps({
'success': True,
'data': data,
})
def get_action_response(self):
if self.action not in self.allowed_actions:
raise AsyncHandlerError("Action '%s' is not allowed." % self.action)
response = getattr(self, '%s_response' % self.action)
return self._fmt_success(response)
def get_response(self):
try:
response = self.get_action_response()
except AsyncHandlerError as e:
response = self._fmt_error(e)
except TypeError as e:
response = self._fmt_error(e)
return HttpResponse(response, content_type='application/json')
| import json
from django.http import HttpResponse, HttpRequest
from dimagi.utils.decorators.memoized import memoized
class AsyncHandlerMixin(object):
"""
To be mixed in with a TemplateView.
todo write better documentation on this (biyeun)
"""
async_handlers = []
@property
def handler_slug(self):
return self.request.POST.get('handler')
def get_async_handler(self):
handler_class = dict([(h.slug, h) for h in self.async_handlers])[self.handler_slug]
return handler_class(self.request)
@property
@memoized
def async_response(self):
if self.handler_slug in [h.slug for h in self.async_handlers]:
return self.get_async_handler().get_response()
class AsyncHandlerError(Exception):
pass
class BaseAsyncHandler(object):
"""
Handles serving async responses for an ajax post request, say in a form.
Usage:
1) specify an allowed action slug in allowed actions
2) implement the property <allowed action slug>_response, which returns a dict.
example:
allowed_actions = [
'create'
]
then implement
@property
def create_response(self):
return {}
"""
slug = None
allowed_actions = []
def __init__(self, request):
if not isinstance(request, HttpRequest):
raise ValueError("request must be an HttpRequest.")
self.request = request
self.data = request.POST if request.method == 'POST' else request.GET
self.action = self.data.get('action')
def _fmt_error(self, error):
return json.dumps({
'success': False,
'error': error.message,
})
def _fmt_success(self, data):
return json.dumps({
'success': True,
'data': data,
})
def get_action_response(self):
if self.action not in self.allowed_actions:
raise AsyncHandlerError("Action '%s' is not allowed." % self.action)
response = getattr(self, '%s_response' % self.action)
return self._fmt_success(response)
def get_response(self):
try:
response = self.get_action_response()
except AsyncHandlerError as e:
response = self._fmt_error(e)
except TypeError as e:
response = self._fmt_error(e)
return HttpResponse(response, content_type='application/json')
| bsd-3-clause | Python |
cb19fccce26071378a445844e230b78456961af8 | bump up version | kratsg/ironman | ironman/__init__.py | ironman/__init__.py | __version__ = '0.2.17'
__all__ = ['communicator',
'hardware',
'history',
'interfaces',
'packet',
'server',
'utilities']
def engage(proto='udp'):
''' Fire thrusters.
'''
from ironman.server import ServerFactory
from twisted.internet import reactor
from twisted.internet.defer import Deferred
getattr(reactor, 'listen{0:s}'.format(proto.upper()))(8888, ServerFactory(proto, Deferred))
reactor.run()
| __version__ = '0.2.16'
__all__ = ['communicator',
'hardware',
'history',
'interfaces',
'packet',
'server',
'utilities']
def engage(proto='udp'):
''' Fire thrusters.
'''
from ironman.server import ServerFactory
from twisted.internet import reactor
from twisted.internet.defer import Deferred
getattr(reactor, 'listen{0:s}'.format(proto.upper()))(8888, ServerFactory(proto, Deferred))
reactor.run()
| mit | Python |
eb1e1cbe0b529e4c3ccc88809639deb1f2c354c5 | mark percentile tests as xfail, still not sure why this happens | maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex | tests/percentile_approx_test.py | tests/percentile_approx_test.py | import platform
import numpy as np
import vaex
import pytest
import sys
version = tuple(map(int, np.__version__.split('.')))
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'windows', reason="strange ref count issue with numpy")
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'linux' and sys.version_info[:2] == (3,6), reason="strange ref count issue with numpy")
@pytest.mark.xfail
def test_percentile_approx():
df = vaex.example()
# Simple test
percentile = df.percentile_approx('z', percentage=99)
expected_result = 15.1739
np.testing.assert_almost_equal(percentile, expected_result, decimal=1)
# Test for multiple percentages
percentiles = df.percentile_approx('x', percentage=[25, 50, 75], percentile_shape=65536)
expected_result = [-3.5992, -0.0367, 3.4684]
np.testing.assert_array_almost_equal(percentiles, expected_result, decimal=1)
# Test for multiple expressions
percentiles_2d = df.percentile_approx(['x', 'y'], percentage=[33, 66])
expected_result = np.array(([-2.3310, 1.9540], [-2.4313, 2.1021]))
np.testing.assert_array_almost_equal(percentiles_2d, expected_result, decimal=1)
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'windows', reason="strange ref count issue with numpy")
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'linux' and sys.version_info[:2] == (3,6), reason="strange ref count issue with numpy")
@pytest.mark.xfail
def test_percentile_1d():
x = np.array([0, 0, 10, 100, 200])
df = vaex.from_arrays(x=x)
median = df.median_approx(df.x)
assert median < 10.
x = np.array([0, 0, 90, 100, 200])
df = vaex.from_arrays(x=x)
median = df.median_approx(df.x)
assert median > 90.
# coverage test
df = vaex.example()
df.percentile_approx('x', percentage=80, binby=df.z, limits='minmax', shape=100)
| import platform
import numpy as np
import vaex
import pytest
import sys
version = tuple(map(int, np.__version__.split('.')))
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'windows', reason="strange ref count issue with numpy")
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'linux' and sys.version_info[:2] == (3,6), reason="strange ref count issue with numpy")
def test_percentile_approx():
df = vaex.example()
# Simple test
percentile = df.percentile_approx('z', percentage=99)
expected_result = 15.1739
np.testing.assert_almost_equal(percentile, expected_result, decimal=1)
# Test for multiple percentages
percentiles = df.percentile_approx('x', percentage=[25, 50, 75], percentile_shape=65536)
expected_result = [-3.5992, -0.0367, 3.4684]
np.testing.assert_array_almost_equal(percentiles, expected_result, decimal=1)
# Test for multiple expressions
percentiles_2d = df.percentile_approx(['x', 'y'], percentage=[33, 66])
expected_result = np.array(([-2.3310, 1.9540], [-2.4313, 2.1021]))
np.testing.assert_array_almost_equal(percentiles_2d, expected_result, decimal=1)
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'windows', reason="strange ref count issue with numpy")
@pytest.mark.skipif(((1,17,0) <= version <= (1,18,1)) and platform.system().lower() == 'linux' and sys.version_info[:2] == (3,6), reason="strange ref count issue with numpy")
def test_percentile_1d():
x = np.array([0, 0, 10, 100, 200])
df = vaex.from_arrays(x=x)
median = df.median_approx(df.x)
assert median < 10.
x = np.array([0, 0, 90, 100, 200])
df = vaex.from_arrays(x=x)
median = df.median_approx(df.x)
assert median > 90.
# coverage test
df = vaex.example()
df.percentile_approx('x', percentage=80, binby=df.z, limits='minmax', shape=100)
| mit | Python |
690fdb11ab2dfb7fc9e729de085d64101b31ec1d | increase test timeout | cladmi/RIOT,cladmi/RIOT,lazytech-org/RIOT,mfrey/RIOT,avmelnikoff/RIOT,x3ro/RIOT,yogo1212/RIOT,authmillenon/RIOT,kbumsik/RIOT,rfuentess/RIOT,aeneby/RIOT,mtausig/RIOT,jasonatran/RIOT,smlng/RIOT,BytesGalore/RIOT,yogo1212/RIOT,miri64/RIOT,aeneby/RIOT,OTAkeys/RIOT,basilfx/RIOT,BytesGalore/RIOT,toonst/RIOT,OTAkeys/RIOT,basilfx/RIOT,biboc/RIOT,kaspar030/RIOT,aeneby/RIOT,gebart/RIOT,ant9000/RIOT,mtausig/RIOT,miri64/RIOT,kbumsik/RIOT,josephnoir/RIOT,avmelnikoff/RIOT,A-Paul/RIOT,aeneby/RIOT,OlegHahm/RIOT,mtausig/RIOT,gebart/RIOT,smlng/RIOT,authmillenon/RIOT,kYc0o/RIOT,kaspar030/RIOT,josephnoir/RIOT,OlegHahm/RIOT,toonst/RIOT,basilfx/RIOT,BytesGalore/RIOT,toonst/RIOT,kYc0o/RIOT,rfuentess/RIOT,BytesGalore/RIOT,A-Paul/RIOT,basilfx/RIOT,kbumsik/RIOT,kYc0o/RIOT,kYc0o/RIOT,cladmi/RIOT,toonst/RIOT,jasonatran/RIOT,smlng/RIOT,mtausig/RIOT,basilfx/RIOT,OTAkeys/RIOT,yogo1212/RIOT,avmelnikoff/RIOT,A-Paul/RIOT,ant9000/RIOT,gebart/RIOT,cladmi/RIOT,lazytech-org/RIOT,mfrey/RIOT,mfrey/RIOT,yogo1212/RIOT,smlng/RIOT,miri64/RIOT,OTAkeys/RIOT,x3ro/RIOT,smlng/RIOT,lazytech-org/RIOT,biboc/RIOT,josephnoir/RIOT,RIOT-OS/RIOT,lazytech-org/RIOT,jasonatran/RIOT,kbumsik/RIOT,yogo1212/RIOT,ant9000/RIOT,rfuentess/RIOT,yogo1212/RIOT,lazytech-org/RIOT,OTAkeys/RIOT,biboc/RIOT,RIOT-OS/RIOT,kYc0o/RIOT,RIOT-OS/RIOT,mfrey/RIOT,avmelnikoff/RIOT,gebart/RIOT,BytesGalore/RIOT,biboc/RIOT,cladmi/RIOT,x3ro/RIOT,miri64/RIOT,A-Paul/RIOT,kaspar030/RIOT,aeneby/RIOT,authmillenon/RIOT,authmillenon/RIOT,A-Paul/RIOT,authmillenon/RIOT,authmillenon/RIOT,rfuentess/RIOT,x3ro/RIOT,OlegHahm/RIOT,mtausig/RIOT,miri64/RIOT,gebart/RIOT,RIOT-OS/RIOT,josephnoir/RIOT,kaspar030/RIOT,biboc/RIOT,x3ro/RIOT,kbumsik/RIOT,kaspar030/RIOT,rfuentess/RIOT,OlegHahm/RIOT,toonst/RIOT,OlegHahm/RIOT,jasonatran/RIOT,avmelnikoff/RIOT,RIOT-OS/RIOT,ant9000/RIOT,jasonatran/RIOT,ant9000/RIOT,mfrey/RIOT,josephnoir/RIOT | tests/unittests/tests/01-run.py | tests/unittests/tests/01-run.py | #!/usr/bin/env python3
# Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
def testfunc(child):
child.expect(u"OK \\([0-9]+ tests\\)")
if __name__ == "__main__":
sys.path.append(os.path.join(os.environ['RIOTTOOLS'], 'testrunner'))
from testrunner import run
sys.exit(run(testfunc, timeout=120))
| #!/usr/bin/env python3
# Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
def testfunc(child):
child.expect(u"OK \\([0-9]+ tests\\)")
if __name__ == "__main__":
sys.path.append(os.path.join(os.environ['RIOTTOOLS'], 'testrunner'))
from testrunner import run
sys.exit(run(testfunc, timeout=60))
| lgpl-2.1 | Python |
2ab738e3d966402511d436fef88325542deac008 | remove debug print | yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD | lib/neuroimaging/__init__.py | lib/neuroimaging/__init__.py | """
Insert long description here.
"""
import re
from path import path
__version__ = "0.01a"
packages = (
'neuroimaging',
'neuroimaging.tests',
'neuroimaging.data',
'neuroimaging.data.tests',
'neuroimaging.fmri',
'neuroimaging.fmri.tests',
'neuroimaging.fmri.fmristat',
'neuroimaging.fmri.fmristat.tests',
'neuroimaging.image',
'neuroimaging.image.tests',
'neuroimaging.image.formats',
'neuroimaging.image.formats.tests',
'neuroimaging.refactoring',
'neuroimaging.refactoring.tests',
'neuroimaging.reference',
'neuroimaging.reference.tests',
'neuroimaging.statistics',
'neuroimaging.statistics.tests',
'neuroimaging.visualization',
'neuroimaging.visualization.cmap',
'neuroimaging.visualization.tests')
testmatch = re.compile(".*tests").search
nontest_packages = [p for p in packages if not testmatch(p)]
# modules to be pre-imported for convenience
_preload_modules = (
'neuroimaging.image.formats.analyze',
'neuroimaging.image.interpolation',
'neuroimaging.image.onesample',
'neuroimaging.image.regression',
'neuroimaging.reference.axis',
'neuroimaging.reference.coordinate_system',
'neuroimaging.reference.grid',
'neuroimaging.reference.grid_iterators',
'neuroimaging.reference.mapping',
'neuroimaging.reference.slices',
'neuroimaging.statistics.regression',
'neuroimaging.statistics.classification',
'neuroimaging.statistics.iterators',
'neuroimaging.statistics.contrast',
'neuroimaging.statistics.utils',
'neuroimaging.visualization.viewer',)
#-----------------------------------------------------------------------------
def ensuredirs(dir):
if not isinstance(dir, path): dir= path(dir)
if not dir.exists(): dir.makedirs()
#-----------------------------------------------------------------------------
def preload(packages=nontest_packages):
"""
Import the specified modules/packages (enabling fewer imports in client
scripts). By default, import all non-test packages:\n%s
and the following modules:\n%s
"""%("\n".join(nontest_packages),"\n".join(_preload_modules))
for package in packages: __import__(package, {}, {})
for module in _preload_modules: __import__(module, {}, {})
#-----------------------------------------------------------------------------
def import_from(modulename, objectname):
"Import and return objectname from modulename."
module = __import__(modulename, {}, {}, (objectname,))
try: return getattr(module, objectname)
except AttributeError: return None
# Always preload all packages. This should be removed as soon as the client
# scripts can be modified to call it themselves.
#preload()
| """
Insert long description here.
"""
import re
from path import path
__version__ = "0.01a"
packages = (
'neuroimaging',
'neuroimaging.tests',
'neuroimaging.data',
'neuroimaging.data.tests',
'neuroimaging.fmri',
'neuroimaging.fmri.tests',
'neuroimaging.fmri.fmristat',
'neuroimaging.fmri.fmristat.tests',
'neuroimaging.image',
'neuroimaging.image.tests',
'neuroimaging.image.formats',
'neuroimaging.image.formats.tests',
'neuroimaging.refactoring',
'neuroimaging.refactoring.tests',
'neuroimaging.reference',
'neuroimaging.reference.tests',
'neuroimaging.statistics',
'neuroimaging.statistics.tests',
'neuroimaging.visualization',
'neuroimaging.visualization.cmap',
'neuroimaging.visualization.tests')
testmatch = re.compile(".*tests").search
nontest_packages = [p for p in packages if not testmatch(p)]
# modules to be pre-imported for convenience
_preload_modules = (
'neuroimaging.image.formats.analyze',
'neuroimaging.image.interpolation',
'neuroimaging.image.onesample',
'neuroimaging.image.regression',
'neuroimaging.reference.axis',
'neuroimaging.reference.coordinate_system',
'neuroimaging.reference.grid',
'neuroimaging.reference.grid_iterators',
'neuroimaging.reference.mapping',
'neuroimaging.reference.slices',
'neuroimaging.statistics.regression',
'neuroimaging.statistics.classification',
'neuroimaging.statistics.iterators',
'neuroimaging.statistics.contrast',
'neuroimaging.statistics.utils',
'neuroimaging.visualization.viewer',)
#-----------------------------------------------------------------------------
def ensuredirs(dir):
if not isinstance(dir, path): dir= path(dir)
if not dir.exists(): dir.makedirs()
#-----------------------------------------------------------------------------
def preload(packages=nontest_packages):
"""
Import the specified modules/packages (enabling fewer imports in client
scripts). By default, import all non-test packages:\n%s
and the following modules:\n%s
"""%("\n".join(nontest_packages),"\n".join(_preload_modules))
for package in packages: __import__(package, {}, {})
for module in _preload_modules: __import__(module, {}, {})
#-----------------------------------------------------------------------------
def import_from(modulename, objectname):
"Import and return objectname from modulename."
print "import_from:",modulename,objectname
module = __import__(modulename, {}, {}, (objectname,))
try: return getattr(module, objectname)
except AttributeError: return None
# Always preload all packages. This should be removed as soon as the client
# scripts can be modified to call it themselves.
#preload()
| bsd-3-clause | Python |
00f7cc6eb910b6272b84ead68aeb221886c3d91c | Hide the YT call | pculture/unisubs,pculture/unisubs,ofer43211/unisubs,eloquence/unisubs,ReachingOut/unisubs,ofer43211/unisubs,wevoice/wesub,norayr/unisubs,ofer43211/unisubs,ReachingOut/unisubs,wevoice/wesub,ReachingOut/unisubs,eloquence/unisubs,eloquence/unisubs,norayr/unisubs,pculture/unisubs,ujdhesa/unisubs,norayr/unisubs,ujdhesa/unisubs,ofer43211/unisubs,eloquence/unisubs,pculture/unisubs,ujdhesa/unisubs,wevoice/wesub,ujdhesa/unisubs,wevoice/wesub,norayr/unisubs,ReachingOut/unisubs | apps/auth/templatetags/auth_tags.py | apps/auth/templatetags/auth_tags.py | # Amara, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django import template
from django.utils.translation import ugettext_lazy as _
register = template.Library()
@register.inclusion_tag('auth/_email_confirmation_notification.html', takes_context=True)
def email_confirmation_notification(context, force=False):
user = context['request'].user
content = ''
if user.is_authenticated():
if not user.email:
content = _(u'Fill email field, please.')
elif not user.valid_email:
content = _(u'Confirm your email, please.')
context['notification_content'] = content
return context
@register.filter
def show_youtube_prompt(request):
"""
Returns a boolean for whether to show the Youtube syncing prompt.
Current logic is that we show it for:
* unauthenticated visitors
* authenticated users who haven't synced a YT account and haven't
dismissed the prompt
"""
# We need to turn this off for now
return False
if request.COOKIES.get('hide-yt-prompt') == 'yes':
return False
user = request.user if request.user.is_authenticated() else None
if not user:
return True
accounts = user.third_party_accounts.all()
types = [a.get_type_display() for a in accounts]
if 'Youtube' not in types:
return True
else:
return False | # Amara, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django import template
from django.utils.translation import ugettext_lazy as _
register = template.Library()
@register.inclusion_tag('auth/_email_confirmation_notification.html', takes_context=True)
def email_confirmation_notification(context, force=False):
user = context['request'].user
content = ''
if user.is_authenticated():
if not user.email:
content = _(u'Fill email field, please.')
elif not user.valid_email:
content = _(u'Confirm your email, please.')
context['notification_content'] = content
return context
@register.filter
def show_youtube_prompt(request):
"""
Returns a boolean for whether to show the Youtube syncing prompt.
Current logic is that we show it for:
* unauthenticated visitors
* authenticated users who haven't synced a YT account and haven't
dismissed the prompt
"""
if request.COOKIES.get('hide-yt-prompt') == 'yes':
return False
user = request.user if request.user.is_authenticated() else None
if not user:
return True
accounts = user.third_party_accounts.all()
types = [a.get_type_display() for a in accounts]
if 'Youtube' not in types:
return True
else:
return False | agpl-3.0 | Python |
567feb08488d6e4bc58dca80ed736bbec8050b29 | change comment | totalvoice/totalvoice-python | totalvoice/cliente/api/validanumero.py | totalvoice/cliente/api/validanumero.py | # coding=utf-8
from __future__ import absolute_import
from .helper import utils
from .helper.routes import Routes
from totalvoice.cliente.api.totalvoice import Totalvoice
import json, requests
class ValidaNumero(Totalvoice):
def __init__(self, cliente):
super(ValidaNumero, self).__init__(cliente)
def get_valida_numero(self, id):
"""
:Descrição:
Função para buscar as informações de um ValidaNumero
:Utilização:
get_valida_numero(id)
:Parâmetros:
- id:
ID da do ValidaNumero.
"""
host = self.build_host(self.cliente.host, Routes.VALIDA_NUMERO, [str(id)])
return self.get_request(host)
def criar(self, numero_destino):
"""
:Descrição:
Função para criar um ValidaNumero que irá validar se o número
fornecido é um número ativo ou inativo.
:Utilização:
criar(numero_destino)
:Parâmetros:
- numero_destino:
Número do telefone que será validado.
"""
host = self.build_host(self.cliente.host, Routes.VALIDA_NUMERO)
data = {}
data.update({"numero_destino" : numero_destino})
data = json.dumps(data)
response = requests.post(host, headers=utils.build_header(self.cliente.access_token), data=data)
return response.content
def get_relatorio(self, data_inicio, data_fim):
"""
:Descrição:
Função para pegar o relatório de validaNumero.
:Utilização:
get_relatorio(data_inicio, data_fim)
:Parâmetros:
- data_inicio:
Data início do relatório (2016-03-30T17:15:59-03:00)
format UTC
- data_fim:
Data final do relatório (2016-03-30T17:15:59-03:00)
format UTC
"""
host = self.build_host(self.cliente.host, Routes.VALIDA_NUMERO, ["relatorio"])
params = (('data_inicio', data_inicio),('data_fim', data_fim),)
return self.get_request(host, params)
| # coding=utf-8
from __future__ import absolute_import
from .helper import utils
from .helper.routes import Routes
from totalvoice.cliente.api.totalvoice import Totalvoice
import json, requests
class ValidaNumero(Totalvoice):
def __init__(self, cliente):
super(ValidaNumero, self).__init__(cliente)
def get_valida_numero(self, id):
"""
:Descrição:
Função para buscar as informações de um ValidaNumero
:Utilização:
get_valida_numero(id)
:Parâmetros:
- id:
ID da do ValidaNumero.
"""
host = self.build_host(self.cliente.host, Routes.VALIDA_NUMERO, [str(id)])
return self.get_request(host)
def criar(self, numero_destino):
"""
:Descrição:
Função para criar um ValidaNumero que irá validar se o número
fornecido é um número ativo ou inativo.
:Utilização:
criar(numero_destino)
:Parâmetros:
- numero_destino:
Número do telefone que será validado.
"""
host = self.build_host(self.cliente.host, Routes.VALIDA_NUMERO)
data = {}
data.update({"numero_destino" : numero_destino})
data = json.dumps(data)
response = requests.post(host, headers=utils.build_header(self.cliente.access_token), data=data)
return response.content
def get_relatorio(self, data_inicio, data_fim):
"""
:Descrição:
Função para pegar o relatório de compostos.
:Utilização:
get_relatorio(data_inicio, data_fim)
:Parâmetros:
- data_inicio:
Data início do relatório (2016-03-30T17:15:59-03:00)
format UTC
- data_fim:
Data final do relatório (2016-03-30T17:15:59-03:00)
format UTC
"""
host = self.build_host(self.cliente.host, Routes.VALIDA_NUMERO, ["relatorio"])
params = (('data_inicio', data_inicio),('data_fim', data_fim),)
return self.get_request(host, params)
| mit | Python |
3e7847fcfb33dd0aa06fea2ccbf02f5c07e2b5ea | Enable webrtc on non-android since those tests don't seem to fail. | axinging/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,hgl888/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1 | tools/perf/benchmarks/webrtc.py | tools/perf/benchmarks/webrtc.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import webrtc
import page_sets
from telemetry import benchmark
# http://crbug.com/501383
# http://crbug.com/508344
@benchmark.Disabled('android')
class WebRTC(perf_benchmark.PerfBenchmark):
"""Obtains WebRTC metrics for a real-time video tests."""
test = webrtc.WebRTC
page_set = page_sets.WebrtcCasesPageSet
@classmethod
def Name(cls):
return 'webrtc.webrtc_cases'
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import webrtc
import page_sets
from telemetry import benchmark
@benchmark.Disabled # http://crbug.com/501383
class WebRTC(perf_benchmark.PerfBenchmark):
"""Obtains WebRTC metrics for a real-time video tests."""
test = webrtc.WebRTC
page_set = page_sets.WebrtcCasesPageSet
@classmethod
def Name(cls):
return 'webrtc.webrtc_cases'
| bsd-3-clause | Python |
1da94e9512944f2cdb33c9f3520ebc0216026950 | Update file helpers | NejcZupec/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core | src/lib/file_ops.py | src/lib/file_ops.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
import os
import logging
try:
import yaml
except ImportError:
pass
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def load_yaml_contents(file_path):
logger.info("Loading yaml: %s" % file_path)
with open(file_path) as f:
contents = yaml.load(f)
return contents
def get_unique_postfix(file_path, extension):
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
for file_name in os.listdir(path):
os.remove(path + os.sep + file_name)
| import os
import logging
try:
import yaml
except ImportError:
pass
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that ends with os.sep.
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def load_yaml_contents(file_path):
logger.info("Loading yaml: %s" % file_path)
with open(file_path) as f:
contents = yaml.load(f)
return contents
| apache-2.0 | Python |
47e91a816e196ff357e348bc8fd02819bf00059a | remove useless broker conf in dev. | soasme/rio,soasme/rio,soasme/rio | rio/settings/dev.py | rio/settings/dev.py | # -*- coding: utf-8 -*-
DEBUG = True
SECRET_KEY = 'OOxdXBtiwPHGpjxaACWvzpYCbDhBmaYk'
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/rio.db'
CELERY_BROKER_URL = 'redis://localhost/2'
CELERY_RESULT_BACKEND = 'db+' + SQLALCHEMY_DATABASE_URI
| # -*- coding: utf-8 -*-
DEBUG = True
SECRET_KEY = 'OOxdXBtiwPHGpjxaACWvzpYCbDhBmaYk'
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/rio.db'
BROKER_URL = 'redis://localhost/2'
CELERY_BROKER_URL = 'redis://localhost/2'
CELERY_RESULT_BACKEND = 'db+' + SQLALCHEMY_DATABASE_URI
| mit | Python |
418faae8f7e1052e3a198ef7e69dff55deb7d141 | Add max password length check. | alex-march/micropython,turbinenreiter/micropython,hosaka/micropython,ryannathans/micropython,tobbad/micropython,tralamazza/micropython,matthewelse/micropython,alex-march/micropython,selste/micropython,hiway/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,SHA2017-badge/micropython-esp32,pfalcon/micropython,ganshun666/micropython,alex-robbins/micropython,lowRISC/micropython,MrSurly/micropython,cwyark/micropython,bvernoux/micropython,kerneltask/micropython,ganshun666/micropython,mpalomer/micropython,hiway/micropython,PappaPeppar/micropython,mpalomer/micropython,bvernoux/micropython,misterdanb/micropython,redbear/micropython,emfcamp/micropython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,cwyark/micropython,ryannathans/micropython,AriZuu/micropython,chrisdearman/micropython,pramasoul/micropython,hosaka/micropython,selste/micropython,pramasoul/micropython,deshipu/micropython,pozetroninc/micropython,cwyark/micropython,Timmenem/micropython,puuu/micropython,tobbad/micropython,drrk/micropython,SHA2017-badge/micropython-esp32,cwyark/micropython,blazewicz/micropython,pfalcon/micropython,dinau/micropython,bvernoux/micropython,tuc-osg/micropython,adafruit/micropython,praemdonck/micropython,micropython/micropython-esp32,dmazzella/micropython,dxxb/micropython,pfalcon/micropython,chrisdearman/micropython,pozetroninc/micropython,lowRISC/micropython,mhoffma/micropython,matthewelse/micropython,drrk/micropython,hosaka/micropython,pramasoul/micropython,Timmenem/micropython,AriZuu/micropython,blazewicz/micropython,turbinenreiter/micropython,mpalomer/micropython,pfalcon/micropython,redbear/micropython,adafruit/circuitpython,misterdanb/micropython,torwag/micropython,dinau/micropython,tralamazza/micropython,tobbad/micropython,MrSurly/micropython-esp32,TDAbboud/micropython,pramasoul/micropython,alex-march/micropython,praemdonck/micropython,ryannathans/micropython,dxxb/micropython,mhoffma/micropython,matthewelse/micropython,MrSurly/micropython,toolmacher/micropython,drrk/micropython,Peetz0r/micropython-esp32,lowRISC/micropython,deshipu/micropython,pozetroninc/micropython,trezor/micropython,oopy/micropython,infinnovation/micropython,redbear/micropython,chrisdearman/micropython,infinnovation/micropython,adafruit/micropython,hiway/micropython,emfcamp/micropython,pfalcon/micropython,ganshun666/micropython,Timmenem/micropython,deshipu/micropython,jmarcelino/pycom-micropython,ganshun666/micropython,dmazzella/micropython,henriknelson/micropython,puuu/micropython,adafruit/micropython,praemdonck/micropython,micropython/micropython-esp32,Peetz0r/micropython-esp32,alex-march/micropython,AriZuu/micropython,swegener/micropython,puuu/micropython,turbinenreiter/micropython,micropython/micropython-esp32,henriknelson/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,infinnovation/micropython,pramasoul/micropython,misterdanb/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython,MrSurly/micropython,torwag/micropython,adafruit/circuitpython,deshipu/micropython,infinnovation/micropython,dxxb/micropython,AriZuu/micropython,turbinenreiter/micropython,tuc-osg/micropython,selste/micropython,PappaPeppar/micropython,adafruit/circuitpython,tuc-osg/micropython,puuu/micropython,lowRISC/micropython,tuc-osg/micropython,tobbad/micropython,AriZuu/micropython,kerneltask/micropython,MrSurly/micropython,praemdonck/micropython,kerneltask/micropython,jmarcelino/pycom-micropython,misterdanb/micropython,HenrikSolver/micropython,blazewicz/micropython,puuu/micropython,trezor/micropython,HenrikSolver/micropython,hosaka/micropython,oopy/micropython,turbinenreiter/micropython,swegener/micropython,adafruit/circuitpython,dxxb/micropython,adafruit/micropython,TDAbboud/micropython,tuc-osg/micropython,PappaPeppar/micropython,kerneltask/micropython,ganshun666/micropython,mhoffma/micropython,hiway/micropython,mpalomer/micropython,chrisdearman/micropython,bvernoux/micropython,hiway/micropython,MrSurly/micropython-esp32,adafruit/micropython,mhoffma/micropython,drrk/micropython,dinau/micropython,selste/micropython,oopy/micropython,dmazzella/micropython,toolmacher/micropython,HenrikSolver/micropython,swegener/micropython,Peetz0r/micropython-esp32,torwag/micropython,TDAbboud/micropython,praemdonck/micropython,TDAbboud/micropython,SHA2017-badge/micropython-esp32,kerneltask/micropython,mhoffma/micropython,pozetroninc/micropython,emfcamp/micropython,hosaka/micropython,pozetroninc/micropython,alex-robbins/micropython,tobbad/micropython,Peetz0r/micropython-esp32,TDAbboud/micropython,mpalomer/micropython,micropython/micropython-esp32,ryannathans/micropython,swegener/micropython,alex-robbins/micropython,cwyark/micropython,adafruit/circuitpython,toolmacher/micropython,dxxb/micropython,dmazzella/micropython,toolmacher/micropython,redbear/micropython,ryannathans/micropython,jmarcelino/pycom-micropython,dinau/micropython,oopy/micropython,trezor/micropython,henriknelson/micropython,redbear/micropython,emfcamp/micropython,alex-robbins/micropython,adafruit/circuitpython,blazewicz/micropython,swegener/micropython,henriknelson/micropython,deshipu/micropython,tralamazza/micropython,blazewicz/micropython,torwag/micropython,matthewelse/micropython,matthewelse/micropython,emfcamp/micropython,jmarcelino/pycom-micropython,Timmenem/micropython,henriknelson/micropython,drrk/micropython,bvernoux/micropython,toolmacher/micropython,jmarcelino/pycom-micropython,trezor/micropython,infinnovation/micropython,HenrikSolver/micropython,Timmenem/micropython,dinau/micropython,misterdanb/micropython,torwag/micropython,trezor/micropython,lowRISC/micropython,matthewelse/micropython,oopy/micropython,tralamazza/micropython,micropython/micropython-esp32,alex-march/micropython,MrSurly/micropython-esp32,selste/micropython | esp8266/scripts/webrepl_setup.py | esp8266/scripts/webrepl_setup.py | import sys
import socket
import time
from websocket import *
import websocket_helper
def setup_server():
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ai = socket.getaddrinfo("0.0.0.0", 8266)
addr = ai[0][4]
s.bind(addr)
s.listen(1)
return s
def getpass(stream, prompt):
stream.write(prompt)
passwd = b""
while 1:
c = stream.read(1)
if c in (b"\r", b"\n"):
stream.write("\r\n")
return passwd
passwd += c
stream.write("*")
def handle_conn(listen_sock):
cl, remote_addr = listen_sock.accept()
print("""
First-time WebREPL connection has been received. WebREPL initial setup
will now start over this connection. During setup, UART REPL will be
non-responsive. After setup finishes, the board will be rebooted. In
case of error during setup, current session will continue.
If you receive this message unexpectedly, it may mean that your WebREPL
connection is being hacked (power off board if unsure).
""")
websocket_helper.server_handshake(cl)
ws = websocket(cl)
ws.write("""\
Welcome to MicroPython WebREPL!\r
\r
This is the first time you connect to WebREPL, so please set a password\r
to use for the following WebREPL sessions. Once you enter the password\r
twice, your board will reboot with WebREPL running in active mode. On\r
some boards, you may need to press reset button or reconnect power.\r
\r
""")
while 1:
passwd1 = getpass(ws, "New password: ")
if len(passwd1) < 4:
ws.write("Password too short\r\n")
continue
elif len(passwd1) > 9:
ws.write("Password too long\r\n")
continue
passwd2 = getpass(ws, "Confirm password: ")
if passwd1 == passwd2:
break
ws.write("Passwords do not match\r\n")
with open("port_config.py", "w") as f:
f.write("WEBREPL_PASS = %r\n" % passwd1.decode("ascii"))
ws.write("Password successfully set, restarting...\r\n")
cl.close()
time.sleep(2)
import machine
machine.reset()
def test():
s = setup_server()
handle_conn(s)
| import sys
import socket
import time
from websocket import *
import websocket_helper
def setup_server():
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ai = socket.getaddrinfo("0.0.0.0", 8266)
addr = ai[0][4]
s.bind(addr)
s.listen(1)
return s
def getpass(stream, prompt):
stream.write(prompt)
passwd = b""
while 1:
c = stream.read(1)
if c in (b"\r", b"\n"):
stream.write("\r\n")
return passwd
passwd += c
stream.write("*")
def handle_conn(listen_sock):
cl, remote_addr = listen_sock.accept()
print("""
First-time WebREPL connection has been received. WebREPL initial setup
will now start over this connection. During setup, UART REPL will be
non-responsive. After setup finishes, the board will be rebooted. In
case of error during setup, current session will continue.
If you receive this message unexpectedly, it may mean that your WebREPL
connection is being hacked (power off board if unsure).
""")
websocket_helper.server_handshake(cl)
ws = websocket(cl)
ws.write("""\
Welcome to MicroPython WebREPL!\r
\r
This is the first time you connect to WebREPL, so please set a password\r
to use for the following WebREPL sessions. Once you enter the password\r
twice, your board will reboot with WebREPL running in active mode. On\r
some boards, you may need to press reset button or reconnect power.\r
\r
""")
while 1:
passwd1 = getpass(ws, "New password: ")
if len(passwd1) < 4:
ws.write("Password too short\r\n")
continue
passwd2 = getpass(ws, "Confirm password: ")
if passwd1 == passwd2:
break
ws.write("Passwords do not match\r\n")
with open("port_config.py", "w") as f:
f.write("WEBREPL_PASS = %r\n" % passwd1.decode("ascii"))
ws.write("Password successfully set, restarting...\r\n")
cl.close()
time.sleep(2)
import machine
machine.reset()
def test():
s = setup_server()
handle_conn(s)
| mit | Python |
5b69d2666b7f51d1b25bbdbcaad615e33e4e099d | Fix crash when enemy in sensor tower range + support destructible rocksu | Dentosal/python-sc2 | sc2/game_state.py | sc2/game_state.py | from .units import Units
from .power_source import PsionicMatrix
from .pixel_map import PixelMap
class Common(object):
ATTRIBUTES = [
"player_id",
"minerals", "vespene",
"food_cap", "food_used",
"food_army", "food_workers",
"idle_worker_count", "army_count",
"warp_gate_count", "larva_count"
]
def __init__(self, proto):
self._proto = proto
def __getattr__(self, attr):
assert attr in self.ATTRIBUTES, f"'{attr}' is not a valid attribute"
return int(getattr(self._proto, attr))
class GameState(object):
def __init__(self, observation, game_data):
self.common = Common(observation.observation.player_common)
self.psionic_matrix = PsionicMatrix.from_proto(observation.observation.raw_data.player.power_sources)
self.game_loop = observation.observation.game_loop
destructables = [x for x in observation.observation.raw_data.units if x.alliance == 3 and x.radius > 1.5] # all destructable rocks except the one below the main base ramps
self.destructables = Units.from_proto(destructables, game_data)
# fix for enemy units detected by sensor tower
visibleUnits, hiddenUnits = [], []
for u in observation.observation.raw_data.units:
hiddenUnits.append(u) if u.is_blip else visibleUnits.append(u)
self.units = Units.from_proto(visibleUnits, game_data)
# self.blips = Units.from_proto(hiddenUnits, game_data) # TODO: fix me
@property
def mineral_field(self):
return self.units.mineral_field
@property
def vespene_geyser(self):
return self.units.vespene_geyser
| from .units import Units
from .power_source import PsionicMatrix
class Common(object):
ATTRIBUTES = [
"player_id",
"minerals", "vespene",
"food_cap", "food_used",
"food_army", "food_workers",
"idle_worker_count", "army_count",
"warp_gate_count", "larva_count"
]
def __init__(self, proto):
self._proto = proto
def __getattr__(self, attr):
assert attr in self.ATTRIBUTES, f"'{attr}' is not a valid attribute"
return int(getattr(self._proto, attr))
class GameState(object):
def __init__(self, observation, game_data):
self.common = Common(observation.observation.player_common)
self.units = Units.from_proto(observation.observation.raw_data.units, game_data)
self.psionic_matrix = PsionicMatrix.from_proto(observation.observation.raw_data.player.power_sources)
self.game_loop = observation.observation.game_loop
@property
def mineral_field(self):
return self.units.mineral_field
@property
def vespene_geyser(self):
return self.units.vespene_geyser
| mit | Python |
bd5978083cdd096fa1f83e43a4e8dbb36cd5f2a7 | Clean up the lunar heightmap | Cynerva/jttcotm | states/lunar_surface.py | states/lunar_surface.py | import math
import random
import pygame
from Box2D import *
import backgrounds
from camera import Camera
from player import Player
from heightmap import heightmap_1d
from config import screen_height
from debug import draw_body
class LunarSurface(object):
def __init__(self):
""" Good god what is this even """
self.background = backgrounds.LunarSurface()
self.camera = Camera()
self.world = b2World(gravity=(0, -100), doSleep=True)
heightmap = heightmap_1d(12)
for x in range(4096):
heightmap[x] *= 128.0
for x in range(1024, 4096):
heightmap[x] += (((x - 2048) / 1024.0) ** 2.0 - 1.0) * 1024.0
ground_shapes = []
for x in range(4096/16-1):
left = heightmap[x*16]
right = heightmap[x*16+16]
ground_shapes.append(b2PolygonShape(vertices=(
(x * 16, left),
(x * 16, left - 16),
(x * 16 + 16, right - 16),
(x * 16 + 16, right)
)))
self.ground = self.world.CreateStaticBody(
position=(0, 0),
shapes=ground_shapes
)
self.player = Player(self.world, (0, 500))
def update(self, delta):
pygame.event.pump() # TODO: remove this
self.background.update(delta)
self.world.Step(delta, 8, 8)
self.world.ClearForces()
self.player.update(delta)
self.camera.x, self.camera.y = self.player.pos
def render(self, screen):
self.background.render(screen, self.camera)
self.player.render(screen, self.camera)
draw_body(self.ground, screen, self.camera)
| import math
import pygame
from Box2D import *
import backgrounds
from camera import Camera
from player import Player
from heightmap import heightmap_1d
from debug import draw_body
class LunarSurface(object):
def __init__(self):
self.background = backgrounds.LunarSurface()
self.camera = Camera()
self.world = b2World(gravity=(0, -100), doSleep=True)
heightmap = heightmap_1d(8)
ground_shapes = []
heightmap[0] *= 128.0
for x in range(1, 256):
heightmap[x] *= 128.0
heightmap[x] += (((x - 128.0) / 128.0) ** 2 - 1.0) * 1024.0
left = heightmap[x - 1]
right = heightmap[x]
ground_shapes.append(b2PolygonShape(
vertices=(
(x*10-10, left),
(x*10-10, left-20),
(x*10, right-20),
(x*10, right)
)
))
self.ground = self.world.CreateStaticBody(
position=(0, 0),
shapes=ground_shapes
)
self.player = Player(self.world, (0, 500))
def update(self, delta):
pygame.event.pump() # TODO: remove this
self.background.update(delta)
self.world.Step(delta, 8, 8)
self.world.ClearForces()
self.player.update(delta)
self.camera.x, self.camera.y = self.player.pos
def render(self, screen):
self.background.render(screen, self.camera)
draw_body(self.ground, screen, self.camera)
self.player.render(screen, self.camera)
| bsd-3-clause | Python |
b1f8e376f343bfad5c740f2dce1a0fbc066d3b01 | Add command line args for the pH. | dmccloskey/component-contribution,dmccloskey/component-contribution | scripts/calc_E.py | scripts/calc_E.py | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 25th 2015
@author: flamholz
"""
from component_contribution.component_contribution_trainer import ComponentContribution
from component_contribution.kegg_reaction import KeggReaction
from component_contribution.thermodynamic_constants import F, default_T
import argparse
import csv
import numpy as np
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
'Calculate reduction potentials for a number of reactions.')
parser.add_argument('infile', type=argparse.FileType(),
help='path to input file containing a list of reactions')
parser.add_argument('outfile', type=argparse.FileType('w'),
help='path to output file')
parser.add_argument('--ionic_strength', default=0.2, type=int,
help='ionic strength in molar units.')
parser.add_argument('--pH_min', default=5, type=int,
help='lowest pH to produce E0 for.')
parser.add_argument('--pH_max', default=9, type=int,
help='highest pH to produce E0 for.')
parser.add_argument('--pH_step', default=0.05, type=float,
help='pH increment.')
args = parser.parse_args()
I = args.ionic_strength
T = default_T
cc = ComponentContribution.init()
pHs = np.arange(args.pH_min, args.pH_max + args.pH_step,
args.pH_step)
reactions_and_energies = []
reader = csv.reader(args.infile)
for row in reader:
formula = row[0].strip()
reaction = KeggReaction.parse_formula(formula)
reaction_atom_bag = reaction._get_reaction_atom_bag()
n_e = reaction_atom_bag.pop('e-', 0)
if len(reaction_atom_bag) != 0:
raise ValueError('This is not a half-reaction'
' (i.e. cannot be balanced by adding e-)')
dG0_r, u_r = cc.get_dG0_r(reaction)
E0s = []
for pH in pHs:
ddG0_r = reaction.get_transform_ddG0(pH=pH, I=I, T=T)
dG0_r_prime = dG0_r + ddG0_r
E0_prime = 1000 * -dG0_r_prime / (n_e*F) # mV
E0s.append(E0_prime)
reactions_and_energies.append((row, E0s))
header = ['reaction', 'reaction_description', 'E\'m', 'Type', 'Source']
pH_header = ['pH %.1f (mV)' % pH for pH in pHs]
header += pH_header
writer = csv.writer(args.outfile)
writer.writerow(header)
for rxn_data, pH_E0 in reactions_and_energies:
energies_fmted = ['%.2f' % e for e in pH_E0]
writer.writerow(rxn_data + energies_fmted)
| # -*- coding: utf-8 -*-
"""
Created on Mon Jan 25th 2015
@author: flamholz
"""
from component_contribution.component_contribution_trainer import ComponentContribution
from component_contribution.kegg_reaction import KeggReaction
from component_contribution.thermodynamic_constants import F, default_T
import argparse
import csv
import numpy as np
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
'Calculate reduction potentials for a number of reactions.')
parser.add_argument('infile', type=argparse.FileType(),
help='path to input file containing a list of reactions')
parser.add_argument('outfile', type=argparse.FileType('w'),
help='path to output file')
parser.add_argument('--ionic_strength', default=0.2, type=int,
help='ionic strength in molar units.')
args = parser.parse_args()
I = args.ionic_strength
T = default_T
cc = ComponentContribution.init()
pHs = np.arange(6, 9.1, 0.2)
reactions_and_energies = []
reader = csv.reader(args.infile)
for row in reader:
formula = row[0].strip()
reaction = KeggReaction.parse_formula(formula)
reaction_atom_bag = reaction._get_reaction_atom_bag()
n_e = reaction_atom_bag.pop('e-', 0)
if len(reaction_atom_bag) != 0:
raise ValueError('This is not a half-reaction'
' (i.e. cannot be balanced by adding e-)')
dG0_r, u_r = cc.get_dG0_r(reaction)
E0s = []
for pH in pHs:
ddG0_r = reaction.get_transform_ddG0(pH=pH, I=I, T=T)
dG0_r_prime = dG0_r + ddG0_r
E0_prime = 1000 * -dG0_r_prime / (n_e*F) # mV
E0s.append(E0_prime)
reactions_and_energies.append((row, E0s))
header = ['reaction', 'reaction_description', 'E\'m', 'Type', 'Source']
pH_header = ['pH %.1f (mV)' % pH for pH in pHs]
header += pH_header
writer = csv.writer(args.outfile)
writer.writerow(header)
for rxn_data, pH_E0 in reactions_and_energies:
energies_fmted = ['%.2f' % e for e in pH_E0]
writer.writerow(rxn_data + energies_fmted)
| mit | Python |
4016dbec6a2ea903bc8162ea41d77c3ae811a6f0 | fix pylint warnings in rdp module | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/modules/rdp.py | salt/modules/rdp.py | # -*- coding: utf-8 -*-
'''
Manage RDP Service on Windows servers
'''
# Import python libs
import re
# Import salt libs
import salt.utils
POWERSHELL = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
def __virtual__():
'''
Only works on Windows systems
'''
if salt.utils.is_windows():
return 'rdp'
return False
def _parse_return_code_powershell(string):
'''
return from the input string the return code of the powershell command
'''
regex = re.search(r'ReturnValue\s*: (\d*)', string)
if not regex:
return False
else:
return int(regex.group(1))
def enable():
'''
Enable RDP the service on the server
CLI Example:
.. code-block:: bash
salt '*' rdp.enable
'''
cmd = '-InputFormat None -Command "& { $RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop ; $RDP.SetAllowTsConnections(1,1) }"'
cmd = '{0} {1}'.format(POWERSHELL, cmd)
return _parse_return_code_powershell(__salt__['cmd.run'](cmd)) == 0
def disable():
'''
Disable RDP the service on the server
CLI Example:
.. code-block:: bash
salt '*' rdp.disable
'''
cmd = '-InputFormat None -Command "& { $RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop ; $RDP.SetAllowTsConnections(0,1) }"'
cmd = '{0} {1}'.format(POWERSHELL, cmd)
return _parse_return_code_powershell(__salt__['cmd.run'](cmd)) == 0
def status():
'''
Show if rdp is enabled on the server
CLI Example:
.. code-block:: bash
salt '*' rdp.status
'''
cmd = '-InputFormat None -Command "& { $RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop ; echo $RDP.AllowTSConnections }"'
cmd = '{0} {1}'.format(POWERSHELL, cmd)
out = int(__salt__['cmd.run'](cmd).strip())
return out != 0
| # -*- coding: utf-8 -*-
'''
Manage RDP Service on Windows servers
'''
# Import python libs
import re
# Import salt libs
import salt.utils
POWERSHELL='C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
def __virtual__():
'''
Only works on Windows systems
'''
if salt.utils.is_windows():
return 'rdp'
return False
def _parse_return_code_powershell(string):
'''
return from the input string the return code of the powershell command
'''
regex = re.search(r'ReturnValue\s*: (\d*)', string)
if not regex:
return False
else:
return int(regex.group(1))
def enable():
'''
Enable RDP the service on the server
CLI Example:
.. code-block:: bash
salt '*' rdp.enable
'''
cmd = '-InputFormat None -Command "& { $RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop ; $RDP.SetAllowTsConnections(1,1) }"'
cmd = '{0} {1}'.format(POWERSHELL, cmd)
return _parse_return_code_powershell(__salt__['cmd.run'](cmd)) == 0
def disable():
'''
Disable RDP the service on the server
CLI Example:
.. code-block:: bash
salt '*' rdp.disable
'''
cmd = '-InputFormat None -Command "& { $RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop ; $RDP.SetAllowTsConnections(0,1) }"'
cmd = '{0} {1}'.format(POWERSHELL, cmd)
return _parse_return_code_powershell(__salt__['cmd.run'](cmd)) == 0
def status():
'''
Show if rdp is enabled on the server
CLI Example:
.. code-block:: bash
salt '*' rdp.status
'''
cmd = '-InputFormat None -Command "& { $RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop ; echo $RDP.AllowTSConnections }"'
cmd = '{0} {1}'.format(POWERSHELL, cmd)
out = int(__salt__['cmd.run'](cmd).strip())
return out != 0
| apache-2.0 | Python |
e1067ab4e5d028fa22437ac0451ae3237f7d3a2f | update guestbook example css | fission/fission,platform9/fission,fission/fission,life1347/fission,life1347/fission,life1347/fission,fission/fission,life1347/fission,life1347/fission,platform9/fission,life1347/fission,fission/fission,platform9/fission,life1347/fission,platform9/fission,life1347/fission,life1347/fission | examples/python/guestbook/get.py | examples/python/guestbook/get.py | #
# Handles GET /guestbook -- returns a list of items in the guestbook
# with a form to add more.
#
from flask import current_app, escape
import redis
# Connect to redis. This is run only when this file is loaded; as
# long as the pod is alive, the connection is reused.
redisConnection = redis.StrictRedis(host='redis.guestbook', port=6379, db=0)
def main():
messages = redisConnection.lrange('guestbook', 0, -1)
items = [("<li>%s</li>" % escape(m.decode('utf-8'))) for m in messages]
ul = "<ul>%s</ul>" % "\n".join(items)
return """
<html><body style="font-family:sans-serif;font-size:2rem;padding:40px">
<h1>Guestbook</h1>
<form action="/guestbook" method="POST">
<input type="text" name="text">
<button type="submit">Add</button>
</form>
<hr/>
%s
</body></html>
""" % ul
| #
# Handles GET /guestbook -- returns a list of items in the guestbook
# with a form to add more.
#
from flask import current_app, escape
import redis
# Connect to redis. This is run only when this file is loaded; as
# long as the pod is alive, the connection is reused.
redisConnection = redis.StrictRedis(host='redis.guestbook', port=6379, db=0)
def main():
messages = redisConnection.lrange('guestbook', 0, -1)
items = [("<li>%s</li>" % escape(m.decode('utf-8'))) for m in messages]
ul = "<ul>%s</ul>" % "\n".join(items)
return """
<html><body>
<h1>Guestbook</h1>
<form action="/guestbook" method="POST">
<input type="text" name="text">
<button type="submit">Add</button>
</form>
<hr/>
%s
</body></html>
""" % ul
| apache-2.0 | Python |
c951576e0c6f3e7b4a4df6ba843c144e49ff1ba8 | add missing new line at the end of file | tojonmz/treeherder,deathping1994/treeherder,kapy2010/treeherder,parkouss/treeherder,gbrmachado/treeherder,tojon/treeherder,adusca/treeherder,moijes12/treeherder,adusca/treeherder,gbrmachado/treeherder,gbrmachado/treeherder,edmorley/treeherder,akhileshpillai/treeherder,edmorley/treeherder,moijes12/treeherder,sylvestre/treeherder,gbrmachado/treeherder,moijes12/treeherder,kapy2010/treeherder,gbrmachado/treeherder,parkouss/treeherder,moijes12/treeherder,avih/treeherder,parkouss/treeherder,avih/treeherder,tojonmz/treeherder,sylvestre/treeherder,vaishalitekale/treeherder,parkouss/treeherder,adusca/treeherder,jgraham/treeherder,jgraham/treeherder,tojonmz/treeherder,vaishalitekale/treeherder,moijes12/treeherder,avih/treeherder,kapy2010/treeherder,KWierso/treeherder,tojonmz/treeherder,tojon/treeherder,rail/treeherder,avih/treeherder,akhileshpillai/treeherder,glenn124f/treeherder,edmorley/treeherder,parkouss/treeherder,glenn124f/treeherder,adusca/treeherder,rail/treeherder,vaishalitekale/treeherder,wlach/treeherder,tojon/treeherder,jgraham/treeherder,akhileshpillai/treeherder,sylvestre/treeherder,KWierso/treeherder,glenn124f/treeherder,avih/treeherder,glenn124f/treeherder,kapy2010/treeherder,wlach/treeherder,sylvestre/treeherder,akhileshpillai/treeherder,adusca/treeherder,moijes12/treeherder,edmorley/treeherder,jgraham/treeherder,tojon/treeherder,rail/treeherder,wlach/treeherder,deathping1994/treeherder,tojonmz/treeherder,glenn124f/treeherder,glenn124f/treeherder,rail/treeherder,akhileshpillai/treeherder,adusca/treeherder,vaishalitekale/treeherder,kapy2010/treeherder,tojonmz/treeherder,KWierso/treeherder,akhileshpillai/treeherder,vaishalitekale/treeherder,rail/treeherder,parkouss/treeherder,sylvestre/treeherder,wlach/treeherder,vaishalitekale/treeherder,sylvestre/treeherder,rail/treeherder,deathping1994/treeherder,jgraham/treeherder,deathping1994/treeherder,wlach/treeherder,wlach/treeherder,gbrmachado/treeherder,jgraham/treeherder,avih/treeherder,deathping1994/treeherder,deathping1994/treeherder,KWierso/treeherder | treeherder/workers/management/commands/shutdown_workers.py | treeherder/workers/management/commands/shutdown_workers.py | from django.core.management.base import BaseCommand
from celery.task.control import broadcast
class Command(BaseCommand):
help = "Broadcast a warm shutdown event to all the workers."
def handle(self, *args, **options):
self.stdout.write("Sending shutdown event")
broadcast("shutdown")
self.stdout.write("Shutdown event sent")
| from django.core.management.base import BaseCommand
from celery.task.control import broadcast
class Command(BaseCommand):
help = "Broadcast a warm shutdown event to all the workers."
def handle(self, *args, **options):
self.stdout.write("Sending shutdown event")
broadcast("shutdown")
self.stdout.write("Shutdown event sent") | mpl-2.0 | Python |
bd9526e0044f6cdf47c4ed2e45ddde58f67905b3 | add line to run unittests | funginstitute/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,yngcan/patentprocessor,nikken1/patentprocessor,yngcan/patentprocessor | lib/new_xml_parsing/test_xml_driver.py | lib/new_xml_parsing/test_xml_driver.py | #!/usr/bin/env python
import os
import re
import unittest
from xml_driver import XMLElement, XMLHandler
from xml.sax import make_parser, handler
# Directory of test files
xml_files = [x for x in os.listdir('test_xml_files')
if re.match(r"2012_\d.xml", x) != None] # Match fixtures
parsed_xml = []
for xf in xml_files:
parser = make_parser()
xmlhandler = XMLHandler()
parser.setContentHandler(xmlhandler)
parser.setFeature(handler.feature_external_ges, False)
parser.parse('test_xml_files/'+xf)
parsed_xml.append(xmlhandler.root)
class Test_XMLElement(unittest.TestCase):
def setUp(self):
# sanity check
self.assertTrue(xml_files)
unittest.main()
| #!/usr/bin/env python
import os
import re
import unittest
from xml_driver import XMLElement, XMLHandler
from xml.sax import make_parser, handler
# Directory of test files
xml_files = [x for x in os.listdir('test_xml_files')
if re.match(r"2012_\d.xml", x) != None] # Match fixtures
parsed_xml = []
for xf in xml_files:
parser = make_parser()
xmlhandler = XMLHandler()
parser.setContentHandler(xmlhandler)
parser.setFeature(handler.feature_external_ges, False)
parser.parse('test_xml_files/'+xf)
parsed_xml.append(xmlhandler.root)
class Test_XMLElement(unittest.TestCase):
def setUp(self):
# sanity check
self.assertTrue(xml_files)
| bsd-2-clause | Python |
708439135d26a8a4b38ad84665938f742a6e7f8c | Update description | davidrobles/mlnd-capstone-code | examples/tic_ql_tabular_fixed.py | examples/tic_ql_tabular_fixed.py | '''
Q-Learning is used to estimate the state-action values for a
Tic-Tac-Toe board position against a fixed Alpha-Beta opponent
'''
from capstone.game.games import TicTacToe
from capstone.game.players import AlphaBeta, RandPlayer
from capstone.game.utils import tic2pdf
from capstone.rl import FixedGameMDP, Environment
from capstone.rl.learners import QLearning
from capstone.rl.policies import RandomPolicy
from capstone.rl.utils import EpisodicWLDPlotter, QValuesPlotter
from capstone.rl.value_functions import TabularQ
seed = 23
board = [[' ', ' ', 'X'],
[' ', 'X', ' '],
['O', 'O', ' ']]
game = TicTacToe(board)
mdp = FixedGameMDP(game, AlphaBeta(), 1)
env = Environment(mdp)
qlearning = QLearning(
env=env,
qfunction=TabularQ(random_state=seed),
policy=RandomPolicy(env.actions, random_state=seed),
learning_rate=0.1,
discount_factor=1.0,
n_episodes=800,
)
qlearning.train(callbacks=[QValuesPlotter(game, game.legal_moves())])
####################
# Generate figures #
####################
tic2pdf('figures/tic_ql_tab_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qfunction[(game, move)]
print('Move: %d' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/tic_ql_tab_move_{}.pdf'.format(move)
tic2pdf(filename, new_game.board)
| '''
Q-Learning is used to learn the state-action values for a
Tic-Tac-Toe board position against a fixed Alpha-Beta opponent
'''
from capstone.game.games import TicTacToe
from capstone.game.players import AlphaBeta, RandPlayer
from capstone.game.utils import tic2pdf
from capstone.rl import FixedGameMDP, Environment
from capstone.rl.learners import QLearning
from capstone.rl.policies import RandomPolicy
from capstone.rl.utils import EpisodicWLDPlotter, QValuesPlotter
from capstone.rl.value_functions import TabularQ
seed = 23
board = [[' ', ' ', 'X'],
[' ', 'X', ' '],
['O', 'O', ' ']]
game = TicTacToe(board)
mdp = FixedGameMDP(game, AlphaBeta(), 1)
env = Environment(mdp)
qlearning = QLearning(
env=env,
qfunction=TabularQ(random_state=seed),
policy=RandomPolicy(env.actions, random_state=seed),
learning_rate=0.1,
discount_factor=1.0,
n_episodes=800,
)
qlearning.train(callbacks=[QValuesPlotter(game, game.legal_moves())])
####################
# Generate figures #
####################
tic2pdf('figures/tic_ql_tab_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qfunction[(game, move)]
print('Move: %d' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/tic_ql_tab_move_{}.pdf'.format(move)
tic2pdf(filename, new_game.board)
| mit | Python |
37bec74984f1d3b88f640f2bc9d7788090acdf58 | Remove commented out code | khosrow/lvsm,khosrow/lvsm | lvsm/__main__.py | lvsm/__main__.py | #!/usr/bin/env python
# Khosrow Ebrahimpour - Sep 2012
"""
lvsm - LVS Manager
LVS Manager is a shell that eases the management of a linux virtual server.
Using it without arguments will enter an interactive shell. Supplying one or
more command-line arguments will run lvsm for a "single-shot" use.
Usage: lvsm [-h] [-c <conffile>][commands]
Options:
-h, --help Show this help message and exit
-c <conffile>, -config=<connfile>
Specify which configuration file to use
The default is /etc/lvsm.conf
-d, --debug Enable debug messages during runtime
-v, --version Display lvsm version
Commands:
configure
status
help
Use 'lvsm help <command>' for information on a specific command.
"""
import getopt
import sys
import __init__ as appinfo
import prompts.live
import utils
import logging
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
print >> fd, __doc__
if msg:
print >> fd, msg
sys.exit(code)
def main():
CONFFILE = "/etc/lvsm.conf"
logging.basicConfig(format='[%(levelname)s]: %(message)s')
logger = logging.getLogger('lvsm')
try:
opts, args = getopt.getopt(sys.argv[1:], "hvc:d",
["help", "version", "config=", "debug"])
except getopt.error, msg:
usage(2, msg)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage(0)
elif opt in ("-v", "--version"):
print "lvsm " + appinfo.__version__
sys.exit(0)
elif opt in ("-c", "--config"):
CONFFILE = arg
elif opt in ("-d", "--debug"):
logger.setLevel(logging.DEBUG)
# open config file and read it
config = utils.parse_config(CONFFILE)
logger.debug('Parsed config file')
logger.debug(str(config))
try:
shell = prompts.live.LivePrompt(config)
if args:
shell.onecmd(' '.join(args[:]))
else:
shell.cmdloop()
except KeyboardInterrupt:
print "\nleaving abruptly!"
sys.exit(1)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# Khosrow Ebrahimpour - Sep 2012
"""
lvsm - LVS Manager
LVS Manager is a shell that eases the management of a linux virtual server.
Using it without arguments will enter an interactive shell. Supplying one or
more command-line arguments will run lvsm for a "single-shot" use.
Usage: lvsm [-h] [-c <conffile>][commands]
Options:
-h, --help Show this help message and exit
-c <conffile>, -config=<connfile>
Specify which configuration file to use
The default is /etc/lvsm.conf
-d, --debug Enable debug messages during runtime
-v, --version Display lvsm version
Commands:
configure
status
help
Use 'lvsm help <command>' for information on a specific command.
"""
import getopt
import sys
import __init__ as appinfo
import prompts.live
import utils
import logging
def usage(code, msg=''):
if code:
fd = sys.stderr
else:
fd = sys.stdout
print >> fd, __doc__
if msg:
print >> fd, msg
sys.exit(code)
def main():
CONFFILE = "/etc/lvsm.conf"
logging.basicConfig(format='[%(levelname)s]: %(message)s')
logger = logging.getLogger('lvsm')
try:
opts, args = getopt.getopt(sys.argv[1:], "hvc:d",
["help", "version", "config=", "debug"])
except getopt.error, msg:
usage(2, msg)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage(0)
elif opt in ("-v", "--version"):
print "lvsm " + appinfo.__version__
sys.exit(0)
elif opt in ("-c", "--config"):
CONFFILE = arg
elif opt in ("-d", "--debug"):
logger.setLevel(logging.DEBUG)
# open config file and read it
config = utils.parse_config(CONFFILE)
logger.debug('Parsed config file')
logger.debug(str(config))
try:
# shell = lvsm.MainPrompt(config)
shell = prompts.live.LivePrompt(config)
if args:
shell.onecmd(' '.join(args[:]))
else:
shell.cmdloop()
except KeyboardInterrupt:
print "\nleaving abruptly!"
sys.exit(1)
if __name__ == "__main__":
main()
| mit | Python |
cb14fb14f5c227aa780a06a2ce7bff758c74962c | update version | pavlov99/jsonapi,pavlov99/jsonapi | jsonapi/__init__.py | jsonapi/__init__.py | """ JSON:API realization."""
__version = (0, 7, 4)
__version__ = version = '.'.join(map(str, __version))
__project__ = PROJECT = __name__
| """ JSON:API realization."""
__version = (0, 7, 3)
__version__ = version = '.'.join(map(str, __version))
__project__ = PROJECT = __name__
| mit | Python |
88753822c379177a309a1e195520d487835b10cf | update hotfix version | bradical987/jumpbox,bradical987/jumpbox | jumpbox/_version.py | jumpbox/_version.py | # Copyright (C) 2017 Bradley Sakdol <bsakdol@turnitin.com>
#
# This file is part of Jumpbox
#
# Jumpbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '1.3.1'
| # Copyright (C) 2017 Bradley Sakdol <bsakdol@turnitin.com>
#
# This file is part of Jumpbox
#
# Jumpbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '1.3.0'
| mit | Python |
ece5438dda41e8d50346860bc33931c109f14ec8 | bump version number for win32 port | kapteyn-astro/kapteyn,kapteyn-astro/kapteyn,kapteyn-astro/kapteyn,kapteyn-astro/kapteyn,kapteyn-astro/kapteyn,kapteyn-astro/kapteyn | kapteyn/__init__.py | kapteyn/__init__.py | """Kapteyn package.
"""
from os import path
package_dir = path.abspath(path.dirname(__file__))
__all__=['celestial', 'wcs', 'wcsgrat', 'tabarray', 'maputils',
'mplutil', 'positions', 'shapes', 'rulers', 'filters',
'interpolation','kmpfit']
__version__='2.3.1'
| """Kapteyn package.
"""
from os import path
package_dir = path.abspath(path.dirname(__file__))
__all__=['celestial', 'wcs', 'wcsgrat', 'tabarray', 'maputils',
'mplutil', 'positions', 'shapes', 'rulers', 'filters',
'interpolation','kmpfit']
__version__='2.3'
| bsd-3-clause | Python |
7365a739660927f373cbf613e1d869265fb16e7d | remove some obsolete code | nschloe/meshio | meshio/_files.py | meshio/_files.py | from contextlib import contextmanager
def is_buffer(obj, mode):
return ("r" in mode and hasattr(obj, "read")) or (
"w" in mode and hasattr(obj, "write")
)
@contextmanager
def open_file(path_or_buf, mode="r"):
if is_buffer(path_or_buf, mode):
yield path_or_buf
else:
with open(path_or_buf, mode) as f:
yield f
| import sys
from contextlib import contextmanager
try:
# Python 3.6+
from os import PathLike
except ImportError:
from pathlib import PurePath as PathLike
def is_buffer(obj, mode):
return ("r" in mode and hasattr(obj, "read")) or (
"w" in mode and hasattr(obj, "write")
)
@contextmanager
def open_file(path_or_buf, mode="r"):
if is_buffer(path_or_buf, mode):
yield path_or_buf
elif sys.version_info < (3, 6) and isinstance(path_or_buf, PathLike):
# TODO remove when python 3.5 is EoL (i.e. 2020-09-13)
# https://devguide.python.org/#status-of-python-branches
# https://www.python.org/dev/peps/pep-0478/
with open(str(path_or_buf), mode) as f:
yield f
else:
with open(path_or_buf, mode) as f:
yield f
| mit | Python |
30794989f1c33c55ff8adfc91aa575a47bd98dfd | Increment version to 1.0.6 | SpectralAngel/django-select2-forms,hkmshb/django-select2-forms,sandow-digital/django-select2-forms,JP-Ellis/django-select2-forms,SpectralAngel/django-select2-forms,hkmshb/django-select2-forms,sandow-digital/django-select2-forms,SpectralAngel/django-select2-forms,hkmshb/django-select2-forms,sandow-digital/django-select2-forms | select2/__init__.py | select2/__init__.py | __version_info__ = (1, 0, 6)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (1, 0, 5)
__version__ = '.'.join(map(str, __version_info__))
| bsd-2-clause | Python |
d95412425a5d5a76cacb463fc8c137f576c8b7ad | Fix line-wrapping of help text. | synicalsyntax/zulip,brainwane/zulip,vaidap/zulip,ryanbackman/zulip,aakash-cr7/zulip,jrowan/zulip,eeshangarg/zulip,blaze225/zulip,vabs22/zulip,Galexrt/zulip,brockwhittaker/zulip,SmartPeople/zulip,zulip/zulip,PhilSk/zulip,aakash-cr7/zulip,tommyip/zulip,rht/zulip,christi3k/zulip,kou/zulip,dhcrzf/zulip,blaze225/zulip,dhcrzf/zulip,rht/zulip,susansls/zulip,Galexrt/zulip,jainayush975/zulip,aakash-cr7/zulip,eeshangarg/zulip,dattatreya303/zulip,synicalsyntax/zulip,j831/zulip,eeshangarg/zulip,verma-varsha/zulip,susansls/zulip,verma-varsha/zulip,ryanbackman/zulip,jackrzhang/zulip,punchagan/zulip,synicalsyntax/zulip,kou/zulip,tommyip/zulip,dhcrzf/zulip,jainayush975/zulip,ryanbackman/zulip,vaidap/zulip,mahim97/zulip,SmartPeople/zulip,blaze225/zulip,rishig/zulip,kou/zulip,dhcrzf/zulip,andersk/zulip,shubhamdhama/zulip,andersk/zulip,souravbadami/zulip,j831/zulip,PhilSk/zulip,aakash-cr7/zulip,jainayush975/zulip,eeshangarg/zulip,j831/zulip,shubhamdhama/zulip,tommyip/zulip,SmartPeople/zulip,hackerkid/zulip,dattatreya303/zulip,Galexrt/zulip,susansls/zulip,kou/zulip,christi3k/zulip,kou/zulip,PhilSk/zulip,dawran6/zulip,souravbadami/zulip,vaidap/zulip,andersk/zulip,zulip/zulip,PhilSk/zulip,jackrzhang/zulip,andersk/zulip,souravbadami/zulip,andersk/zulip,rht/zulip,verma-varsha/zulip,ryanbackman/zulip,j831/zulip,showell/zulip,hackerkid/zulip,JPJPJPOPOP/zulip,zulip/zulip,andersk/zulip,aakash-cr7/zulip,vabs22/zulip,kou/zulip,zulip/zulip,jrowan/zulip,timabbott/zulip,punchagan/zulip,shubhamdhama/zulip,rht/zulip,brockwhittaker/zulip,punchagan/zulip,tommyip/zulip,showell/zulip,Galexrt/zulip,showell/zulip,dawran6/zulip,jackrzhang/zulip,timabbott/zulip,jackrzhang/zulip,blaze225/zulip,timabbott/zulip,brockwhittaker/zulip,timabbott/zulip,synicalsyntax/zulip,rishig/zulip,dhcrzf/zulip,shubhamdhama/zulip,verma-varsha/zulip,jphilipsen05/zulip,showell/zulip,blaze225/zulip,brockwhittaker/zulip,susansls/zulip,amanharitsh123/zulip,hackerkid/zulip,Galexrt/zulip,hackerkid/zulip,showell/zulip,JPJPJPOPOP/zulip,amanharitsh123/zulip,souravbadami/zulip,Galexrt/zulip,christi3k/zulip,shubhamdhama/zulip,synicalsyntax/zulip,JPJPJPOPOP/zulip,jainayush975/zulip,jrowan/zulip,mahim97/zulip,kou/zulip,timabbott/zulip,SmartPeople/zulip,vaidap/zulip,hackerkid/zulip,vaidap/zulip,JPJPJPOPOP/zulip,PhilSk/zulip,mahim97/zulip,souravbadami/zulip,eeshangarg/zulip,brainwane/zulip,rht/zulip,jphilipsen05/zulip,shubhamdhama/zulip,dawran6/zulip,tommyip/zulip,jrowan/zulip,jrowan/zulip,jainayush975/zulip,eeshangarg/zulip,SmartPeople/zulip,rishig/zulip,mahim97/zulip,vabs22/zulip,jphilipsen05/zulip,amanharitsh123/zulip,jackrzhang/zulip,JPJPJPOPOP/zulip,brainwane/zulip,dhcrzf/zulip,brockwhittaker/zulip,eeshangarg/zulip,JPJPJPOPOP/zulip,rht/zulip,shubhamdhama/zulip,dattatreya303/zulip,Galexrt/zulip,jphilipsen05/zulip,punchagan/zulip,vabs22/zulip,punchagan/zulip,aakash-cr7/zulip,amanharitsh123/zulip,punchagan/zulip,SmartPeople/zulip,dawran6/zulip,showell/zulip,souravbadami/zulip,dawran6/zulip,jackrzhang/zulip,vabs22/zulip,dhcrzf/zulip,mahim97/zulip,zulip/zulip,brainwane/zulip,rht/zulip,hackerkid/zulip,j831/zulip,mahim97/zulip,PhilSk/zulip,rishig/zulip,christi3k/zulip,synicalsyntax/zulip,amanharitsh123/zulip,vaidap/zulip,showell/zulip,dattatreya303/zulip,christi3k/zulip,tommyip/zulip,synicalsyntax/zulip,brainwane/zulip,j831/zulip,hackerkid/zulip,verma-varsha/zulip,amanharitsh123/zulip,susansls/zulip,rishig/zulip,vabs22/zulip,christi3k/zulip,dattatreya303/zulip,ryanbackman/zulip,dattatreya303/zulip,blaze225/zulip,punchagan/zulip,brainwane/zulip,jrowan/zulip,timabbott/zulip,jainayush975/zulip,tommyip/zulip,brainwane/zulip,brockwhittaker/zulip,rishig/zulip,dawran6/zulip,timabbott/zulip,rishig/zulip,andersk/zulip,jphilipsen05/zulip,zulip/zulip,susansls/zulip,jackrzhang/zulip,verma-varsha/zulip,zulip/zulip,jphilipsen05/zulip,ryanbackman/zulip | zerver/management/commands/generate_realm_creation_link.py | zerver/management/commands/generate_realm_creation_link.py | from __future__ import absolute_import
from __future__ import print_function
from argparse import ArgumentParser, RawTextHelpFormatter
from typing import Any
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import ProgrammingError
from confirmation.models import generate_realm_creation_url
from zerver.models import Realm
import sys
class Command(BaseCommand):
help = """
Outputs a randomly generated, 1-time-use link for Organization creation.
Whoever visits the link can create a new organization on this server, regardless of whether
settings.OPEN_REALM_CREATION is enabled. The link would expire automatically after
settings.REALM_CREATION_LINK_VALIDITY_DAYS.
Usage: ./manage.py generate_realm_creation_link """
# Fix support for multi-line usage
def create_parser(self, *args, **kwargs):
# type: (*Any, **Any) -> ArgumentParser
parser = super(Command, self).create_parser(*args, **kwargs)
parser.formatter_class = RawTextHelpFormatter
return parser
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
try:
# first check if the db has been initalized
Realm.objects.first()
except ProgrammingError:
print("The Zulip database does not appear to exist. Have you run initialize-database?")
sys.exit(1)
url = generate_realm_creation_url()
self.stdout.write(
"\033[1;92mPlease visit the following secure single-use link to register your ")
self.stdout.write("new Zulip organization:\033[0m")
self.stdout.write("")
self.stdout.write(" \033[1;92m%s\033[0m" % (url,))
self.stdout.write("")
| from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import ProgrammingError
from confirmation.models import generate_realm_creation_url
from zerver.models import Realm
import sys
class Command(BaseCommand):
help = """Outputs a randomly generated, 1-time-use link for Organization creation.
Whoever visits the link can create a new organization on this server, regardless of whether
settings.OPEN_REALM_CREATION is enabled. The link would expire automatically after
settings.REALM_CREATION_LINK_VALIDITY_DAYS.
Usage: ./manage.py generate_realm_creation_link """
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
try:
# first check if the db has been initalized
Realm.objects.first()
except ProgrammingError:
print("The Zulip database does not appear to exist. Have you run initialize-database?")
sys.exit(1)
url = generate_realm_creation_url()
self.stdout.write(
"\033[1;92mPlease visit the following secure single-use link to register your ")
self.stdout.write("new Zulip organization:\033[0m")
self.stdout.write("")
self.stdout.write(" \033[1;92m%s\033[0m" % (url,))
self.stdout.write("")
| apache-2.0 | Python |
a1e028c4cbd414073163f076fb766f64068eb728 | Bump version | thoas/django-sequere | sequere/__init__.py | sequere/__init__.py | version = (0, 2, 5)
__version__ = '.'.join(map(str, version))
from .registry import register, autodiscover
__all__ = ['register', 'autodiscover']
default_app_config = 'sequere.apps.SequereConfig'
| version = (0, 2, 4)
__version__ = '.'.join(map(str, version))
from .registry import register, autodiscover
__all__ = ['register', 'autodiscover']
default_app_config = 'sequere.apps.SequereConfig'
| mit | Python |
081be22efa042db2f706cf3adc08e40ab605c6d0 | modify comment | dasadc/conmgr,dasadc/conmgr,dasadc/conmgr,dasadc/conmgr | server/datastore.py | server/datastore.py | # -*- coding: utf-8 -*-
#
from google.appengine.ext import ndb
def qdata_key(year=2015):
"問題データのparent"
return ndb.Key('Qdata', year)
def userlist_key():
"UserInfoのparent"
return ndb.Key('Users', 'all')
class UserInfo(ndb.Model):
username = ndb.StringProperty()
password = ndb.StringProperty()
displayname = ndb.StringProperty()
uid = ndb.IntegerProperty()
gid = ndb.IntegerProperty()
class Question(ndb.Model):
"問題データ"
qnum = ndb.IntegerProperty(indexed=True)
text = ndb.StringProperty(indexed=False)
rows = ndb.IntegerProperty()
cols = ndb.IntegerProperty()
linenum = ndb.IntegerProperty()
author = ndb.StringProperty(indexed=True)
date = ndb.DateTimeProperty(auto_now_add=True)
class QuestionList(ndb.Model):
"コンテスト用の、出題問題リスト"
q = ndb.KeyProperty(kind=Question)
num = ndb.IntegerProperty()
class Answer(ndb.Model):
"回答データ"
anum = ndb.IntegerProperty(indexed=True)
text = ndb.StringProperty(indexed=False)
owner = ndb.StringProperty(indexed=True)
date = ndb.DateTimeProperty(auto_now_add=True)
| # -*- coding: utf-8 -*-
#
from google.appengine.ext import ndb
def qdata_key(year=2015):
"問題データのparent"
return ndb.Key('Qdata', year)
def userlist_key():
"UserInfoのparent"
return ndb.Key('Users', 'all')
class UserInfo(ndb.Model):
username = ndb.StringProperty()
password = ndb.StringProperty()
displayname = ndb.StringProperty()
uid = ndb.IntegerProperty()
gid = ndb.IntegerProperty()
class Question(ndb.Model):
"問題データ"
qnum = ndb.IntegerProperty(indexed=True)
text = ndb.StringProperty(indexed=False)
rows = ndb.IntegerProperty()
cols = ndb.IntegerProperty()
linenum = ndb.IntegerProperty()
author = ndb.StringProperty(indexed=True)
date = ndb.DateTimeProperty(auto_now_add=True)
class QuestionList(ndb.Model):
"コンテスト用の問題リスト"
q = ndb.KeyProperty(kind=Question)
num = ndb.IntegerProperty()
class Answer(ndb.Model):
"回答データ"
anum = ndb.IntegerProperty(indexed=True)
text = ndb.StringProperty(indexed=False)
owner = ndb.StringProperty(indexed=True)
date = ndb.DateTimeProperty(auto_now_add=True)
| bsd-3-clause | Python |
fb6c8d2be457f6043a51ceaae2f9469a02b2aeca | Update MultiWii.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | service/MultiWii.py | service/MultiWii.py | #########################################
# MultiWii.py
# categories: sensor
# more info @: http://myrobotlab.org/service/MultiWii
#########################################
# uncomment for virtual hardware
# virtual = True
# port = "/dev/ttyUSB0"
port = "COM15"
# start optional virtual arduino service, used for test
if ('virtual' in globals() and virtual):
virtualArduino = Runtime.start("virtualArduino", "VirtualArduino")
virtualArduino.connect(port)
#define POLL_PERIOD 20
serial = Runtime.start("serial","Serial")
BAUDRATE = 9600
#define MSP_SET_RAW_RC 200
#define MSP_SET_RAW_RC_LENGTH 16
RC_MIN = 1000
RC_MID = 1500
RC_MAX = 2000
ROLL = 0
PITCH = 1
YAW = 2
THROTTLE = 3
AUX1 = 4
AUX2 = 5
AUX3 = 6
AUX4 = 7
#RC signals to send to the quad
#format: { roll, throttle, yaw, pitch, 0, 0, 0, 0 }
# uint16_t rc_signals[8] = { 1234 };
#Buffer for storing the serializes byte form of the RC signals
# uint8_t rc_bytes[16] = { 0 };
serial.connect(port,BAUDRATE,8,1,0);
# send_msp(MSP_SET_RAW_RC, rc_bytes, MSP_SET_RAW_RC_LENGTH);
def arm():
print("arm movement")
msgType = 200 # MSP_SET_RAW_RC
# header $M<
serial.write('$')
serial.write('M')
serial.write('<')
serial.write(msgType)
serial.write(16) # ? not sure guessing 0 no payload
# serial.write(MsgLength??)
# start checksum
checksum = 0
checksum ^= (16 & 0xFF);
checksum ^= (200) # msp
# msg data - LSB first 2 byte uint_8
serial.write(1500) # min roll
checksum ^= (1500)
serial.write(1500 >> 8) # min roll
checksum ^= (1500 >> 8)
serial.write(1000) # min throttle
checksum ^= (1000)
serial.write(1000 >> 8) # min throttle
serial.write(2000) # max yaw
serial.write(2000 >> 8) # max yaw
serial.write(1500) # min pitch
serial.write(1500 >> 8) # min pitch
serial.write(1000) # aux1
serial.write(1000 >> 8) # aux1
serial.write(1000) # aux2
serial.write(1000 >> 8) # aux2
serial.write(1000) # aux3
serial.write(1000 >> 8) # aux3
serial.write(1000) # aux4
serial.write(1000 >> 8) # aux4
serial.write(checksum)
# rc_signals[THROTTLE] = RC_MIN;
# rc_signals[YAW] = RC_MAX;
# rc_signals[PITCH] = RC_MID;
# rc_signals[ROLL] = RC_MID;
# rc_signals[AUX1] = RC_MIN;
# rc_signals[AUX2] = RC_MIN;
arm()
| #########################################
# MultiWii.py
# categories: sensor
# more info @: http://myrobotlab.org/service/MultiWii
#########################################
# uncomment for virtual hardware
virtual = True
# port = "/dev/ttyUSB0"
port = "COM15"
# start optional virtual arduino service, used for test
if ('virtual' in globals() and virtual):
virtualArduino = Runtime.start("virtualArduino", "VirtualArduino")
virtualArduino.connect(port)
#define POLL_PERIOD 20
serial = Runtime.start("serial","Serial")
BAUDRATE = 9600
#define MSP_SET_RAW_RC 200
#define MSP_SET_RAW_RC_LENGTH 16
RC_MIN = 1000
RC_MID = 1500
RC_MAX = 2000
ROLL = 0
PITCH = 1
YAW = 2
THROTTLE = 3
AUX1 = 4
AUX2 = 5
AUX3 = 6
AUX4 = 7
#RC signals to send to the quad
#format: { roll, throttle, yaw, pitch, 0, 0, 0, 0 }
# uint16_t rc_signals[8] = { 1234 };
#Buffer for storing the serializes byte form of the RC signals
# uint8_t rc_bytes[16] = { 0 };
serial.connect(port,BAUDRATE,8,1,0);
# send_msp(MSP_SET_RAW_RC, rc_bytes, MSP_SET_RAW_RC_LENGTH);
def arm():
print("arm movement")
msgType = 200 # MSP_SET_RAW_RC
# header $M<
serial.write('$')
serial.write('M')
serial.write('<')
serial.write(msgType)
serial.write(16) # ? not sure guessing 0 no payload
# serial.write(MsgLength??)
# start checksum
checksum = 0
checksum ^= (16 & 0xFF);
checksum ^= (200) # msp
# msg data - LSB first 2 byte uint_8
serial.write(1500) # min roll
checksum ^= (1500)
serial.write(1500 >> 8) # min roll
checksum ^= (1500 >> 8)
serial.write(1000) # min throttle
checksum ^= (1000)
serial.write(1000 >> 8) # min throttle
serial.write(2000) # max yaw
serial.write(2000 >> 8) # max yaw
serial.write(1500) # min pitch
serial.write(1500 >> 8) # min pitch
serial.write(1000) # aux1
serial.write(1000 >> 8) # aux1
serial.write(1000) # aux2
serial.write(1000 >> 8) # aux2
serial.write(1000) # aux3
serial.write(1000 >> 8) # aux3
serial.write(1000) # aux4
serial.write(1000 >> 8) # aux4
serial.write(checksum)
# rc_signals[THROTTLE] = RC_MIN;
# rc_signals[YAW] = RC_MAX;
# rc_signals[PITCH] = RC_MID;
# rc_signals[ROLL] = RC_MID;
# rc_signals[AUX1] = RC_MIN;
# rc_signals[AUX2] = RC_MIN;
arm()
| apache-2.0 | Python |
e8e0aa64437bcf7eb98f1d8ee89e7fcf918c81ca | Update Multiwii.py | sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,mecax/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,mecax/pyrobotlab | service/Multiwii.py | service/Multiwii.py | #define POLL_PERIOD 20
serial = Runtime.start("serial","Serial")
COMPORT= "COM19"
BAUDRATE = 9600
#define MSP_SET_RAW_RC 200
#define MSP_SET_RAW_RC_LENGTH 16
RC_MIN = 1000
RC_MID = 1500
RC_MAX = 2000
ROLL = 0
PITCH = 1
YAW = 2
THROTTLE = 3
AUX1 = 4
AUX2 = 5
AUX3 = 6
AUX4 = 7
#RC signals to send to the quad
#format: { roll, throttle, yaw, pitch, 0, 0, 0, 0 }
uint16_t rc_signals[8] = { 1234 };
#Buffer for storing the serializes byte form of the RC signals
uint8_t rc_bytes[16] = { 0 };
serial.connect(COMPORT,BAUDRATE,8,1,0);
send_msp(MSP_SET_RAW_RC, rc_bytes, MSP_SET_RAW_RC_LENGTH);
def arm():
msgType = 200 # MSP_SET_RAW_RC
# header $M<
serial.write('$')
serial.write('M')
serial.write('<')
serial.write(msgType)
serial.write(MsgLength??)
serial.write(0) # min throttle
serial.write(2000) # max yaw
serial.write(1000) # min pitch
serial.write(1000) # min roll
serial.write(1000) # aux1
serial.write(1000) # aux2
serial.write(1000) # aux3
serial.write(1000) # aux4
serial.write(checksum)
# rc_signals[THROTTLE] = RC_MIN;
# rc_signals[YAW] = RC_MAX;
# rc_signals[PITCH] = RC_MID;
# rc_signals[ROLL] = RC_MID;
# rc_signals[AUX1] = RC_MIN;
# rc_signals[AUX2] = RC_MIN;
arm()
| #define POLL_PERIOD 20
serial = Runtime.start("serial","Serial")
COMPORT= "COM19"
BAUDRATE = 9600
#define MSP_SET_RAW_RC 200
#define MSP_SET_RAW_RC_LENGTH 16
RC_MIN = 1000
RC_MID = 1500
RC_MAX = 2000
ROLL = 0
PITCH = 1
YAW = 2
THROTTLE = 3
AUX1 = 4
AUX2 = 5
AUX3 = 6
AUX4 = 7
#RC signals to send to the quad
#format: { roll, throttle, yaw, pitch, 0, 0, 0, 0 }
uint16_t rc_signals[8] = { 1234 };
#Buffer for storing the serializes byte form of the RC signals
uint8_t rc_bytes[16] = { 0 };
serial.connect(COMPORT,BAUDRATE,8,1,0);
send_msp(MSP_SET_RAW_RC, rc_bytes, MSP_SET_RAW_RC_LENGTH);
def arm():
msgType = 200 # MSP_SET_RAW_RC
# header $M<
serial.write('$')
serial.write('M')
serial.write('<')
serial.write(msgType)
serial.write(MsgLength??)
serial.write(throttleValue)
serial.write(yawValue)
serial.write(pitchValue)
serial.write(rollValue)
serial.write(aux1)
serial.write(aux2)
serial.write(checksum)
# rc_signals[THROTTLE] = RC_MIN;
# rc_signals[YAW] = RC_MAX;
# rc_signals[PITCH] = RC_MID;
# rc_signals[ROLL] = RC_MID;
# rc_signals[AUX1] = RC_MIN;
# rc_signals[AUX2] = RC_MIN;
arm()
| apache-2.0 | Python |
62a04170e41d599d653e94afe0844576e175c314 | Add comments on variable settings | AustralianAntarcticDataCentre/save_emails_to_files,AustralianAntarcticDataCentre/save_emails_to_files | settings_example.py | settings_example.py | import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| mit | Python |
ce1c46ecbf5623ec99c8fc2871d0cb35c5dc9100 | Update sshkey.py | jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi | setup/ssh/sshkey.py | setup/ssh/sshkey.py | # -*- coding: utf-8 -*-
# Author : jeonghoonkang, https://github.com/jeonghoonkang
from __future__ import print_function
import subprocess
import os
import sys
if __name__ == '__main__':
print ("usage : python sshkey.py {ID@IP_ADD} {PORT}, {} : user should input")
if len(sys.argv) < 2:
exit("[bye] you need to input args")
arg1 = sys.argv[1]
arg2 = sys.argv[2]
arg3 = sys.argv[3]
ip = arg1
port = arg2
id = arg3
print ("... start running", " inputs are ", ip, port, id)
print ("... key generating")
os.system('ssh-keygen')
print ("... entering copying security file")
run_cmd = "cat ~/.ssh/id_rsa.pub"
run_cmd += " | ssh -p %s %s@%s" %(port, id, ip)
run_cmd += " 'cat>>/home/%s/.ssh/authorized_keys'" %id
print (run_cmd)
os.system(run_cmd)
''' 아래 코드는 동작을 안함. 확인 필요 '''
#ret = subprocess.check_output(run_cmd)
#print (ret)
| # -*- coding: utf-8 -*-
# Author : jeonghoonkang, https://github.com/jeonghoonkang
from __future__ import print_function
import subprocess
import os
import sys
if __name__ == '__main__':
print ("usage : python sshkey.py {IP_ADD} {PORT}, {} : user should input")
if len(sys.argv) < 2:
exit("[bye] you need to input args")
arg1 = sys.argv[1]
arg2 = sys.argv[2]
arg3 = sys.argv[3]
ip = arg1
port = arg2
id = arg3
print ("... start running", " inputs are ", ip, port, id)
print ("... key generating")
os.system('ssh-keygen')
print ("... entering copying security file")
run_cmd = "cat ~/.ssh/id_rsa.pub"
run_cmd += " | ssh -p %s %s@%s" %(port, id, ip)
run_cmd += " 'cat>>/home/%s/.ssh/authorized_keys'" %id
print (run_cmd)
os.system(run_cmd)
''' 아래 코드는 동작을 안함. 확인 필요 '''
#ret = subprocess.check_output(run_cmd)
#print (ret)
| bsd-2-clause | Python |
0df211c91ce899e34bfb5691dde0e0ba1a78f36d | Add fiware-ut-12 tag to the test_objectstorage_available_to_widgets | rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud | src/wirecloud/fiware/tests/selenium.py | src/wirecloud/fiware/tests/selenium.py | from wirecloud.commons.utils.testcases import WirecloudSeleniumTestCase
__test__ = False
class FiWareSeleniumTestCase(WirecloudSeleniumTestCase):
def test_add_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
test_add_fiware_marketplace.tags = ('fiware-ut-8',)
def test_delete_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
self.delete_marketplace('fiware')
test_delete_fiware_marketplace.tags = ('fiware-ut-8',)
def test_ngsi_available_to_widgets(self):
self.login()
resource = self.add_packaged_resource_to_catalogue('Wirecloud_ngsi-test-widget_1.0.wgt', 'Wirecloud NGSI API test widget')
iwidget = self.instantiate(resource)
with iwidget:
api_element = self.driver.find_element_by_id('api_available')
self.assertEqual(api_element.text, 'Yes')
test_ngsi_available_to_widgets.tags = ('fiware-ut-7',)
def test_objectstorage_available_to_widgets(self):
self.login()
resource = self.add_packaged_resource_to_catalogue('Wirecloud_objectstorage-test-widget_1.0.wgt', 'Wirecloud Object Storage API test widget')
iwidget = self.instantiate(resource)
with iwidget:
api_element = self.driver.find_element_by_id('api_available')
self.assertEqual(api_element.text, 'Yes')
test_objectstorage_available_to_widgets.tags = ('fiware-ut-12',)
| from wirecloud.commons.utils.testcases import WirecloudSeleniumTestCase
__test__ = False
class FiWareSeleniumTestCase(WirecloudSeleniumTestCase):
def test_add_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
test_add_fiware_marketplace.tags = ('fiware-ut-8',)
def test_delete_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
self.delete_marketplace('fiware')
test_delete_fiware_marketplace.tags = ('fiware-ut-8',)
def test_ngsi_available_to_widgets(self):
self.login()
resource = self.add_packaged_resource_to_catalogue('Wirecloud_ngsi-test-widget_1.0.wgt', 'Wirecloud NGSI API test widget')
iwidget = self.instantiate(resource)
with iwidget:
api_element = self.driver.find_element_by_id('api_available')
self.assertEqual(api_element.text, 'Yes')
test_ngsi_available_to_widgets.tags = ('fiware-ut-7',)
def test_objectstorage_available_to_widgets(self):
self.login()
resource = self.add_packaged_resource_to_catalogue('Wirecloud_objectstorage-test-widget_1.0.wgt', 'Wirecloud Object Storage API test widget')
iwidget = self.instantiate(resource)
with iwidget:
api_element = self.driver.find_element_by_id('api_available')
self.assertEqual(api_element.text, 'Yes')
| agpl-3.0 | Python |
2078ba357b331f071e1bd36e71ed69ed68cceb1d | Add todo annotation. | dennybaa/st2,pixelrebel/st2,pixelrebel/st2,peak6/st2,Plexxi/st2,pixelrebel/st2,punalpatel/st2,punalpatel/st2,peak6/st2,nzlosh/st2,StackStorm/st2,emedvedev/st2,Plexxi/st2,Plexxi/st2,lakshmi-kannan/st2,tonybaloney/st2,dennybaa/st2,peak6/st2,dennybaa/st2,lakshmi-kannan/st2,StackStorm/st2,Plexxi/st2,tonybaloney/st2,tonybaloney/st2,nzlosh/st2,nzlosh/st2,lakshmi-kannan/st2,punalpatel/st2,emedvedev/st2,nzlosh/st2,emedvedev/st2,StackStorm/st2,StackStorm/st2 | st2actions/st2actions/runners/utils.py | st2actions/st2actions/runners/utils.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging as stdlib_logging
from st2common import log as logging
__all__ = [
'get_logger_for_python_runner_action',
'get_action_class_instance'
]
LOG = logging.getLogger(__name__)
def get_logger_for_python_runner_action(action_name):
"""
Set up a logger which logs all the messages with level DEBUG and above to stderr.
"""
logger_name = 'actions.python.%s' % (action_name)
logger = logging.getLogger(logger_name)
console = stdlib_logging.StreamHandler()
console.setLevel(stdlib_logging.DEBUG)
formatter = stdlib_logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
logger.addHandler(console)
logger.setLevel(stdlib_logging.DEBUG)
return logger
def get_action_class_instance(action_cls, config=None, action_service=None):
"""
Instantiate and return Action class instance.
:param action_cls: Action class to instantiate.
:type action_cls: ``class``
:param config: Config to pass to the action class.
:type config: ``dict``
:param action_service: ActionService instance to pass to the class.
:type action_service: :class:`ActionService`
"""
kwargs = {}
kwargs['config'] = config
kwargs['action_service'] = action_service
# Note: This is done for backward compatibility reasons. We first try to pass
# "action_service" argument to the action class constructor, but if that doesn't work (e.g. old
# action which hasn't been updated yet), we resort to late assignment post class instantiation.
# TODO: Remove in next major version once all the affected actions have been updated.
try:
action_instance = action_cls(**kwargs)
except TypeError as e:
if 'unexpected keyword argument \'action_service\'' not in str(e):
raise e
LOG.debug('Action class constructor doesn\'t take "action_service" argument, '
'falling back to late assignment...')
action_service = kwargs.pop('action_service', None)
action_instance = action_cls(**kwargs)
action_instance.action_service = action_service
return action_instance
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging as stdlib_logging
from st2common import log as logging
__all__ = [
'get_logger_for_python_runner_action',
'get_action_class_instance'
]
LOG = logging.getLogger(__name__)
def get_logger_for_python_runner_action(action_name):
"""
Set up a logger which logs all the messages with level DEBUG and above to stderr.
"""
logger_name = 'actions.python.%s' % (action_name)
logger = logging.getLogger(logger_name)
console = stdlib_logging.StreamHandler()
console.setLevel(stdlib_logging.DEBUG)
formatter = stdlib_logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
logger.addHandler(console)
logger.setLevel(stdlib_logging.DEBUG)
return logger
def get_action_class_instance(action_cls, config=None, action_service=None):
"""
Instantiate and return Action class instance.
:param action_cls: Action class to instantiate.
:type action_cls: ``class``
:param config: Config to pass to the action class.
:type config: ``dict``
:param action_service: ActionService instance to pass to the class.
:type action_service: :class:`ActionService`
"""
kwargs = {}
kwargs['config'] = config
kwargs['action_service'] = action_service
# Note: This is done for backward compatibility reasons. We first try to pass
# "action_service" argument to the action class constructor, but if that doesn't work
# (e.g. old action which hasn't been updated yet), we resort to late assignment.
try:
action_instance = action_cls(**kwargs)
except TypeError as e:
if 'unexpected keyword argument \'action_service\'' not in str(e):
raise e
LOG.debug('Action class constructor doesn\'t take "action_service" argument, '
'falling back to late assignment...')
action_service = kwargs.pop('action_service', None)
action_instance = action_cls(**kwargs)
action_instance.action_service = action_service
return action_instance
| apache-2.0 | Python |
0e345fb7624e5d30e7e689bb3d6a05dfe7193d13 | Convert major and minor versions to integers for version matching | celery/kombu,mverrilli/kombu,numb3r3/kombu,daevaorn/kombu,bmbouter/kombu,cce/kombu,Elastica/kombu,tkanemoto/kombu,numb3r3/kombu,Elastica/kombu,jindongh/kombu,jindongh/kombu,mverrilli/kombu,ZoranPavlovic/kombu,bmbouter/kombu,ZoranPavlovic/kombu,cce/kombu,tkanemoto/kombu,iris-edu-int/kombu,daevaorn/kombu,iris-edu-int/kombu,urbn/kombu | kombu/utils/text.py | kombu/utils/text.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from difflib import SequenceMatcher
from kombu import version_info_t
def fmatch_iter(needle, haystack, min_ratio=0.6):
for key in haystack:
ratio = SequenceMatcher(None, needle, key).ratio()
if ratio >= min_ratio:
yield ratio, key
def fmatch_best(needle, haystack, min_ratio=0.6):
try:
return sorted(
fmatch_iter(needle, haystack, min_ratio), reverse=True,
)[0][1]
except IndexError:
pass
def version_string_as_tuple(s):
v = _unpack_version(*s.split('.'))
# X.Y.3a1 -> (X, Y, 3, 'a1')
if not v.releaselevel and v.micro:
v = version_info_t(v.major, v.minor, *_splitmicro(v.micro))
# X.Y.3a1-40 -> (X, Y, 3, 'a1', '40')
if not v.serial and v.releaselevel and '-' in v.releaselevel:
v = version_info_t(*list(v[0:3]) + v.releaselevel.split('-'))
return v
def _unpack_version(major, minor=0, micro=0, releaselevel='', serial=''):
return version_info_t(int(major), int(minor), micro, releaselevel, serial)
def _splitmicro(s):
for index, char in enumerate(s):
if not char.isdigit():
break
else:
return s, '', ''
return s[:index], s[index:], ''
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from difflib import SequenceMatcher
from kombu import version_info_t
def fmatch_iter(needle, haystack, min_ratio=0.6):
for key in haystack:
ratio = SequenceMatcher(None, needle, key).ratio()
if ratio >= min_ratio:
yield ratio, key
def fmatch_best(needle, haystack, min_ratio=0.6):
try:
return sorted(
fmatch_iter(needle, haystack, min_ratio), reverse=True,
)[0][1]
except IndexError:
pass
def version_string_as_tuple(s):
v = _unpack_version(*s.split('.'))
# X.Y.3a1 -> (X, Y, 3, 'a1')
if not v.releaselevel and v.micro:
v = version_info_t(v.major, v.minor, *_splitmicro(v.micro))
# X.Y.3a1-40 -> (X, Y, 3, 'a1', '40')
if not v.serial and v.releaselevel and '-' in v.releaselevel:
v = version_info_t(*list(v[0:3]) + v.releaselevel.split('-'))
return v
def _unpack_version(major, minor=0, micro=0, releaselevel='', serial=''):
return version_info_t(major, minor, micro, releaselevel, serial)
def _splitmicro(s):
for index, char in enumerate(s):
if not char.isdigit():
break
else:
return s, '', ''
return s[:index], s[index:], ''
| bsd-3-clause | Python |
0fabc8d522c07c7f64557d840b53ecb723a7b07f | Rename char | muddyfish/PYKE,muddyfish/PYKE | node/rickroll.py | node/rickroll.py | #!/usr/bin/env python
from nodes import Node
class RickRoll(Node):
char = "Never gonna give you up"
args = 0
results = 1
def func(self):
"""Redirect to RickRoll"""
return "Never gonna let you down" | #!/usr/bin/env python
from nodes import Node
class RickRoll(Node):
char = "never gonna give you up"
args = 0
results = 1
def func(self):
"""Redirect to RickRoll"""
return "Never gonna let you down" | mit | Python |
c8be023cb26e34fd1a1055b11b320f570c87b4ab | Fix undefined version | mkukielka/oddt,oddt/oddt,mkukielka/oddt,oddt/oddt | oddt/__init__.py | oddt/__init__.py | """Open Drug Discovery Toolkit
==============================
Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring.
Attributes
----------
toolkit : module,
Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk].
This setting is toolkit-wide, and sets given toolkit as default
"""
import os, subprocess
from numpy.random import seed as np_seed
from random import seed as python_seed
try:
from .toolkits import ob
except ImportError:
ob = None
try:
from .toolkits import rdk
except ImportError:
rdk = None
if ob:
toolkit = ob
elif rdk:
toolkit = rdk
else:
raise Exception('You need at least one toolkit for ODDT.')
def get_version():
home = os.path.dirname(__file__)
v = None
if os.path.isdir(home + '/../.git'):
v = subprocess.check_output(['git', 'describe', '--tags'], cwd=home).strip()
if not v:
v = '0.1.3'
return v
__version__ = get_version()
__all__ = ['toolkit']
def random_seed(i):
"""
Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds.
Parameters
----------
i: int
integer used as seed for random number generators
"""
# python's random module
python_seed(i)
# numpy random module
np_seed(i)
| """Open Drug Discovery Toolkit
==============================
Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring.
Attributes
----------
toolkit : module,
Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk].
This setting is toolkit-wide, and sets given toolkit as default
"""
import os, subprocess
from numpy.random import seed as np_seed
from random import seed as python_seed
try:
from .toolkits import ob
except ImportError:
ob = None
try:
from .toolkits import rdk
except ImportError:
rdk = None
if ob:
toolkit = ob
elif rdk:
toolkit = rdk
else:
raise Exception('You need at least one toolkit for ODDT.')
def get_version():
home = os.path.dirname(__file__)
if os.path.isdir(home + '/../.git'):
v = subprocess.check_output(['git', 'describe', '--tags'], cwd=home).strip()
if not v:
v = '0.1.3'
return v
__version__ = get_version()
__all__ = ['toolkit']
def random_seed(i):
"""
Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds.
Parameters
----------
i: int
integer used as seed for random number generators
"""
# python's random module
python_seed(i)
# numpy random module
np_seed(i)
| bsd-3-clause | Python |
0cf45657c01349b6304470e0ee7349e9f62874f5 | Bump version 0.19.0rc3 --> 0.19.0rc4 | lbryio/lbry,lbryio/lbry,lbryio/lbry | lbrynet/__init__.py | lbrynet/__init__.py | import logging
__version__ = "0.19.0rc4"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| import logging
__version__ = "0.19.0rc3"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| mit | Python |
4bcf0ab7fb40902b49b877f41fc4dbbb7d5b7c56 | Bump version 0.20.2rc1 --> 0.20.2rc2 | lbryio/lbry,lbryio/lbry,lbryio/lbry | lbrynet/__init__.py | lbrynet/__init__.py | import logging
__version__ = "0.20.2rc2"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| import logging
__version__ = "0.20.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| mit | Python |
e83ff4772251d5c997ddcdc70686c898ea454c82 | Bump version 0.20.1rc1 --> 0.20.1rc2 | lbryio/lbry,lbryio/lbry,lbryio/lbry | lbrynet/__init__.py | lbrynet/__init__.py | import logging
__version__ = "0.20.1rc2"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| import logging
__version__ = "0.20.1rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.