commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
92cecda5aa82165dd45a62b57c57574ad65fdb35 | remove leftovers from flake8 copy i started with | TimYi/pybuilder,Designist/pybuilder,arcivanov/pybuilder,arcivanov/pybuilder,locolupo/pybuilder,Designist/pybuilder,alex-dow/pybuilder,Danielweber7624/pybuilder,Danielweber7624/pybuilder,paolodedios/pybuilder,elkingtonmcb/pybuilder,pybuilder/pybuilder,locolupo/pybuilder,paolodedios/pybuilder,esc/pybuilder,onesfreedom/pybuilder,elkingtonmcb/pybuilder,esc/pybuilder,pybuilder/pybuilder,TimYi/pybuilder,paolodedios/pybuilder,esc/pybuilder,arcivanov/pybuilder,pybuilder/pybuilder,onesfreedom/pybuilder,alex-dow/pybuilder | src/main/python/pybuilder/plugins/python/cram_plugin.py | src/main/python/pybuilder/plugins/python/cram_plugin.py | # cram Plugin for PyBuilder
#
# Copyright 2011-2014 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Cram plugin.
"""
__author__ = 'Valentin Haenel'
from pybuilder.core import after, task, init, use_plugin, depends
from pybuilder.errors import BuildFailedException
from pybuilder.utils import assert_can_execute, discover_files_matching, read_file
from pybuilder.plugins.python.python_plugin_helper import execute_command
use_plugin("python.core")
DIR_SOURCE_CMDLINETEST = 'dir_source_cmdlinetest'
@init
def initialize_cram_plugin(project):
project.build_depends_on("cram")
#project.set_property("cram_measure_coverage", False)
project.set_property_if_unset(
DIR_SOURCE_CMDLINETEST, "src/cmdlinetest")
@after("prepare")
def assert_cram_is_executable(logger):
""" Asserts that the cram script is executable. """
logger.debug("Checking if cram is executable.")
assert_can_execute(command_and_arguments=["cram", "--version"],
prerequisite="cram",
caller="plugin python.cram")
@task
@depends("prepare")
def cram(project, logger):
logger.info("Running Cram tests")
command_and_arguments = ["cram"]
cram_dir = project.get_property(DIR_SOURCE_CMDLINETEST)
cram_files = discover_files_matching(cram_dir, '*.cram')
command_and_arguments.extend(cram_files)
report_file = project.expand_path("$dir_reports/{0}".format('cram.err'))
execution_result = execute_command(command_and_arguments, report_file), report_file
report = read_file(report_file)
result = report[-1][2:].strip()
if execution_result[0] != 0:
logger.error("Cram tests failed!")
logger.error(result)
logger.error("See: '{0}' for details".format(report_file))
raise BuildFailedException("Cram tests failed!")
else:
logger.info("Cram tests were fine")
logger.info(result)
| # cram Plugin for PyBuilder
#
# Copyright 2011-2014 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Cram plugin.
"""
__author__ = 'Valentin Haenel'
from pybuilder.core import after, task, init, use_plugin, depends
from pybuilder.errors import BuildFailedException
from pybuilder.utils import assert_can_execute, discover_files_matching, read_file
from pybuilder.plugins.python.python_plugin_helper import execute_command
use_plugin("python.core")
DIR_SOURCE_CMDLINETEST = 'dir_source_cmdlinetest'
@init
def initialize_cram_plugin(project):
project.build_depends_on("cram")
#project.set_property("cram_measure_coverage", False)
project.set_property_if_unset(
DIR_SOURCE_CMDLINETEST, "src/cmdlinetest")
#project.set_property("flake8_max_line_length", 120)
#project.set_property("flake8_exclude_patterns", None)
#project.set_property("flake8_include_test_sources", False)
@after("prepare")
def assert_cram_is_executable(logger):
""" Asserts that the cram script is executable. """
logger.debug("Checking if cram is executable.")
assert_can_execute(command_and_arguments=["cram", "--version"],
prerequisite="cram",
caller="plugin python.cram")
@task
@depends("prepare")
def cram(project, logger):
logger.info("Running Cram tests")
command_and_arguments = ["cram"]
cram_dir = project.get_property(DIR_SOURCE_CMDLINETEST)
cram_files = discover_files_matching(cram_dir, '*.cram')
command_and_arguments.extend(cram_files)
report_file = project.expand_path("$dir_reports/{0}".format('cram.err'))
execution_result = execute_command(command_and_arguments, report_file), report_file
report = read_file(report_file)
result = report[-1][2:].strip()
if execution_result[0] != 0:
logger.error("Cram tests failed!")
logger.error(result)
logger.error("See: '{0}' for details".format(report_file))
raise BuildFailedException("Cram tests failed!")
else:
logger.info("Cram tests were fine")
logger.info(result)
| apache-2.0 | Python |
3a7d154f6561f9b3c8db4620049244d5ba74e06a | Adjust formatting to match the lint check. | jazzband/django-axes | axes/middleware.py | axes/middleware.py | from typing import Callable
from django.conf import settings
from axes.helpers import (
get_lockout_response,
get_failure_limit,
get_client_username,
get_credentials,
)
from axes.handlers.proxy import AxesProxyHandler
class AxesMiddleware:
"""
Middleware that calculates necessary HTTP request attributes for attempt monitoring
and maps lockout signals into readable HTTP 403 Forbidden responses.
If a project uses ``django rest framework`` then the middleware updates the
request and checks whether the limit has been exceeded. It's needed only
for integration with DRF because it uses its own request object.
This middleware recognizes a logout monitoring flag in the request and
and uses the ``axes.helpers.get_lockout_response`` handler for returning
customizable and context aware lockout message to the end user if necessary.
To customize the lockout handling behaviour further, you can subclass this middleware
and change the ``__call__`` method to your own liking.
Please see the following configuration flags before customizing this handler:
- ``AXES_LOCKOUT_TEMPLATE``,
- ``AXES_LOCKOUT_URL``,
- ``AXES_COOLOFF_MESSAGE``, and
- ``AXES_PERMALOCK_MESSAGE``.
"""
def __init__(self, get_response: Callable):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
if settings.AXES_ENABLED:
if "rest_framework" in settings.INSTALLED_APPS:
AxesProxyHandler.update_request(request)
username = get_client_username(request)
credentials = get_credentials(username)
failures_since_start = AxesProxyHandler.get_failures(
request, credentials
)
if (
settings.AXES_LOCK_OUT_AT_FAILURE
and failures_since_start >= get_failure_limit(request, credentials)
):
request.axes_locked_out = True
if getattr(request, "axes_locked_out", None):
response = get_lockout_response(request) # type: ignore
return response
| from typing import Callable
from django.conf import settings
from axes.helpers import (
get_lockout_response,
get_failure_limit,
get_client_username,
get_credentials,
)
from axes.handlers.proxy import AxesProxyHandler
class AxesMiddleware:
"""
Middleware that calculates necessary HTTP request attributes for attempt monitoring
and maps lockout signals into readable HTTP 403 Forbidden responses.
If a project uses ``django rest framework`` then the middleware updates the
request and checks whether the limit has been exceeded. It's needed only
for integration with DRF because it uses its own request object.
This middleware recognizes a logout monitoring flag in the request and
and uses the ``axes.helpers.get_lockout_response`` handler for returning
customizable and context aware lockout message to the end user if necessary.
To customize the lockout handling behaviour further, you can subclass this middleware
and change the ``__call__`` method to your own liking.
Please see the following configuration flags before customizing this handler:
- ``AXES_LOCKOUT_TEMPLATE``,
- ``AXES_LOCKOUT_URL``,
- ``AXES_COOLOFF_MESSAGE``, and
- ``AXES_PERMALOCK_MESSAGE``.
"""
def __init__(self, get_response: Callable):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
if settings.AXES_ENABLED:
if "rest_framework" in settings.INSTALLED_APPS:
AxesProxyHandler.update_request(request)
username = get_client_username(request)
credentials = get_credentials(username)
failures_since_start = AxesProxyHandler.get_failures(request, credentials)
if (
settings.AXES_LOCK_OUT_AT_FAILURE
and failures_since_start >= get_failure_limit(request, credentials)
):
request.axes_locked_out = True
if getattr(request, "axes_locked_out", None):
response = get_lockout_response(request) # type: ignore
return response
| mit | Python |
40f4c4ee43c3625b8aec81f37b221f500b047b87 | add comment | ingadhoc/account-financial-tools | base_currency_inverse_rate/models/res_currency_rate.py | base_currency_inverse_rate/models/res_currency_rate.py | ##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api
class ResCurrencyRate(models.Model):
_inherit = "res.currency.rate"
inverse_rate = fields.Float(
'Inverse Rate', digits=(12, 4),
compute='_compute_inverse_rate',
inverse='_inverse_inverse_rate',
help='The rate of the currency from the currency of rate 1',
)
# we add more digits because we usually use argentinian currency as base
# currency and this way better rate could be achived, for eg "37,4000"
# would give differences on amounts like 2000 USD
# TODO this is not a good solution and we should improve it, perhups not
# to use arg as base currency but we should change method where we get
# rate from afip
rate = fields.Float(digits=(7, 9))
@api.multi
@api.depends('rate')
def _compute_inverse_rate(self):
for rec in self:
rec.inverse_rate = rec.rate and (1.0 / (rec.rate))
@api.multi
def _inverse_inverse_rate(self):
for rec in self:
rec.rate = rec.inverse_rate and (1.0 / (rec.inverse_rate))
| ##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api
class ResCurrencyRate(models.Model):
_inherit = "res.currency.rate"
inverse_rate = fields.Float(
'Inverse Rate', digits=(12, 4),
compute='_compute_inverse_rate',
inverse='_inverse_inverse_rate',
help='The rate of the currency from the currency of rate 1',
)
rate = fields.Float(digits=(7, 9))
@api.multi
@api.depends('rate')
def _compute_inverse_rate(self):
for rec in self:
rec.inverse_rate = rec.rate and (1.0 / (rec.rate))
@api.multi
def _inverse_inverse_rate(self):
for rec in self:
rec.rate = rec.inverse_rate and (1.0 / (rec.inverse_rate))
| agpl-3.0 | Python |
e8345b44b1e05adbb48f7ef0e2ef2201196ad06c | remove blank line in the end of file of model perceptron. | ntucllab/libact,ntucllab/libact,ntucllab/libact | libact/models/perceptron.py | libact/models/perceptron.py | from libact.base.interfaces import Model
import sklearn.linear_model
"""
A interface for scikit-learn's perceptron model
"""
class Perceptron(Model):
def __init__(self, *args, **kwargs):
self.model = sklearn.linear_model.Perceptron(*args, **kwargs)
def fit(self, dataset, *args, **kwargs):
return self.model.fit(*(dataset.format_sklearn() + args), **kwargs)
def predict(self, feature, *args, **kwargs):
return self.model.predict(feature, *args, **kwargs)
def score(self, testing_dataset, *args, **kwargs):
return self.model.score(*(testing_dataset.format_sklearn() + args), **kwargs)
| from libact.base.interfaces import Model
import sklearn.linear_model
"""
A interface for scikit-learn's perceptron model
"""
class Perceptron(Model):
def __init__(self, *args, **kwargs):
self.model = sklearn.linear_model.Perceptron(*args, **kwargs)
def fit(self, dataset, *args, **kwargs):
return self.model.fit(*(dataset.format_sklearn() + args), **kwargs)
def predict(self, feature, *args, **kwargs):
return self.model.predict(feature, *args, **kwargs)
def score(self, testing_dataset, *args, **kwargs):
return self.model.score(*(testing_dataset.format_sklearn() + args), **kwargs)
| bsd-2-clause | Python |
a9a1a5fda5ff9633cba362d79110e816eea4265a | add def greeting(msg) | erc7as/cs3240-labdemo | hello.py | hello.py | __author__ = 'erc7as'
def greeting(msg):
print(msg)
greeting('hello') | __author__ = 'erc7as'
print('hello') | mit | Python |
f06a406772d73a9ec9b61185b227341a0059abea | Update altair/examples/natural_disasters.py | altair-viz/altair | altair/examples/natural_disasters.py | altair/examples/natural_disasters.py | """
Global Deaths from Natural Disasters
------------------------------------
This example shows a proportional symbols visualization of deaths from natural disasters by year and type.
"""
# category: case studies
import altair as alt
from vega_datasets import data
source = data.disasters.url
alt.Chart(source).transform_filter(
alt.datum.Entity != 'All natural disasters'
).mark_circle(
opacity=0.8,
stroke='black',
strokeWidth=1,
strokeOpacity=0.4
).encode(
x=alt.X('Year:T', title=None, scale=alt.Scale(domain=['1899','2018'])),
y=alt.Y(
'Entity:N',
sort=alt.EncodingSortField(field="Deaths", op="sum", order='descending'),
title=None
),
size=alt.Size('Deaths:Q',
scale=alt.Scale(range=[0, 2500]),
legend=alt.Legend(title='Deaths', clipHeight=30, format='s')
),
color=alt.Color('Entity:N', legend=None),
tooltip=[
"Entity:N",
alt.Tooltip("Year:T", format='%Y'),
alt.Tooltip("Deaths:Q", format='~s')
],
).properties(
width=450,
height=320,
title=alt.TitleParams(
text="Global Deaths from Natural Disasters (1900-2017)",
subtitle="The size of the bubble represents the total death count per year, by type of disaster",
anchor='start'
)
).configure_axis(
domain=False,
ticks=False,
offset=10
).configure_axisX(
labelOverlap=True,
).configure_view(
stroke=None
)
| """
Global Deaths from Natural Disasters
------------------------------------
This example shows a proportional symbols visualization of deaths from natural disasters by year and type.
"""
# category: case studies
import altair as alt
from vega_datasets import data
source = data.disasters.url
alt.Chart(source).transform_filter(
alt.datum.Entity != 'All natural disasters'
).mark_circle(
opacity=0.8,
stroke='black',
strokeWidth=1,
strokeOpacity=0.4
).encode(
x=alt.X('Year:T', title=None, scale=alt.Scale(domain=['1899','2018'])),
y=alt.Y(
'Entity:N',
sort=alt.EncodingSortField(field="Deaths", op="sum", order='descending'),
title=None
),
size=alt.Size('Deaths:Q',
scale=alt.Scale(range=[0, 2500]),
legend=alt.Legend(title='Deaths', clipHeight=30, format='s')
),
color=alt.Color('Entity:N', legend=None),
tooltip=[
"Entity:N",
alt.Tooltip("Year:T", format='%Y'),
alt.Tooltip("Deaths:Q", format='~s')
],
).properties(
width=450,
height=320,
title=alt.TitleParams(
text="Global Deaths from Natural Disasters (1900-2017)",
subtitle="The size of the bubble represents the total death count per year, by type of disaster",
anchor='start'
)
).configure_axis(
domain=False,
ticks=False,
).configure_axisX(
labelOverlap=True,
).configure_view(
stroke=None
)
| bsd-3-clause | Python |
37260f9e8618a63e7e1be695bfa7766e3cfa4418 | Use DummyProcess() not to raise exceptions. | kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa | server/controller.py | server/controller.py | #!/usr/bin/env python
import logging
import multiprocessing
import time
import traceback
import browser
import kcsapi_util
import proxy_util
class DummyProcess(object):
def join(self):
pass
def control(args, server_conn, to_exit):
# It seems that uncaught exceptions are silently buffered after creating
# another multiprocessing.Process.
pk = DummyProcess()
pc = DummyProcess()
try:
logger = logging.getLogger('kcaa.controller')
har_manager = proxy_util.HarManager(args, 3.0)
# HarManager first resets the proxy. Notify the server that it's done.
server_conn.send(True)
if not server_conn.poll(3.0):
logger.error('Server is not responding. Shutting down.')
to_exit.set()
return
root_url = server_conn.recv()
pk = multiprocessing.Process(target=browser.setup_kancolle_browser,
args=(args, to_exit))
pc = multiprocessing.Process(target=browser.setup_kcaa_browser,
args=(args, root_url, to_exit))
pk.start()
pc.start()
kcsapi_handler = kcsapi_util.KcsapiHandler(har_manager)
while True:
time.sleep(0.1)
if to_exit.wait(0.0):
logger.error('Controller got an exit signal. Shutting down.')
break
for obj in kcsapi_handler.get_updated_objects():
server_conn.send((obj.object_type, obj.data))
except:
traceback.print_exc()
to_exit.set()
pk.join()
pc.join()
| #!/usr/bin/env python
import logging
import multiprocessing
import time
import traceback
import browser
import kcsapi_util
import proxy_util
def control(args, server_conn, to_exit):
# It seems that uncaught exceptions are silently buffered after creating
# another multiprocessing.Process.
try:
logger = logging.getLogger('kcaa.controller')
har_manager = proxy_util.HarManager(args, 3.0)
# HarManager first resets the proxy. Notify the server that it's done.
server_conn.send(True)
if not server_conn.poll(3.0):
logger.error('Server is not responding. Shutting down.')
to_exit.set()
return
root_url = server_conn.recv()
pk = multiprocessing.Process(target=browser.setup_kancolle_browser,
args=(args, to_exit))
pc = multiprocessing.Process(target=browser.setup_kcaa_browser,
args=(args, root_url, to_exit))
pk.start()
pc.start()
kcsapi_handler = kcsapi_util.KcsapiHandler(har_manager)
while True:
time.sleep(0.1)
if to_exit.wait(0.0):
logger.error('Controller got an exit signal. Shutting down.')
break
for obj in kcsapi_handler.get_updated_objects():
server_conn.send((obj.object_type, obj.data))
except:
traceback.print_exc()
to_exit.set()
pk.join()
pc.join()
| apache-2.0 | Python |
3300a66a421d5110093c02011c305d501daf069a | Fix search_fields | HerraLampila/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall,matthiask/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall,matthiask/django-newswall,HerraLampila/django-newswall | newswall/admin.py | newswall/admin.py | from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_editable=('is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_editable=('is_active',),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'body'),
)
| from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_editable=('is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_editable=('is_active',),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
| bsd-3-clause | Python |
6ecb6a784723048dbdc36082ffecee284f49c10e | fix setting name | alacritythief/django-rest-auth,serxoz/django-rest-auth,Tivix/django-rest-auth,roopesh90/django-rest-auth,SakuradaJun/django-rest-auth,flexpeace/django-rest-auth,caruccio/django-rest-auth,julioeiras/django-rest-auth,caruccio/django-rest-auth,philippeluickx/django-rest-auth,ZachLiuGIS/django-rest-auth,bopo/django-rest-auth,bung87/django-rest-auth,eugena/django-rest-auth,julioeiras/django-rest-auth,elberthcabrales/django-rest-auth,antetna/django-rest-auth,georgemarshall/django-rest-auth,mjrulesamrat/django-rest-auth,elberthcabrales/django-rest-auth,illing2005/django-rest-auth,citizen-stig/django-rest-auth,SakuradaJun/django-rest-auth,maxim-kht/django-rest-auth,bung87/django-rest-auth,roopesh90/django-rest-auth,jerinzam/django-rest-auth,kdzch/django-rest-auth,antetna/django-rest-auth,gushedaoren/django-rest-auth,citizen-stig/django-rest-auth,aexeagmbh/django-rest-auth,maxim-kht/django-rest-auth,ZachLiuGIS/django-rest-auth,iHub/django-rest-auth,gushedaoren/django-rest-auth,eugena/django-rest-auth,alacritythief/django-rest-auth,serxoz/django-rest-auth,iHub/django-rest-auth,bopo/django-rest-auth,georgemarshall/django-rest-auth,illing2005/django-rest-auth,mjrulesamrat/django-rest-auth,jerinzam/django-rest-auth,flexpeace/django-rest-auth | rest_auth/app_settings.py | rest_auth/app_settings.py | from django.conf import settings
from rest_auth.serializers import (
TokenSerializer as DefaultTokenSerializer,
UserDetailsSerializer as DefaultUserDetailsSerializer,
LoginSerializer as DefaultLoginSerializer,
PasswordResetSerializer as DefaultPasswordResetSerializer,
PasswordResetConfirmSerializer as DefaultPasswordResetConfirmSerializer,
PasswordChangeSerializer as DefaultPasswordChangeSerializer)
from allauth.utils import import_callable
serializers = getattr(settings, 'REST_AUTH_SERIALIZERS', {})
TokenSerializer = import_callable(
serializers.get('TOKEN_SERIALIZER', DefaultTokenSerializer))
UserDetailsSerializer = import_callable(
serializers.get('USER_DETAILS_SERIALIZER', DefaultUserDetailsSerializer)
)
LoginSerializer = import_callable(
serializers.get('LOGIN_SERIALIZER', DefaultLoginSerializer)
)
PasswordResetSerializer = import_callable(
serializers.get('PASSWORD_RESET_SERIALIZER',
DefaultPasswordResetSerializer)
)
PasswordResetConfirmSerializer = import_callable(
serializers.get('PASSWORD_RESET_CONFIRM_SERIALIZER',
DefaultPasswordResetConfirmSerializer)
)
PasswordChangeSerializer = import_callable(
serializers.get('PASSWORD_CHANGE_SERIALIZER',
DefaultPasswordChangeSerializer)
)
| from django.conf import settings
from rest_auth.serializers import (
TokenSerializer as DefaultTokenSerializer,
UserDetailsSerializer as DefaultUserDetailsSerializer,
LoginSerializer as DefaultLoginSerializer,
PasswordResetSerializer as DefaultPasswordResetSerializer,
PasswordResetConfirmSerializer as DefaultPasswordResetConfirmSerializer,
PasswordChangeSerializer as DefaultPasswordChangeSerializer)
from allauth.utils import import_callable
serializers = getattr(settings, 'REST_AUTH_SERIALIZERS', {})
TokenSerializer = import_callable(
serializers.get('TOKEN_SERIALIZER', DefaultTokenSerializer))
UserDetailsSerializer = import_callable(
serializers.get('USER_DETAILS_SERIALIZER', DefaultUserDetailsSerializer)
)
LoginSerializer = import_callable(
serializers.get('LOGIN_SERIALIZER', DefaultLoginSerializer)
)
PasswordResetSerializer = import_callable(
serializers.get('PASSWORD_RESET_SERIALIZER',
DefaultPasswordResetSerializer)
)
PasswordResetConfirmSerializer = import_callable(
serializers.get('PASSWORD_RESET_CONFIRM_SERIALIZER',
DefaultPasswordResetConfirmSerializer)
)
PasswordChangeSerializer = import_callable(
serializers.get('PASSWORD_RESET_SERIALIZER',
DefaultPasswordChangeSerializer)
)
| mit | Python |
d7ffd790976de4db8011d0d2e61ba0b486c098da | split config files into long term support and deprecated ones | Woile/commitizen,Woile/commitizen | commitizen/defaults.py | commitizen/defaults.py | name: str = "cz_conventional_commits"
# TODO: .cz, setup.cfg, .cz.cfg should be removed in 2.0
long_term_support_config_files: list = ["pyproject.toml", ".cz.toml"]
deprcated_config_files: list = [".cz", "setup.cfg", ".cz.cfg"]
config_files: list = long_term_support_config_files + deprcated_config_files
DEFAULT_SETTINGS = {
"name": "cz_conventional_commits",
"version": None,
"version_files": [],
"tag_format": None, # example v$version
"bump_message": None, # bumped v$current_version to $new_version
}
MAJOR = "MAJOR"
MINOR = "MINOR"
PATCH = "PATCH"
bump_pattern = r"^(BREAKING CHANGE|feat|fix|refactor|perf)"
bump_map = {
"BREAKING CHANGE": MAJOR,
"feat": MINOR,
"fix": PATCH,
"refactor": PATCH,
"perf": PATCH,
}
bump_message = "bump: version $current_version → $new_version"
| name: str = "cz_conventional_commits"
# TODO: .cz, setup.cfg, .cz.cfg should be removed in 2.0
config_files: list = ["pyproject.toml", ".cz.toml", ".cz", "setup.cfg", ".cz.cfg"]
DEFAULT_SETTINGS = {
"name": "cz_conventional_commits",
"version": None,
"version_files": [],
"tag_format": None, # example v$version
"bump_message": None, # bumped v$current_version to $new_version
}
MAJOR = "MAJOR"
MINOR = "MINOR"
PATCH = "PATCH"
bump_pattern = r"^(BREAKING CHANGE|feat|fix|refactor|perf)"
bump_map = {
"BREAKING CHANGE": MAJOR,
"feat": MINOR,
"fix": PATCH,
"refactor": PATCH,
"perf": PATCH,
}
bump_message = "bump: version $current_version → $new_version"
| mit | Python |
5e0e22eb6e709eb5291ac50a28b96c2c05909a2d | Return value of get_result is a pair of (task, result data) | dMaggot/ArtistGraph | src/artgraph/miner.py | src/artgraph/miner.py | import pymw
import pymw.interfaces
import artgraph.plugins.infobox
from artgraph.node import NodeTypes
from artgraph.node import Node
class Miner(object):
nodes = []
relationships = []
master = None
task_queue = []
def __init__(self, debug=False):
mwinterface = pymw.interfaces.GenericInterface()
self.master = pymw.PyMW_Master(mwinterface, delete_files=not debug)
def mine(self, artist):
self.mine_internal(Node(artist, NodeTypes.ARTIST))
(finished_task, new_relationships) = self.master.get_result()
while new_relationships:
for n in new_relationships:
self.relationships.append(n)
if n.get_predicate() not in self.nodes:
self.mine_internal(n.get_predicate())
def mine_internal(self, current_node, level=0, parent=None, relationship=None):
self.nodes.append(current_node)
infoboxplugin = artgraph.plugins.infobox.InfoboxPlugin(current_node)
self.task_queue.append(self.master.submit_task(infoboxplugin.get_nodes, input_data=(infoboxplugin,), modules=("artgraph",), data_files=("my.cnf",)))
| import pymw
import pymw.interfaces
import artgraph.plugins.infobox
from artgraph.node import NodeTypes
from artgraph.node import Node
class Miner(object):
nodes = []
relationships = []
master = None
task_queue = []
def __init__(self, debug=False):
mwinterface = pymw.interfaces.GenericInterface()
self.master = pymw.PyMW_Master(mwinterface, delete_files=not debug)
def mine(self, artist):
self.mine_internal(Node(artist, NodeTypes.ARTIST))
new_relationships = self.master.get_result()
while new_relationships:
for n in new_relationships:
self.relationships.append(n)
if n.get_predicate() not in self.nodes:
self.mine_internal(n.get_predicate())
def mine_internal(self, current_node, level=0, parent=None, relationship=None):
self.nodes.append(current_node)
infoboxplugin = artgraph.plugins.infobox.InfoboxPlugin(current_node)
self.task_queue.append(self.master.submit_task(infoboxplugin.get_nodes, input_data=(infoboxplugin,), modules=("artgraph",), data_files=("my.cnf",)))
| mit | Python |
ab9da6108ae2b554bb956daa0b61a6e8678bf5eb | Add Build.__unicode__ | debian-live/live-studio,debian-live/live-studio,debian-live/live-studio | live_studio/build/models.py | live_studio/build/models.py | import uuid
import datetime
from django.db import models
from django.conf import settings
from .managers import BuildManager
class Build(models.Model):
ident = models.CharField(max_length=40, unique=True, default=uuid.uuid4)
config = models.ForeignKey('config.Config', related_name='builds')
enqueued = models.DateTimeField(default=datetime.datetime.now)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
filename = models.CharField(max_length=100)
objects = BuildManager()
class Meta:
ordering = ('-enqueued',)
def __unicode__(self):
return 'Build #%d started by %s (status: %s)' % \
(self.pk, self.config.user.username, self.status())
def status(self):
if self.filename:
return 'success'
if self.finished:
return 'failure'
if self.started:
return 'building'
return 'waiting'
def log_url(self):
return '%s/%s/log.txt' % (settings.BUILDS_URL, self.ident)
def result_url(self):
return '%s/%s/%s' % (settings.BUILDS_URL, self.ident, self.filename)
| import uuid
import datetime
from django.db import models
from django.conf import settings
from .managers import BuildManager
class Build(models.Model):
ident = models.CharField(max_length=40, unique=True, default=uuid.uuid4)
config = models.ForeignKey('config.Config', related_name='builds')
enqueued = models.DateTimeField(default=datetime.datetime.now)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
filename = models.CharField(max_length=100)
objects = BuildManager()
class Meta:
ordering = ('-enqueued',)
def status(self):
if self.filename:
return 'success'
if self.finished:
return 'failure'
if self.started:
return 'building'
return 'waiting'
def log_url(self):
return '%s/%s/log.txt' % (settings.BUILDS_URL, self.ident)
def result_url(self):
return '%s/%s/%s' % (settings.BUILDS_URL, self.ident, self.filename)
| agpl-3.0 | Python |
d1d3109bdeffc229ed897f35ef50554e8a0f5549 | Simplify conditionals | bazelbuild/gmaven_rules,bazelbuild/gmaven_rules,bazelbuild/rules_jvm_external,bazelbuild/rules_jvm_external,bazelbuild/rules_jvm_external | defs.bzl | defs.bzl | def gmaven_artifact(fqn):
parts = fqn.split(":")
packaging = "aar"
if len(parts) == 3:
group_id, artifact_id, version = parts
elif len(parts) == 4:
group_id, artifact_id, version, packaging = parts
elif len(parts) == 5:
_, _, _, _, classifier = parts
fail("Classifiers are currently not supported. Please remove it from the coordinate: %s" % classifier)
else:
fail("Invalid qualified name for artifact: %s" % fqn)
return "@%s_%s_%s//%s" % (
escape(group_id),
escape(artifact_id),
escape(version),
packaging
)
def escape(string):
return string.replace(".", "_")
| def gmaven_artifact(fqn):
parts = fqn.split(":")
packaging = "aar"
if len(parts) < 2 or len(parts) > 5:
fail("Invalid qualified name for artifact: %s" % fqn)
elif len(parts) == 3:
group_id, artifact_id, version = parts
elif len(parts) == 4:
group_id, artifact_id, version, packaging = parts
elif len(parts) == 5:
_, _, _, _, classifier = parts
fail("Classifiers are currently not supported. Please remove it from the coordinate: %s" % classifier)
return "@%s_%s_%s//%s" % (
escape(group_id),
escape(artifact_id),
escape(version),
packaging
)
def escape(string):
return string.replace(".", "_")
| apache-2.0 | Python |
e95cc37db097e13cc0582c0d5429fb5f747f95aa | use alternate com port addressing | andrewramsay/pytia | examples/pytia_sk7imu.py | examples/pytia_sk7imu.py | import time, sys, itertools
from pyshake import *
import pytia
from pytia import TiAServer, TiAConnectionHandler, TiASignalConfig
# Runs a TiA server configured to stream data from an SK7 with 5
# wired IMUs attached
def data_callback(data):
(sig_id, sig_data) = data
sk7_dev = sig_data[0]
# each time the server polls for new data, get the latest sensor readings
# from each individual IMU, construct a list of their accelerometer,
# magnetometer and gyroscope values and pass that back to the server
imudata = sk7_dev.sk7_imus()
vals = [val for imu in imudata for val in [imu.acc, imu.mag, imu.gyro]]
return itertools.chain(*vals)
if __name__ == "__main__":
# connect to the IMUs through the SK7
sk7_dev = ShakeDevice(SHAKE_SK7)
if len(sys.argv) != 2:
print('Usage: pytia_sk7imu.py <port number>')
sys.exit(0)
address = int(sys.argv[1])
print('Connecting to SK7 on port %s...' % address)
if not sk7_dev.connect(address):
print('Failed to connect')
sys.exit(-1)
print('Connected OK!')
# create a TiAServer on localhost:9000
server = TiAServer(('', 9000), TiAConnectionHandler)
# define a single signal for the server to stream to clients. There are 5
# IMUs and each will be providing 9 channels of data (x/y/z accelerometer,
# magnetometer and gyroscope), so that's 45 in total. The server will poll
# for new data at 100Hz, packets will be sent in blocks of 1 and the callback
# defined above will be used as the data source.
signal = TiASignalConfig(channels=45, sample_rate=100, blocksize=1, \
callback=tia_callback, id=(0, (sk7_dev,)),
is_master=True, sigtype=pytia.TIA_SIG_USER_1)
# start the server with the list of signals to use
server.start([signal,])
print('[Ctrl-C to exit]')
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
print('Closing connection...')
sd.close()
| import time, sys, itertools
from pyshake import *
import pytia
from pytia import TiAServer, TiAConnectionHandler, TiASignalConfig
# Runs a TiA server configured to stream data from an SK7 with 5
# wired IMUs attached
def data_callback(data):
(sig_id, sig_data) = data
sk7_dev = sig_data[0]
# each time the server polls for new data, get the latest sensor readings
# from each individual IMU, construct a list of their accelerometer,
# magnetometer and gyroscope values and pass that back to the server
imudata = sk7_dev.sk7_imus()
vals = [val for imu in imudata for val in [imu.acc, imu.mag, imu.gyro]]
return itertools.chain(*vals)
if __name__ == "__main__":
# connect to the IMUs through the SK7
sk7_dev = ShakeDevice(SHAKE_SK7)
if len(sys.argv) != 2:
print('Usage: pytia_sk7imu.py <port number>')
sys.exit(0)
try:
address = int(sys.argv[1])
except ValueError:
print('Failed to parse port number!')
sys.exit(-1)
print('Connecting to SK7 on port %d...' % address)
if not sk7_dev.connect(address):
print('Failed to connect')
sys.exit(-1)
print('Connected OK!')
# create a TiAServer on localhost:9000
server = TiAServer(('', 9000), TiAConnectionHandler)
# define a single signal for the server to stream to clients. There are 5
# IMUs and each will be providing 9 channels of data (x/y/z accelerometer,
# magnetometer and gyroscope), so that's 45 in total. The server will poll
# for new data at 100Hz, packets will be sent in blocks of 1 and the callback
# defined above will be used as the data source.
signal = TiASignalConfig(channels=45, sample_rate=100, blocksize=1, \
callback=tia_callback, id=(0, (sk7_dev,)),
is_master=True, sigtype=pytia.TIA_SIG_USER_1)
# start the server with the list of signals to use
server.start([signal,])
print('[Ctrl-C to exit]')
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
print('Closing connection...')
sd.close()
| mit | Python |
e6080b6f5997e1462ed97fbda0e13b0299742527 | Update version for 4.0 RC3 | HtmlUnit/selenium,HtmlUnit/selenium,titusfortner/selenium,titusfortner/selenium,joshmgrant/selenium,valfirst/selenium,joshmgrant/selenium,joshmgrant/selenium,titusfortner/selenium,SeleniumHQ/selenium,joshmgrant/selenium,titusfortner/selenium,HtmlUnit/selenium,valfirst/selenium,SeleniumHQ/selenium,titusfortner/selenium,titusfortner/selenium,valfirst/selenium,HtmlUnit/selenium,SeleniumHQ/selenium,valfirst/selenium,joshmgrant/selenium,joshmgrant/selenium,SeleniumHQ/selenium,joshmgrant/selenium,valfirst/selenium,joshmgrant/selenium,SeleniumHQ/selenium,joshmgrant/selenium,SeleniumHQ/selenium,SeleniumHQ/selenium,valfirst/selenium,joshmgrant/selenium,valfirst/selenium,SeleniumHQ/selenium,HtmlUnit/selenium,titusfortner/selenium,titusfortner/selenium,HtmlUnit/selenium,HtmlUnit/selenium,HtmlUnit/selenium,valfirst/selenium,valfirst/selenium,valfirst/selenium,titusfortner/selenium,joshmgrant/selenium,titusfortner/selenium,valfirst/selenium,SeleniumHQ/selenium,SeleniumHQ/selenium,titusfortner/selenium,SeleniumHQ/selenium,HtmlUnit/selenium,HtmlUnit/selenium | dotnet/selenium-dotnet-version.bzl | dotnet/selenium-dotnet-version.bzl | # BUILD FILE SYNTAX: STARLARK
SE_VERSION = "4.0.0-rc3"
ASSEMBLY_VERSION = "4.0.0.0"
SUPPORTED_NET_FRAMEWORKS = ["net45", "net46", "net47", "net48"]
SUPPORTED_NET_STANDARD_VERSIONS = ["netstandard2.0", "netstandard2.1", "net5.0"]
SUPPORTED_DEVTOOLS_VERSIONS = [
"v85",
"v93",
"v94",
]
ASSEMBLY_COMPANY = "Selenium Committers"
ASSEMBLY_COPYRIGHT = "Copyright © Software Freedom Conservancy 2021"
ASSEMBLY_INFORMATIONAL_VERSION = "4"
ASSEMBLY_PRODUCT = "Selenium"
| # BUILD FILE SYNTAX: STARLARK
SE_VERSION = "4.0.0"
ASSEMBLY_VERSION = "4.0.0.0"
SUPPORTED_NET_FRAMEWORKS = ["net45", "net46", "net47", "net48"]
SUPPORTED_NET_STANDARD_VERSIONS = ["netstandard2.0", "netstandard2.1", "net5.0"]
SUPPORTED_DEVTOOLS_VERSIONS = [
"v85",
"v93",
"v94",
]
ASSEMBLY_COMPANY = "Selenium Committers"
ASSEMBLY_COPYRIGHT = "Copyright © Software Freedom Conservancy 2021"
ASSEMBLY_INFORMATIONAL_VERSION = "4"
ASSEMBLY_PRODUCT = "Selenium"
| apache-2.0 | Python |
140700d03da37e3a01c6a76346141c7af0ef68cd | Update benchmark with new capabilities | sirmarcel/floq | benchmark/spins.py | benchmark/spins.py | import sys
sys.path.append('..')
sys.path.append('museum_of_forks')
from floq.systems.spins import SpinEnsemble
import numpy as np
import timeit
def wrapper(func, *args, **kwargs):
def wrapped():
return func(*args, **kwargs)
return wrapped
def get_f(fid, base_controls):
fid(np.random.rand(1)*base_controls)
def get_f_and_df(fid, base_controls):
ctrl = np.random.rand(1)*base_controls
fid.f(ctrl)
fid.df(ctrl)
def run_opt(opt):
opt.optimize()
def time_f(fidel, ctrl):
time = min(timeit.Timer(wrapper(get_f, fidel, ctrl)).repeat(2, 5))/5
return " F: " + str(round(time*1000, 3)) + " ms per execution"
def time_df(grad_fidel, ctrl):
time = min(timeit.Timer(wrapper(get_f, grad_fidel, ctrl)).repeat(3, 1))
return "dF: " + str(round(time, 3)) + " s per execution"
def time_f_and_df(fidel, ctrl):
time = min(timeit.Timer(wrapper(get_f_and_df, fidel, ctrl)).repeat(3, 1))
return "F+dF: " + str(round(time, 3)) + " s"
def time_opt(opt):
time = min(timeit.Timer(wrapper(run_opt, opt)).repeat(2, 1))/1
return "Op: " + str(round(time, 3)) + " s"
ncomp = 5
n = 100
freqs = 0.01*np.ones(n)-0.025+0.05*np.random.rand(n)
amps = 1.0*np.ones(n)-0.025+0.05*np.random.rand(n)
s = SpinEnsemble(n, ncomp, 1.5, freqs, amps)
ctrl = 0.5*np.ones(2*ncomp)
target = np.array([[0.208597 + 0.j, -0.691552 - 0.691552j],
[0.691552 - 0.691552j, 0.208597 + 0.j]])
from floq.optimization.optimizer import SciPyOptimizer
print "---- Karl's Version"
from floq.optimization.fidelity import OperatorDistance
from floq.parallel.worker import FidelityMaster
master = FidelityMaster(n, 2, s, OperatorDistance, t=1.0, target=target)
opt = SciPyOptimizer(master, ctrl, tol=1e-5)
print time_f(master.f, ctrl)
print time_df(master.df, ctrl)
print time_f_and_df(master, ctrl)
print time_opt(opt)
master.kill()
print "---- Queue Version"
from floq.optimization.fidelity import OperatorDistance
from floq.parallel.simple_ensemble import ParallelEnsembleFidelity
fid = ParallelEnsembleFidelity(s, OperatorDistance, t=1.0, target=target)
opt = SciPyOptimizer(fid, ctrl, tol=1e-5)
print time_f(fid.f, ctrl)
print time_df(fid.df, ctrl)
print time_f_and_df(fid, ctrl)
print time_opt(opt)
print "---- Legacy version"
from museum_of_forks.p0.optimization.fidelity import EnsembleFidelity, OperatorDistance
fid = EnsembleFidelity(s, OperatorDistance, t=1.0, target=target)
opt = SciPyOptimizer(fid, ctrl, tol=1e-5)
print time_f(fid.f, ctrl)
print time_df(fid.df, ctrl)
print time_f_and_df(fid, ctrl)
print time_opt(opt)
| import sys
sys.path.append('..')
sys.path.append('museum_of_forks')
from floq.systems.spins import SpinEnsemble
import numpy as np
import timeit
def wrapper(func, *args, **kwargs):
def wrapped():
return func(*args, **kwargs)
return wrapped
def get_f(fid, base_controls):
fid(np.random.rand(1)*base_controls)
def time_u(fidel, ctrl):
time = min(timeit.Timer(wrapper(get_f, fidel, ctrl)).repeat(2, 5))/5
return " U: " + str(round(time*1000, 3)) + " ms per execution"
def time_du(grad_fidel, ctrl):
time = min(timeit.Timer(wrapper(get_f, grad_fidel, ctrl)).repeat(3, 1))
return "dU: " + str(round(time, 3)) + " s per execution"
ncomp = 6
n = 20
freqs = 0.0*np.ones(n)+1.0*np.random.rand(n)
amps = 1.0*np.ones(n)+0.05*np.random.rand(n)
s = SpinEnsemble(n, ncomp, 1.5, freqs, amps)
ctrl = 0.5*np.ones(2*ncomp)
target = np.array([[0.105818 - 0.324164j, -0.601164 - 0.722718j],
[0.601164 - 0.722718j, 0.105818 + 0.324164j]])
print "---- Current version"
from floq.optimization.fidelity import EnsembleFidelity, OperatorDistance
fid = EnsembleFidelity(s, OperatorDistance, t=1.0, target=target)
print time_u(fid.f, ctrl)
print time_du(fid.df, ctrl)
print "---- Legacy version"
from museum_of_forks.p0.optimization.fidelity import EnsembleFidelity, OperatorDistance
fid = EnsembleFidelity(s, OperatorDistance, t=1.0, target=target)
print time_u(fid.f, ctrl)
print time_du(fid.df, ctrl) | mit | Python |
c3911af29983181e593ca89fb52739c616bdb7bb | add cluster and clustertemplate to fake_policy.py | openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum | magnum/tests/fake_policy.py | magnum/tests/fake_policy.py | # Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
policy_data = """
{
"context_is_admin": "role:admin",
"admin_or_owner": "is_admin:True or project_id:%(project_id)s",
"default": "rule:admin_or_owner",
"admin_api": "rule:context_is_admin",
"bay:create": "",
"bay:delete": "",
"bay:detail": "",
"bay:get": "",
"bay:get_all": "",
"bay:update": "",
"baymodel:create": "",
"baymodel:delete": "",
"baymodel:detail": "",
"baymodel:get": "",
"baymodel:get_all": "",
"baymodel:update": "",
"cluster:create": "",
"cluster:delete": "",
"cluster:detail": "",
"cluster:get": "",
"cluster:get_all": "",
"cluster:update": "",
"clustertemplate:create": "",
"clustertemplate:delete": "",
"clustertemplate:detail": "",
"clustertemplate:get": "",
"clustertemplate:get_all": "",
"clustertemplate:update": "",
"clustertemplate:publish": "",
"certificate:create": "",
"certificate:get": "",
"magnum-service:get_all": ""
}
"""
| # Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
policy_data = """
{
"context_is_admin": "role:admin",
"admin_or_owner": "is_admin:True or project_id:%(project_id)s",
"default": "rule:admin_or_owner",
"admin_api": "rule:context_is_admin",
"bay:create": "",
"bay:delete": "",
"bay:detail": "",
"bay:get": "",
"bay:get_all": "",
"bay:update": "",
"baymodel:create": "",
"baymodel:delete": "",
"baymodel:detail": "",
"baymodel:get": "",
"baymodel:get_all": "",
"baymodel:update": ""
}
"""
| apache-2.0 | Python |
0bdffe52aca802377e288b7a1ceb24ecacd926c3 | Update Version number | suipotryot/django-saas-userdb | userdb/__init__.py | userdb/__init__.py | __version__ = '0.1.dev15'
| __version__ = '0.1.dev14'
| mit | Python |
404127ffae808690ba83fa506fd402dc267007f9 | Update version to 3.1 | copasi/condor-copasi,copasi/condor-copasi | web_frontend/version.py | web_frontend/version.py | version = '0.3.1 beta'
| version = '0.3.0 beta'
| artistic-2.0 | Python |
b7e8154511df708844d0f6d07adcc40f36b33b7b | Bump the version to 0.3.7 | sulami/feed2maildir | feed2maildir/__init__.py | feed2maildir/__init__.py | VERSION = '0.3.7'
| VERSION = '0.3.6'
| isc | Python |
29bd597ce983b64bce5fb19348c7b94f8ce9d0fd | Fix tests, so that they use a hostname rather than an IP address. | wonderslug/mongo-orchestration,10gen/mongo-orchestration,10gen/mongo-orchestration,agilemobiledev/mongo-orchestration,llvtt/mongo-orchestration-1,wonderslug/mongo-orchestration,llvtt/mongo-orchestration-1,agilemobiledev/mongo-orchestration | tests/__init__.py | tests/__init__.py | # Copyright 2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import sys
import time
from mongo_orchestration import set_releases
from mongo_orchestration.servers import Servers
PORT = int(os.environ.get('MO_PORT', '8889'))
HOSTNAME = os.environ.get('MO_HOST', 'localhost')
TEST_SUBJECT = (
'C=US,ST=New York,L=New York City,O=MongoDB,OU=KernelUser,CN=client_revoked'
)
TEST_RELEASES = (
{'default-release': os.environ.get('MONGOBIN', '')},
'default-release')
# Set up the default mongo binaries to use from MONGOBIN.
set_releases(*TEST_RELEASES)
SSL_ENABLED = False
SERVER_VERSION = (2, 6)
__server_id = Servers().create(name='mongod', procParams={})
try:
# Server version
info = Servers().info(__server_id)['serverInfo']
version_str = re.search('((\d+\.)+\d+)', info['version']).group(0)
SERVER_VERSION = tuple(map(int, version_str.split('.')))
# Do we have SSL support?
SSL_ENABLED = bool(info.get('OpenSSLVersion'))
finally:
Servers().cleanup()
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
from unittest2 import SkipTest
else:
import unittest
from unittest import SkipTest
class SSLTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
if not SSL_ENABLED:
raise SkipTest("SSL is not enabled on this server.")
def certificate(cert_name):
"""Return the path to the PEM file with the given name."""
return os.path.join(os.path.dirname(__file__), 'lib', cert_name)
def assert_eventually(condition, message=None, max_tries=60):
for i in range(max_tries):
if condition():
break
time.sleep(1)
else:
raise AssertionError(message or "Failed after %d attempts." % max_tries)
| # Copyright 2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import socket
import sys
import time
from mongo_orchestration import set_releases
from mongo_orchestration.servers import Servers
PORT = int(os.environ.get('MO_PORT', '8889'))
HOSTNAME = socket.getaddrinfo(
os.environ.get('MO_HOST', '127.0.0.1'), PORT,
socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)[-1][-1][0]
TEST_SUBJECT = (
'C=US,ST=New York,L=New York City,O=MongoDB,OU=KernelUser,CN=client_revoked'
)
TEST_RELEASES = (
{'default-release': os.environ.get('MONGOBIN', '')},
'default-release')
# Set up the default mongo binaries to use from MONGOBIN.
set_releases(*TEST_RELEASES)
SSL_ENABLED = False
SERVER_VERSION = (2, 6)
__server_id = Servers().create(name='mongod', procParams={})
try:
# Server version
info = Servers().info(__server_id)['serverInfo']
version_str = re.search('((\d+\.)+\d+)', info['version']).group(0)
SERVER_VERSION = tuple(map(int, version_str.split('.')))
# Do we have SSL support?
SSL_ENABLED = bool(info.get('OpenSSLVersion'))
finally:
Servers().cleanup()
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
from unittest2 import SkipTest
else:
import unittest
from unittest import SkipTest
class SSLTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
if not SSL_ENABLED:
raise SkipTest("SSL is not enabled on this server.")
def certificate(cert_name):
"""Return the path to the PEM file with the given name."""
return os.path.join(os.path.dirname(__file__), 'lib', cert_name)
def assert_eventually(condition, message=None, max_tries=60):
for i in range(max_tries):
if condition():
break
time.sleep(1)
else:
raise AssertionError(message or "Failed after %d attempts." % max_tries)
| apache-2.0 | Python |
a78b4e2ee35a404c06d4bdbed95e3ffa5ad2d061 | Use Python 3 style print. | GrahamDumpleton/mod_wsgi,GrahamDumpleton/mod_wsgi,GrahamDumpleton/mod_wsgi | tests/access.wsgi | tests/access.wsgi | def allow_access(environ, host):
print(environ, host)
return True
| def allow_access(environ, host):
print environ, host
return True
| apache-2.0 | Python |
5fd3f1afc564f154fdce7512e553a0f211e6d527 | add engine attribute to Engine class | CaptainDesAstres/Blender-Render-Manager,CaptainDesAstres/Simple-Blender-Render-Manager | settingMod/Engine.py | settingMod/Engine.py | #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage Engine Settings'''
import xml.etree.ElementTree as xmlMod
import os
class Engine:
'''class to manage Engine Settings'''
def __init__(self, xml= None):
'''initialize Engine Settings with default value or values extracted from an xml object'''
if xml is None:
self.defaultInit()
else:
self.fromXml(xml)
def defaultInit(self):
'''initialize Engine Settings with default value'''
self.version = '[default]'
self.engine = 'CYCLES'
def fromXml(self, xml):
'''initialize Engine Settings with values extracted from an xml object'''
self.version = xml.get('version')
self.engine = xml.get('engine')
def toXml(self):
'''export Engine Settings into xml syntaxed string'''
return '<engine version="'+self.version+'" engine="'\
+self.engine+'" />\n'
def see(self, log, versions):
'''menu to explore and edit Engine Settings settings'''
change = False
log.menuIn('Engine')
while True:
os.system('clear')
log.print()
self.print()
print('''\n\n Menu :
1- Edit Blender Version
2- Switch Engine
3- Switch Device (for Cycles)
0- Save and quit
''')
choice = input('Action?').strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
elif choice == '1':
change = (self.chooseVersion(log, versions) or change)
elif choice in ['2', '3']:
self.switch(int(choice))
change = True
else:
log.error('Unvalid menu choice', False)
def print(self):
'''a method to print Engine Settings'''
print('Blender Version : '+self.version)
print('Engine : '+self.engine.capitalize())
def chooseVersion(self, log, versions):
'''A method to set the blender version to use with a preset'''
choice = versions.choose(log, True, True)
if choice is not None:
self.version = choice
log.write('Version set to : '+self.version)
return True
return False
| #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage Engine Settings'''
import xml.etree.ElementTree as xmlMod
import os
class Engine:
'''class to manage Engine Settings'''
def __init__(self, xml= None):
'''initialize Engine Settings with default value or values extracted from an xml object'''
if xml is None:
self.defaultInit()
else:
self.fromXml(xml)
def defaultInit(self):
'''initialize Engine Settings with default value'''
self.version = '[default]'
def fromXml(self, xml):
'''initialize Engine Settings with values extracted from an xml object'''
self.version = xml.get('version')
def toXml(self):
'''export Engine Settings into xml syntaxed string'''
return '<engine version="'+self.version+'" />\n'
def see(self, log, versions):
'''menu to explore and edit Engine Settings settings'''
change = False
log.menuIn('Engine')
while True:
os.system('clear')
log.print()
self.print()
print('''\n\n Menu :
1- Edit Blender Version
0- Save and quit
''')
choice = input('Action?').strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
elif choice == '1':
change = (self.chooseVersion(log, versions) or change)
else:
log.error('Unvalid menu choice', False)
def print(self):
'''a method to print Engine Settings'''
print('Blender Version : '+self.version)
def chooseVersion(self, log, versions):
'''A method to set the blender version to use with a preset'''
choice = versions.choose(log, True, True)
if choice is not None:
self.version = choice
log.write('Version set to : '+self.version)
return True
return False
| mit | Python |
54bc9e4d75d36684a9524688a593eab5ef8a7333 | Fix typo | yuvipanda/jupyterhub-kubernetes-spawner,jupyterhub/kubespawner | tests/conftest.py | tests/conftest.py | """pytest fixtures for kubespawner"""
import os
from kubernetes.client import V1Namespace
from kubernetes.config import load_kube_config
import pytest
from traitlets.config import Config
from kubespawner.clients import shared_client
@pytest.fixture(scope="session")
def kube_ns():
"""Fixture for the kubernetes namespace"""
return os.environ.get("KUBESPAWNER_TEST_NAMESPACE") or "kubespawner-test"
@pytest.fixture
def config(kube_ns):
"""Return a traitlets Config object
The base configuration for testing.
Use when constructing Spawners for tests
"""
cfg = Config()
cfg.KubeSpawner.namespace = kube_ns
cfg.KubeSpawner.start_timeout = 120
return cfg
@pytest.fixture(scope="session")
def kube_client(request, kube_ns):
"""fixture for the Kubernetes client object.
skips test that require kubernetes if kubernetes cannot be contacted
"""
load_kube_config()
client = shared_client('CoreV1Api')
try:
namespaces = client.list_namespace(_request_timeout=3)
except Exception as e:
pytest.skip("Kubernetes not found: %s" % e)
if not any(ns.metadata.name == kube_ns for ns in namespaces.items):
print("Creating namespace %s" % kube_ns)
client.create_namespace(V1Namespace(metadata=dict(name=kube_ns)))
else:
print("Using existing namespace %s" % kube_ns)
# delete the test namespace when we finish
request.addfinalizer(lambda: client.delete_namespace(kube_ns, body={}))
return client
| """pytest fixtures for kubespawner"""
import os
from kubernetes.client import V1Namespace
from kubernetes.config import load_kube_config
import pytest
from traitlets.config import Config
from kubespawner.clients import shared_client
@pytest.fixture(scope="session")
def kube_ns():
"""Fixture for the kubernetes namespace"""
return os.environ.get("KUBESPAWNER_TEST_NAMESPACE") or "kubespawner-test"
@pytest.fixture
def config(kube_ns):
"""Return a traitlets Config object
The base configuration for testing.
Use when constructing Spawners for tests
"""
cfg = Config()
cfg.KubeSpawner.namespace = kube_ns
cfb.KubeSpawner.start_timeout = 120
return cfg
@pytest.fixture(scope="session")
def kube_client(request, kube_ns):
"""fixture for the Kubernetes client object.
skips test that require kubernetes if kubernetes cannot be contacted
"""
load_kube_config()
client = shared_client('CoreV1Api')
try:
namespaces = client.list_namespace(_request_timeout=3)
except Exception as e:
pytest.skip("Kubernetes not found: %s" % e)
if not any(ns.metadata.name == kube_ns for ns in namespaces.items):
print("Creating namespace %s" % kube_ns)
client.create_namespace(V1Namespace(metadata=dict(name=kube_ns)))
else:
print("Using existing namespace %s" % kube_ns)
# delete the test namespace when we finish
request.addfinalizer(lambda: client.delete_namespace(kube_ns, body={}))
return client
| bsd-3-clause | Python |
bad660465f87dfa5f3edfc7b50049cc6dab2df80 | fix new format | cpvargas/stacklib | tests/ez_stack.py | tests/ez_stack.py | '''
Simple script to test some functions and methods
Creates a fake catalog of 100 sources at [RA,DEC] inside fullmap area
at each position pastes beammaps of amplitude -150 on a zero fullmap,
then it performs a stack of all beams.
'''
from datetime import datetime
startTime = datetime.now()
import sys
import os
sys.path.append(os.path.abspath("../"))
import stacklib as sl
import numpy as np
path = os.environ["HOME"] + '/FILES/'
m = path + 'ACT_148_equ_season_3_1way_v3_src_free.fits'
w = path + 'ACT_148_equ_season_3_1way_calgc_strictcuts2_weights.fits'
b = path + 'profile_AR1_2009_pixwin_130224.txt'
s = path + 'Equa_mask_15mJy.fits'
RA0 = 57.5
RA1 = 308.5
DEC0 = -1.5
DEC1 = 1.5
M = sl.StackMap(m,w,b,s,RA0,RA1,DEC0,DEC1)
M.fullmap = np.abs(M.fullmap*0)
cat = sl.fakecatalog(100)
psize = np.abs(M.maphdr['CDELT1'])
Beam = -150*sl.beam(b,psize,10)
for item in cat:
loc = M.getpix(item[0],item[1])
M.fullmap = sl.pastemap(M.fullmap, Beam, loc)
M.setsubmapL(16)
M.setstackmap()
for item in cat:
M.setsubmap(item[0],item[1])
M.stacksubmap()
M.finishstack()
print datetime.now() - startTime | '''
Simple script to test some functions and methods
Creates a fake catalog of 100 sources at [RA,DEC] inside fullmap area
at each position pastes beammaps of amplitude -150 on a zero fullmap,
then it performs a stack of all beams.
'''
from datetime import datetime
startTime = datetime.now()
import stacklib as sl
import numpy as np
import os
path = os.environ["HOME"] + '/FILES/'
m = path + 'ACT_148_equ_season_3_1way_v3_src_free.fits'
w = path + 'ACT_148_equ_season_3_1way_calgc_strictcuts2_weights.fits'
b = path + 'profile_AR1_2009_pixwin_130224.txt'
s = path + 'Equa_mask_15mJy.fits'
RA0 = 55.
RA1 = 324.
DEC0 = -1.5
DEC1 = 1.5
M = sl.StackMap(m,w,b,s,RA0,RA1,DEC0,DEC1)
M.fullmap = np.abs(M.fullmap*0)
cat = sl.fakecatalog(100)
psize = np.abs(M.maphdr['CDELT1'])
Beam = -150*sl.beam(b,psize,10)
for item in cat:
loc = M.getpix(item[0],item[1])
M.fullmap = sl.pastemap(M.fullmap, Beam, loc)
M.setsubmapL(16)
M.setstackmap()
for item in cat:
M.setsubmap(item[0],item[1])
M.stacksubmap()
M.finishstack()
print datetime.now() - startTime | mit | Python |
de23cf5344fbd3b3a3b13111edb22bb1e519d806 | create a temp file for cookies in phantomjs if not specified | joshmgrant/selenium,Ardesco/selenium,chrisblock/selenium,Dude-X/selenium,gurayinan/selenium,Jarob22/selenium,valfirst/selenium,jabbrwcky/selenium,alb-i986/selenium,asashour/selenium,HtmlUnit/selenium,valfirst/selenium,dibagga/selenium,Dude-X/selenium,5hawnknight/selenium,asashour/selenium,juangj/selenium,SeleniumHQ/selenium,tbeadle/selenium,oddui/selenium,carlosroh/selenium,davehunt/selenium,valfirst/selenium,sankha93/selenium,Ardesco/selenium,twalpole/selenium,sag-enorman/selenium,alb-i986/selenium,SeleniumHQ/selenium,kalyanjvn1/selenium,jsakamoto/selenium,krmahadevan/selenium,GorK-ChO/selenium,jabbrwcky/selenium,krmahadevan/selenium,SeleniumHQ/selenium,sankha93/selenium,uchida/selenium,gurayinan/selenium,titusfortner/selenium,joshmgrant/selenium,dibagga/selenium,joshmgrant/selenium,asashour/selenium,chrisblock/selenium,oddui/selenium,mojwang/selenium,joshuaduffy/selenium,twalpole/selenium,xsyntrex/selenium,markodolancic/selenium,GorK-ChO/selenium,Herst/selenium,Herst/selenium,mach6/selenium,joshmgrant/selenium,juangj/selenium,bayandin/selenium,mojwang/selenium,alb-i986/selenium,SeleniumHQ/selenium,Herst/selenium,markodolancic/selenium,oddui/selenium,juangj/selenium,Tom-Trumper/selenium,jabbrwcky/selenium,valfirst/selenium,GorK-ChO/selenium,Jarob22/selenium,valfirst/selenium,davehunt/selenium,davehunt/selenium,5hawnknight/selenium,valfirst/selenium,Jarob22/selenium,twalpole/selenium,gurayinan/selenium,joshbruning/selenium,bayandin/selenium,bayandin/selenium,bayandin/selenium,asolntsev/selenium,5hawnknight/selenium,GorK-ChO/selenium,Jarob22/selenium,tbeadle/selenium,xsyntrex/selenium,mojwang/selenium,xmhubj/selenium,carlosroh/selenium,Tom-Trumper/selenium,xmhubj/selenium,chrisblock/selenium,xmhubj/selenium,titusfortner/selenium,sag-enorman/selenium,markodolancic/selenium,titusfortner/selenium,valfirst/selenium,joshuaduffy/selenium,oddui/selenium,gurayinan/selenium,twalpole/selenium,Dude-X/selenium,joshmgrant/selenium,asolntsev/selenium,SeleniumHQ/selenium,sankha93/selenium,tbeadle/selenium,gurayinan/selenium,markodolancic/selenium,joshmgrant/selenium,titusfortner/selenium,dibagga/selenium,lmtierney/selenium,krmahadevan/selenium,davehunt/selenium,Ardesco/selenium,DrMarcII/selenium,jsakamoto/selenium,bayandin/selenium,xsyntrex/selenium,twalpole/selenium,alb-i986/selenium,asashour/selenium,Jarob22/selenium,joshbruning/selenium,asolntsev/selenium,lmtierney/selenium,Dude-X/selenium,HtmlUnit/selenium,joshmgrant/selenium,5hawnknight/selenium,TikhomirovSergey/selenium,twalpole/selenium,mach6/selenium,titusfortner/selenium,markodolancic/selenium,juangj/selenium,asolntsev/selenium,GorK-ChO/selenium,DrMarcII/selenium,Jarob22/selenium,sankha93/selenium,uchida/selenium,gurayinan/selenium,Tom-Trumper/selenium,kalyanjvn1/selenium,Jarob22/selenium,valfirst/selenium,bayandin/selenium,carlosroh/selenium,uchida/selenium,tbeadle/selenium,mach6/selenium,jabbrwcky/selenium,oddui/selenium,krmahadevan/selenium,joshbruning/selenium,mojwang/selenium,Tom-Trumper/selenium,jabbrwcky/selenium,sag-enorman/selenium,oddui/selenium,Dude-X/selenium,chrisblock/selenium,tbeadle/selenium,markodolancic/selenium,jsakamoto/selenium,oddui/selenium,xsyntrex/selenium,bayandin/selenium,dibagga/selenium,TikhomirovSergey/selenium,tbeadle/selenium,kalyanjvn1/selenium,SeleniumHQ/selenium,Herst/selenium,Dude-X/selenium,xmhubj/selenium,asolntsev/selenium,asolntsev/selenium,titusfortner/selenium,jabbrwcky/selenium,joshmgrant/selenium,DrMarcII/selenium,5hawnknight/selenium,HtmlUnit/selenium,joshbruning/selenium,kalyanjvn1/selenium,twalpole/selenium,valfirst/selenium,krmahadevan/selenium,twalpole/selenium,joshbruning/selenium,juangj/selenium,joshuaduffy/selenium,xmhubj/selenium,xsyntrex/selenium,Herst/selenium,kalyanjvn1/selenium,carlosroh/selenium,bayandin/selenium,chrisblock/selenium,Ardesco/selenium,HtmlUnit/selenium,lmtierney/selenium,davehunt/selenium,kalyanjvn1/selenium,sag-enorman/selenium,jsakamoto/selenium,krmahadevan/selenium,joshuaduffy/selenium,5hawnknight/selenium,Jarob22/selenium,Tom-Trumper/selenium,joshbruning/selenium,joshbruning/selenium,lmtierney/selenium,lmtierney/selenium,Herst/selenium,jsakamoto/selenium,mach6/selenium,GorK-ChO/selenium,sankha93/selenium,mojwang/selenium,GorK-ChO/selenium,oddui/selenium,xsyntrex/selenium,5hawnknight/selenium,Tom-Trumper/selenium,sag-enorman/selenium,HtmlUnit/selenium,Herst/selenium,DrMarcII/selenium,jabbrwcky/selenium,uchida/selenium,krmahadevan/selenium,chrisblock/selenium,jsakamoto/selenium,Ardesco/selenium,oddui/selenium,carlosroh/selenium,titusfortner/selenium,lmtierney/selenium,GorK-ChO/selenium,alb-i986/selenium,DrMarcII/selenium,alb-i986/selenium,sankha93/selenium,mojwang/selenium,markodolancic/selenium,asashour/selenium,dibagga/selenium,jabbrwcky/selenium,TikhomirovSergey/selenium,jsakamoto/selenium,joshuaduffy/selenium,Ardesco/selenium,asashour/selenium,joshuaduffy/selenium,asolntsev/selenium,Tom-Trumper/selenium,carlosroh/selenium,bayandin/selenium,5hawnknight/selenium,sag-enorman/selenium,sankha93/selenium,titusfortner/selenium,joshbruning/selenium,uchida/selenium,davehunt/selenium,Ardesco/selenium,kalyanjvn1/selenium,gurayinan/selenium,Dude-X/selenium,SeleniumHQ/selenium,HtmlUnit/selenium,joshbruning/selenium,5hawnknight/selenium,Dude-X/selenium,carlosroh/selenium,jsakamoto/selenium,uchida/selenium,joshmgrant/selenium,juangj/selenium,gurayinan/selenium,dibagga/selenium,joshmgrant/selenium,chrisblock/selenium,DrMarcII/selenium,uchida/selenium,mach6/selenium,valfirst/selenium,alb-i986/selenium,juangj/selenium,gurayinan/selenium,Ardesco/selenium,tbeadle/selenium,alb-i986/selenium,asashour/selenium,alb-i986/selenium,valfirst/selenium,davehunt/selenium,mojwang/selenium,TikhomirovSergey/selenium,titusfortner/selenium,davehunt/selenium,mach6/selenium,joshuaduffy/selenium,lmtierney/selenium,Tom-Trumper/selenium,carlosroh/selenium,DrMarcII/selenium,krmahadevan/selenium,DrMarcII/selenium,titusfortner/selenium,xmhubj/selenium,Ardesco/selenium,uchida/selenium,mojwang/selenium,uchida/selenium,TikhomirovSergey/selenium,juangj/selenium,HtmlUnit/selenium,markodolancic/selenium,sankha93/selenium,asolntsev/selenium,joshuaduffy/selenium,xsyntrex/selenium,jsakamoto/selenium,Dude-X/selenium,krmahadevan/selenium,mach6/selenium,xsyntrex/selenium,xmhubj/selenium,davehunt/selenium,sag-enorman/selenium,SeleniumHQ/selenium,TikhomirovSergey/selenium,HtmlUnit/selenium,kalyanjvn1/selenium,asashour/selenium,mojwang/selenium,carlosroh/selenium,jabbrwcky/selenium,Herst/selenium,GorK-ChO/selenium,Jarob22/selenium,sag-enorman/selenium,HtmlUnit/selenium,joshuaduffy/selenium,dibagga/selenium,mach6/selenium,chrisblock/selenium,lmtierney/selenium,xsyntrex/selenium,tbeadle/selenium,DrMarcII/selenium,markodolancic/selenium,SeleniumHQ/selenium,chrisblock/selenium,lmtierney/selenium,asashour/selenium,TikhomirovSergey/selenium,SeleniumHQ/selenium,Tom-Trumper/selenium,xmhubj/selenium,tbeadle/selenium,titusfortner/selenium,mach6/selenium,xmhubj/selenium,SeleniumHQ/selenium,kalyanjvn1/selenium,sankha93/selenium,juangj/selenium,TikhomirovSergey/selenium,sag-enorman/selenium,TikhomirovSergey/selenium,asolntsev/selenium,Herst/selenium,dibagga/selenium,HtmlUnit/selenium,twalpole/selenium,joshmgrant/selenium,dibagga/selenium | py/selenium/webdriver/phantomjs/service.py | py/selenium/webdriver/phantomjs/service.py | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import tempfile
from selenium.webdriver.common import service
class Service(service.Service):
"""
Object that manages the starting and stopping of PhantomJS / Ghostdriver
"""
def __init__(self, executable_path, port=0, service_args=None, log_path=None):
"""
Creates a new instance of the Service
:Args:
- executable_path : Path to PhantomJS binary
- port : Port the service is running on
- service_args : A List of other command line options to pass to PhantomJS
- log_path: Path for PhantomJS service to log to
"""
self.service_args= service_args
if self.service_args is None:
self.service_args = []
else:
self.service_args=service_args[:]
if not log_path:
log_path = "ghostdriver.log"
if not self._args_contain("--cookies-file="):
self._cookie_temp_file = tempfile.mkstemp()[1]
self.service_args.append("cookies-file=" + self._cookie_temp_file)
else:
self._cookie_temp_file = None
service.Service.__init__(self, executable_path, port=port, log_file=open(log_path, 'w'))
def _args_contain(self, arg):
return len(filter(lambda x:x.startswith(arg), self.service_args)) > 0
def command_line_args(self):
return self.service_args + ["--webdriver=%d" % self.port]
@property
def service_url(self):
"""
Gets the url of the GhostDriver Service
"""
return "http://localhost:%d/wd/hub" % self.port
def send_remote_shutdown_command(self):
if self._cookie_temp_file:
os.remove(self._cookie_temp_file)
| # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common import service
class Service(service.Service):
"""
Object that manages the starting and stopping of PhantomJS / Ghostdriver
"""
def __init__(self, executable_path, port=0, service_args=None, log_path=None):
"""
Creates a new instance of the Service
:Args:
- executable_path : Path to PhantomJS binary
- port : Port the service is running on
- service_args : A List of other command line options to pass to PhantomJS
- log_path: Path for PhantomJS service to log to
"""
self.service_args= service_args
if self.service_args is None:
self.service_args = []
else:
self.service_args=service_args[:]
if not log_path:
log_path = "ghostdriver.log"
service.Service.__init__(self, executable_path, port=port, log_file=open(log_path, 'w'))
def command_line_args(self):
return self.service_args + ["--webdriver=%d" % self.port]
@property
def service_url(self):
"""
Gets the url of the GhostDriver Service
"""
return "http://localhost:%d/wd/hub" % self.port
def send_remote_shutdown_command(self):
pass
| apache-2.0 | Python |
bddcdfb59357ca5e27750d2bdbfc6d974e1a2a09 | add try-except for the user column removal shit | pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot | alembic/versions/25cf8a00d471_removed_unused_columns_in_user_table.py | alembic/versions/25cf8a00d471_removed_unused_columns_in_user_table.py | """removed unused columns in user table
Revision ID: 25cf8a00d471
Revises: 34d6f5a24cbe
Create Date: 2016-04-10 13:29:45.695493
"""
# revision identifiers, used by Alembic.
revision = '25cf8a00d471'
down_revision = '34d6f5a24cbe'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
try:
op.drop_column('tb_user', 'twitch_access_token')
op.drop_column('tb_user', 'twitch_refresh_token')
op.drop_column('tb_user', 'discord_user_id')
except:
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tb_user', sa.Column('discord_user_id', mysql.VARCHAR(length=32), nullable=True))
op.add_column('tb_user', sa.Column('twitch_refresh_token', mysql.VARCHAR(length=128), nullable=True))
op.add_column('tb_user', sa.Column('twitch_access_token', mysql.VARCHAR(length=128), nullable=True))
### end Alembic commands ###
| """removed unused columns in user table
Revision ID: 25cf8a00d471
Revises: 34d6f5a24cbe
Create Date: 2016-04-10 13:29:45.695493
"""
# revision identifiers, used by Alembic.
revision = '25cf8a00d471'
down_revision = '34d6f5a24cbe'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('tb_user', 'twitch_access_token')
op.drop_column('tb_user', 'twitch_refresh_token')
op.drop_column('tb_user', 'discord_user_id')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tb_user', sa.Column('discord_user_id', mysql.VARCHAR(length=32), nullable=True))
op.add_column('tb_user', sa.Column('twitch_refresh_token', mysql.VARCHAR(length=128), nullable=True))
op.add_column('tb_user', sa.Column('twitch_access_token', mysql.VARCHAR(length=128), nullable=True))
### end Alembic commands ###
| mit | Python |
b2f5c65cd623d01f00064c33c563ae6d4e4f5ec3 | test unicode str removed | fata1ex/django-statsy,zhebrak/django-statsy,zhebrak/django-statsy,zhebrak/django-statsy,fata1ex/django-statsy,fata1ex/django-statsy | tests/settings.py | tests/settings.py | # coding: utf-8
test_group = 'test_group'
test_event = 'test_event'
test_value_str = 'test_value'
test_value_int = 123
test_value_float = 123.0
test_label = 'test_label'
test_value_list = [test_value_str, test_value_int, test_value_float]
| # coding: utf-8
test_group = 'test_group'
test_event = 'test_event'
test_value_str = 'test_value'
test_value_unicode = u'test_value'
test_value_int = 123
test_value_float = 123.0
test_label = 'test_label'
test_value_list = [
test_value_str, test_value_unicode,
test_value_int, test_value_float
]
| mit | Python |
8f374f9e91fd8ff9527920767ac48b7c01567fd1 | Fix for Py3 | ganehag/pyMeterBus | tests/test_aux.py | tests/test_aux.py | import os
import sys
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
import unittest
import meterbus
from meterbus.exceptions import *
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
pass
def test_manufacturer_encode(self):
intval = meterbus.aux.manufacturer_id("WEP")
h1, h2 = meterbus.manufacturer_encode(intval, 2)
hexstr = "{0:02X}{1:02X}".format(h1, h2)
self.assertEqual(hexstr, "B05C")
def test_invalid_manufacturer_length(self):
intval = meterbus.aux.manufacturer_id("J")
falseVal = meterbus.manufacturer_encode(intval, 2)
self.assertEqual(falseVal, None)
def test_invalud_manufacturer_string(self):
intval = meterbus.aux.manufacturer_id("@@@")
falseVal = meterbus.manufacturer_encode(intval, 2)
self.assertEqual(falseVal, None)
def test_is_primary_true(self):
self.assertEqual(True, meterbus.is_primary_address(1))
def test_is_primary_false(self):
self.assertEqual(False, meterbus.is_primary_address(256))
def test_is_secondary_true(self):
self.assertEqual(True, meterbus.is_secondary_address("00000001B05CFF1B"))
def test_is_secondary_false(self):
self.assertEqual(False, meterbus.is_secondary_address(0))
def test_inter_char_timeout(self):
opts = {
300: 0.12,
600: 0.60,
1200: 0.4,
2400: 0.2,
4800: 0.2,
9600: 0.1,
19200: 0.1,
38400: 0.1,
}
for key, val in opts.items():
self.assertEqual(
meterbus.aux.inter_byte_timeout(key), val)
if __name__ == '__main__':
unittest.main()
| import os
import sys
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
import unittest
import meterbus
from meterbus.exceptions import *
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
pass
def test_manufacturer_encode(self):
intval = meterbus.aux.manufacturer_id("WEP")
h1, h2 = meterbus.manufacturer_encode(intval, 2)
hexstr = "{0:02X}{1:02X}".format(h1, h2)
self.assertEqual(hexstr, "B05C")
def test_invalid_manufacturer_length(self):
intval = meterbus.aux.manufacturer_id("J")
falseVal = meterbus.manufacturer_encode(intval, 2)
self.assertEqual(falseVal, None)
def test_invalud_manufacturer_string(self):
intval = meterbus.aux.manufacturer_id("@@@")
falseVal = meterbus.manufacturer_encode(intval, 2)
self.assertEqual(falseVal, None)
def test_is_primary_true(self):
self.assertEqual(True, meterbus.is_primary_address(1))
def test_is_primary_false(self):
self.assertEqual(False, meterbus.is_primary_address(256))
def test_is_secondary_true(self):
self.assertEqual(True, meterbus.is_secondary_address("00000001B05CFF1B"))
def test_is_secondary_false(self):
self.assertEqual(False, meterbus.is_secondary_address(0))
def test_inter_char_timeout(self):
opts = {
300: 0.12,
600: 0.60,
1200: 0.4,
2400: 0.2,
4800: 0.2,
9600: 0.1,
19200: 0.1,
38400: 0.1,
}
for key, val in opts.iteritems():
self.assertEqual(
meterbus.aux.inter_byte_timeout(key), val)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
4c41de1307e5666d65a2e70e39d043c144709c31 | Test unsubscribe works. | ambitioninc/django-entity-emailer,ambitioninc/django-entity-emailer,wesleykendall/django-entity-emailer,wesleykendall/django-entity-emailer | entity_emailer/tests/test_tasks.py | entity_emailer/tests/test_tasks.py | from entity.models import Entity, EntityRelationship
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from django_dynamic_fixture import G, N
from entity_emailer import tasks
from entity_emailer.models import Email, EmailType, Unsubscribed
class Test_get_email_addresses(TestCase):
def setUp(self):
self.ct = ContentType.objects.get_for_model(Email)
self.ct2 = ContentType.objects.get_for_model(Unsubscribed)
self.super_entity = G(Entity, entity_meta={'email': 'test_super@example.com'}, entity_type=self.ct)
self.sub_entity_1 = G(Entity, entity_meta={'email': 'test_sub1@example.com'}, entity_type=self.ct)
self.sub_entity_2 = G(Entity, entity_meta={'email': 'test_sub2@example.com'}, entity_type=self.ct)
self.sub_entity_3 = G(Entity, entity_meta={'email': 'test_sub3@example.com'}, entity_type=self.ct2)
G(EntityRelationship, sub_entity=self.sub_entity_1, super_entity=self.super_entity)
G(EntityRelationship, sub_entity=self.sub_entity_2, super_entity=self.super_entity)
G(EntityRelationship, sub_entity=self.sub_entity_3, super_entity=self.super_entity)
def test_returns_sub_entities_emails(self):
email = N(Email, send_to=self.super_entity, subentity_type=self.ct, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_sub1@example.com', u'test_sub2@example.com'}
self.assertEqual(set(addresses), expected_addresses)
def test_filters_other_entity_types(self):
email = N(Email, send_to=self.super_entity, subentity_type=self.ct2, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_sub3@example.com'}
self.assertEqual(set(addresses), expected_addresses)
def test_returns_own_email(self):
email = N(Email, send_to=self.super_entity, subentity_type=None, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_super@example.com'}
self.assertEqual(set(addresses), expected_addresses)
def test_unsubscription_works(self):
test_email_type = G(EmailType, name='test_email')
G(Unsubscribed, user=self.sub_entity_1, unsubscribed_from=test_email_type)
email = N(Email, send_to=self.super_entity, subentity_type=self.ct, email_type=test_email_type, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_sub2@example.com'}
self.assertEqual(set(addresses), expected_addresses)
| from entity.models import Entity, EntityRelationship
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from django_dynamic_fixture import G, N
from entity_emailer import tasks
from entity_emailer.models import Email, Unsubscribed
class Test_get_email_addresses(TestCase):
def setUp(self):
self.ct = ContentType.objects.get_for_model(Email)
self.ct2 = ContentType.objects.get_for_model(Unsubscribed)
self.super_entity = G(Entity, entity_meta={'email': 'test_super@example.com'}, entity_type=self.ct)
self.sub_entity_1 = G(Entity, entity_meta={'email': 'test_sub1@example.com'}, entity_type=self.ct)
self.sub_entity_2 = G(Entity, entity_meta={'email': 'test_sub2@example.com'}, entity_type=self.ct)
self.sub_entity_3 = G(Entity, entity_meta={'email': 'test_sub3@example.com'}, entity_type=self.ct2)
G(EntityRelationship, sub_entity=self.sub_entity_1, super_entity=self.super_entity)
G(EntityRelationship, sub_entity=self.sub_entity_2, super_entity=self.super_entity)
G(EntityRelationship, sub_entity=self.sub_entity_3, super_entity=self.super_entity)
def test_returns_sub_entities_emails(self):
email = N(Email, send_to=self.super_entity, subentity_type=self.ct, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_sub1@example.com', u'test_sub2@example.com'}
self.assertEqual(set(addresses), expected_addresses)
def test_filters_other_entity_types(self):
email = N(Email, send_to=self.super_entity, subentity_type=self.ct2, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_sub3@example.com'}
self.assertEqual(set(addresses), expected_addresses)
def test_returns_own_email(self):
email = N(Email, send_to=self.super_entity, subentity_type=None, context={})
addresses = tasks.get_email_addresses(email)
expected_addresses = {u'test_super@example.com'}
self.assertEqual(set(addresses), expected_addresses)
def test_unsubscription_works(self):
pass
| mit | Python |
872151f32e0fe04340da722e10a0910019c0166e | fix patch to pluralize list view setting | StrellaGroup/frappe,frappe/frappe,yashodhank/frappe,frappe/frappe,saurabh6790/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,saurabh6790/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,yashodhank/frappe,almeidapaulopt/frappe,saurabh6790/frappe,saurabh6790/frappe,mhbu50/frappe,mhbu50/frappe,almeidapaulopt/frappe,mhbu50/frappe,yashodhank/frappe,StrellaGroup/frappe,frappe/frappe | frappe/patches/v13_0/rename_list_view_setting_to_list_view_settings.py | frappe/patches/v13_0/rename_list_view_setting_to_list_view_settings.py | # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists('List View Setting'):
if not frappe.db.table_exists('List View Settings'):
frappe.reload_doctype("List View Settings")
existing_list_view_settings = frappe.get_all('List View Settings', as_list=True)
for list_view_setting in frappe.get_all('List View Setting', fields = ['disable_count', 'disable_sidebar_stats', 'disable_auto_refresh', 'name']):
name = list_view_setting.pop('name')
if name not in [x[0] for x in existing_list_view_settings]:
list_view_setting['doctype'] = 'List View Settings'
list_view_settings = frappe.get_doc(list_view_setting)
# setting name here is necessary because autoname is set as prompt
list_view_settings.name = name
list_view_settings.insert()
frappe.delete_doc("DocType", "List View Setting", force=True)
frappe.db.commit()
| # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists('List View Setting'):
existing_list_view_settings = frappe.get_all('List View Settings', as_list=True)
for list_view_setting in frappe.get_all('List View Setting', fields = ['disable_count', 'disable_sidebar_stats', 'disable_auto_refresh', 'name']):
name = list_view_setting.pop('name')
if name not in [x[0] for x in existing_list_view_settings]:
list_view_setting['doctype'] = 'List View Settings'
list_view_settings = frappe.get_doc(list_view_setting)
# setting name here is necessary because autoname is set as prompt
list_view_settings.name = name
list_view_settings.insert()
frappe.delete_doc("DocType", "List View Setting", force=True)
frappe.db.commit()
| mit | Python |
bfbb1e4fb8324df9a039c18359c053190f9e7e64 | Make config list able to handle long values | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty | dusty/commands/manage_config.py | dusty/commands/manage_config.py | import textwrap
from prettytable import PrettyTable
from ..config import get_config, save_config_value
from .. import constants
from ..log import log_to_client
def _eligible_config_keys_for_setting():
config = get_config()
return [key for key in sorted(constants.CONFIG_SETTINGS.keys())
if key not in config or isinstance(config[key], basestring)]
def list_config():
config = get_config()
table = PrettyTable(['Key', 'Description', 'Value'])
for key, description in constants.CONFIG_SETTINGS.iteritems():
table.add_row([key,
'\n'.join(textwrap.wrap(description, 80)),
'\n'.join(textwrap.wrap(str(config.get(key)), 80))])
log_to_client(table.get_string(sortby='Key'))
def list_config_values():
log_to_client(get_config())
def save_value(key, value):
config = get_config()
if key not in constants.CONFIG_SETTINGS:
raise KeyError('Your key {} must be in the list {}'.format(key, sorted(constants.CONFIG_SETTINGS.keys())))
if key in config and not isinstance(config[key], basestring):
raise KeyError('You can only modify string values in your config. {} has type {}'.format(key, type(config[key])))
else:
save_config_value(key, value)
log_to_client('Set {} to {} in your config'.format(key, value))
| import textwrap
from prettytable import PrettyTable
from ..config import get_config, save_config_value
from .. import constants
from ..log import log_to_client
def _eligible_config_keys_for_setting():
config = get_config()
return [key for key in sorted(constants.CONFIG_SETTINGS.keys())
if key not in config or isinstance(config[key], basestring)]
def list_config():
config = get_config()
table = PrettyTable(['Key', 'Description', 'Value'])
for key, description in constants.CONFIG_SETTINGS.iteritems():
table.add_row([key, '\n'.join(textwrap.wrap(description, 80)), config.get(key)])
log_to_client(table.get_string(sortby='Key'))
def list_config_values():
log_to_client(get_config())
def save_value(key, value):
config = get_config()
if key not in constants.CONFIG_SETTINGS:
raise KeyError('Your key {} must be in the list {}'.format(key, sorted(constants.CONFIG_SETTINGS.keys())))
if key in config and not isinstance(config[key], basestring):
raise KeyError('You can only modify string values in your config. {} has type {}'.format(key, type(config[key])))
else:
save_config_value(key, value)
log_to_client('Set {} to {} in your config'.format(key, value))
| mit | Python |
d34fbc70d5873d159c311caed41b745b05534ce9 | Read Input: Read file complete or by lines | unstko/adventofcode2016 | lib/solution.py | lib/solution.py | class Solution:
def __init__(self, nr):
self.nr = nr
self.test = False
self.input = ""
self.solution = ["(not calculated)", "(not calculated)"]
self.calculated = [False, False]
def __str__(self):
return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1])
def calculate(self, test=False):
raise NotImplementedError('users must define calculate to use this base class')
def get_solution(self, nr):
if nr in [1, 2]:
return self.solution[nr-1]
def set_solution(self, nr, value):
if nr in [1, 2]:
self.solution[nr-1] = value
self.calculated[nr-1] = True
def is_calculated(self, nr):
if nr in [1, 2]:
return self.calculated[nr-1]
def read_input(self, lines=False):
with open(self.nr+"/input.txt", "r") as f:
if lines:
self.input = f.readlines()
else:
self.input = f.read()
| class Solution:
def __init__(self, nr):
self.nr = nr
self.test = False
self.input = ""
self.solution = ["(not calculated)", "(not calculated)"]
self.calculated = [False, False]
def __str__(self):
return "Solution 1: {}\nSolution 2: {}".format(self.solution[0], self.solution[1])
def calculate(self, test=False):
raise NotImplementedError('users must define calculate to use this base class')
def get_solution(self, nr):
if nr in [1, 2]:
return self.solution[nr-1]
def set_solution(self, nr, value):
if nr in [1, 2]:
self.solution[nr-1] = value
self.calculated[nr-1] = True
def is_calculated(self, nr):
if nr in [1, 2]:
return self.calculated[nr-1]
def read_input(self):
with open(self.nr+"/input.txt", "r") as f:
self.input = f.read()
| mit | Python |
0f8b6f4a12c23e5498e8135a3f39da40c4333788 | Add a function which allows packet dumps to be produced easily for inserting into regression tests. | gvnn3/PCS,gvnn3/PCS | tests/hexdumper.py | tests/hexdumper.py | # This hack by: Raymond Hettinger
class hexdumper:
"""Given a byte array, turn it into a string. hex bytes to stdout."""
def __init__(self):
self.FILTER=''.join([(len(repr(chr(x)))==3) and chr(x) or '.' \
for x in range(256)])
# pretty dumping hate machine.
def dump(self, src, length=8):
result=[]
for i in xrange(0, len(src), length):
s = src[i:i+length]
hexa = ' '.join(["%02X"%ord(x) for x in s])
printable = s.translate(self.FILTER)
result.append("%04X %-*s %s\n" % \
(i, length*3, hexa, printable))
return ''.join(result)
# dump in a way which can be embedded in a python string.
def dump2(self, src, length=8):
result=[]
for i in xrange(0, len(src), length):
s = src[i:i+length]
hexa = ''.join(["\\x%02X"%ord(x) for x in s])
result.append("\"%-*s\"" % (length*3, hexa))
if i + length < len(src):
result.append(" \\")
result.append("\n")
return ''.join(result)
| # This hack by: Raymond Hettinger
class hexdumper:
"""Given a byte array, turn it into a string. hex bytes to stdout."""
def __init__(self):
self.FILTER=''.join([(len(repr(chr(x)))==3) and chr(x) or '.' \
for x in range(256)])
def dump(self, src, length=8):
result=[]
for i in xrange(0, len(src), length):
s = src[i:i+length]
hexa = ' '.join(["%02X"%ord(x) for x in s])
printable = s.translate(self.FILTER)
result.append("%04X %-*s %s\n" % \
(i, length*3, hexa, printable))
return ''.join(result)
| bsd-3-clause | Python |
f3b0d41054df95a762557a96d93d49fbd9bf00ff | Remove network parameter input | backpacker69/pypeerassets,PeerAssets/pypeerassets | tests/kutiltest.py | tests/kutiltest.py | import unittest
from hashlib import sha256
from pypeerassets.kutil import Kutil
class KutilTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('''Starting Kutil class tests.
This class handles all things cryptography.''')
def test_network_parameter_load(self):
'''tests if loading of network parameteres is accurate'''
mykey = Kutil(network="ppc")
self.assertEqual(mykey.denomination, 1000000)
self.assertEqual(mykey.wif_prefix, b'b7')
self.assertEqual(mykey.pubkeyhash, b'37')
def test_key_generation(self):
'''test privkey/pubkey generation'''
mykey = Kutil(network="ppc")
# check if keys are in proper format
self.assertTrue(isinstance(mykey.privkey, bytes))
self.assertTrue(isinstance(mykey.pubkey, bytes))
def test_key_generation_from_seed(self):
'''check if key generation is what is expected from seed'''
seed = "Hello PeerAssets."
mykey = Kutil(seed=seed, network="ppc")
self.assertEqual(mykey.privkey, b'680510f7f5e622347bc8d9e54e109a9192353693ef61d82d2d5bdf4bc9fd638b')
self.assertEqual(mykey.pubkey, b'037cf9e7664b5d10ce209cf9e2c7f68baa06f1950114f25677531b959edd7e670c')
def test_address_generation(self):
'''test if addresses are properly made'''
mykey = Kutil(network="ppc")
self.assertTrue(mykey.address.decode().startswith("P"))
self.assertTrue(isinstance(mykey.address, bytes))
self.assertTrue(len(mykey.address), 34)
def test_wif_import(self):
'''test improting WIF privkey'''
mykey = Kutil(wif="7A6cFXZSZnNUzutCMcuE1hyqDPtysH2LrSA9i5sqP2BPCLrAvZM")
self.assertEqual(mykey.address, b'PJxwxuBqjpHhhdpV6KY1pXxUSUNb6omyNW')
self.assertEqual(mykey.pubkey, b'02a119079ef5be1032bed61cc295cdccde58bf70e0dd982399c024d1263740f398')
self.assertEqual(mykey.privkey, b'b43d38cdfa04ecea88f7d9d7e95b15b476e4a6c3f551ae7b45344831c3098da2')
if __name__ == '__main__':
unittest.main()
| import unittest
from hashlib import sha256
from pypeerassets.kutil import Kutil
class KutilTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('''Starting Kutil class tests.
This class handles all things cryptography.''')
def test_network_parameter_load(self):
'''tests if loading of network parameteres is accurate'''
mykey = Kutil(network="ppc")
self.assertEqual(mykey.denomination, 1000000)
self.assertEqual(mykey.wif_prefix, b'b7')
self.assertEqual(mykey.pubkeyhash, b'37')
def test_key_generation(self):
'''test privkey/pubkey generation'''
mykey = Kutil(network="ppc")
# check if keys are in proper format
self.assertTrue(isinstance(mykey.privkey, bytes))
self.assertTrue(isinstance(mykey.pubkey, bytes))
def test_key_generation_from_seed(self):
'''check if key generation is what is expected from seed'''
seed = "Hello PeerAssets."
mykey = Kutil(seed=seed, network="ppc")
self.assertEqual(mykey.privkey, b'680510f7f5e622347bc8d9e54e109a9192353693ef61d82d2d5bdf4bc9fd638b')
self.assertEqual(mykey.pubkey, b'037cf9e7664b5d10ce209cf9e2c7f68baa06f1950114f25677531b959edd7e670c')
def test_address_generation(self):
'''test if addresses are properly made'''
mykey = Kutil(network="ppc")
self.assertTrue(mykey.address.decode().startswith("P"))
self.assertTrue(isinstance(mykey.address, bytes))
self.assertTrue(len(mykey.address), 34)
def test_wif_import(self):
'''test improting WIF privkey'''
mykey = Kutil(wif="7A6cFXZSZnNUzutCMcuE1hyqDPtysH2LrSA9i5sqP2BPCLrAvZM", network="ppc")
self.assertEqual(mykey.address, b'PJxwxuBqjpHhhdpV6KY1pXxUSUNb6omyNW')
self.assertEqual(mykey.pubkey, b'02a119079ef5be1032bed61cc295cdccde58bf70e0dd982399c024d1263740f398')
self.assertEqual(mykey.privkey, b'b43d38cdfa04ecea88f7d9d7e95b15b476e4a6c3f551ae7b45344831c3098da2')
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
15bf95dad618b605de0ac46a0fc19e7132e81245 | add test for login | gitgik/flask-rest-api,gitgik/flask-rest-api | tests/test_auth.py | tests/test_auth.py | import unittest
import json
from app import create_app, db
class AuthTestCase(unittest.TestCase):
"""Test case for the authentication blueprint."""
def setUp(self):
"""Set up test variables."""
self.app = create_app(config_name="testing")
self.client = self.app.test_client
self.user_data = {
'email': 'test@example.com',
'password': 'test_password'
}
with self.app.app_context():
# create all tables
db.session.close()
db.drop_all()
db.create_all()
def test_registration(self):
"""Test user registration works correcty."""
res = self.client().post('/auth/register', data=self.user_data)
result = json.loads(res.data.decode())
self.assertEqual(result['message'], "You registered successfully.")
self.assertEqual(res.status_code, 201)
def test_already_registered_user(self):
"""Test that a user cannot be registered twice."""
res = self.client().post('/auth/register', data=self.user_data)
self.assertEqual(res.status_code, 201)
second_res = self.client().post('/auth/register', data=self.user_data)
self.assertEqual(second_res.status_code, 202)
result = json.loads(second_res.data.decode())
self.assertEqual(
result['message'], "User already exists. Please log in.")
def test_user_login(self):
"""Test registered user can login."""
res = self.client().post('/auth/login', data=self.user_data)
result = json.loads(res.data.decode())
self.assertEqual(res.status_code, 200)
self.assertEqual(result['message'], "You logged in successfully.")
self.assertTrue(result['auth_token'])
def test_registered_user_login(self):
"""Test non registered users cannot login."""
not_a_user = {
'email': 'not_a_user@example.com',
'password': 'nope'
}
res = self.client().post('/auth/login', data=not_a_user)
result = json.loads(res.data.decode())
self.assertEqual(res.status_code, 401)
self.assertEqual(result['message'], "Unauthorized. Please register")
| import unittest
import json
from app import create_app, db
class AuthTestCase(unittest.TestCase):
"""Test case for the authentication blueprint."""
def setUp(self):
"""Set up test variables."""
self.app = create_app(config_name="testing")
self.client = self.app.test_client
self.user_data = {
'email': 'test@example.com',
'password': 'test_password'
}
with self.app.app_context():
# create all tables
db.session.close()
db.drop_all()
db.create_all()
def test_registration(self):
"""Test user registration works correcty."""
res = self.client().post('/auth/register', data=self.user_data)
result = json.loads(res.data.decode())
self.assertEqual(result['message'], "You registered successfully.")
self.assertEqual(res.status_code, 201)
def test_already_registered_user(self):
"""Test that a user cannot be registered twice."""
res = self.client().post('/auth/register', data=self.user_data)
self.assertEqual(res.status_code, 201)
second_res = self.client().post('/auth/register', data=self.user_data)
self.assertEqual(second_res.status_code, 202)
result = json.loads(second_res.data.decode())
self.assertEqual(
result['message'], "User already exists. Please log in.")
| mit | Python |
72036d3d65a78973e98d1a2085cf4b1f444852ff | fix test class | devlights/try-python | tests/test_libs.py | tests/test_libs.py | import io
import os
import re
import time
import trypython.common.commonfunc as libs
def test_chdir():
# arrange
orig_dir = os.path.abspath('.')
dest_dir = os.path.abspath('/tmp')
os.chdir(orig_dir)
assert orig_dir == os.path.abspath(os.curdir)
# act
with libs.chdir(dest_dir) as current_dir:
assert dest_dir == current_dir
assert dest_dir == os.path.abspath(os.curdir)
print(current_dir)
# assert
assert orig_dir == os.path.abspath(os.curdir)
def test_timetracer():
# arrange
file = io.StringIO()
# act
with libs.timetracer('test', file):
time.sleep(0.3)
# assert
file.seek(io.SEEK_SET)
result = str(file.read()).strip()
assert result
re.match(r'[test] elapsed: .* seconds', result)
pass
def test_open_inout():
# arrange
in_file = '/tmp/test_open_input.txt'
out_file = '/tmp/test_open_input2.txt'
with open(in_file, 'w', encoding='utf-8') as fp:
fp.writelines(str(x) for x in range(10))
try:
# act
# assert
with libs.open_inout(in_file, out_file) as (in_fp, out_fp):
assert in_fp
assert out_fp
assert in_file == in_fp.name
assert out_file == out_fp.name
assert in_fp.mode == 'r'
assert out_fp.mode == 'w'
finally:
if os.path.exists(in_file):
os.unlink(in_file)
if os.path.exists(out_file):
os.unlink(out_file)
| import io
import os
import re
import time
import trypython.common.commonfunc as libs
def test_chdir():
# arrange
orig_dir = os.path.abspath('.')
dest_dir = os.path.abspath('/tmp')
os.chdir(orig_dir)
assert orig_dir == os.path.abspath(os.curdir)
# act
with libs.chdir(dest_dir) as current_dir:
assert dest_dir == current_dir
assert dest_dir == os.path.abspath(os.curdir)
print(current_dir)
# assert
assert orig_dir == os.path.abspath(os.curdir)
def test_timetracer():
# arrange
file = io.StringIO()
# act
with libs.timetracer('test', file):
time.sleep(0.3)
# assert
file.seek(io.SEEK_SET)
result = str(file.read()).strip()
assert result
r = re.match(r'[test] elapsed: .* seconds', result)
pass
def test_open_inout():
# arrange
in_file = '/tmp/test_open_input.txt'
out_file = '/tmp/test_open_input2.txt'
with open(in_file, 'w', encoding='utf-8') as fp:
fp.writelines(str(x) for x in range(10))
try:
# act
# assert
with libs.open_inout(in_file, out_file) as (in_fp, out_fp):
assert in_fp
assert out_fp
assert in_file == in_fp.name
assert out_file == out_fp.name
assert in_fp.mode == 'r'
assert out_fp.mode == 'w'
finally:
if os.path.exists(in_file):
os.unlink(in_file)
if os.path.exists(out_file):
os.unlink(out_file)
| mit | Python |
713572fa6f68899955de4d9f7c5e3c685d89cf2e | test with broken link source conflict | arecarn/dploy | tests/test_link.py | tests/test_link.py | """
Tests for the link sub command
"""
# pylint: disable=unused-argument
# pylint: disable=missing-docstring
# disable lint errors for function names longer that 30 characters
# pylint: disable=invalid-name
import os
import pytest
import dploy
import util
def test_link_directory(source_a, dest):
dploy.link(['source_a'], 'dest/source_a_link')
assert os.path.islink('dest/source_a_link')
def test_link_file(source_a, dest):
dploy.link(['source_a/aaa/aaa'], 'dest/source_a_link')
assert os.path.islink('dest/source_a_link')
def test_link_with_non_existant_source(dest):
with pytest.raises(SystemExit):
dploy.link(['source_a'], 'dest/source_a_link')
def test_link_with_non_existant_dest(source_a):
with pytest.raises(SystemExit):
dploy.link(['source_a'], 'dest/source_a_link')
def test_link_with_read_only_dest(file_a, dest):
util.read_only('dest')
dploy.link(['file_a'], 'dest/file_a_link')
def test_link_with_write_only_source(file_a, dest):
util.write_only('file_a')
dploy.link(['file_a'], 'dest/file_a_link')
def test_link_with_conflicting_broken_lint_at_dest(file_a, dest):
with pytest.raises(SystemExit):
os.symlink('non_existant_source', 'dest/file_a_link')
dploy.link(['file_a'], 'dest/file_a_link')
| """
Tests for the link sub command
"""
# pylint: disable=unused-argument
# pylint: disable=missing-docstring
# disable lint errors for function names longer that 30 characters
# pylint: disable=invalid-name
import os
import pytest
import dploy
import util
def test_link_directory(source_a, dest):
dploy.link(['source_a'], 'dest/source_a_link')
assert os.path.islink('dest/source_a_link')
def test_link_file(source_a, dest):
dploy.link(['source_a/aaa/aaa'], 'dest/source_a_link')
assert os.path.islink('dest/source_a_link')
def test_link_with_non_existant_source(dest):
with pytest.raises(SystemExit):
dploy.link(['source_a'], 'dest/source_a_link')
def test_link_with_non_existant_dest(source_a):
with pytest.raises(SystemExit):
dploy.link(['source_a'], 'dest/source_a_link')
def test_link_with_read_only_dest(file_a, dest):
util.read_only('dest')
dploy.link(['file_a'], 'dest/file_a_link')
def test_link_with_write_only_source(file_a, dest):
util.write_only('file_a')
dploy.link(['file_a'], 'dest/file_a_link')
| mit | Python |
b46cf19729a101b5c31492c420c0bbef37b05323 | Update auth finalize to request permanent auth_token given code after installing app | Shopify/shopify_django_app,Shopify/shopify_django_app | shopify_app/views.py | shopify_app/views.py | from django.shortcuts import render_to_response, redirect
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.conf import settings
import shopify
def _return_address(request):
return request.session.get('return_to') or reverse('root_path')
def login(request):
# Ask user for their ${shop}.myshopify.com address
# If the ${shop}.myshopify.com address is already provided in the URL,
# just skip to authenticate
if request.REQUEST.get('shop'):
return authenticate(request)
return render_to_response('shopify_app/login.html', {},
context_instance=RequestContext(request))
def authenticate(request):
shop = request.REQUEST.get('shop')
if shop:
scope = settings.SHOPIFY_API_SCOPE
redirect_uri = request.build_absolute_uri(reverse('shopify_app.views.finalize'))
permission_url = shopify.Session(shop.strip()).create_permission_url(scope, redirect_uri)
return redirect(permission_url)
return redirect(_return_address(request))
def finalize(request):
shop_url = request.REQUEST.get('shop')
try:
shopify_session = shopify.Session(shop_url)
request.session['shopify'] = {
"shop_url": shop_url,
"access_token": shopify_session.request_token(request.REQUEST)
}
except Exception:
messages.error(request, "Could not log in to Shopify store.")
return redirect(reverse('shopify_app.views.login'))
messages.info(request, "Logged in to shopify store.")
response = redirect(_return_address(request))
request.session.pop('return_to', None)
return response
def logout(request):
request.session.pop('shopify', None)
messages.info(request, "Successfully logged out.")
return redirect(reverse('shopify_app.views.login'))
| from django.shortcuts import render_to_response, redirect
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.conf import settings
import shopify
def _return_address(request):
return request.session.get('return_to') or reverse('root_path')
def login(request):
# Ask user for their ${shop}.myshopify.com address
# If the ${shop}.myshopify.com address is already provided in the URL,
# just skip to authenticate
if request.REQUEST.get('shop'):
return authenticate(request)
return render_to_response('shopify_app/login.html', {},
context_instance=RequestContext(request))
def authenticate(request):
shop = request.REQUEST.get('shop')
if shop:
scope = settings.SHOPIFY_API_SCOPE
redirect_uri = request.build_absolute_uri(reverse('shopify_app.views.finalize'))
permission_url = shopify.Session(shop.strip()).create_permission_url(scope, redirect_uri)
return redirect(permission_url)
return redirect(_return_address(request))
def finalize(request):
shop_url = request.REQUEST.get('shop')
try:
shopify_session = shopify.Session(shop_url, request.REQUEST)
except shopify.ValidationException:
messages.error(request, "Could not log in to Shopify store.")
return redirect(reverse('shopify_app.views.login'))
request.session['shopify'] = {
"shop_url": shop_url,
"access_token": shopify_session.token
}
messages.info(request, "Logged in to shopify store.")
response = redirect(_return_address(request))
request.session.pop('return_to', None)
return response
def logout(request):
request.session.pop('shopify', None)
messages.info(request, "Successfully logged out.")
return redirect(reverse('shopify_app.views.login'))
| mit | Python |
601a31de9c4175fcd112c3cee2248e490de55eb9 | Add more tests | wong2/pick | tests/test_pick.py | tests/test_pick.py | #-*-coding:utf-8-*-
import unittest
from pick import Picker
class TestPick(unittest.TestCase):
def test_move_up_down(self):
title = 'Please choose an option: '
options = ['option1', 'option2', 'option3']
picker = Picker(options, title)
picker.move_up()
assert picker.get_selected() == ('option3', 2)
picker.move_down()
picker.move_down()
assert picker.get_selected() == ('option2', 1)
def test_default_index(self):
title = 'Please choose an option: '
options = ['option1', 'option2', 'option3']
picker = Picker(options, title, default_index=1)
assert picker.get_selected() == ('option2', 1)
def test_get_lines(self):
title = 'Please choose an option: '
options = ['option1', 'option2', 'option3']
picker = Picker(options, title, indicator='*')
lines, current_line = picker.get_lines()
assert lines == [title, '', '* option1', ' option2', ' option3']
assert current_line == 3
def test_no_title(self):
options = ['option1', 'option2', 'option3']
picker = Picker(options)
lines, current_line = picker.get_lines()
assert current_line == 1
if __name__ == '__main__':
unittest.main()
| #-*-coding:utf-8-*-
import unittest
from pick import pick, Picker
class TestPick(unittest.TestCase):
def test_pick(self):
title = 'Please choose an option: '
options = ['option1', 'option2', 'option3']
picker = Picker(options, title)
picker.move_up()
assert picker.get_selected() == ('option3', 2)
picker.move_down()
picker.move_down()
assert picker.get_selected() == ('option2', 1)
if __name__ == '__main__':
unittest.main()
| mit | Python |
9d87b19a1a5a39e4b18278fad4851dbe2e7459c3 | clean up formatting in settings.py | tobiasmcnulty/django-cache-machine,janusnic/django-cache-machine,blag/django-cache-machine,django-cache-machine/django-cache-machine | examples/cache_machine/settings.py | examples/cache_machine/settings.py | CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.PyLibMCCache',
'LOCATION': 'localhost:11211',
},
}
TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner'
DATABASES = {
'default': {
'NAME': ':memory:',
'ENGINE': 'django.db.backends.sqlite3',
},
'slave': {
'NAME': 'test_slave.db',
'ENGINE': 'django.db.backends.sqlite3',
},
}
INSTALLED_APPS = (
'django_nose',
'tests.testapp',
)
SECRET_KEY = 'ok'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
| CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.PyLibMCCache',
'LOCATION': 'localhost:11211',
},
}
TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner'
DATABASES = {
'default': {
'NAME': ':memory:',
'ENGINE': 'django.db.backends.sqlite3',
},
'slave': {
'NAME': 'test_slave.db',
'ENGINE': 'django.db.backends.sqlite3',
}
}
INSTALLED_APPS = (
'django_nose',
'tests.testapp',
)
SECRET_KEY = 'ok'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
| bsd-3-clause | Python |
40ba5a610cb51944f70250c9f005c3d2307c4d3f | Update cli parameters | AlexMathew/litslist | litslist/cmd.py | litslist/cmd.py | """
Usage:
litslist (-h | --help | --version)
litslist create <count>
Options:
-h, --help
Show this help message and exit
--version
Display the version of Scrapple
"""
from __future__ import print_function
from docopt import docopt
from . import commands
def runCLI():
"""
CLI controller for litslist command
"""
args = docopt(__doc__, version='0.1.0')
try:
commands.run_create(args['<count>'])
except Exception as e:
print('\n', e, '\n')
if __name__ == '__main__':
runCLI()
| """
Usage:
litslist create
Options:
-h, --help
Show this help message and exit
--version
Display the version of Scrapple
"""
from __future__ import print_function
from docopt import docopt
from . import commands
def runCLI():
"""
CLI controller for litslist command
"""
args = docopt(__doc__, version='0.1.0')
try:
commands.run_create()
except Exception as e:
print('\n', e, '\n')
if __name__ == '__main__':
runCLI()
| mit | Python |
8d43f902fc24217bbed3e703c8c87654fd4e4d8f | fix typo | dmargala/tpcorr | examples/find_validation_plates.py | examples/find_validation_plates.py | #!/usr/bin/env python
import numpy as np
import astropy.table
import bossdata.meta
def main():
meta_db = bossdata.meta.Database(lite=False, verbose=True)
meta_db.cursor.execute('SELECT PLATE,MJD,FIBER FROM meta WHERE ((ANCILLARY_TARGET2&(1<<20))>0)')
rows = meta_db.cursor.fetchall()
table = astropy.table.Table(rows=rows, names=['PLATE','MJD','FIBER'])
table_by_obs = table.group_by(['PLATE','MJD'])
counts_per_spec = [(grp['PLATE'][0], np.sum(grp['FIBER'] <= 500),np.sum(grp['FIBER'] > 500)) for grp in table_by_obs.groups]
at_least_10 = [obs[0] for obs in counts_per_spec if obs[1]+obs[2] >= 10]
validiation_plates = [obs[0] for obs in counts_per_spec if obs[1] >= 10 and obs[2] >= 10]
print 'Number of observations with ancillary targets:', len(counts_per_spec)
print 'Number of observations with at least 10 ancillary targets: ', len(at_least_10)
print 'Number of observations with at least 10 ancillary targets per spectrograph:', len(validiation_plates)
print 'Validation plates:', validiation_plates
bad_chunks = ('boss35','boss36','boss37','boss38')
# LAMBDA_EFF=4000 and ZWARNING&1<<7=0 and CHUNK not in ('boss35','boss36','boss37','boss38')
validiation_plates_str = ','.join(['{}'.format(plate) for plate in validiation_plates])
meta_db.cursor.execute('SELECT PLATE,MJD,FIBER FROM meta WHERE LAMBDA_EFF=4000 and ZWARNING=0 and PLATE in ({})'.format(
validiation_plates_str))
rows = meta_db.cursor.fetchall()
print 'Number of observations with offset target on validation plates:', len(rows)
if __name__ == '__main__':
main() | #!/usr/bin/env python
import numpy as np
import astropy.table
import bossdata.meta
def main():
meta_db = bossdata.meta.Database(lite=False, verbose=True)
meta_db.cursor.execute('SELECT PLATE,MJD,FIBER FROM meta WHERE ((ANCILLARY_TARGET2&(1<<20))>0)')
rows = meta_db.cursor.fetchall()
table = astropy.table.Table(rows=rows, names=['PLATE','MJD','FIBER'])
table_by_obs = table.group_by(['PLATE','MJD'])
counts_per_spec = [(grp['PLATE'][0], np.sum(grp['FIBER'] <= 500),np.sum(grp['FIBER'] > 500)) for grp in table_by_obs.groups]
at_least_10 = [obs[0] for obs in counts_per_spec if obs[1]+obs[2] >= 10]
validiation_plates = [obs[0] for obs in counts_per_spec if obs[1] >= 10 and obs[2] >= 10]
print 'Number of observations with ancillary targets:', len(counts_per_spec)
print 'Number of observations with at least 10 ancillary targets: ', len(at_least_10)
print 'Number of observations with at least 10 ancillary targets per spectrograph:', len(validiation_plates)
print 'Validation plates:', validiation_plates
bad_chunks = ('boss35','boss36','boss37','boss38')
# LAMBDA_EFF=4000 and ZWARNING&1<<7=0 and CHUNK not in ('boss35','boss36','boss37','boss38')
validiation_plates_str = ','.join(['{}'.format(plate) for palte in validiation_plates])
meta_db.cursor.execute('SELECT PLATE,MJD,FIBER FROM meta WHERE LAMBDA_EFF=4000 and ZWARNING=0 and PLATE in ({})'.format(
validiation_plates_str))
rows = meta_db.cursor.fetchall()
print 'Number of observations with offset target on validation plates:', len(rows)
if __name__ == '__main__':
main() | mit | Python |
93ba3a0f51cd1d48b4f21950c962019ce8b20d7a | Update version.py | sagasurvey/saga,sagasurvey/saga | SAGA/version.py | SAGA/version.py | """
SAGA package version
"""
__version__ = "0.12.1"
| """
SAGA package version
"""
__version__ = "0.12.0"
| mit | Python |
48bde7a86956610bafd9c4dd4bf45c6a15ac9828 | adjust cache time of bridge account | uw-it-aca/bridge-sis-provisioner,uw-it-aca/bridge-sis-provisioner | sis_provisioner/cache_implementation.py | sis_provisioner/cache_implementation.py | import re
from django.conf import settings
from restclients.cache_implementation import MemcachedCache, TimedCache
from restclients.exceptions import DataFailureException
FIVE_SECONDS = 5
FIFTEEN_MINS = 60 * 15
HALF_HOUR = 60 * 30
ONE_HOUR = 60 * 60
FOUR_HOURS = 60 * 60 * 4
EIGHT_HOURS = 60 * 60 * 8
ONE_DAY = 60 * 60 * 24
ONE_WEEK = 60 * 60 * 24 * 7
def get_cache_time(service, url):
if "pws" == service or "gws" == service:
return FOUR_HOURS
if "bridge" == service:
if re.match('^/api/author/custom_fields', url):
return ONE_DAY
else:
return ONE_HOUR
return FOUR_HOURS
class BridgeAccountCache(TimedCache):
def getCache(self, service, url, headers):
return self._response_from_cache(
service, url, headers, get_cache_time(service, url))
def processResponse(self, service, url, response):
return self._process_response(service, url, response)
| import re
from django.conf import settings
from restclients.cache_implementation import MemcachedCache, TimedCache
from restclients.exceptions import DataFailureException
FIVE_SECONDS = 5
FIFTEEN_MINS = 60 * 15
ONE_HOUR = 60 * 60
FOUR_HOURS = 60 * 60 * 4
ONE_DAY = 60 * 60 * 24
ONE_WEEK = 60 * 60 * 24 * 7
def get_cache_time(service, url):
if "pws" == service or\
"gws" == service:
return FOUR_HOURS
if "bridge" == service:
if re.match('^/api/author/custom_fields', url):
return ONE_DAY
else:
return FIFTEEN_MINS
return FOUR_HOURS
class BridgeAccountCache(TimedCache):
def getCache(self, service, url, headers):
return self._response_from_cache(
service, url, headers, get_cache_time(service, url))
def processResponse(self, service, url, response):
return self._process_response(service, url, response)
| apache-2.0 | Python |
253fae940c78df55f0dcdaf2515ba0ed157a8f15 | load System Settings graph when setting up the system, re #1631 | archesproject/arches,archesproject/arches,cvast/arches,archesproject/arches,cvast/arches,cvast/arches,cvast/arches,archesproject/arches | arches/app/models/migrations/0002.py | arches/app/models/migrations/0002.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-04-24 13:08
from __future__ import unicode_literals
import os
import uuid
import django.db.models.deletion
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
from django.core import management
from arches.app.models.system_settings import SystemSettings as settings
def forwards_func(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
management.call_command('packages', operation='import_graphs', source=os.path.join(settings.ROOT_DIR, 'db', 'graphs', 'resource_models', 'Arches System Settings.json'))
def reverse_func(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('models', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='maplayer',
name='centerx',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='maplayer',
name='centery',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='maplayer',
name='zoom',
field=models.FloatField(blank=True, null=True),
),
migrations.RunPython(forwards_func, reverse_func)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-04-24 13:08
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('models', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='maplayer',
name='centerx',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='maplayer',
name='centery',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='maplayer',
name='zoom',
field=models.FloatField(blank=True, null=True),
)
]
| agpl-3.0 | Python |
c648d397049a600a5434441ea4a6f1a5f05a511a | Update makeBuild.py to actually create lower-case vessel object with Vessel as alias | shiplab/vesseljs,shiplab/vesseljs,shiplab/vesseljs | tools/makeBuild.py | tools/makeBuild.py | #Now the "geeky" md5 hash and archiving is disabled. It was of no practical use, really.
from datetime import datetime
classes = ["JSONSpecObject.js", "Ship.js", "Structure.js", "Hull.js", "BaseObject.js", "DerivedObject.js", "ShipState.js", "StateModule.js", "WaveCreator.js", "WaveMotion.js", "Positioning.js", "FuelConsumption.js", "HullResistance.js", "PropellerInteraction.js"]
fileIO = ["browseShip.js", "loadShip.js", "downloadShip.js"]
math = ["Vectors.js", "interpolation.js", "areaCalculations.js", "volumeCalculations.js", "parametricWeightParsons.js", "combineWeights.js"]
filepaths = list(map((lambda filename: "../source/math/"+filename), math)) \
+ list(map((lambda filename: "../source/classes/"+filename), classes)) \
+ list(map((lambda filename: "../source/fileIO/"+filename), fileIO))
code = """
/*
Import like this in HTML:
<script src="vessel.js"></script>
Then in javascript use classes and functions with a vessel prefix. Example:
let ship = new vessel.Ship(someSpecification);
*/
"use strict";
var vessel = {};
var Vessel = vessel; //alias for backwards compatibility
(function() {
"""
for filepath in filepaths:
file = open(filepath)
code += file.read()
file.close()
#This interface is currently very restricted.
#We can also include access to all of the functions.
#I just don't want to maintain a long list manually.
#Maybe there is an easier way...
code += """
Object.assign(vessel, {
/*JSONSpecObject: JSONSpecObject,*/
Ship: Ship,
Structure: Structure,
Hull: Hull,
BaseObject: BaseObject,
DerivedObject: DerivedObject,
ShipState: ShipState,
StateModule: StateModule,
WaveCreator: WaveCreator,
WaveMotion: WaveMotion,
Positioning: Positioning,
FuelConsumption: FuelConsumption,
HullResistance: HullResistance,
PropellerInteraction: PropellerInteraction,
browseShip: browseShip,
loadShip: loadShip,
downloadShip: downloadShip,
f: {
linearFromArrays: linearFromArrays,
bilinear: bilinear,
bisectionSearch
},
Vectors: Vectors
});
})();
"""
timestamp = str(datetime.today())
#from hashlib import md5
#codehash = md5(code.encode()).hexdigest()
header = "//vessel.js library, built " + timestamp# + ", Checksum: " + codehash
output = header + code
#stamp = timestamp[0:17] + "." + codehash[0:5]
oFile = open("../build/vessel.js", "w")
oFile.write(output)
oFile.close()
#oFile = open("archive/vessel_"+stamp.replace("-","").replace(":","").replace(" ","")+".js", "w")
#oFile.write(output)
#oFile.close()
| #Now the "geeky" md5 hash and archiving is disabled. It was of no practical use, really.
from datetime import datetime
classes = ["JSONSpecObject.js", "Ship.js", "Structure.js", "Hull.js", "BaseObject.js", "DerivedObject.js", "ShipState.js", "StateModule.js", "WaveCreator.js", "WaveMotion.js", "Positioning.js", "FuelConsumption.js", "HullResistance.js", "PropellerInteraction.js"]
fileIO = ["browseShip.js", "loadShip.js", "downloadShip.js"]
math = ["Vectors.js", "interpolation.js", "areaCalculations.js", "volumeCalculations.js", "parametricWeightParsons.js", "combineWeights.js"]
filepaths = list(map((lambda filename: "../source/math/"+filename), math)) \
+ list(map((lambda filename: "../source/classes/"+filename), classes)) \
+ list(map((lambda filename: "../source/fileIO/"+filename), fileIO))
code = """
/*
Import like this in HTML:
<script src="vessel.js"></script>
Then in javascript use classes and functions with a vessel prefix. Example:
let ship = new Vessel.Ship(someSpecification);
*/
"use strict";
var Vessel = {};
(function() {
"""
for filepath in filepaths:
file = open(filepath)
code += file.read()
file.close()
#This interface is currently very restricted.
#We can also include access to all of the functions.
#I just don't want to maintain a long list manually.
#Maybe there is an easier way...
code += """
Object.assign(Vessel, {
/*JSONSpecObject: JSONSpecObject,*/
Ship: Ship,
Structure: Structure,
Hull: Hull,
BaseObject: BaseObject,
DerivedObject: DerivedObject,
ShipState: ShipState,
StateModule: StateModule,
WaveCreator: WaveCreator,
WaveMotion: WaveMotion,
Positioning: Positioning,
FuelConsumption: FuelConsumption,
HullResistance: HullResistance,
PropellerInteraction: PropellerInteraction,
browseShip: browseShip,
loadShip: loadShip,
downloadShip: downloadShip,
f: {
linearFromArrays: linearFromArrays,
bilinear: bilinear,
bisectionSearch
},
Vectors: Vectors
});
})();
"""
timestamp = str(datetime.today())
#from hashlib import md5
#codehash = md5(code.encode()).hexdigest()
header = "//vessel.js library, built " + timestamp# + ", Checksum: " + codehash
output = header + code
#stamp = timestamp[0:17] + "." + codehash[0:5]
oFile = open("../build/vessel.js", "w")
oFile.write(output)
oFile.close()
#oFile = open("archive/Vessel_"+stamp.replace("-","").replace(":","").replace(" ","")+".js", "w")
#oFile.write(output)
#oFile.close()
| mit | Python |
503ba661c388e2fcae6d648dd80f05843442bdd6 | Clean up GLM example | yarikoptic/pystatsmodels,detrout/debian-statsmodels,rgommers/statsmodels,wdurhamh/statsmodels,wdurhamh/statsmodels,bert9bert/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,detrout/debian-statsmodels,bzero/statsmodels,DonBeo/statsmodels,nguyentu1602/statsmodels,nguyentu1602/statsmodels,bsipocz/statsmodels,josef-pkt/statsmodels,wwf5067/statsmodels,bzero/statsmodels,bert9bert/statsmodels,josef-pkt/statsmodels,wwf5067/statsmodels,astocko/statsmodels,DonBeo/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,wdurhamh/statsmodels,bavardage/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,bzero/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,wzbozon/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,gef756/statsmodels,nvoron23/statsmodels,edhuckle/statsmodels,bsipocz/statsmodels,alekz112/statsmodels,nvoron23/statsmodels,bert9bert/statsmodels,musically-ut/statsmodels,astocko/statsmodels,wkfwkf/statsmodels,hainm/statsmodels,jstoxrocky/statsmodels,detrout/debian-statsmodels,edhuckle/statsmodels,jseabold/statsmodels,hainm/statsmodels,edhuckle/statsmodels,bavardage/statsmodels,adammenges/statsmodels,Averroes/statsmodels,bert9bert/statsmodels,bsipocz/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,Averroes/statsmodels,bzero/statsmodels,cbmoore/statsmodels,bashtage/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,saketkc/statsmodels,alekz112/statsmodels,DonBeo/statsmodels,rgommers/statsmodels,kiyoto/statsmodels,kiyoto/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,cbmoore/statsmodels,bashtage/statsmodels,cbmoore/statsmodels,wzbozon/statsmodels,nguyentu1602/statsmodels,hainm/statsmodels,yl565/statsmodels,YihaoLu/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,wzbozon/statsmodels,statsmodels/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,detrout/debian-statsmodels,kiyoto/statsmodels,nguyentu1602/statsmodels,yl565/statsmodels,wwf5067/statsmodels,yarikoptic/pystatsmodels,jstoxrocky/statsmodels,waynenilsen/statsmodels,adammenges/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,astocko/statsmodels,yarikoptic/pystatsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,musically-ut/statsmodels,adammenges/statsmodels,statsmodels/statsmodels,musically-ut/statsmodels,ChadFulton/statsmodels,gef756/statsmodels,gef756/statsmodels,hlin117/statsmodels,bashtage/statsmodels,nvoron23/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels,gef756/statsmodels,adammenges/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,yl565/statsmodels,saketkc/statsmodels,bert9bert/statsmodels,jseabold/statsmodels,phobson/statsmodels,ChadFulton/statsmodels,hlin117/statsmodels,phobson/statsmodels,hlin117/statsmodels,astocko/statsmodels,kiyoto/statsmodels,Averroes/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,jseabold/statsmodels,saketkc/statsmodels,hlin117/statsmodels,yl565/statsmodels,edhuckle/statsmodels,waynenilsen/statsmodels,kiyoto/statsmodels,hainm/statsmodels,wkfwkf/statsmodels,huongttlan/statsmodels,josef-pkt/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,DonBeo/statsmodels,bavardage/statsmodels,bashtage/statsmodels,gef756/statsmodels,musically-ut/statsmodels,huongttlan/statsmodels,ChadFulton/statsmodels,nvoron23/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,wkfwkf/statsmodels,cbmoore/statsmodels,YihaoLu/statsmodels,waynenilsen/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,phobson/statsmodels,alekz112/statsmodels,bavardage/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,phobson/statsmodels | examples/example_formula_glm.py | examples/example_formula_glm.py | """GLM Formula Example
"""
import statsmodels.api as sm
import numpy as np
star98 = sm.datasets.star98.load_pandas().data
formula = 'SUCCESS ~ LOWINC + PERASIAN + PERBLACK + PERHISP + PCTCHRT '
formula += '+ PCTYRRND + PERMINTE*AVYRSEXP*AVSALK + PERSPENK*PTRATIO*PCTAF'
dta = star98[["NABOVE", "NBELOW", "LOWINC", "PERASIAN", "PERBLACK", "PERHISP",
"PCTCHRT", "PCTYRRND", "PERMINTE", "AVYRSEXP", "AVSALK",
"PERSPENK", "PTRATIO", "PCTAF"]]
endog = dta["NABOVE"]/(dta["NABOVE"] + dta.pop("NBELOW"))
del dta["NABOVE"]
dta["SUCCESS"] = endog
mod = sm.GLM.from_formula(formula=formula, df=dta,
family=sm.families.Binomial()).fit()
# try passing a formula object, using arbitrary user-injected code
def double_it(x):
return 2*x
formula = 'SUCCESS ~ double_it(LOWINC) + PERASIAN + PERBLACK + PERHISP + '
formula += 'PCTCHRT '
formula += '+ PCTYRRND + PERMINTE*AVYRSEXP*AVSALK + PERSPENK*PTRATIO*PCTAF'
mod2 = sm.GLM.from_formula(formula=formula, df=dta,
family=sm.families.Binomial()).fit()
| import statsmodels.api as sm
import numpy as np
star98 = sm.datasets.star98.load_pandas().data
formula = 'SUCCESS ~ LOWINC + PERASIAN + PERBLACK + PERHISP + PCTCHRT '
formula += '+ PCTYRRND + PERMINTE*AVYRSEXP*AVSALK + PERSPENK*PTRATIO*PCTAF'
dta = star98[["NABOVE", "NBELOW", "LOWINC", "PERASIAN", "PERBLACK", "PERHISP",
"PCTCHRT", "PCTYRRND", "PERMINTE", "AVYRSEXP", "AVSALK",
"PERSPENK", "PTRATIO", "PCTAF"]]
endog = dta["NABOVE"]/(dta["NABOVE"] + dta.pop("NBELOW"))
del dta["NABOVE"]
dta["SUCCESS"] = endog
mod = sm.GLM.from_formula(formula=formula, df=dta,
family=sm.families.Binomial()).fit()
# try passing a formula object, using user-injected code
def double_it(x):
return 2*x
# What is the correct entry point for this? Should users be able to inject
# code into default_env or similar? I don't see a way to do this yet using
# the approach I have been using, it should be an argument to Desc
from charlton.builtins import builtins
builtins['double_it'] = double_it
formula = 'SUCCESS ~ double_it(LOWINC) + PERASIAN + PERBLACK + PERHISP + '
formula += 'PCTCHRT '
formula += '+ PCTYRRND + PERMINTE*AVYRSEXP*AVSALK + PERSPENK*PTRATIO*PCTAF'
mod2 = sm.GLM.from_formula(formula=formula, df=dta,
family=sm.families.Binomial()).fit()
| bsd-3-clause | Python |
21830028c62dd551646002021d5518813b0b407c | Delete Makefiles after cmake is generated | f0rki/cb-multios,f0rki/cb-multios,f0rki/cb-multios,f0rki/cb-multios,f0rki/cb-multios | tools/makefiles.py | tools/makefiles.py | #!/usr/bin/env python
import glob
import os
import re
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
CHAL_DIR = os.path.join(os.path.dirname(TOOLS_DIR), 'cqe-challenges')
def generate_cmake(path):
# Path to the new CMakelists.txt
cmake_path = os.path.join(os.path.dirname(path), 'CMakeLists.txt')
print('Generating: {}'.format(cmake_path))
# Read in the Makefile
with open(path) as f:
old_make = f.readlines()
# Default values for CFLAGS
make_vars = {'CFLAGS': '-fno-builtin -Wno-int-to-pointer-cast -Wno-writable-strings -nostdinc '}
for line in old_make:
line = re.sub('[\r\n\t]', '', line)
# Parse out any variables in the Makefile
if "=" in line:
var, val = line.split('=', 1)
var = var.strip()
val = val.strip()
# Keep the CFLAGS that have already been set
if var == "CFLAGS":
make_vars[var] += val.replace('-Werror', '')
else:
make_vars[var] = val
# Generate the CMake data
cmake = ""
for var, val in make_vars.iteritems():
cmake += 'set( {} "{}" )\n'.format(var, val)
cmake += 'buildCB(${CFLAGS})'
# Write the CMakelists
with open(cmake_path, 'w') as f:
f.write(cmake)
# Delete the now unused Makefile
os.remove(path)
def main():
makefiles = glob.glob(os.path.join(CHAL_DIR, '*', 'Makefile'))
map(generate_cmake, makefiles)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import glob
import os
import re
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
CHAL_DIR = os.path.join(os.path.dirname(TOOLS_DIR), 'cqe-challenges')
def generate_cmake(path):
# Path to the new CMakelists.txt
cmake_path = os.path.join(os.path.dirname(path), 'CMakeLists.txt')
print('Generating: {}'.format(cmake_path))
# Read in the Makefile
with open(path) as f:
old_make = f.readlines()
# Default values for CFLAGS
make_vars = {'CFLAGS': '-fno-builtin -Wno-int-to-pointer-cast -Wno-writable-strings -nostdinc '}
for line in old_make:
line = re.sub('[\r\n\t]', '', line)
# Parse out any variables in the Makefile
if "=" in line:
var, val = line.split('=', 1)
var = var.strip()
val = val.strip()
# Keep the CFLAGS that have already been set
if var == "CFLAGS":
make_vars[var] += val.replace('-Werror', '')
else:
make_vars[var] = val
# Generate the CMake data
cmake = ""
for var, val in make_vars.iteritems():
cmake += 'set( {} "{}" )\n'.format(var, val)
cmake += 'buildCB(${CFLAGS})'
# Write the CMakelists
with open(cmake_path, 'w') as f:
f.write(cmake)
def main():
makefiles = glob.glob(os.path.join(CHAL_DIR, '*', 'Makefile'))
map(generate_cmake, makefiles)
if __name__ == '__main__':
main()
| mit | Python |
06e84c25bb783490c963963ccb44cf07d521a197 | Reword docstrings for exception classes | piotr-rusin/spam-lists | spam_lists/exceptions.py | spam_lists/exceptions.py | # -*- coding: utf-8 -*-
class SpamBLError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamBLError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamBLError):
'''The API key used to query the service was not authorized'''
| # -*- coding: utf-8 -*-
class SpamBLError(Exception):
''' Base exception class for spambl module '''
class UnknownCodeError(SpamBLError):
''' Raise when trying to use an unexpected value of dnsbl return code '''
class UnathorizedAPIKeyError(SpamBLError):
''' Raise when trying to use an unathorized api key '''
| mit | Python |
e92ffbebf8affb93837d196e830c58ec5c8a87cc | update script | daftscience/Labrador,daftscience/Labrador,daftscience/Labrador,daftscience/Labrador | scripts/gpio_functions.py | scripts/gpio_functions.py | from gpiozero import Button
import git
import subprocess
GIT_PATH = '/home/pi/projects/labrador/'
restart_supervisor = "supervisorctl reload"
def update():
print("Update")
g = git.cmd.Git(GIT_PATH)
g.pull()
process = subprocess.Popen(restart_supervisor.split(), stdout=subprocess.PIPE)
output, error = process.communicate()
print(output)
update_btn = Button(17)
update_btn.hold_time = 7
update_btn.when_held = update
while True:
pass
# channel_list = [17, 22, 23, 27]
| from gpiozero import Button
import git
import subprocess
GIT_PATH = '/home/pi/projects/labrador/'
restart_supervisor = "supervisorctl reload"
def update():
print("Update")
g = git.cmd.Git(GIT_PATH)
g.pull()
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
output, error = process.communicate()
print(output)
update_btn = Button(17)
update_btn.hold_time = 7
update_btn.when_held = update
while True:
pass
# channel_list = [17, 22, 23, 27]
| bsd-3-clause | Python |
9175f999c2394b1b1e5f2fa9bfb962f120a831a0 | Add actions and id as part of the constructor | brainbots/assistant | assisstant/bots/bots/abstract_bot.py | assisstant/bots/bots/abstract_bot.py | from abc import ABC, abstractmethod, abstractproperty
class AbstractBot(ABC):
def __init__(self, id, actions):
self.id = id
self.actions = actions
@abstractmethod
def validate_intent(self, intent):
return
@abstractmethod
def execute(self, intent):
return
| from abc import ABC, abstractmethod, abstractproperty
class AbstractBot(ABC):
# @abstractmethod
# @abstractproperty
# def id(self, id):
# pass
# @abstractmethod
def __init__(self, id):
self.id = id
@abstractmethod
def validate_intent(self, intent):
pass
@abstractmethod
def validate_intent(self, intent):
pass
@abstractmethod
def execute(self, intent):
pass
| apache-2.0 | Python |
260e3a546e56edb59b6d04f754e6033734ee1c7a | Tweak runner_spec descriptions. | winstonwolff/expectorant | specs/runner_spec.py | specs/runner_spec.py | from expectorant import *
from expectorant import runner
import glob
@describe('runner')
def _():
@describe('find_files()')
def _():
@it('returns *_spec.py files in current directory when args=[]')
def _():
args = []
expect(runner.find_files(args)) == glob.glob('./**/*_spec.py', recursive=True)
@it('passes args through when args are filenames')
def _():
args = ['specs/runner_spec.py']
expect(runner.find_files(args)) == ['specs/runner_spec.py']
@it('returns all spec filenames when args is directory')
def _():
args = ['specs']
expect(runner.find_files(args)) == glob.glob('specs/*_spec.py')
@it('raises error when args has filename that doesnt exist')
def _():
args = ['non_existent.file']
expect(lambda: runner.find_files(args)).to(raise_error, FileNotFoundError)
| from expectorant import *
from expectorant import runner
import glob
@describe('runner')
def _():
@describe('find_files()')
def _():
@it('returns fileenames in spec/ when args=[]')
def _():
args = []
expect(runner.find_files(args)) == glob.glob('./**/*_spec.py', recursive=True)
@it('passes args through when args are filenames')
def _():
args = ['specs/runner_spec.py']
expect(runner.find_files(args)) == ['specs/runner_spec.py']
@it('returns all spec filenames when args is directory')
def _():
args = ['specs']
expect(runner.find_files(args)) == glob.glob('specs/*_spec.py')
@it('throws error when args has filename that doesnt exist')
def _():
args = ['non_existent.file']
expect(lambda: runner.find_files(args)).to(raise_error, FileNotFoundError)
| mit | Python |
04f4d01914c72c664b2d3e2f362dd7d37a06e326 | Bump version | atugushev/django-static-pages | static_pages/__init__.py | static_pages/__init__.py | VERSION = '0.1.1'
| VERSION = '0.1'
| mit | Python |
d5bcfd724966a497d0bde02da7f2061a228c67cd | Update dependencies for apidoc. | dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk | utils/apidoc/apidoc.gyp | utils/apidoc/apidoc.gyp | # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
{
'targets': [
{
'target_name': 'api_docs',
'type': 'none',
'dependencies': [
'../../frog/dart-frog.gyp:frog',
'../../runtime/dart-runtime.gyp:dart',
],
'includes': [
'../../corelib/src/corelib_sources.gypi',
],
'actions': [
{
'action_name': 'run_apidoc',
'inputs': [
'<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)dart<(EXECUTABLE_SUFFIX)',
'<(PRODUCT_DIR)/frog/bin/frog',
'<!@(["python", "../../tools/list_files.py", "\\.(css|ico|js|json|png|sh|txt|yaml|py)$", ".", "../../lib/dartdoc"])',
'<!@(["python", "../../tools/list_files.py", "\\.dart$", "../../lib", "../../runtime/lib", "../../runtime/bin"])',
'<@(_sources)',
],
'outputs': [
'<(PRODUCT_DIR)/api_docs/index.html',
'<(PRODUCT_DIR)/api_docs/client-live-nav.js',
],
'action': [
'<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)dart<(EXECUTABLE_SUFFIX)',
'apidoc.dart',
'--out=<(PRODUCT_DIR)/api_docs',
'--mode=live-nav',
'--compiler=<(PRODUCT_DIR)/frog/bin/frog',
],
},
],
}
],
}
| # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
{
'targets': [
{
'target_name': 'api_docs',
'type': 'none',
'dependencies': [
'../../frog/dart-frog.gyp:frog',
'../../runtime/dart-runtime.gyp:dart',
],
'actions': [
{
'action_name': 'run_apidoc',
'inputs': [
'<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)dart<(EXECUTABLE_SUFFIX)',
'<(PRODUCT_DIR)/frog/bin/frog',
'<!@(["python", "../../tools/list_files.py", "\\.(css|dart|ico|js|json|png|sh|txt|yaml|py)$", ".", "../../lib/dartdoc"])',
],
'outputs': [
'<(PRODUCT_DIR)/api_docs/index.html',
'<(PRODUCT_DIR)/api_docs/client-live-nav.js',
],
'action': [
'<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)dart<(EXECUTABLE_SUFFIX)',
'apidoc.dart',
'--out=<(PRODUCT_DIR)/api_docs',
'--mode=live-nav',
'--compiler=<(PRODUCT_DIR)/frog/bin/frog',
],
},
],
}
],
}
| bsd-3-clause | Python |
903c192657b6b714b9c8aada7c08c7e6dc013d45 | Check if the post_to_twitter value has been submitted in the form. | disqus/overseer | overseer/admin.py | overseer/admin.py | from django import forms
from django.contrib import admin
from overseer import conf
from overseer.models import Service, Event, EventUpdate
class ServiceAdmin(admin.ModelAdmin):
list_display = ('name', 'status', 'order', 'date_updated')
search_fields = ('name', 'description')
prepopulated_fields = {'slug': ('name',)}
class EventForm(forms.ModelForm):
if conf.TWITTER_ACCESS_TOKEN and conf.TWITTER_ACCESS_SECRET:
post_to_twitter = forms.BooleanField(required=False, label="Post to Twitter", help_text="This will send a tweet with a brief summary, the permalink to the event (if BASE_URL is defined), and the hashtag of #status for EACH update you add below.")
class Meta:
model = EventUpdate
class EventUpdateInline(admin.StackedInline):
model = EventUpdate
extra = 1
class EventAdmin(admin.ModelAdmin):
form = EventForm
list_display = ('date_created', 'description', 'status', 'date_updated')
search_fields = ('description', 'message')
list_filter = ('services',)
inlines = [EventUpdateInline]
def save_formset(self, request, form, formset, change):
instances = formset.save()
if 'post_to_twitter' in form.cleaned_data and form.cleaned_data['post_to_twitter']:
for obj in instances:
obj.event.post_to_twitter(obj.get_message())
class EventUpdateAdmin(admin.ModelAdmin):
list_display = ('date_created', 'message', 'status', 'event')
search_fields = ('message',)
admin.site.register(Service, ServiceAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(EventUpdate, EventUpdateAdmin) | from django import forms
from django.contrib import admin
from overseer import conf
from overseer.models import Service, Event, EventUpdate
class ServiceAdmin(admin.ModelAdmin):
list_display = ('name', 'status', 'order', 'date_updated')
search_fields = ('name', 'description')
prepopulated_fields = {'slug': ('name',)}
class EventForm(forms.ModelForm):
if conf.TWITTER_ACCESS_TOKEN and conf.TWITTER_ACCESS_SECRET:
post_to_twitter = forms.BooleanField(required=False, label="Post to Twitter", help_text="This will send a tweet with a brief summary, the permalink to the event (if BASE_URL is defined), and the hashtag of #status for EACH update you add below.")
class Meta:
model = EventUpdate
class EventUpdateInline(admin.StackedInline):
model = EventUpdate
extra = 1
class EventAdmin(admin.ModelAdmin):
form = EventForm
list_display = ('date_created', 'description', 'status', 'date_updated')
search_fields = ('description', 'message')
list_filter = ('services',)
inlines = [EventUpdateInline]
def save_formset(self, request, form, formset, change):
instances = formset.save()
if form.cleaned_data['post_to_twitter']:
for obj in instances:
obj.event.post_to_twitter(obj.get_message())
class EventUpdateAdmin(admin.ModelAdmin):
list_display = ('date_created', 'message', 'status', 'event')
search_fields = ('message',)
admin.site.register(Service, ServiceAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(EventUpdate, EventUpdateAdmin) | apache-2.0 | Python |
b06630128e4fe91000a27c9dbbc70656d5347bfd | Change name in conflicts | catroot/rethinkdb,eliangidoni/rethinkdb,4talesa/rethinkdb,captainpete/rethinkdb,Qinusty/rethinkdb,mbroadst/rethinkdb,KSanthanam/rethinkdb,wkennington/rethinkdb,4talesa/rethinkdb,alash3al/rethinkdb,captainpete/rethinkdb,robertjpayne/rethinkdb,pap/rethinkdb,JackieXie168/rethinkdb,nviennot/rethinkdb,spblightadv/rethinkdb,greyhwndz/rethinkdb,losywee/rethinkdb,gdi2290/rethinkdb,mcanthony/rethinkdb,jesseditson/rethinkdb,eliangidoni/rethinkdb,tempbottle/rethinkdb,greyhwndz/rethinkdb,sontek/rethinkdb,mbroadst/rethinkdb,AntouanK/rethinkdb,wkennington/rethinkdb,gavioto/rethinkdb,lenstr/rethinkdb,jesseditson/rethinkdb,yaolinz/rethinkdb,4talesa/rethinkdb,jesseditson/rethinkdb,JackieXie168/rethinkdb,AntouanK/rethinkdb,yakovenkodenis/rethinkdb,bchavez/rethinkdb,yakovenkodenis/rethinkdb,AntouanK/rethinkdb,greyhwndz/rethinkdb,RubenKelevra/rethinkdb,robertjpayne/rethinkdb,sebadiaz/rethinkdb,losywee/rethinkdb,wkennington/rethinkdb,catroot/rethinkdb,victorbriz/rethinkdb,bchavez/rethinkdb,losywee/rethinkdb,Wilbeibi/rethinkdb,sebadiaz/rethinkdb,scripni/rethinkdb,gavioto/rethinkdb,victorbriz/rethinkdb,sebadiaz/rethinkdb,wujf/rethinkdb,niieani/rethinkdb,sontek/rethinkdb,scripni/rethinkdb,losywee/rethinkdb,yakovenkodenis/rethinkdb,elkingtonmcb/rethinkdb,Wilbeibi/rethinkdb,tempbottle/rethinkdb,scripni/rethinkdb,urandu/rethinkdb,KSanthanam/rethinkdb,sontek/rethinkdb,nviennot/rethinkdb,yakovenkodenis/rethinkdb,matthaywardwebdesign/rethinkdb,matthaywardwebdesign/rethinkdb,victorbriz/rethinkdb,urandu/rethinkdb,sbusso/rethinkdb,alash3al/rethinkdb,jesseditson/rethinkdb,bpradipt/rethinkdb,mcanthony/rethinkdb,jfriedly/rethinkdb,ajose01/rethinkdb,niieani/rethinkdb,urandu/rethinkdb,jmptrader/rethinkdb,gavioto/rethinkdb,JackieXie168/rethinkdb,jmptrader/rethinkdb,mbroadst/rethinkdb,jfriedly/rethinkdb,mcanthony/rethinkdb,elkingtonmcb/rethinkdb,Qinusty/rethinkdb,sontek/rethinkdb,urandu/rethinkdb,RubenKelevra/rethinkdb,ayumilong/rethinkdb,rrampage/rethinkdb,mquandalle/rethinkdb,nviennot/rethinkdb,jesseditson/rethinkdb,lenstr/rethinkdb,nviennot/rethinkdb,sbusso/rethinkdb,losywee/rethinkdb,mquandalle/rethinkdb,alash3al/rethinkdb,jesseditson/rethinkdb,gdi2290/rethinkdb,ajose01/rethinkdb,lenstr/rethinkdb,ayumilong/rethinkdb,catroot/rethinkdb,AtnNn/rethinkdb,KSanthanam/rethinkdb,spblightadv/rethinkdb,bpradipt/rethinkdb,eliangidoni/rethinkdb,matthaywardwebdesign/rethinkdb,wkennington/rethinkdb,eliangidoni/rethinkdb,wojons/rethinkdb,RubenKelevra/rethinkdb,AntouanK/rethinkdb,lenstr/rethinkdb,greyhwndz/rethinkdb,yakovenkodenis/rethinkdb,urandu/rethinkdb,dparnell/rethinkdb,nviennot/rethinkdb,grandquista/rethinkdb,sebadiaz/rethinkdb,grandquista/rethinkdb,AtnNn/rethinkdb,greyhwndz/rethinkdb,pap/rethinkdb,ayumilong/rethinkdb,scripni/rethinkdb,KSanthanam/rethinkdb,jmptrader/rethinkdb,scripni/rethinkdb,robertjpayne/rethinkdb,yaolinz/rethinkdb,Wilbeibi/rethinkdb,matthaywardwebdesign/rethinkdb,eliangidoni/rethinkdb,nviennot/rethinkdb,Wilbeibi/rethinkdb,sebadiaz/rethinkdb,marshall007/rethinkdb,wkennington/rethinkdb,yaolinz/rethinkdb,sebadiaz/rethinkdb,4talesa/rethinkdb,yakovenkodenis/rethinkdb,elkingtonmcb/rethinkdb,wojons/rethinkdb,rrampage/rethinkdb,victorbriz/rethinkdb,wkennington/rethinkdb,bchavez/rethinkdb,sontek/rethinkdb,urandu/rethinkdb,rrampage/rethinkdb,4talesa/rethinkdb,ajose01/rethinkdb,wojons/rethinkdb,wujf/rethinkdb,marshall007/rethinkdb,eliangidoni/rethinkdb,niieani/rethinkdb,catroot/rethinkdb,gavioto/rethinkdb,alash3al/rethinkdb,captainpete/rethinkdb,nviennot/rethinkdb,alash3al/rethinkdb,bpradipt/rethinkdb,lenstr/rethinkdb,greyhwndz/rethinkdb,wujf/rethinkdb,Qinusty/rethinkdb,rrampage/rethinkdb,mquandalle/rethinkdb,captainpete/rethinkdb,dparnell/rethinkdb,RubenKelevra/rethinkdb,4talesa/rethinkdb,JackieXie168/rethinkdb,catroot/rethinkdb,wujf/rethinkdb,pap/rethinkdb,4talesa/rethinkdb,gdi2290/rethinkdb,AtnNn/rethinkdb,bchavez/rethinkdb,Qinusty/rethinkdb,AtnNn/rethinkdb,elkingtonmcb/rethinkdb,dparnell/rethinkdb,jfriedly/rethinkdb,gdi2290/rethinkdb,bpradipt/rethinkdb,mquandalle/rethinkdb,grandquista/rethinkdb,captainpete/rethinkdb,bpradipt/rethinkdb,sontek/rethinkdb,niieani/rethinkdb,AntouanK/rethinkdb,marshall007/rethinkdb,lenstr/rethinkdb,alash3al/rethinkdb,elkingtonmcb/rethinkdb,KSanthanam/rethinkdb,losywee/rethinkdb,niieani/rethinkdb,mquandalle/rethinkdb,urandu/rethinkdb,jfriedly/rethinkdb,ayumilong/rethinkdb,tempbottle/rethinkdb,wojons/rethinkdb,marshall007/rethinkdb,tempbottle/rethinkdb,bchavez/rethinkdb,robertjpayne/rethinkdb,JackieXie168/rethinkdb,ayumilong/rethinkdb,spblightadv/rethinkdb,grandquista/rethinkdb,greyhwndz/rethinkdb,robertjpayne/rethinkdb,catroot/rethinkdb,grandquista/rethinkdb,nviennot/rethinkdb,AtnNn/rethinkdb,rrampage/rethinkdb,Wilbeibi/rethinkdb,mcanthony/rethinkdb,jmptrader/rethinkdb,matthaywardwebdesign/rethinkdb,jfriedly/rethinkdb,AntouanK/rethinkdb,sbusso/rethinkdb,wujf/rethinkdb,rrampage/rethinkdb,grandquista/rethinkdb,scripni/rethinkdb,matthaywardwebdesign/rethinkdb,mbroadst/rethinkdb,jfriedly/rethinkdb,gavioto/rethinkdb,JackieXie168/rethinkdb,bchavez/rethinkdb,ayumilong/rethinkdb,Qinusty/rethinkdb,jesseditson/rethinkdb,wojons/rethinkdb,ayumilong/rethinkdb,yaolinz/rethinkdb,captainpete/rethinkdb,sbusso/rethinkdb,pap/rethinkdb,captainpete/rethinkdb,AtnNn/rethinkdb,RubenKelevra/rethinkdb,jmptrader/rethinkdb,wojons/rethinkdb,sbusso/rethinkdb,spblightadv/rethinkdb,losywee/rethinkdb,marshall007/rethinkdb,dparnell/rethinkdb,robertjpayne/rethinkdb,spblightadv/rethinkdb,victorbriz/rethinkdb,sbusso/rethinkdb,gdi2290/rethinkdb,wojons/rethinkdb,KSanthanam/rethinkdb,Qinusty/rethinkdb,captainpete/rethinkdb,tempbottle/rethinkdb,tempbottle/rethinkdb,RubenKelevra/rethinkdb,gavioto/rethinkdb,gavioto/rethinkdb,marshall007/rethinkdb,urandu/rethinkdb,lenstr/rethinkdb,sontek/rethinkdb,bpradipt/rethinkdb,spblightadv/rethinkdb,niieani/rethinkdb,elkingtonmcb/rethinkdb,jesseditson/rethinkdb,spblightadv/rethinkdb,alash3al/rethinkdb,ajose01/rethinkdb,AntouanK/rethinkdb,Qinusty/rethinkdb,mquandalle/rethinkdb,dparnell/rethinkdb,pap/rethinkdb,wujf/rethinkdb,yaolinz/rethinkdb,matthaywardwebdesign/rethinkdb,greyhwndz/rethinkdb,jfriedly/rethinkdb,ajose01/rethinkdb,mquandalle/rethinkdb,matthaywardwebdesign/rethinkdb,scripni/rethinkdb,robertjpayne/rethinkdb,rrampage/rethinkdb,bchavez/rethinkdb,Wilbeibi/rethinkdb,catroot/rethinkdb,sebadiaz/rethinkdb,grandquista/rethinkdb,niieani/rethinkdb,tempbottle/rethinkdb,RubenKelevra/rethinkdb,bpradipt/rethinkdb,AtnNn/rethinkdb,jfriedly/rethinkdb,pap/rethinkdb,wkennington/rethinkdb,gdi2290/rethinkdb,dparnell/rethinkdb,AntouanK/rethinkdb,victorbriz/rethinkdb,bpradipt/rethinkdb,wujf/rethinkdb,robertjpayne/rethinkdb,dparnell/rethinkdb,grandquista/rethinkdb,AtnNn/rethinkdb,victorbriz/rethinkdb,pap/rethinkdb,marshall007/rethinkdb,tempbottle/rethinkdb,4talesa/rethinkdb,Wilbeibi/rethinkdb,mbroadst/rethinkdb,yakovenkodenis/rethinkdb,gavioto/rethinkdb,mbroadst/rethinkdb,mbroadst/rethinkdb,mcanthony/rethinkdb,mbroadst/rethinkdb,RubenKelevra/rethinkdb,mquandalle/rethinkdb,rrampage/rethinkdb,wkennington/rethinkdb,sontek/rethinkdb,dparnell/rethinkdb,catroot/rethinkdb,ajose01/rethinkdb,ajose01/rethinkdb,eliangidoni/rethinkdb,jmptrader/rethinkdb,JackieXie168/rethinkdb,yakovenkodenis/rethinkdb,scripni/rethinkdb,grandquista/rethinkdb,losywee/rethinkdb,KSanthanam/rethinkdb,Qinusty/rethinkdb,mcanthony/rethinkdb,jmptrader/rethinkdb,sebadiaz/rethinkdb,yaolinz/rethinkdb,eliangidoni/rethinkdb,yaolinz/rethinkdb,ajose01/rethinkdb,elkingtonmcb/rethinkdb,elkingtonmcb/rethinkdb,alash3al/rethinkdb,lenstr/rethinkdb,bchavez/rethinkdb,ayumilong/rethinkdb,Wilbeibi/rethinkdb,dparnell/rethinkdb,eliangidoni/rethinkdb,sbusso/rethinkdb,yaolinz/rethinkdb,pap/rethinkdb,bchavez/rethinkdb,Qinusty/rethinkdb,bpradipt/rethinkdb,mcanthony/rethinkdb,sbusso/rethinkdb,JackieXie168/rethinkdb,gdi2290/rethinkdb,JackieXie168/rethinkdb,mbroadst/rethinkdb,spblightadv/rethinkdb,marshall007/rethinkdb,victorbriz/rethinkdb,robertjpayne/rethinkdb,jmptrader/rethinkdb,KSanthanam/rethinkdb,niieani/rethinkdb,wojons/rethinkdb,mcanthony/rethinkdb | test/interface/conflict.py | test/interface/conflict.py | #!/usr/bin/env python
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster1 = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Spinning up two processes..."
files1 = driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix)
proc1 = driver.Process(cluster1, files1,
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
files2 = driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix)
proc2 = driver.Process(cluster1, files2,
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
proc1.wait_until_started_up()
proc2.wait_until_started_up()
cluster1.check()
access1 = http_admin.ClusterAccess([("localhost", proc1.http_port)])
access2 = http_admin.ClusterAccess([("localhost", proc2.http_port)])
dc = access1.add_datacenter("new_dc")
db = access1.add_database('new_db')
table = access1.add_namespace("rdb", "new_table", None, None, {}, {}, None, db)
access2.update_cluster_data(10)
assert len(access1.get_directory()) == len(access2.get_directory()) == 2
print "Splitting cluster, then waiting 20s..."
cluster2 = driver.Cluster(metacluster)
metacluster.move_processes(cluster1, cluster2, [proc2])
time.sleep(20)
print "Conflicting datacenter name..."
access1.rename(dc, "Buzz")
access2.rename(access2.find_datacenter(dc.uuid), "Fizz")
access1.rename(table, "other_name")
access2.rename(access2.find_namespace(table.uuid), "another_name")
print "Joining cluster, then waiting 10s..."
metacluster.move_processes(cluster2, cluster1, [proc2])
time.sleep(10)
cluster1.check()
cluster2.check()
issues = access1.get_issues()
assert issues[0]["type"] == "VCLOCK_CONFLICT"
assert len(access1.get_directory()) == len(access2.get_directory()) == 2
time.sleep(1000000)
print "Done."
| #!/usr/bin/env python
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster1 = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Spinning up two processes..."
files1 = driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix)
proc1 = driver.Process(cluster1, files1,
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
files2 = driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix)
proc2 = driver.Process(cluster1, files2,
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
proc1.wait_until_started_up()
proc2.wait_until_started_up()
cluster1.check()
access1 = http_admin.ClusterAccess([("localhost", proc1.http_port)])
access2 = http_admin.ClusterAccess([("localhost", proc2.http_port)])
dc = access1.add_datacenter("Fizz")
table = access1.add_namespace("rdb", "new_table")
access2.update_cluster_data(10)
assert len(access1.get_directory()) == len(access2.get_directory()) == 2
print "Splitting cluster, then waiting 20s..."
cluster2 = driver.Cluster(metacluster)
metacluster.move_processes(cluster1, cluster2, [proc2])
time.sleep(20)
print "Conflicting datacenter name..."
access1.rename(dc, "Buzz")
access2.rename(access2.find_datacenter(dc.uuid), "Fizz")
access1.rename(table, "new_name")
access2.rename(access2.find_namespace(table.uuid), "othe_name")
print "Joining cluster, then waiting 10s..."
metacluster.move_processes(cluster2, cluster1, [proc2])
time.sleep(10)
cluster1.check()
cluster2.check()
issues = access1.get_issues()
assert issues[0]["type"] == "VCLOCK_CONFLICT"
assert len(access1.get_directory()) == len(access2.get_directory()) == 2
time.sleep(1000000)
print "Done."
| apache-2.0 | Python |
934f5d9060516bd8866fe217cd30666efba66fbf | Fix ORF TVthek plugin (#113) | melmorabity/streamlink,fishscene/streamlink,bastimeyer/streamlink,gravyboat/streamlink,back-to/streamlink,melmorabity/streamlink,ethanhlc/streamlink,beardypig/streamlink,mmetak/streamlink,sbstp/streamlink,wlerin/streamlink,gravyboat/streamlink,bastimeyer/streamlink,chhe/streamlink,mmetak/streamlink,back-to/streamlink,fishscene/streamlink,streamlink/streamlink,beardypig/streamlink,streamlink/streamlink,javiercantero/streamlink,ethanhlc/streamlink,javiercantero/streamlink,chhe/streamlink,sbstp/streamlink,wlerin/streamlink | src/streamlink/plugins/orf_tvthek.py | src/streamlink/plugins/orf_tvthek.py | import re, json
from streamlink.plugin import Plugin, PluginError
from streamlink.plugin.api import http
from streamlink.stream import HLSStream
_stream_url_re = re.compile(r'https?://tvthek\.orf\.at/(index\.php/)?live/(?P<title>[^/]+)/(?P<id>[0-9]+)')
_vod_url_re = re.compile(r'https?://tvthek\.orf\.at/pro(gram|file)/(?P<showtitle>[^/]+)/(?P<showid>[0-9]+)/(?P<episodetitle>[^/]+)/(?P<epsiodeid>[0-9]+)(/(?P<segmenttitle>[^/]+)/(?P<segmentid>[0-9]+))?')
_json_re = re.compile(r'<div class="jsb_ jsb_VideoPlaylist" data-jsb="(?P<json>[^"]+)">')
MODE_STREAM, MODE_VOD = 0, 1
class ORFTVThek(Plugin):
@classmethod
def can_handle_url(self, url):
return _stream_url_re.match(url) or _vod_url_re.match(url)
def _get_streams(self):
if _stream_url_re.match(self.url):
mode = MODE_STREAM
else:
mode = MODE_VOD
res = http.get(self.url)
match = _json_re.search(res.text)
if match:
data = json.loads(_json_re.search(res.text).group('json').replace('"', '"'))
else:
raise PluginError("Could not extract JSON metadata")
streams = {}
try:
if mode == MODE_STREAM:
sources = data['playlist']['videos'][0]['sources']
elif mode == MODE_VOD:
sources = data['selected_video']['sources']
except (KeyError, IndexError):
raise PluginError("Could not extract sources")
for source in sources:
try:
if source['delivery'] != 'hls':
continue
url = source['src'].replace('\/', '/')
except KeyError:
continue
stream = HLSStream.parse_variant_playlist(self.session, url)
streams.update(stream)
return streams
__plugin__ = ORFTVThek
| import re, json
from streamlink.plugin import Plugin, PluginError
from streamlink.plugin.api import http
from streamlink.stream import HLSStream
_stream_url_re = re.compile(r'https?://tvthek\.orf\.at/live/(?P<title>[^/]+)/(?P<id>[0-9]+)')
_vod_url_re = re.compile(r'https?://tvthek\.orf\.at/program/(?P<showtitle>[^/]+)/(?P<showid>[0-9]+)/(?P<episodetitle>[^/]+)/(?P<epsiodeid>[0-9]+)(/(?P<segmenttitle>[^/]+)/(?P<segmentid>[0-9]+))?')
_json_re = re.compile(r'initializeAdworx\(\[(?P<json>.+)\]\);')
MODE_STREAM, MODE_VOD = 0, 1
class ORFTVThek(Plugin):
@classmethod
def can_handle_url(self, url):
return _stream_url_re.match(url) or _vod_url_re.match(url)
def _get_streams(self):
if _stream_url_re.match(self.url):
mode = MODE_STREAM
else:
mode = MODE_VOD
res = http.get(self.url)
match = _json_re.search(res.text)
if match:
data = json.loads(_json_re.search(res.text).group('json'))
else:
raise PluginError("Could not extract JSON metadata")
streams = {}
try:
if mode == MODE_STREAM:
sources = data['values']['episode']['livestream_playlist_data']['videos'][0]['sources']
elif mode == MODE_VOD:
sources = data['values']['segment']['playlist_item_array']['sources']
except (KeyError, IndexError):
raise PluginError("Could not extract sources")
for source in sources:
try:
if source['delivery'] != 'hls':
continue
url = source['src'].replace('\/', '/')
except KeyError:
continue
stream = HLSStream.parse_variant_playlist(self.session, url)
streams.update(stream)
return streams
__plugin__ = ORFTVThek
| bsd-2-clause | Python |
4ca292e53710dd4ef481e7fa5965e22d3f94e65b | Index and code improve cnab.return.move.code | akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil | l10n_br_account_payment_order/models/cnab_return_move_code.py | l10n_br_account_payment_order/models/cnab_return_move_code.py | # Copyright 2020 Akretion
# @author Magno Costa <magno.costa@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
name = fields.Char(
string='Name',
index=True,
)
code = fields.Char(
string='Code',
index=True,
)
bank_id = fields.Many2one(
string='Bank',
comodel_name='res.bank',
index=True,
)
payment_method_id = fields.Many2one(
comodel_name='account.payment.method',
string='Payment Method',
index=True,
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
store=True,
)
payment_method_code = fields.Char(
related='payment_method_id.code',
store=True,
)
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
| # Copyright 2020 Akretion
# @author Magno Costa <magno.costa@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, api, fields
class CNABReturnMoveCode(models.Model):
"""
CNAB return code, each Bank can has a list of Codes
"""
_name = 'cnab.return.move.code'
_description = 'CNAB Return Move Code'
name = fields.Char(string='Name')
code = fields.Char(string='Code')
bank_id = fields.Many2one(
string='Bank', comodel_name='res.bank'
)
payment_method_id = fields.Many2one(
'account.payment.method', string='Payment Method'
)
# Fields used to create domain
bank_code_bc = fields.Char(
related='bank_id.code_bc',
)
payment_method_code = fields.Char(related='payment_method_id.code')
@api.multi
def name_get(self):
result = []
for record in self:
result.append((
record.id, '%s - %s' % (
record.code, record.name)
))
return result
| agpl-3.0 | Python |
379966b284c7273c0039689521d0e8f40463ad10 | fix progress.py | wujf/rethinkdb,yaolinz/rethinkdb,KSanthanam/rethinkdb,elkingtonmcb/rethinkdb,alash3al/rethinkdb,Wilbeibi/rethinkdb,sontek/rethinkdb,wkennington/rethinkdb,mbroadst/rethinkdb,tempbottle/rethinkdb,yakovenkodenis/rethinkdb,sbusso/rethinkdb,dparnell/rethinkdb,JackieXie168/rethinkdb,mcanthony/rethinkdb,wujf/rethinkdb,jesseditson/rethinkdb,lenstr/rethinkdb,sbusso/rethinkdb,rrampage/rethinkdb,losywee/rethinkdb,Wilbeibi/rethinkdb,eliangidoni/rethinkdb,yaolinz/rethinkdb,sbusso/rethinkdb,lenstr/rethinkdb,RubenKelevra/rethinkdb,scripni/rethinkdb,sbusso/rethinkdb,nviennot/rethinkdb,robertjpayne/rethinkdb,dparnell/rethinkdb,Qinusty/rethinkdb,gavioto/rethinkdb,yakovenkodenis/rethinkdb,alash3al/rethinkdb,gdi2290/rethinkdb,4talesa/rethinkdb,dparnell/rethinkdb,losywee/rethinkdb,jesseditson/rethinkdb,captainpete/rethinkdb,AtnNn/rethinkdb,nviennot/rethinkdb,lenstr/rethinkdb,spblightadv/rethinkdb,RubenKelevra/rethinkdb,pap/rethinkdb,robertjpayne/rethinkdb,gdi2290/rethinkdb,bpradipt/rethinkdb,grandquista/rethinkdb,yakovenkodenis/rethinkdb,sbusso/rethinkdb,victorbriz/rethinkdb,AtnNn/rethinkdb,eliangidoni/rethinkdb,nviennot/rethinkdb,jfriedly/rethinkdb,sebadiaz/rethinkdb,sebadiaz/rethinkdb,grandquista/rethinkdb,ayumilong/rethinkdb,bpradipt/rethinkdb,ayumilong/rethinkdb,pap/rethinkdb,yaolinz/rethinkdb,AtnNn/rethinkdb,jesseditson/rethinkdb,wkennington/rethinkdb,bpradipt/rethinkdb,yaolinz/rethinkdb,yakovenkodenis/rethinkdb,jfriedly/rethinkdb,RubenKelevra/rethinkdb,matthaywardwebdesign/rethinkdb,spblightadv/rethinkdb,mbroadst/rethinkdb,marshall007/rethinkdb,Qinusty/rethinkdb,AtnNn/rethinkdb,bpradipt/rethinkdb,bchavez/rethinkdb,lenstr/rethinkdb,spblightadv/rethinkdb,RubenKelevra/rethinkdb,mcanthony/rethinkdb,yakovenkodenis/rethinkdb,jfriedly/rethinkdb,grandquista/rethinkdb,wujf/rethinkdb,bchavez/rethinkdb,wojons/rethinkdb,robertjpayne/rethinkdb,niieani/rethinkdb,alash3al/rethinkdb,greyhwndz/rethinkdb,tempbottle/rethinkdb,victorbriz/rethinkdb,niieani/rethinkdb,bpradipt/rethinkdb,wojons/rethinkdb,mquandalle/rethinkdb,matthaywardwebdesign/rethinkdb,urandu/rethinkdb,rrampage/rethinkdb,gavioto/rethinkdb,bchavez/rethinkdb,rrampage/rethinkdb,mcanthony/rethinkdb,pap/rethinkdb,losywee/rethinkdb,nviennot/rethinkdb,grandquista/rethinkdb,niieani/rethinkdb,niieani/rethinkdb,jfriedly/rethinkdb,tempbottle/rethinkdb,rrampage/rethinkdb,niieani/rethinkdb,sebadiaz/rethinkdb,jesseditson/rethinkdb,gavioto/rethinkdb,matthaywardwebdesign/rethinkdb,4talesa/rethinkdb,alash3al/rethinkdb,ajose01/rethinkdb,wojons/rethinkdb,ajose01/rethinkdb,niieani/rethinkdb,Qinusty/rethinkdb,tempbottle/rethinkdb,losywee/rethinkdb,urandu/rethinkdb,grandquista/rethinkdb,eliangidoni/rethinkdb,greyhwndz/rethinkdb,bchavez/rethinkdb,pap/rethinkdb,bchavez/rethinkdb,urandu/rethinkdb,niieani/rethinkdb,pap/rethinkdb,mbroadst/rethinkdb,AntouanK/rethinkdb,sontek/rethinkdb,jmptrader/rethinkdb,robertjpayne/rethinkdb,bpradipt/rethinkdb,yaolinz/rethinkdb,mcanthony/rethinkdb,sebadiaz/rethinkdb,gavioto/rethinkdb,pap/rethinkdb,losywee/rethinkdb,gavioto/rethinkdb,captainpete/rethinkdb,lenstr/rethinkdb,ayumilong/rethinkdb,JackieXie168/rethinkdb,rrampage/rethinkdb,Wilbeibi/rethinkdb,scripni/rethinkdb,catroot/rethinkdb,yakovenkodenis/rethinkdb,bchavez/rethinkdb,losywee/rethinkdb,jmptrader/rethinkdb,RubenKelevra/rethinkdb,spblightadv/rethinkdb,elkingtonmcb/rethinkdb,mbroadst/rethinkdb,gdi2290/rethinkdb,mbroadst/rethinkdb,JackieXie168/rethinkdb,Qinusty/rethinkdb,elkingtonmcb/rethinkdb,eliangidoni/rethinkdb,robertjpayne/rethinkdb,lenstr/rethinkdb,eliangidoni/rethinkdb,AntouanK/rethinkdb,Wilbeibi/rethinkdb,wojons/rethinkdb,ayumilong/rethinkdb,grandquista/rethinkdb,wkennington/rethinkdb,4talesa/rethinkdb,catroot/rethinkdb,yaolinz/rethinkdb,grandquista/rethinkdb,niieani/rethinkdb,yaolinz/rethinkdb,grandquista/rethinkdb,wujf/rethinkdb,pap/rethinkdb,wojons/rethinkdb,matthaywardwebdesign/rethinkdb,gdi2290/rethinkdb,sontek/rethinkdb,eliangidoni/rethinkdb,JackieXie168/rethinkdb,elkingtonmcb/rethinkdb,ayumilong/rethinkdb,dparnell/rethinkdb,victorbriz/rethinkdb,yakovenkodenis/rethinkdb,gavioto/rethinkdb,urandu/rethinkdb,Qinusty/rethinkdb,wkennington/rethinkdb,eliangidoni/rethinkdb,jmptrader/rethinkdb,jmptrader/rethinkdb,lenstr/rethinkdb,dparnell/rethinkdb,4talesa/rethinkdb,scripni/rethinkdb,mquandalle/rethinkdb,robertjpayne/rethinkdb,RubenKelevra/rethinkdb,dparnell/rethinkdb,greyhwndz/rethinkdb,sontek/rethinkdb,Wilbeibi/rethinkdb,Qinusty/rethinkdb,ajose01/rethinkdb,jmptrader/rethinkdb,sontek/rethinkdb,gavioto/rethinkdb,scripni/rethinkdb,nviennot/rethinkdb,yakovenkodenis/rethinkdb,nviennot/rethinkdb,sbusso/rethinkdb,matthaywardwebdesign/rethinkdb,wujf/rethinkdb,greyhwndz/rethinkdb,jesseditson/rethinkdb,spblightadv/rethinkdb,mquandalle/rethinkdb,tempbottle/rethinkdb,captainpete/rethinkdb,elkingtonmcb/rethinkdb,sontek/rethinkdb,wujf/rethinkdb,Wilbeibi/rethinkdb,greyhwndz/rethinkdb,marshall007/rethinkdb,alash3al/rethinkdb,mbroadst/rethinkdb,Qinusty/rethinkdb,sebadiaz/rethinkdb,mbroadst/rethinkdb,marshall007/rethinkdb,alash3al/rethinkdb,yaolinz/rethinkdb,marshall007/rethinkdb,captainpete/rethinkdb,mquandalle/rethinkdb,mquandalle/rethinkdb,AtnNn/rethinkdb,jmptrader/rethinkdb,sebadiaz/rethinkdb,pap/rethinkdb,sbusso/rethinkdb,robertjpayne/rethinkdb,jmptrader/rethinkdb,ajose01/rethinkdb,captainpete/rethinkdb,wojons/rethinkdb,rrampage/rethinkdb,RubenKelevra/rethinkdb,eliangidoni/rethinkdb,jfriedly/rethinkdb,marshall007/rethinkdb,greyhwndz/rethinkdb,ajose01/rethinkdb,elkingtonmcb/rethinkdb,KSanthanam/rethinkdb,matthaywardwebdesign/rethinkdb,sontek/rethinkdb,greyhwndz/rethinkdb,4talesa/rethinkdb,mbroadst/rethinkdb,wkennington/rethinkdb,JackieXie168/rethinkdb,sebadiaz/rethinkdb,catroot/rethinkdb,scripni/rethinkdb,ajose01/rethinkdb,Wilbeibi/rethinkdb,losywee/rethinkdb,losywee/rethinkdb,dparnell/rethinkdb,victorbriz/rethinkdb,urandu/rethinkdb,AntouanK/rethinkdb,wujf/rethinkdb,tempbottle/rethinkdb,KSanthanam/rethinkdb,AtnNn/rethinkdb,victorbriz/rethinkdb,4talesa/rethinkdb,ayumilong/rethinkdb,bchavez/rethinkdb,alash3al/rethinkdb,scripni/rethinkdb,marshall007/rethinkdb,wkennington/rethinkdb,catroot/rethinkdb,Wilbeibi/rethinkdb,catroot/rethinkdb,scripni/rethinkdb,captainpete/rethinkdb,catroot/rethinkdb,sebadiaz/rethinkdb,nviennot/rethinkdb,nviennot/rethinkdb,AntouanK/rethinkdb,dparnell/rethinkdb,catroot/rethinkdb,4talesa/rethinkdb,KSanthanam/rethinkdb,robertjpayne/rethinkdb,jfriedly/rethinkdb,mquandalle/rethinkdb,urandu/rethinkdb,rrampage/rethinkdb,robertjpayne/rethinkdb,rrampage/rethinkdb,jfriedly/rethinkdb,JackieXie168/rethinkdb,AtnNn/rethinkdb,ajose01/rethinkdb,ayumilong/rethinkdb,scripni/rethinkdb,wkennington/rethinkdb,mcanthony/rethinkdb,catroot/rethinkdb,bchavez/rethinkdb,alash3al/rethinkdb,sontek/rethinkdb,KSanthanam/rethinkdb,victorbriz/rethinkdb,lenstr/rethinkdb,wkennington/rethinkdb,sbusso/rethinkdb,wojons/rethinkdb,JackieXie168/rethinkdb,victorbriz/rethinkdb,elkingtonmcb/rethinkdb,bpradipt/rethinkdb,mquandalle/rethinkdb,RubenKelevra/rethinkdb,gdi2290/rethinkdb,KSanthanam/rethinkdb,bchavez/rethinkdb,AntouanK/rethinkdb,captainpete/rethinkdb,jesseditson/rethinkdb,spblightadv/rethinkdb,matthaywardwebdesign/rethinkdb,greyhwndz/rethinkdb,captainpete/rethinkdb,jesseditson/rethinkdb,matthaywardwebdesign/rethinkdb,Qinusty/rethinkdb,dparnell/rethinkdb,marshall007/rethinkdb,grandquista/rethinkdb,mcanthony/rethinkdb,AtnNn/rethinkdb,tempbottle/rethinkdb,JackieXie168/rethinkdb,mquandalle/rethinkdb,jfriedly/rethinkdb,KSanthanam/rethinkdb,victorbriz/rethinkdb,spblightadv/rethinkdb,mcanthony/rethinkdb,gdi2290/rethinkdb,gdi2290/rethinkdb,jesseditson/rethinkdb,KSanthanam/rethinkdb,JackieXie168/rethinkdb,tempbottle/rethinkdb,AntouanK/rethinkdb,elkingtonmcb/rethinkdb,4talesa/rethinkdb,ayumilong/rethinkdb,AntouanK/rethinkdb,urandu/rethinkdb,wojons/rethinkdb,Qinusty/rethinkdb,gavioto/rethinkdb,ajose01/rethinkdb,bpradipt/rethinkdb,jmptrader/rethinkdb,bpradipt/rethinkdb,AntouanK/rethinkdb,spblightadv/rethinkdb,mcanthony/rethinkdb,urandu/rethinkdb,eliangidoni/rethinkdb,mbroadst/rethinkdb,marshall007/rethinkdb | test/interface/progress.py | test/interface/progress.py | #!/usr/bin/env python
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from memcached_workload_common import MemcacheConnection
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Starting cluster..."
processes = [
driver.Process(cluster, driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix),
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
for i in xrange(2)]
for process in processes:
process.wait_until_started_up()
print "Creating namespace..."
http = http_admin.ClusterAccess([("localhost", p.http_port) for p in processes])
dc = http.add_datacenter()
for machine_id in http.machines:
http.move_server_to_datacenter(machine_id, dc)
ns = http.add_namespace(protocol = "memcached", primary = dc)
time.sleep(10)
host, port = driver.get_namespace_host(ns.port, processes)
with MemcacheConnection(host, port) as mc:
for i in range(10000):
if (i + 1) % 100 == 0:
print i + 1,
sys.stdout.flush()
mc.set(str(i) * 10, str(i)*20)
print
http.set_namespace_affinities(ns, {dc : 1})
time.sleep(1)
progress = http.get_progress()
for machine_id, temp1 in progress.iteritems():
for namespace_id, temp2 in temp1.iteritems():
for activity_id, temp3 in temp2.iteritems():
for region, progress_val in temp3.iteritems():
assert(progress_val[0] != "Timeout")
assert(progress_val[0][0] <= progress_val[0][1])
cluster.check_and_stop()
| #!/usr/bin/env python
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from workload_common import MemcacheConnection
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Starting cluster..."
processes = [
driver.Process(cluster, driver.Files(metacluster, executable_path = executable_path, command_prefix = command_prefix),
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
for i in xrange(2)]
for process in processes:
process.wait_until_started_up()
print "Creating namespace..."
http = http_admin.ClusterAccess([("localhost", p.http_port) for p in processes])
dc = http.add_datacenter()
for machine_id in http.machines:
http.move_server_to_datacenter(machine_id, dc)
ns = http.add_namespace(protocol = "memcached", primary = dc)
time.sleep(10)
host, port = driver.get_namespace_host(ns.port, processes)
with MemcacheConnection(host, port) as mc:
for i in range(10000):
mc.set(str(i) * 10, str(i)*20)
http.set_namespace_affinities(ns, {dc : 1})
time.sleep(1)
progress = http.get_progress()
for machine_id, temp1 in progress.iteritems():
for namespace_id, temp2 in temp1.iteritems():
for activity_id, temp3 in temp2.iteritems():
for region, progress_val in temp3.iteritems():
assert(progress_val[0] != "Timeout")
assert(progress_val[0][0] <= progress_val[0][1])
cluster.check_and_stop()
| agpl-3.0 | Python |
7fab76d312e20f0419274836e93415f632f398e2 | clean up | yassersouri/omgh,yassersouri/omgh | src/storage.py | src/storage.py | import os
import scipy.io
import numpy as np
class datastore(object):
LARGE_FILE_FORMAT = '%s_%d.mat'
def __init__(self, base_path, global_key='global_key'):
self.base_path = base_path
self.global_key = global_key
@classmethod
def ensure_dir(cls, path):
if not os.path.exists(path):
os.makedirs(path)
def get_super_folder(self, super_name):
super_folder = os.path.join(self.base_path, super_name)
return super_folder
def get_sub_folder(self, super_name, sub_name):
super_folder = os.path.join(self.base_path, super_name)
sub_folder = os.path.join(super_folder, sub_name)
return sub_folder
def get_instance_path(self, super_name, sub_name, instance_name):
sub_folder = self.get_sub_folder(super_name, sub_name)
self.ensure_dir(sub_folder)
return os.path.join(sub_folder, instance_name)
def get_model_path(self, super_name, model_name):
super_folder = self.get_super_folder(super_name)
return os.path.join(super_folder, model_name)
def check_exists(self, instance_path):
if os.path.exists(instance_path):
return True
else:
return False
def save_instance(self, instance_path, instance):
scipy.io.savemat(
instance_path, {self.global_key: instance}, do_compression=True)
def save_full_instance(self, instance_path, instance):
scipy.io.savemat(instance_path, instance, do_compression=True)
def load_instance(self, instance_path):
instance = scipy.io.loadmat(instance_path)
return instance[self.global_key]
def load_full_instance(self, instance_path):
instance = scipy.io.loadmat(instance_path)
return instance
def save_large_instance(self, instance_path, instance, split_size):
instance_arrays = np.vsplit(instance, split_size)
for i, inst in enumerate(instance_arrays):
self.save_instance(self.LARGE_FILE_FORMAT % (instance_path, i), inst)
def load_large_instance(self, instance_path, split_size):
instance_arrays = []
for i in range(split_size):
instance_arrays.append(self.load_instance(self.LARGE_FILE_FORMAT % (instance_path, i)))
return np.vstack(instance_arrays)
| import os
import scipy.io
import numpy as np
from glob import glob
class datastore(object):
LARGE_FILE_FORMAT = '%s_%d.mat'
def __init__(self, base_path, global_key='global_key'):
self.base_path = base_path
self.global_key = global_key
@classmethod
def ensure_dir(cls, path):
if not os.path.exists(path):
os.makedirs(path)
def get_super_folder(self, super_name):
super_folder = os.path.join(self.base_path, super_name)
return super_folder
def get_sub_folder(self, super_name, sub_name):
super_folder = os.path.join(self.base_path, super_name)
sub_folder = os.path.join(super_folder, sub_name)
return sub_folder
def get_instance_path(self, super_name, sub_name, instance_name):
sub_folder = self.get_sub_folder(super_name, sub_name)
self.ensure_dir(sub_folder)
return os.path.join(sub_folder, instance_name)
def get_model_path(self, super_name, model_name):
super_folder = self.get_super_folder(super_name)
return os.path.join(super_folder, model_name)
def check_exists(self, instance_path):
if os.path.exists(instance_path):
return True
else:
return False
def save_instance(self, instance_path, instance):
scipy.io.savemat(
instance_path, {self.global_key: instance}, do_compression=True)
def save_full_instance(self, instance_path, instance):
scipy.io.savemat(instance_path, instance, do_compression=True)
def load_instance(self, instance_path):
instance = scipy.io.loadmat(instance_path)
return instance[self.global_key]
def load_full_instance(self, instance_path):
instance = scipy.io.loadmat(instance_path)
return instance
def save_large_instance(self, instance_path, instance, split_size):
instance_arrays = np.vsplit(instance, split_size)
for i, inst in enumerate(instance_arrays):
self.save_instance(self.LARGE_FILE_FORMAT % (instance_path, i), inst)
def load_large_instance(self, instance_path, split_size):
instance_arrays = []
for i in range(split_size):
instance_arrays.append(self.load_instance(self.LARGE_FILE_FORMAT % (instance_path, i)))
return np.vstack(instance_arrays)
| mit | Python |
79d19d20cdabc7c139bc55704523f8a1ea050292 | sort commands properly | rgs1/xcmd | xcmd/tests/test_xcmd.py | xcmd/tests/test_xcmd.py | # -*- coding: utf-8 -*-
""" test xcmd proper """
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from xcmd.xcmd import (
ensure_params,
Optional,
Required,
XCmd
)
class XCmdTestCase(unittest.TestCase):
""" Xcmd tests cases """
@classmethod
def setUpClass(cls):
pass
def setUp(self):
pass
def test_basic(self):
class Shell(XCmd):
@ensure_params(Required('path'))
def do_cat(self, params):
self.show_output('cat called with %s' % params.path)
output = StringIO()
shell = Shell(setup_readline=False, output_io=output)
regular = ['cat', 'help', 'pipe']
special = ['!!', '$?']
self.assertEquals(regular, sorted(shell.commands))
self.assertEquals(special, sorted(shell.special_commands))
self.assertEquals(special + regular, sorted(shell.all_commands))
shell.do_cat('/etc/passwd')
self.assertEqual('cat called with /etc/passwd\n', output.getvalue())
# test resolving paths
self.assertEquals(shell.resolve_path(''), '/')
self.assertEquals(shell.resolve_path('.'), '/')
self.assertEquals(shell.resolve_path('..'), '/')
self.assertEquals(shell.resolve_path('foo'), '/foo')
def test_pipe(self):
class Shell(XCmd):
@ensure_params(Optional('path'))
def do_ls(self, params):
self.show_output('/aaa\n/bbb')
@ensure_params(Required('line'))
def do_upper(self, params):
self.show_output(params.line.upper())
output = StringIO()
shell = Shell(setup_readline=False, output_io=output)
shell.do_pipe('ls upper')
self.assertEqual('/AAA\n/BBB\n', output.getvalue())
| # -*- coding: utf-8 -*-
""" test xcmd proper """
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from xcmd.xcmd import (
ensure_params,
Optional,
Required,
XCmd
)
class XCmdTestCase(unittest.TestCase):
""" Xcmd tests cases """
@classmethod
def setUpClass(cls):
pass
def setUp(self):
pass
def test_basic(self):
class Shell(XCmd):
@ensure_params(Required('path'))
def do_cat(self, params):
self.show_output('cat called with %s' % params.path)
output = StringIO()
shell = Shell(setup_readline=False, output_io=output)
regular = ['cat', 'help', 'pipe']
special = ['!!', '$?']
self.assertEquals(regular, sorted(shell.commands))
self.assertEquals(special, sorted(shell.special_commands))
self.assertEquals(regular + special, sorted(shell.all_commands))
shell.do_cat('/etc/passwd')
self.assertEqual('cat called with /etc/passwd\n', output.getvalue())
# test resolving paths
self.assertEquals(shell.resolve_path(''), '/')
self.assertEquals(shell.resolve_path('.'), '/')
self.assertEquals(shell.resolve_path('..'), '/')
self.assertEquals(shell.resolve_path('foo'), '/foo')
def test_pipe(self):
class Shell(XCmd):
@ensure_params(Optional('path'))
def do_ls(self, params):
self.show_output('/aaa\n/bbb')
@ensure_params(Required('line'))
def do_upper(self, params):
self.show_output(params.line.upper())
output = StringIO()
shell = Shell(setup_readline=False, output_io=output)
shell.do_pipe('ls upper')
self.assertEqual('/AAA\n/BBB\n', output.getvalue())
| apache-2.0 | Python |
23a1e1a9963412a33c2a4d91b63b29a76237644b | Set default feature value to 0.0 | fawcettc/planning-features,fawcettc/planning-features,fawcettc/planning-features,fawcettc/planning-features | extractors/feature_extractor.py | extractors/feature_extractor.py | #!/usr/bin/env python2.7
# encoding: utf-8
import os
import sys
from subprocess import Popen, PIPE
import tempfile
class FeatureExtractor(object):
'''
abstract feature extractor
'''
def __init__(self, args):
self.memory_limit = args.mem_limit
self.runtime_limit = args.per_extraction_time_limit
self.runsolver_path = args.runsolver
self.abs_script_directory = os.path.abspath(os.path.dirname(sys.argv[0]))
# for creating/maintaining a shared directory with the SAS+ representation
self.creates_sas_representation = False
self.requires_sas_representation = False
self.sentinel_value = "0.0"
self.extractor_name = "BASE EXTRACTOR"
'''
'''
def extract(self, domain_path, instance_path):
print "ERROR: You should have written a new extract() for your feature extractor"
return False,{}
'''
'''
def default_features(self):
print "ERROR: You need to provide default_features for your feature extractor"
return {}
'''
'''
def execute_command_with_runsolver(self, command, temporary_directory=None, stdin_file=None, runtime_limit=None):
try:
if temporary_directory == None:
temporary_directory = tempfile.mkdtemp(prefix='pfeat.',suffix='.tmp')
runsolver_stdout = "%s/runsolver.stdout" % (temporary_directory)
cmd_stdout = "%s/cmd.stdout" % (temporary_directory)
if runtime_limit == None:
runtime_limit = self.runtime_limit
modified_cmd = [self.runsolver_path, "-w", runsolver_stdout, "-o", cmd_stdout, "-C", runtime_limit, "-M", self.memory_limit, "-d", "2"]
modified_cmd.extend(command)
if stdin_file != None:
stdin_file = open(stdin_file, 'r')
io = Popen(map(str, modified_cmd), shell=False, preexec_fn=os.setpgrp, cwd=temporary_directory, stdin=stdin_file)
io.wait()
except Exception as e:
print "ERROR: Exception during feature extraction: %s" % (str(e))
except:
print "ERROR: Unknown exception during feature extraction!"
finally:
if stdin_file != None:
stdin_file.close()
return temporary_directory
| #!/usr/bin/env python2.7
# encoding: utf-8
import os
import sys
from subprocess import Popen, PIPE
import tempfile
class FeatureExtractor(object):
'''
abstract feature extractor
'''
def __init__(self, args):
self.memory_limit = args.mem_limit
self.runtime_limit = args.per_extraction_time_limit
self.runsolver_path = args.runsolver
self.abs_script_directory = os.path.abspath(os.path.dirname(sys.argv[0]))
# for creating/maintaining a shared directory with the SAS+ representation
self.creates_sas_representation = False
self.requires_sas_representation = False
self.sentinel_value = "-512.0"
self.extractor_name = "BASE EXTRACTOR"
'''
'''
def extract(self, domain_path, instance_path):
print "ERROR: You should have written a new extract() for your feature extractor"
return False,{}
'''
'''
def default_features(self):
print "ERROR: You need to provide default_features for your feature extractor"
return {}
'''
'''
def execute_command_with_runsolver(self, command, temporary_directory=None, stdin_file=None, runtime_limit=None):
try:
if temporary_directory == None:
temporary_directory = tempfile.mkdtemp(prefix='pfeat.',suffix='.tmp')
runsolver_stdout = "%s/runsolver.stdout" % (temporary_directory)
cmd_stdout = "%s/cmd.stdout" % (temporary_directory)
if runtime_limit == None:
runtime_limit = self.runtime_limit
modified_cmd = [self.runsolver_path, "-w", runsolver_stdout, "-o", cmd_stdout, "-C", runtime_limit, "-M", self.memory_limit, "-d", "2"]
modified_cmd.extend(command)
if stdin_file != None:
stdin_file = open(stdin_file, 'r')
io = Popen(map(str, modified_cmd), shell=False, preexec_fn=os.setpgrp, cwd=temporary_directory, stdin=stdin_file)
io.wait()
except Exception as e:
print "ERROR: Exception during feature extraction: %s" % (str(e))
except:
print "ERROR: Unknown exception during feature extraction!"
finally:
if stdin_file != None:
stdin_file.close()
return temporary_directory
| agpl-3.0 | Python |
6675586f061cf2cde25a36b0df04e0d22d7bfdee | Update version.py | istresearch/traptor,istresearch/traptor | traptor/version.py | traptor/version.py | __version__ = '4.0.9'
if __name__ == '__main__':
print(__version__)
| __version__ = '4.0.8'
if __name__ == '__main__':
print(__version__)
| mit | Python |
25e5157785ee9dd7a3bbb606b1f7642342936d18 | call drop # 2 | wfxiang08/sqlalchemy,olemis/sqlalchemy,monetate/sqlalchemy,dstufft/sqlalchemy,elelianghh/sqlalchemy,inspirehep/sqlalchemy,j5int/sqlalchemy,ThiefMaster/sqlalchemy,WinterNis/sqlalchemy,pdufour/sqlalchemy,bdupharm/sqlalchemy,davidjb/sqlalchemy,Akrog/sqlalchemy,Cito/sqlalchemy,Cito/sqlalchemy,sandan/sqlalchemy,276361270/sqlalchemy,WinterNis/sqlalchemy,brianv0/sqlalchemy,davidjb/sqlalchemy,pdufour/sqlalchemy,ThiefMaster/sqlalchemy,bootandy/sqlalchemy,dstufft/sqlalchemy,alex/sqlalchemy,Cito/sqlalchemy,hsum/sqlalchemy,halfcrazy/sqlalchemy,bootandy/sqlalchemy,sandan/sqlalchemy,halfcrazy/sqlalchemy,robin900/sqlalchemy,inspirehep/sqlalchemy,Akrog/sqlalchemy,EvaSDK/sqlalchemy,wujuguang/sqlalchemy,alex/sqlalchemy,hsum/sqlalchemy,epa/sqlalchemy,itkovian/sqlalchemy,monetate/sqlalchemy,wujuguang/sqlalchemy,olemis/sqlalchemy,epa/sqlalchemy,276361270/sqlalchemy,davidfraser/sqlalchemy,alex/sqlalchemy,j5int/sqlalchemy,zzzeek/sqlalchemy,graingert/sqlalchemy,itkovian/sqlalchemy,bdupharm/sqlalchemy,graingert/sqlalchemy,EvaSDK/sqlalchemy,elelianghh/sqlalchemy,wfxiang08/sqlalchemy,brianv0/sqlalchemy,robin900/sqlalchemy,davidfraser/sqlalchemy,sqlalchemy/sqlalchemy | test/profiling/compiler.py | test/profiling/compiler.py | import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
class CompileTest(TestBase, AssertsExecutionResults):
def setUpAll(self):
global t1, t2, metadata
metadata = MetaData()
t1 = Table('t1', metadata,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)))
t2 = Table('t2', metadata,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)))
@profiling.function_call_count(72, {'2.4': 42})
def test_insert(self):
t1.insert().compile()
@profiling.function_call_count(70, {'2.4': 42})
def test_update(self):
t1.update().compile()
@profiling.function_call_count(202, versions={'2.4':133})
def test_select(self):
s = select([t1], t1.c.c2==t2.c.c1)
s.compile()
if __name__ == '__main__':
testenv.main()
| import testenv; testenv.configure_for_tests()
from sqlalchemy import *
from testlib import *
class CompileTest(TestBase, AssertsExecutionResults):
def setUpAll(self):
global t1, t2, metadata
metadata = MetaData()
t1 = Table('t1', metadata,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)))
t2 = Table('t2', metadata,
Column('c1', Integer, primary_key=True),
Column('c2', String(30)))
@profiling.function_call_count(72, {'2.4': 42})
def test_insert(self):
t1.insert().compile()
@profiling.function_call_count(70, {'2.4': 42})
def test_update(self):
t1.update().compile()
@profiling.function_call_count(202, versions={'2.4':141})
def test_select(self):
s = select([t1], t1.c.c2==t2.c.c1)
s.compile()
if __name__ == '__main__':
testenv.main()
| mit | Python |
fb95fb39861e4924af729fd1512d6de89ebdef94 | Add parameter for change report path | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | sequana/report_mapping.py | sequana/report_mapping.py | # Import -----------------------------------------------------------------------
import os
from reports import HTMLTable
from sequana.report_main import BaseReport
# Class ------------------------------------------------------------------------
class MappingReport(BaseReport):
"""
"""
def __init__(self, low_threshold=-3, high_threshold=3,
directory="report", **kargs):
super(MappingReport, self).__init__(
jinja_filename="mapping/index.html",
directory=directory,
output_filename="mapping.html", **kargs)
self.jinja['title'] = "Mapping Report"
self.low_t = low_threshold
self.high_t = high_threshold
def set_data(self, data):
self.mapping = data
def parse(self):
self.mapping.plot_coverage(filename=self.directory + os.sep +
"coverage.png")
self.mapping.plot_hist(filename=self.directory + os.sep +
"zscore_hist.png")
low_cov_df = self.mapping.get_low_coverage(self.low_t)
merge_low_cov = self.mapping.merge_region(low_cov_df)
html = HTMLTable(merge_low_cov)
html.add_bgcolor("size")
self.jinja['low_coverage'] = html.to_html(index=False)
high_cov_df = self.mapping.get_high_coverage(self.high_t)
merge_high_cov = self.mapping.merge_region(high_cov_df)
html = HTMLTable(merge_high_cov)
html.add_bgcolor("size")
self.jinja['high_coverage'] = html.to_html(index=False)
| # Import -----------------------------------------------------------------------
import os
from reports import HTMLTable
from sequana.report_main import BaseReport
# Class ------------------------------------------------------------------------
class MappingReport(BaseReport):
"""
"""
def __init__(self, low_threshold=-3, high_threshold=3, **kargs):
super(MappingReport, self).__init__(
jinja_filename="mapping/index.html",
directory="report",
output_filename="mapping.html", **kargs)
self.jinja['title'] = "Mapping Report"
self.low_t = low_threshold
self.high_t = high_threshold
def set_data(self, data):
self.mapping = data
def parse(self):
self.mapping.plot_coverage(filename=self.directory + os.sep +
"coverage.png")
self.mapping.plot_hist(filename=self.directory + os.sep +
"zscore_hist.png")
low_cov_df = self.mapping.get_low_coverage(self.low_t)
merge_low_cov = self.mapping.merge_region(low_cov_df)
html = HTMLTable(merge_low_cov)
html.add_bgcolor("size")
self.jinja['low_coverage'] = html.to_html(index=False)
high_cov_df = self.mapping.get_high_coverage(self.high_t)
merge_high_cov = self.mapping.merge_region(high_cov_df)
html = HTMLTable(merge_high_cov)
html.add_bgcolor("size")
self.jinja['high_coverage'] = html.to_html(index=False)
| bsd-3-clause | Python |
bb588a10240c53ebd5b161ae81352bbf0d5cd985 | fix league ordering | a2ultimate/ultimate-league-app,rdonnelly/ultimate-league-app,rdonnelly/ultimate-league-app,a2ultimate/ultimate-league-app,rdonnelly/ultimate-league-app,a2ultimate/ultimate-league-app,a2ultimate/ultimate-league-app,rdonnelly/ultimate-league-app | src/ultimate/templatetags/leagues.py | src/ultimate/templatetags/leagues.py | from django import template
from django.utils import timezone
from ultimate.leagues.models import League
register = template.Library()
@register.filter
def sort_by_league_start_date_weekday(league_divisions):
divisions = [d for d in league_divisions if d.type == League.LEAGUE_TYPE_LEAGUE]
divisions.sort(key=lambda d: (d.league_start_date.strftime('%w'), d.league_start_date))
other_divisions = [d for d in league_divisions if d.type != League.LEAGUE_TYPE_LEAGUE]
other_divisions.sort(key=lambda d: d.league_start_date)
return divisions + other_divisions
@register.filter
def is_visible(league, user):
return league.is_visible(user)
@register.filter
def is_open(league, user):
return league.is_open(user)
@register.filter
def is_waitlist(league, user):
return league.is_waitlist(user)
@register.filter
def is_past_deadline(league_date):
return bool(timezone.now() > league_date)
@register.filter
def is_free(league):
return bool(league.check_price == 0 and league.paypal_price == 0)
@register.filter
def is_accepting_registrations(league, user):
return league.is_accepting_registrations(user)
@register.filter
def is_waitlisting_registrations(league, user):
return league.is_waitlisting_registrations(user)
@register.filter
def has_user_registration(league, user):
return league.get_user_registration(user) is not None
@register.filter
def has_complete_user_registration(league, user):
user_registration = league.get_user_registration(user)
return user_registration and \
user_registration.is_complete
@register.filter
# returns league captains as user objects
def get_captains(league):
return league.get_captains()
@register.filter
# returns league captains as teammember objects
def get_captains_teammember(league):
return league.get_captains_teammember()
@register.filter
# returns whether a user has filled out a player survey for a league
def get_player_survey_status(league, user):
return league.player_survey_complete_for_user(user)
| from django import template
from django.utils import timezone
from ultimate.leagues.models import League
register = template.Library()
@register.filter
def sort_by_league_start_date_weekday(divisions):
leagues = filter(lambda k: k.type == League.LEAGUE_TYPE_LEAGUE, divisions)
leagues.sort(key=lambda k: k.league_start_date.strftime('%w'))
other_divisions = filter(lambda k: k.type != League.LEAGUE_TYPE_LEAGUE, divisions)
other_divisions.sort(key=lambda k: k.league_start_date)
return leagues + other_divisions
@register.filter
def is_visible(league, user):
return league.is_visible(user)
@register.filter
def is_open(league, user):
return league.is_open(user)
@register.filter
def is_waitlist(league, user):
return league.is_waitlist(user)
@register.filter
def is_past_deadline(league_date):
return bool(timezone.now() > league_date)
@register.filter
def is_free(league):
return bool(league.check_price == 0 and league.paypal_price == 0)
@register.filter
def is_accepting_registrations(league, user):
return league.is_accepting_registrations(user)
@register.filter
def is_waitlisting_registrations(league, user):
return league.is_waitlisting_registrations(user)
@register.filter
def has_user_registration(league, user):
return league.get_user_registration(user) is not None
@register.filter
def has_complete_user_registration(league, user):
user_registration = league.get_user_registration(user)
return user_registration and \
user_registration.is_complete
@register.filter
# returns league captains as user objects
def get_captains(league):
return league.get_captains()
@register.filter
# returns league captains as teammember objects
def get_captains_teammember(league):
return league.get_captains_teammember()
@register.filter
# returns whether a user has filled out a player survey for a league
def get_player_survey_status(league, user):
return league.player_survey_complete_for_user(user)
| bsd-3-clause | Python |
3b9e9d6ac573472aa68bc1d34e22fc27109045c0 | Update the build version | vlegoff/cocomud | src/version.py | src/version.py | BUILD = 11
| BUILD = 9
| bsd-3-clause | Python |
327a255a1ab7fd4b50898ebbeadf27235d6331e6 | Print auth url to console | commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot | tools/lib/auth.py | tools/lib/auth.py | #!/usr/bin/env python3
import json
import os
import sys
import webbrowser
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlencode, parse_qs
from common.file_helpers import mkdirs_exists_ok
from tools.lib.api import CommaApi, APIError
from tools.lib.auth_config import set_token
class ClientRedirectServer(HTTPServer):
query_params = {}
class ClientRedirectHandler(BaseHTTPRequestHandler):
def do_GET(self):
if not self.path.startswith('/auth_redirect'):
self.send_response(204)
return
query = self.path.split('?', 1)[-1]
query = parse_qs(query, keep_blank_values=True)
self.server.query_params = query
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(b'Return to the CLI to continue')
def log_message(self, format, *args):
pass # this prevent http server from dumping messages to stdout
def auth_redirect_link(port):
redirect_uri = f'http://localhost:{port}/auth_redirect'
params = {
'type': 'web_server',
'client_id': '45471411055-ornt4svd2miog6dnopve7qtmh5mnu6id.apps.googleusercontent.com',
'redirect_uri': redirect_uri,
'response_type': 'code',
'scope': 'https://www.googleapis.com/auth/userinfo.email',
'prompt': 'select_account',
}
return (redirect_uri, 'https://accounts.google.com/o/oauth2/auth?' + urlencode(params))
def login():
port = 9090
redirect_uri, oauth_uri = auth_redirect_link(port)
web_server = ClientRedirectServer(('localhost', port), ClientRedirectHandler)
print(f'To sign in, use your browser and navigate to {oauth_uri}')
webbrowser.open(oauth_uri, new=2)
while True:
web_server.handle_request()
if 'code' in web_server.query_params:
code = web_server.query_params['code']
break
elif 'error' in web_server.query_params:
print('Authentication Error: "%s". Description: "%s" ' % (
web_server.query_params['error'],
web_server.query_params.get('error_description')), file=sys.stderr)
break
try:
auth_resp = CommaApi().post('v2/auth/', data={'code': code, 'redirect_uri': redirect_uri})
set_token(auth_resp['access_token'])
print('Authenticated')
except APIError as e:
print(f'Authentication Error: {e}', file=sys.stderr)
if __name__ == '__main__':
login()
| #!/usr/bin/env python3
import json
import os
import sys
import webbrowser
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlencode, parse_qs
from common.file_helpers import mkdirs_exists_ok
from tools.lib.api import CommaApi, APIError
from tools.lib.auth_config import set_token
class ClientRedirectServer(HTTPServer):
query_params = {}
class ClientRedirectHandler(BaseHTTPRequestHandler):
def do_GET(self):
if not self.path.startswith('/auth_redirect'):
self.send_response(204)
return
query = self.path.split('?', 1)[-1]
query = parse_qs(query, keep_blank_values=True)
self.server.query_params = query
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(b'Return to the CLI to continue')
def log_message(self, format, *args):
pass # this prevent http server from dumping messages to stdout
def auth_redirect_link(port):
redirect_uri = f'http://localhost:{port}/auth_redirect'
params = {
'type': 'web_server',
'client_id': '45471411055-ornt4svd2miog6dnopve7qtmh5mnu6id.apps.googleusercontent.com',
'redirect_uri': redirect_uri,
'response_type': 'code',
'scope': 'https://www.googleapis.com/auth/userinfo.email',
'prompt': 'select_account',
}
return (redirect_uri, 'https://accounts.google.com/o/oauth2/auth?' + urlencode(params))
def login():
port = 9090
redirect_uri, oauth_uri = auth_redirect_link(port)
web_server = ClientRedirectServer(('localhost', port), ClientRedirectHandler)
webbrowser.open(oauth_uri, new=2)
while True:
web_server.handle_request()
if 'code' in web_server.query_params:
code = web_server.query_params['code']
break
elif 'error' in web_server.query_params:
print('Authentication Error: "%s". Description: "%s" ' % (
web_server.query_params['error'],
web_server.query_params.get('error_description')), file=sys.stderr)
break
try:
auth_resp = CommaApi().post('v2/auth/', data={'code': code, 'redirect_uri': redirect_uri})
set_token(auth_resp['access_token'])
print('Authenticated')
except APIError as e:
print(f'Authentication Error: {e}', file=sys.stderr)
if __name__ == '__main__':
login()
| mit | Python |
c93eff1b0d9e8a833829c2c697868a5fee5b33fd | Add JSON decoder for datetime timestamp | hoh/Billabong,hoh/Billabong | billabong/utils.py | billabong/utils.py | # Copyright (c) 2015 "Hugo Herter http://hugoherter.com"
#
# This file is part of Billabong.
#
# Intercom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import json
def json_handler(obj):
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, bytes):
return obj.decode()
else:
print([obj])
return json.JSONEncoder().default(obj)
def dumps(obj, indent=2):
return json.dumps(obj, default=json_handler, indent=indent)
def json_loader(dico):
for key, value in dico.items():
if key == 'timestamp' and isinstance(value, str):
try:
dico[key] = datetime.datetime.strptime(value,
"%Y-%m-%dT%H:%M:%S.%f")
except ValueError as error:
print("WARNING: ", error)
return dico
def loads(string):
return json.loads(string, object_hook=json_loader)
def read_in_chunks(file_object, chunk_size=1024):
"""Lazy function (generator) to read a file piece by piece.
Default chunk size: 1k."""
while True:
data = file_object.read(chunk_size)
if not data:
break
yield data
| # Copyright (c) 2015 "Hugo Herter http://hugoherter.com"
#
# This file is part of Billabong.
#
# Intercom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import json
def json_handler(obj):
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, bytes):
return obj.decode()
else:
print([obj])
return json.JSONEncoder().default(obj)
def dumps(obj, indent=2):
return json.dumps(obj, default=json_handler, indent=indent)
loads = json.loads
def read_in_chunks(file_object, chunk_size=1024):
"""Lazy function (generator) to read a file piece by piece.
Default chunk size: 1k."""
while True:
data = file_object.read(chunk_size)
if not data:
break
yield data
| agpl-3.0 | Python |
3a8d78b024033e6cd74ebd787aab2e3cef719f82 | modify models to return human friendly repr | texastribune/tribwire,texastribune/tribwire,texastribune/tribwire,texastribune/tribwire | tribwire/models.py | tribwire/models.py | from django.db import models
from django.contrib import admin
from django.contrib.auth import get_user_model
class Link(models.Model):
url = models.URLField(unique=True)
headline = models.CharField(max_length=128)
blurb = models.CharField(max_length=128)
date_suggested = models.DateField()
user = models.ForeignKey(get_user_model())
source = models.ForeignKey('Source')
wires = models.ManyToManyField('Wire')
def __unicode__(self):
return self.url
class Source(models.Model):
name = models.CharField(max_length=128, unique=True)
url = models.URLField()
favicon = models.TextField()
def __unicode__(self):
return self.name
class Wire(models.Model):
name = models.CharField(max_length=128, unique=True)
slug = models.SlugField()
def __unicode__(self):
return self.name | from django.db import models
from django.contrib import admin
from django.contrib.auth import get_user_model
class Link(models.Model):
url = models.URLField(unique=True)
headline = models.CharField(max_length=128)
blurb = models.CharField(max_length=128)
date_suggested = models.DateField()
user = models.ForeignKey(get_user_model())
source = models.ForeignKey('Source')
wires = models.ManyToManyField('Wire')
class Source(models.Model):
name = models.CharField(max_length=128, unique=True)
url = models.URLField()
favicon = models.TextField()
class Wire(models.Model):
name = models.CharField(max_length=128, unique=True)
slug = models.SlugField() | apache-2.0 | Python |
e7cc149ed3a338956dc5003e100890395f50c9cd | fix get_probe | 20c/vaping,20c/vaping | vaping/__init__.py | vaping/__init__.py | from __future__ import absolute_import
# import to namespace
from .config import Config # noqa
from pluginmgr.config import ConfigPluginManager
class PluginManager(ConfigPluginManager):
def get_probe(self, node, pctx):
obj = self.get_instance(node, pctx)
if not hasattr(obj, 'probe'):
name = obj.pluginmgr_config.get('name', str(node))
raise TypeError('%s is not a probe plugin, missing ::probe()' % (name))
return obj
def get_output(self, node, pctx):
obj = self.get_instance(node, pctx)
if not hasattr(obj, 'emit'):
name = obj.pluginmgr_config.get('name', str(node))
raise TypeError('%s is not an output plugin, missing ::emit()' % (name))
return obj
plugin = PluginManager('vaping.plugins')
| from __future__ import absolute_import
# import to namespace
from .config import Config # noqa
from pluginmgr.config import ConfigPluginManager
class PluginManager(ConfigPluginManager):
def get_probe(self, node, pctx):
obj = self.get_instance(node, pctx)
if not hasattr(obj, 'probe'):
raise TypeError('%s is not a probe plugin, missing ::probe()' % (obj.name))
return obj
def get_output(self, node, pctx):
obj = self.get_instance(node, pctx)
if not hasattr(obj, 'emit'):
name = obj.pluginmgr_config.get('name', str(node))
raise TypeError('%s is not an output plugin, missing ::emit()' % (name))
return obj
plugin = PluginManager('vaping.plugins')
| apache-2.0 | Python |
b6a20a1743cf6a4aaa31ebd6c946e63c997612ee | add read method to service | mylokin/servy | servy/client.py | servy/client.py | from __future__ import absolute_import
import urllib2
import urlparse
import servy.proto as proto
import servy.exc as exc
class Service(object):
def __init__(self, name, host):
self.name = name
self.host = host
@property
def url(self):
url = {
'scheme': 'http',
'netloc': self.host,
'path': self.name,
'params': '',
'query': '',
'fragment': '',
}
return urlparse.urlunparse(urlparse.ParseResult(**{k: v or '' for k, v in url.iteritems()}))
def read(self, message):
return urllib2.urlopen(self.url, message).read()
class Client(object):
def __init__(self, service, proc=None):
if isinstance(service, dict):
service = Service(**service)
self.__service = service
self.__proc = proc
def __getattr__(self, name):
if self.__proc:
proc = '{}.{}'.format(self.__proc, name)
else:
proc = name
return Client(self.__service, proc)
def __call__(self, *args, **kw):
message = proto.Request.encode(self.__proc, args, kw)
try:
content = self.__service.read(message)
except urllib2.HTTPError as e:
if e.code == 404:
raise exc.ServiceNotFound(self.__service.name)
elif e.code == 501:
raise exc.ProcedureNotFound(self.__proc)
elif e.code == 503:
message = e.read()
tb = proto.RemoteException.decode(message)
raise exc.RemoteException(tb)
else:
raise
return proto.Response.decode(content)
| from __future__ import absolute_import
import urllib2
import urlparse
import servy.proto as proto
import servy.exc as exc
class Service(object):
def __init__(self, name, host):
self.name = name
self.host = host
@property
def url(self):
url = {
'scheme': 'http',
'netloc': self.host,
'path': self.name,
'params': '',
'query': '',
'fragment': '',
}
return urlparse.urlunparse(urlparse.ParseResult(**{k: v or '' for k, v in url.iteritems()}))
class Client(object):
def __init__(self, service, proc=None):
if isinstance(service, dict):
service = Service(**service)
self.__service = service
self.__proc = proc
def __getattr__(self, name):
if self.__proc:
proc = '{}.{}'.format(self.__proc, name)
else:
proc = name
return Client(self.__service, proc)
def __call__(self, *args, **kw):
message = proto.Request.encode(self.__proc, args, kw)
try:
content = urllib2.urlopen(self.__service.url, message).read()
except urllib2.HTTPError as e:
if e.code == 404:
raise exc.ServiceNotFound(self.__service.name)
elif e.code == 501:
raise exc.ProcedureNotFound(self.__proc)
elif e.code == 503:
message = e.read()
tb = proto.RemoteException.decode(message)
raise exc.RemoteException(tb)
else:
raise
return proto.Response.decode(content)
| mit | Python |
b8c2a8713b5b6e95f671adfbf446c1030507f117 | add concat and mapcat to top level imports | jcrist/toolz,simudream/toolz,quantopian/toolz,jdmcbr/toolz,berrytj/toolz,JNRowe/toolz,machinelearningdeveloper/toolz,llllllllll/toolz,whilo/toolz,quantopian/toolz,machinelearningdeveloper/toolz,Julian-O/toolz,karansag/toolz,bartvm/toolz,llllllllll/toolz,Julian-O/toolz,jdmcbr/toolz,pombredanne/toolz,obmarg/toolz,karansag/toolz,JNRowe/toolz,bartvm/toolz,pombredanne/toolz,simudream/toolz,jcrist/toolz,cpcloud/toolz,berrytj/toolz,cpcloud/toolz,whilo/toolz,obmarg/toolz | toolz/__init__.py | toolz/__init__.py | from .itertoolz import (groupby, countby, frequencies, reduceby,
first, second, nth, take, drop, rest, last, get,
merge_sorted, concat, mapcat,
interleave, unique, intersection, iterable, distinct)
from .functoolz import (remove, iterate, accumulate,
memoize, curry, comp,
thread_first, thread_last)
from .dicttoolz import merge, keymap, valmap, assoc, update_in
| from .itertoolz import (groupby, countby, frequencies, reduceby,
first, second, nth, take, drop, rest, last, get,
merge_sorted,
interleave, unique, intersection, iterable, distinct)
from .functoolz import (remove, iterate, accumulate,
memoize, curry, comp,
thread_first, thread_last)
from .dicttoolz import merge, keymap, valmap, assoc, update_in
| bsd-3-clause | Python |
c6942fd8ad26d4d31e07fa6ef1554b6d83501255 | Support for ValuesQuerySet / ValuesListQuerySet | acdha/django-queryset-transform | queryset_transform/__init__.py | queryset_transform/__init__.py | from django.db import models
class TransformQuerySetMixin(object):
def __init__(self, *args, **kwargs):
super(TransformQuerySetMixin, self).__init__(*args, **kwargs)
self._transform_fns = []
def _clone(self, klass=None, setup=False, **kwargs):
c = super(TransformQuerySetMixin, self)._clone(klass, setup, **kwargs)
c.__dict__.update(kwargs)
c._transform_fns = self._transform_fns[:]
return c
def transform(self, fn):
c = self._clone()
c._transform_fns.append(fn)
return c
def iterator(self):
result_iter = super(TransformQuerySetMixin, self).iterator()
if self._transform_fns:
results = list(result_iter)
for fn in self._transform_fns:
fn(results)
return iter(results)
return result_iter
def values(self, *fields):
new_qs = self._clone(klass=TransformValuesQuerySet, setup=True, _fields=fields)
# We have to clear any existing transforms as they will expect a different result type
# but want to allow adding new ones:
del new_qs._transform_fns[:]
return new_qs
def values_list(self, *fields, **kwargs):
flat = kwargs.pop('flat', False)
if kwargs:
raise TypeError('Unexpected keyword arguments to values_list: %s' % (list(kwargs),))
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
new_qs = self._clone(klass=TransformValuesListQuerySet, setup=True, _fields=fields, flat=flat)
# We have to clear any existing transforms as they will expect a different result type
# but want to allow adding new ones:
del new_qs._transform_fns[:]
return new_qs
class TransformQuerySet(TransformQuerySetMixin, models.query.QuerySet):
pass
class TransformValuesQuerySet(TransformQuerySetMixin, models.query.ValuesQuerySet):
pass
class TransformValuesListQuerySet(TransformQuerySetMixin, models.query.ValuesListQuerySet):
pass
class TransformManager(models.Manager):
def get_query_set(self):
return TransformQuerySet(self.model)
| from django.db import models
class TransformQuerySetMixin(object):
def __init__(self, *args, **kwargs):
super(TransformQuerySetMixin, self).__init__(*args, **kwargs)
self._transform_fns = []
def _clone(self, klass=None, setup=False, **kw):
c = super(TransformQuerySetMixin, self)._clone(klass, setup, **kw)
c._transform_fns = self._transform_fns[:]
return c
def transform(self, fn):
c = self._clone()
c._transform_fns.append(fn)
return c
def iterator(self):
result_iter = super(TransformQuerySetMixin, self).iterator()
if self._transform_fns:
results = list(result_iter)
for fn in self._transform_fns:
fn(results)
return iter(results)
return result_iter
class TransformQuerySet(TransformQuerySetMixin, models.query.QuerySet):
pass
class TransformManager(models.Manager):
def get_query_set(self):
return TransformQuerySet(self.model)
| bsd-3-clause | Python |
8db28d6d27b63535d21a44e6f9a20f991ec2556a | Update AWS::EFS::FileSystem per 2020-06-16 changes | cloudtools/troposphere,cloudtools/troposphere | troposphere/efs.py | troposphere/efs.py | from . import AWSObject, AWSProperty, Tags
from .validators import boolean
Bursting = 'bursting'
Provisioned = 'provisioned'
def throughput_mode_validator(mode):
valid_modes = [Bursting, Provisioned]
if mode not in valid_modes:
raise ValueError(
'ThroughputMode must be one of: "%s"' % (', '.join(valid_modes))
)
return mode
def provisioned_throughput_validator(throughput):
if throughput < 0.0:
raise ValueError(
'ProvisionedThroughputInMibps must be greater than or equal to 0.0'
)
return throughput
class PosixUser(AWSProperty):
props = {
'Gid': (basestring, True),
'SecondaryGids': ([basestring], False),
'Uid': (basestring, True),
}
class CreationInfo(AWSProperty):
props = {
'OwnerGid': (basestring, True),
'OwnerUid': (basestring, True),
'Permissions': (basestring, True),
}
class RootDirectory(AWSProperty):
props = {
'CreationInfo': (CreationInfo, False),
'Path': (basestring, False),
}
class AccessPoint(AWSObject):
resource_type = "AWS::EFS::AccessPoint"
props = {
'AccessPointTags': (Tags, False),
'ClientToken': (basestring, False),
'FileSystemId': (basestring, True),
'PosixUser': (PosixUser, False),
'RootDirectory': (RootDirectory, False),
}
class LifecyclePolicy(AWSProperty):
props = {
'TransitionToIA': (basestring, True),
}
class FileSystem(AWSObject):
resource_type = "AWS::EFS::FileSystem"
props = {
'Encrypted': (boolean, False),
'FileSystemPolicy': (dict, False),
'FileSystemTags': (Tags, False),
'KmsKeyId': (basestring, False),
'LifecyclePolicies': ([LifecyclePolicy], False),
'PerformanceMode': (basestring, False),
'ProvisionedThroughputInMibps': (float, False),
'ThroughputMode': (throughput_mode_validator, False),
}
class MountTarget(AWSObject):
resource_type = "AWS::EFS::MountTarget"
props = {
'FileSystemId': (basestring, True),
'IpAddress': (basestring, False),
'SecurityGroups': ([basestring], True),
'SubnetId': (basestring, True),
}
| from . import AWSObject, AWSProperty, Tags
from .validators import boolean
Bursting = 'bursting'
Provisioned = 'provisioned'
def throughput_mode_validator(mode):
valid_modes = [Bursting, Provisioned]
if mode not in valid_modes:
raise ValueError(
'ThroughputMode must be one of: "%s"' % (', '.join(valid_modes))
)
return mode
def provisioned_throughput_validator(throughput):
if throughput < 0.0:
raise ValueError(
'ProvisionedThroughputInMibps must be greater than or equal to 0.0'
)
return throughput
class PosixUser(AWSProperty):
props = {
'Gid': (basestring, True),
'SecondaryGids': ([basestring], False),
'Uid': (basestring, True),
}
class CreationInfo(AWSProperty):
props = {
'OwnerGid': (basestring, True),
'OwnerUid': (basestring, True),
'Permissions': (basestring, True),
}
class RootDirectory(AWSProperty):
props = {
'CreationInfo': (CreationInfo, False),
'Path': (basestring, False),
}
class AccessPoint(AWSObject):
resource_type = "AWS::EFS::AccessPoint"
props = {
'AccessPointTags': (Tags, False),
'ClientToken': (basestring, False),
'FileSystemId': (basestring, True),
'PosixUser': (PosixUser, False),
'RootDirectory': (RootDirectory, False),
}
class LifecyclePolicy(AWSProperty):
props = {
'TransitionToIA': (basestring, True),
}
class FileSystem(AWSObject):
resource_type = "AWS::EFS::FileSystem"
props = {
'Encrypted': (boolean, False),
'FileSystemTags': (Tags, False),
'KmsKeyId': (basestring, False),
'LifecyclePolicies': ([LifecyclePolicy], False),
'PerformanceMode': (basestring, False),
'ProvisionedThroughputInMibps': (float, False),
'ThroughputMode': (throughput_mode_validator, False),
}
class MountTarget(AWSObject):
resource_type = "AWS::EFS::MountTarget"
props = {
'FileSystemId': (basestring, True),
'IpAddress': (basestring, False),
'SecurityGroups': ([basestring], True),
'SubnetId': (basestring, True),
}
| bsd-2-clause | Python |
d21676fca075009ee4ff8d1bb979989ea4460c7c | compress output addin files. | genegis/genegis,genegis/genegis,genegis/genegis | makeaddin.py | makeaddin.py | import os
import re
import zipfile
current_path = os.path.dirname(os.path.abspath(__file__))
out_zip_name = os.path.join(current_path,
os.path.basename(current_path) + ".esriaddin")
backup_patterns = {
'PLUGIN_BACKUP_PATTERN': re.compile(".*_addin_[0-9]+[.]py$", re.IGNORECASE),
'VIM_SWAP_PATTERN': re.compile(".*\.sw[op]$", re.IGNORECASE),
'COMPLIED_PYTHON_PATTERN': re.compile(".*\.pyc$", re.IGNORECASE),
'TODO_PATTERN': re.compile('todo.txt')
}
skip_base = ['Install\\toolbox\\arcobjects']
skip_paths = [os.path.join(current_path, s) for s in skip_base]
def looks_like_a_backup(filename):
is_backup = False
for name, pattern in backup_patterns.items():
if bool(pattern.match(filename)):
is_backup = True
return is_backup
zip_file = zipfile.ZipFile(out_zip_name, 'w', zipfile.ZIP_DEFLATED)
for filename in ('config.xml', 'README.md', 'makeaddin.py'):
zip_file.write(os.path.join(current_path, filename), filename)
dirs_to_add = ['Images', 'Install']
for directory in dirs_to_add:
for (path, dirs, files) in os.walk(os.path.join(current_path, directory)):
skip = False
for skip_path in skip_paths:
if path.find(skip_path) != -1:
skip = True
if skip:
# skip this directory
continue
archive_path = os.path.relpath(path, current_path)
found_file = False
for file in (f for f in files if not looks_like_a_backup(f)):
archive_file = os.path.join(archive_path, file)
print archive_file
zip_file.write(os.path.join(path, file), archive_file)
found_file = True
if not found_file:
zip_file.writestr(os.path.join(archive_path, 'placeholder.txt'),
"(Empty directory)")
zip_file.close()
| import os
import re
import zipfile
current_path = os.path.dirname(os.path.abspath(__file__))
out_zip_name = os.path.join(current_path,
os.path.basename(current_path) + ".esriaddin")
backup_patterns = {
'PLUGIN_BACKUP_PATTERN': re.compile(".*_addin_[0-9]+[.]py$", re.IGNORECASE),
'VIM_SWAP_PATTERN': re.compile(".*\.sw[op]$", re.IGNORECASE),
'COMPLIED_PYTHON_PATTERN': re.compile(".*\.pyc$", re.IGNORECASE),
'TODO_PATTERN': re.compile('todo.txt')
}
skip_base = ['Install\\toolbox\\arcobjects']
skip_paths = [os.path.join(current_path, s) for s in skip_base]
def looks_like_a_backup(filename):
is_backup = False
for name, pattern in backup_patterns.items():
if bool(pattern.match(filename)):
is_backup = True
return is_backup
zip_file = zipfile.ZipFile(out_zip_name, 'w')
for filename in ('config.xml', 'README.md', 'makeaddin.py'):
zip_file.write(os.path.join(current_path, filename), filename)
dirs_to_add = ['Images', 'Install']
for directory in dirs_to_add:
for (path, dirs, files) in os.walk(os.path.join(current_path, directory)):
skip = False
for skip_path in skip_paths:
if path.find(skip_path) != -1:
skip = True
if skip:
# skip this directory
continue
archive_path = os.path.relpath(path, current_path)
found_file = False
for file in (f for f in files if not looks_like_a_backup(f)):
archive_file = os.path.join(archive_path, file)
print archive_file
zip_file.write(os.path.join(path, file), archive_file)
found_file = True
if not found_file:
zip_file.writestr(os.path.join(archive_path, 'placeholder.txt'),
"(Empty directory)")
zip_file.close()
| mpl-2.0 | Python |
a1d9e6452e055cb0d5a597b62f8a2c8ba8afd148 | Bump version to 9.1.1 | hhursev/recipe-scraper | recipe_scrapers/__version__.py | recipe_scrapers/__version__.py | __version__ = "9.1.1"
| __version__ = "9.1.0"
| mit | Python |
25090769e50426d61c56ca86d6970ea49c61f040 | test convolutions for many bands. Lower wavelengths take longer! hench no VIS here | jason-neal/eniric,jason-neal/eniric | bin/nIR_testing.py | bin/nIR_testing.py | #!/usr/bin/python
# Testing script for nIR analysis
# Run new and old code to test output.S
# Jason Neal
# December 2016
from __future__ import division, print_function
from eniric.nIRanalysis import convolution, resample_allfiles
from eniric.original_code.nIRanalysis import convolution as old_convolution
from eniric.original_code.nIRanalysis import resample_allfiles as old_resample_allfiles
import matplotlib.pyplot as plt
import datetime
spectrum_name = "lte03900-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes_wave.dat"
data_rep = "../data/PHOENIX-ACES_spectra/"
results_dir = "../data/results/"
resampled_dir = "../data/resampled/"
spectrum_path = data_rep + spectrum_name
# Some test parameters
band = "GAP"
R = 100000
vsini = 1
epsilon = 0.6
FWHM_lim = 5
plot = False
numProcs = 4
do_old = True
#for band in ["GAP", "Y", "J", "K"]:
for band in ["K"]:
# New version
start_time = datetime.datetime.now()
print("Time at start of new code", start_time)
wav_band, flux_conv_res = convolution(spectrum_path, band, vsini, R, epsilon, FWHM_lim, plot, numProcs=numProcs)
end_time = datetime.datetime.now()
print("Time at end of new code", end_time)
print("Time to run new convolution = {}".format((end_time-start_time)))
resample_allfiles()
#for band in ["GAP", "Y", "J", "K"]:
for band in ["K"]:
# The unchanged version
if do_old:
old_start_time = datetime.datetime.now()
print("Time at start of old code", old_start_time)
old_wav_band, old_flux_conv_res = old_convolution(spectrum_path, band, vsini, R, epsilon, FWHM_lim, plot) # takes a very long time. good progress indicator though
old_end_time = datetime.datetime.now()
print("Time at end of old code", old_end_time)
print("Time to run old convolution = {}".format((end_time-start_time)))
old_resample_allfiles()
if plot:
# Plot results together
plt.plot(old_wav_band, old_flux_conv_res, label='Old code')
plt.plot(wav_band, flux_conv_res, label='New code')
plt.legend(loc=0)
plt.show()
| #!/usr/bin/python
# Testing script for nIR analysis
# Run new and old code to test output.S
# Jason Neal
# December 2016
from __future__ import division, print_function
from eniric.nIRanalysis import convolution, resample_allfiles
from eniric.original_code.nIRanalysis import convolution as old_convolution
from eniric.original_code.nIRanalysis import resample_allfiles as old_resample_allfiles
import matplotlib.pyplot as plt
import datetime
spectrum_name = "lte03900-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes_wave.dat"
data_rep = "../data/PHOENIX-ACES_spectra/"
results_dir = "../data/results/"
resampled_dir = "../data/resampled/"
spectrum_path = data_rep + spectrum_name
# Some test parameters
band = "GAP"
R = 100000
vsini = 1
epsilon = 0.6
FWHM_lim = 5
plot = False
numProcs = 4
do_old = True
# print("readin =", read_spectrum(spectrum)) # takes a bit of time
# New version
start_time = datetime.datetime.now()
print("Time at start of new code", start_time)
wav_band, flux_conv_res = convolution(spectrum_path, band, vsini, R, epsilon, FWHM_lim, plot, numProcs=numProcs)
end_time = datetime.datetime.now()
print("Time at end of new code", end_time)
print("Time to run new convolution = {}".format((end_time-start_time)))
resample_allfiles()
# The unchanged version
if do_old:
old_start_time = datetime.datetime.now()
print("Time at start of old code", old_start_time)
old_wav_band, old_flux_conv_res = old_convolution(spectrum_path, band, vsini, R, epsilon, FWHM_lim, plot) # takes a very long time. good progress indicator though
old_end_time = datetime.datetime.now()
print("Time at end of old code", old_end_time)
print("Time to run old convolution = {}".format((end_time-start_time)))
old_resample_allfiles()
# Plot results together
plt.plot(old_wav_band, old_flux_conv_res, label='Old code')
plt.plot(wav_band, flux_conv_res, label='New code')
plt.legend(loc=0)
plt.show()
| mit | Python |
1e9999ee1292809565dc9fc39f8ed86b2b06eebe | Fix test | robinedwards/neomodel,robinedwards/neomodel | test/test_label_install.py | test/test_label_install.py | from six import StringIO
import pytest
from neo4j.exceptions import DatabaseError
from neomodel import (
config, StructuredNode, StringProperty, install_all_labels, install_labels,
UniqueIdProperty)
from neomodel.core import db
config.AUTO_INSTALL_LABELS = False
class NoConstraintsSetup(StructuredNode):
name = StringProperty(unique_index=True)
class AbstractNode(StructuredNode):
__abstract_node__ = True
name = StringProperty(unique_index=True)
config.AUTO_INSTALL_LABELS = True
def test_labels_were_not_installed():
bob = NoConstraintsSetup(name='bob').save()
bob2 = NoConstraintsSetup(name='bob').save()
assert bob.id != bob2.id
for n in NoConstraintsSetup.nodes.all():
n.delete()
def test_install_all():
install_labels(AbstractNode)
# run install all labels
install_all_labels()
assert True
# remove constraint for above test
db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
def test_install_labels_db_property():
class SomeNode(StructuredNode):
id_ = UniqueIdProperty(db_property='id')
stdout = StringIO()
install_labels(SomeNode, quiet=False, stdout=stdout)
assert 'id' in stdout.getvalue()
# make sure that the id_ constraint doesn't exist
with pytest.raises(DatabaseError) as exc_info:
db.cypher_query(
'DROP CONSTRAINT on (n:SomeNode) ASSERT n.id_ IS UNIQUE')
assert 'No such constraint' in exc_info.exconly()
# make sure the id constraint exists and can be removed
db.cypher_query('DROP CONSTRAINT on (n:SomeNode) ASSERT n.id IS UNIQUE')
| from six import StringIO
import pytest
from neo4j.exceptions import DatabaseError
from neomodel import (
config, StructuredNode, StringProperty, install_all_labels, install_labels,
UniqueIdProperty)
from neomodel.core import db
config.AUTO_INSTALL_LABELS = False
class NoConstraintsSetup(StructuredNode):
name = StringProperty(unique_index=True)
class AbstractNode(StructuredNode):
__abstract_node__ = True
name = StringProperty(unique_index=True)
config.AUTO_INSTALL_LABELS = True
def test_labels_were_not_installed():
bob = NoConstraintsSetup(name='bob').save()
bob2 = NoConstraintsSetup(name='bob').save()
assert bob.id != bob2.id
for n in NoConstraintsSetup.nodes.all():
n.delete()
def test_install_all():
install_labels(AbstractNode)
# run install all labels
install_all_labels()
assert True
# remove constraint for above test
db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
def test_install_labels_db_property():
class SomeNode(StructuredNode):
id_ = UniqueIdProperty(db_property='id')
stdout = StringIO()
install_labels(SomeNode, quiet=False, stdout=stdout)
assert 'id_' in stdout.getvalue()
# make sure that the id_ constraint doesn't exist
with pytest.raises(DatabaseError) as exc_info:
db.cypher_query(
'DROP CONSTRAINT on (n:SomeNode) ASSERT n.id_ IS UNIQUE')
assert 'No such constraint' in exc_info.exconly()
# make sure the id constraint exists and can be removed
db.cypher_query('DROP CONSTRAINT on (n:SomeNode) ASSERT n.id IS UNIQUE')
| mit | Python |
9953fab4a88e18bb95b03626c39a88d50cd1c671 | Update issue 49 | Letractively/rdflib,Letractively/rdflib,Letractively/rdflib | test/test_sparql/leaves.py | test/test_sparql/leaves.py | import unittest
import doctest
data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix : <tag:example.org,2007;stuff/> .
:a foaf:knows :b .
:a foaf:knows :c .
:a foaf:knows :d .
:b foaf:knows :a .
:b foaf:knows :c .
:c foaf:knows :a .
"""
query = """
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
select distinct ?person
where {
?person foaf:knows ?a .
?person foaf:knows ?b .
filter (?a != ?b) .
}
"""
#g = CG()
from StringIO import StringIO
#g.parse(StringIO(data), format='n3')
#print g.query(q).serialize('json')
from test import test_sparql
def test_leaves():
return doctest.DocFileSuite("../test/test_sparql/leaves.txt",
package="rdflib",
optionflags = doctest.ELLIPSIS,
globs=locals())
if __name__ == "__main__":
doctest.testfile("leaves.txt", globs=globals(),
optionflags = doctest.ELLIPSIS)
| import unittest
import doctest
data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix : <tag:example.org,2007;stuff/> .
:a foaf:knows :b .
:a foaf:knows :c .
:a foaf:knows :d .
:b foaf:knows :a .
:b foaf:knows :c .
:c foaf:knows :a .
"""
query = """
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
select distinct ?person
where {
?person foaf:knows ?a .
?person foaf:knows ?b .
filter (?a != ?b) .
}
"""
#g = CG()
from StringIO import StringIO
#g.parse(StringIO(data), format='n3')
#print g.query(q).serialize('json')
def test_leaves():
return DocFileSuite("leaves.txt",
package="rdflib",
optionflags = doctest.ELLIPSIS,
globs=locals())
if __name__ == "__main__":
doctest.testfile("leaves.txt", globs=globals(),
optionflags = doctest.ELLIPSIS)
| bsd-3-clause | Python |
24b689510ad66019097cd8d22f3eae5504dbe6fa | add b-flag for windows | cheery/lever,cheery/lever,cheery/lever,cheery/lever | bincode/decoder.py | bincode/decoder.py | from space import *
from rpython.rlib import rfile
from rpython.rlib.rstruct import ieee
from rpython.rtyper.lltypesystem import rffi
import os
import struct
class Stream(object):
def __init__(self, data, index=0):
self.data = data
assert index >= 0
self.index = index
def read(self, count):
assert count >= 0
if self.index + count > len(self.data):
raise Error(u"Read error: End of file")
data = self.data[self.index:self.index+count]
self.index += count
return data
def read_ubyte(self):
return ord(self.read(1)[0])
def read_uint(self):
return (self.read_ubyte() << 0 |
self.read_ubyte() << 8 |
self.read_ubyte() << 16 |
self.read_ubyte() << 24)
def read_integer(self):
"http://en.wikipedia.org/wiki/Variable-length_quantity"
output = 0
ubyte = self.read_ubyte()
while ubyte & 0x80:
output |= ubyte & 0x7F
output <<= 7
ubyte = self.read_ubyte()
output |= ubyte
return output
def read_string(self):
return self.read(self.read_u16()).decode('utf-8')
def read_double(self):
data = self.read(8)
return ieee.unpack_float(data, False)
def read_u16(self):
return (self.read_ubyte() << 0 |
self.read_ubyte() << 8)
def read_u64(self):
return rffi.r_ulong(self.read_uint() << 0 | self.read_uint() << 32)
def read_i64(self):
assert rffi.sizeof(rffi.LONG) == 8
return rffi.cast(rffi.LONG, self.read_u64())
sign_mask = 1L << 63
def open_file(pathname):
try:
fd = rfile.create_file(pathname, 'rb')
try:
return Stream(fd.read())
finally:
fd.close()
except IOError as error:
message = os.strerror(error.errno).decode('utf-8')
raise Error(u"%s: %s" % (pathname.decode('utf-8'), message))
| from space import *
from rpython.rlib import rfile
from rpython.rlib.rstruct import ieee
from rpython.rtyper.lltypesystem import rffi
import os
import struct
class Stream(object):
def __init__(self, data, index=0):
self.data = data
assert index >= 0
self.index = index
def read(self, count):
assert count >= 0
if self.index + count > len(self.data):
raise Error(u"Read error: End of file")
data = self.data[self.index:self.index+count]
self.index += count
return data
def read_ubyte(self):
return ord(self.read(1)[0])
def read_uint(self):
return (self.read_ubyte() << 0 |
self.read_ubyte() << 8 |
self.read_ubyte() << 16 |
self.read_ubyte() << 24)
def read_integer(self):
"http://en.wikipedia.org/wiki/Variable-length_quantity"
output = 0
ubyte = self.read_ubyte()
while ubyte & 0x80:
output |= ubyte & 0x7F
output <<= 7
ubyte = self.read_ubyte()
output |= ubyte
return output
def read_string(self):
return self.read(self.read_u16()).decode('utf-8')
def read_double(self):
data = self.read(8)
return ieee.unpack_float(data, False)
def read_u16(self):
return (self.read_ubyte() << 0 |
self.read_ubyte() << 8)
def read_u64(self):
return rffi.r_ulong(self.read_uint() << 0 | self.read_uint() << 32)
def read_i64(self):
assert rffi.sizeof(rffi.LONG) == 8
return rffi.cast(rffi.LONG, self.read_u64())
sign_mask = 1L << 63
def open_file(pathname):
try:
fd = rfile.create_file(pathname, 'r')
try:
return Stream(fd.read())
finally:
fd.close()
except IOError as error:
message = os.strerror(error.errno).decode('utf-8')
raise Error(u"%s: %s" % (pathname.decode('utf-8'), message))
| mit | Python |
9d30163302486b572fea985034675e94735f26b8 | bump version to 0.4.1 dev | tao12345666333/app-turbo,wecatch/app-turbo,tao12345666333/app-turbo,tao12345666333/app-turbo | turbo/__init__.py | turbo/__init__.py | #!/usr/bin/env python
#
# Copyright 2014 Wecatch
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The App turbo server and tools."""
from __future__ import absolute_import, division, print_function, with_statement
from .conf import app_config
# version is a human-readable version number.
# version_info is a four-tuple for programmatic comparison. The first
# three numbers are the components of the version number. The fourth
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "0.4.1"
version_info = (0, 4, 1, 100)
| #!/usr/bin/env python
#
# Copyright 2014 Wecatch
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The App turbo server and tools."""
from __future__ import absolute_import, division, print_function, with_statement
from .conf import app_config
# version is a human-readable version number.
# version_info is a four-tuple for programmatic comparison. The first
# three numbers are the components of the version number. The fourth
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "0.4.0"
version_info = (0, 4, 0, 0)
| apache-2.0 | Python |
dfcd9cf114aea683371c7a5d59ec8c4e2c570c2f | Update enrichment class interface | jeffakolb/Gnip-Analysis-Pipeline,jeffakolb/Gnip-Analysis-Pipeline | tweet_enricher.py | tweet_enricher.py | #!/usr/bin/env python
import importlib
import argparse
import os
import sys
try:
import ujson as json
except ImportError:
import json
class_list = []
parser = argparse.ArgumentParser()
parser.add_argument('-c','-configuration-file',dest='config_file',default=None,help='python file defining "enrichment_class_list"')
args = parser.parse_args()
prefilters = []
if args.config_file is None:
sys.stderr.write('No configuration file specified; no enrichments will be run.\n')
else:
# if config file not in local directory, temporarily extend path to its location
config_file_full_path = args.config_file.split('/')
if len(config_file_full_path) > 1:
path = '/'.join( config_file_full_path[:-1] )
sys.path.append( os.path.join(os.getcwd(),path) )
else:
sys.path.append(os.getcwd())
config_module = importlib.import_module( config_file_full_path[-1].rstrip('.py') )
sys.path.pop()
if hasattr(config_module,'enrichment_class_list'):
enrichment_class_list = config_module.enrichment_class_list
else:
sys.stderr.write(args.config_file + ' does not define "enrichment_class_list"; no enrichments will be run.\n')
if hasattr(config_module,'prefilters'):
prefilters = config_module.prefilters
# create instances of all configured classes
class_instance_list = [class_definition() for class_definition in enrichment_class_list]
## main loop over tweets
for line in sys.stdin:
try:
tweet = json.loads(line)
except ValueError:
continue
# skip Tweets without body
if 'body' not in tweet:
continue
if not all([prefilter(tweet) for prefilter in prefilters]):
continue
for cls_instance in class_instance_list:
enriched_tweet = cls_instance.enrich(tweet)
# the old-style enrichment base class returned None
if enriched_tweet is not None:
tweet = enriched_tweet
try:
sys.stdout.write(json.dumps(tweet) + '\n')
except IOError:
# account for closed output pipe
break
| #!/usr/bin/env python
import importlib
import argparse
import os
import sys
try:
import ujson as json
except ImportError:
import json
class_list = []
parser = argparse.ArgumentParser()
parser.add_argument('-c','-configuration-file',dest='config_file',default=None,help='python file defining "enrichment_class_list"')
args = parser.parse_args()
prefilters = []
if args.config_file is None:
sys.stderr.write('No configuration file specified; no enrichments will be run.\n')
else:
# if config file not in local directory, temporarily extend path to its location
config_file_full_path = args.config_file.split('/')
if len(config_file_full_path) > 1:
path = '/'.join( config_file_full_path[:-1] )
sys.path.append( os.path.join(os.getcwd(),path) )
else:
sys.path.append(os.getcwd())
config_module = importlib.import_module( config_file_full_path[-1].rstrip('.py') )
sys.path.pop()
if hasattr(config_module,'enrichment_class_list'):
enrichment_class_list = config_module.enrichment_class_list
else:
sys.stderr.write(args.config_file + ' does not define "enrichment_class_list"; no enrichments will be run.\n')
if hasattr(config_module,'prefilters'):
prefilters = config_module.prefilters
# create instances of all configured classes
class_instance_list = [class_definition() for class_definition in enrichment_class_list]
## main loop over tweets
for line in sys.stdin:
try:
tweet = json.loads(line)
except ValueError:
continue
# skip Tweets without body
if 'body' not in tweet:
continue
if not all([prefilter(tweet) for prefilter in prefilters]):
continue
for cls_instance in class_instance_list:
cls_instance.enrich(tweet)
try:
sys.stdout.write(json.dumps(tweet) + '\n')
except IOError:
# account for closed output pipe
break
| mit | Python |
fc2cc22ad853764358bbd1209e4411a1608d0c45 | Bump version for release | twilio/twilio-python,tysonholub/twilio-python | twilio/__init__.py | twilio/__init__.py | __version_info__ = ('6', '0rc6')
__version__ = '.'.join(__version_info__)
| __version_info__ = ('6', '0rc5')
__version__ = '.'.join(__version_info__)
| mit | Python |
b1dab10e80119a358bb291e96a420911a9f634b4 | Change formatting of problem.py | jackstanek/BotBot,jackstanek/BotBot | botbot/problems.py | botbot/problems.py | """Problems a file can have"""
class Problem:
"""Defines a problem that a file could have."""
def __init__(self, code, message, fix):
self.code = code
self.message = message
self.fix = fix
every_problem = {
'PROB_DIR_NOT_WRITABLE': Problem(1,
'Directory not writable.',
'Run \'chmod 0655\' on this directory.'),
'PROB_FILE_NOT_GRPRD': Problem(2,
'File not group readable.',
'Run \'chmod 0644\' on this file.'),
'PROB_FILE_IS_FASTQ': Problem(3,
'File is a fastq file.',
'Don\'t copy fastq files, instead use \'ln -s\''),
'PROB_BROKEN_LINK': Problem(4,
'Symbolic link points to nonexistant file.',
'Delete this link.'),
'PROB_SAM_SHOULD_COMPRESS': Problem(5,
'*.sam files are large.',
'Consider compressing.'),
'PROB_SAM_AND_BAM_EXIST': Problem(6,
'The *.sam file has been compressed, but it still exists.',
'Delete the uncompressed copy.'),
'PROB_FILE_NOT_GRPEXEC': Problem(7,
'File is not group executable.',
'Run chmod 0755 on this file.'),
'PROB_DIR_NOT_ACCESSIBLE': Problem(8,
'Directory is not accessible to the group.',
'Run chmod 0755 on this directory.'),
'PROB_UNKNOWN_ERROR': Problem(9,
'Unknown error occurred.',
'Not sure what to do.'),
'PROB_OLD_LARGE_PLAINTEXT': Problem(10,
'File is an old large plaintext file.',
'Consider compressing.'),
'PROB_PATH_NOT_COMPLETE': Problem(11,
'The PATH environment variable does not contain all necessary paths.',
'Add the path to the PATH environment variable.')
}
| """Problems a file can have"""
class Problem:
"""Defines a problem that a file could have."""
def __init__(self, code, message, fix):
self.code = code
self.message = message
self.fix = fix
every_problem = {
'PROB_DIR_NOT_WRITABLE': Problem(1, 'Directory not writable.', 'Run \'chmod 0655\' on this directory.'),
'PROB_FILE_NOT_GRPRD': Problem(2, 'File not group readable.', 'Run \'chmod 0644\' on this file.'),
'PROB_FILE_IS_FASTQ': Problem(3, 'File is a fastq file.', 'Don\'t copy fastq files, instead use \'ln -s\''),
'PROB_BROKEN_LINK': Problem(4, 'Symbolic link points to nonexistant file.', 'Delete this link.'),
'PROB_SAM_SHOULD_COMPRESS': Problem(5, '*.sam files are large.', 'Consider compressing.'),
'PROB_SAM_AND_BAM_EXIST': Problem(6, 'The *.sam file has been compressed, but it still exists.', 'Delete the uncompressed copy.'),
'PROB_FILE_NOT_GRPEXEC': Problem(7, 'File is not group executable.', 'Run chmod 0755 on this file.'),
'PROB_DIR_NOT_ACCESSIBLE': Problem(8, 'Directory is not accessible to the group.', 'Run chmod 0755 on this directory.'),
'PROB_UNKNOWN_ERROR': Problem(9, 'Unknown error occurred.', 'Not sure what to do.'),
'PROB_OLD_LARGE_PLAINTEXT': Problem(10, 'File is an old large plaintext file.', 'Consider compressing.'),
'PROB_PATH_NOT_COMPLETE': Problem(11, 'The PATH environment variable does not contain all necessary paths.', 'Add the path to the PATH environment variable.')
}
| mit | Python |
f0766ff22a7ee7c3e9a48c468f7e1f41e9d7e92c | Update repo version to v0.0.5.dev | google/vizier,google/vizier | vizier/__init__.py | vizier/__init__.py | """Init file."""
import os
import sys
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
PROTO_ROOT = os.path.realpath(os.path.join(THIS_DIR, "service"))
sys.path.append(PROTO_ROOT)
__version__ = "0.0.5.dev"
| """Init file."""
import os
import sys
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
PROTO_ROOT = os.path.realpath(os.path.join(THIS_DIR, "service"))
sys.path.append(PROTO_ROOT)
__version__ = "0.0.3.alpha"
| apache-2.0 | Python |
ccf1b6762e7f395aab9540941f232a272f2b6d22 | Update __init__.py | google/vizier,google/vizier | vizier/__init__.py | vizier/__init__.py | """Init file."""
import os
import sys
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
PROTO_ROOT = os.path.realpath(os.path.join(THIS_DIR, "service"))
sys.path.append(PROTO_ROOT)
__version__ = "0.0.7a0"
| """Init file."""
import os
import sys
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
PROTO_ROOT = os.path.realpath(os.path.join(THIS_DIR, "service"))
sys.path.append(PROTO_ROOT)
__version__ = "0.0.7"
| apache-2.0 | Python |
ad7b950bd0ea091fa5c6079fc6cfbb6300664c9c | Add new method getBindCredentials method to CR script to allow dynamically change AD password oxTrust #1197 | GluuFederation/oxExternal | cache_refresh/sample/SampleScript.py | cache_refresh/sample/SampleScript.py | # oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
# Copyright (c) 2016, Gluu
#
# Author: Yuriy Movchan
#
from org.xdi.model.custom.script.type.user import CacheRefreshType
from org.xdi.util import StringHelper, ArrayHelper
from java.util import Arrays, ArrayList
from org.gluu.oxtrust.model import GluuCustomAttribute
import java
class CacheRefresh(CacheRefreshType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, configurationAttributes):
print "Cache refresh. Initialization"
print "Cache refresh. Initialized successfully"
return True
def destroy(self, configurationAttributes):
print "Cache refresh. Destroy"
print "Cache refresh. Destroyed successfully"
return True
# Get bind credentials required to access source server
# configId is the source server
# configurationAttributes is java.util.Map<String, SimpleCustomProperty>
def getBindCredentials(self, configId):
print "Cache refresh. GetBindCredentials method"
return None
# Update user entry before persist it
# user is org.gluu.oxtrust.model.GluuCustomPerson
# configurationAttributes is java.util.Map<String, SimpleCustomProperty>
def updateUser(self, user, configurationAttributes):
print "Cache refresh. UpdateUser method"
attributes = user.getCustomAttributes()
# Add new attribute preferredLanguage
attrPrefferedLanguage = GluuCustomAttribute("preferredLanguage", "en-us")
attributes.add(attrPrefferedLanguage)
# Add new attribute userPassword
attrUserPassword = GluuCustomAttribute("userPassword", "test")
attributes.add(attrUserPassword)
# Update givenName attribute
for attribute in attributes:
attrName = attribute.getName()
if (("givenname" == StringHelper.toLowerCase(attrName)) and StringHelper.isNotEmpty(attribute.getValue())):
attribute.setValue(StringHelper.removeMultipleSpaces(attribute.getValue()) + " (updated)")
return True
def getApiVersion(self):
return 2
| # oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
# Copyright (c) 2016, Gluu
#
# Author: Yuriy Movchan
#
from org.xdi.model.custom.script.type.user import CacheRefreshType
from org.xdi.util import StringHelper, ArrayHelper
from java.util import Arrays, ArrayList
from org.gluu.oxtrust.model import GluuCustomAttribute
import java
class CacheRefresh(CacheRefreshType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, configurationAttributes):
print "Cache refresh. Initialization"
print "Cache refresh. Initialized successfully"
return True
def destroy(self, configurationAttributes):
print "Cache refresh. Destroy"
print "Cache refresh. Destroyed successfully"
return True
# Update user entry before persist it
# user is org.gluu.oxtrust.model.GluuCustomPerson
# configurationAttributes is java.util.Map<String, SimpleCustomProperty>
def updateUser(self, user, configurationAttributes):
print "Cache refresh. UpdateUser method"
attributes = user.getCustomAttributes()
# Add new attribute preferredLanguage
attrPrefferedLanguage = GluuCustomAttribute("preferredLanguage", "en-us")
attributes.add(attrPrefferedLanguage)
# Add new attribute userPassword
attrUserPassword = GluuCustomAttribute("userPassword", "test")
attributes.add(attrUserPassword)
# Update givenName attribute
for attribute in attributes:
attrName = attribute.getName()
if (("givenname" == StringHelper.toLowerCase(attrName)) and StringHelper.isNotEmpty(attribute.getValue())):
attribute.setValue(StringHelper.removeMultipleSpaces(attribute.getValue()) + " (updated)")
return True
def getApiVersion(self):
return 1
| mit | Python |
aa1ba8b4ba437552d9431eac023add95fa717c83 | Check if fallback UnixIPParser is actually available and warn if not | ftao/python-ifcfg | src/ifcfg/__init__.py | src/ifcfg/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import platform
from . import parser, tools
__version__ = "0.19"
Log = tools.minimal_logger(__name__)
#: Module instance properties, can be mocked for testing
distro = platform.system()
def get_parser_class():
"""
Returns the parser according to the system platform
"""
global distro
if distro == 'Linux':
Parser = parser.LinuxParser
if not os.path.exists(Parser.get_command()[0]):
Parser = parser.UnixIPParser
if not os.path.exists(Parser.get_command()[0]):
Log.warning("Neither `ifconfig` nor `ip` commands are available, getting interfaces is likely to fail")
elif distro in ['Darwin', 'MacOSX']:
Parser = parser.MacOSXParser
elif distro == 'Windows':
# For some strange reason, Windows will always be win32, see:
# https://stackoverflow.com/a/2145582/405682
Parser = parser.WindowsParser
else:
Parser = parser.NullParser
Log.error("Unknown distro type '%s'." % distro)
Log.debug("Distro detected as '%s'" % distro)
Log.debug("Using '%s'" % Parser)
return Parser
#: Module instance properties, can be mocked for testing
Parser = get_parser_class()
def get_parser(ifconfig=None):
"""
Detect the proper parser class, and return it instantiated.
Optional Arguments:
ifconfig
The ifconfig (stdout) to pass to the parser (used for testing).
"""
global Parser
return Parser(ifconfig=ifconfig)
def interfaces(ifconfig=None):
"""
Return just the parsed interfaces dictionary from the proper parser.
"""
global Parser
return Parser(ifconfig=ifconfig).interfaces
def default_interface(ifconfig=None, route_output=None):
"""
Return just the default interface device dictionary.
:param ifconfig: For mocking actual command output
:param route_output: For mocking actual command output
"""
global Parser
return Parser(ifconfig=ifconfig)._default_interface(route_output=route_output)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import platform
from . import parser, tools
__version__ = "0.19"
Log = tools.minimal_logger(__name__)
#: Module instance properties, can be mocked for testing
distro = platform.system()
def get_parser_class():
"""
Returns the parser according to the system platform
"""
global distro
if distro == 'Linux':
Parser = parser.LinuxParser
if not os.path.exists(Parser.get_command()[0]):
Parser = parser.UnixIPParser
elif distro in ['Darwin', 'MacOSX']:
Parser = parser.MacOSXParser
elif distro == 'Windows':
# For some strange reason, Windows will always be win32, see:
# https://stackoverflow.com/a/2145582/405682
Parser = parser.WindowsParser
else:
Parser = parser.NullParser
Log.error("Unknown distro type '%s'." % distro)
Log.debug("Distro detected as '%s'" % distro)
Log.debug("Using '%s'" % Parser)
return Parser
#: Module instance properties, can be mocked for testing
Parser = get_parser_class()
def get_parser(ifconfig=None):
"""
Detect the proper parser class, and return it instantiated.
Optional Arguments:
ifconfig
The ifconfig (stdout) to pass to the parser (used for testing).
"""
global Parser
return Parser(ifconfig=ifconfig)
def interfaces(ifconfig=None):
"""
Return just the parsed interfaces dictionary from the proper parser.
"""
global Parser
return Parser(ifconfig=ifconfig).interfaces
def default_interface(ifconfig=None, route_output=None):
"""
Return just the default interface device dictionary.
:param ifconfig: For mocking actual command output
:param route_output: For mocking actual command output
"""
global Parser
return Parser(ifconfig=ifconfig)._default_interface(route_output=route_output)
| bsd-3-clause | Python |
654d125a3b672f627d73b1ffade1bf5c5a850124 | Update error message | saurabh6790/frappe,adityahase/frappe,almeidapaulopt/frappe,saurabh6790/frappe,adityahase/frappe,mhbu50/frappe,yashodhank/frappe,yashodhank/frappe,frappe/frappe,frappe/frappe,saurabh6790/frappe,mhbu50/frappe,yashodhank/frappe,adityahase/frappe,vjFaLk/frappe,StrellaGroup/frappe,saurabh6790/frappe,frappe/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,mhbu50/frappe,mhbu50/frappe,yashodhank/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,vjFaLk/frappe,adityahase/frappe,vjFaLk/frappe,vjFaLk/frappe,StrellaGroup/frappe | frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.py | frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.py | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
class DashboardChartSource(Document):
def validate(self):
if frappe.session.user != "Administrator":
frappe.throw(_("Only Administrator is allowed to create Dashboard Chart Sources"))
def on_update(self):
export_to_files(record_list=[[self.doctype, self.name]], record_module=self.module, create_init=True)
| # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
class DashboardChartSource(Document):
def validate(self):
if frappe.session.user != "Administrator":
frappe.throw(_("Only Administrator allowed to create Dashboard Chart Sources"))
def on_update(self):
export_to_files(record_list=[[self.doctype, self.name]], record_module=self.module, create_init=True)
| mit | Python |
17440f58ccaef82988131a42dfc487c6e4d6129f | create userseen upon user creation | johnstcn/whatsnew,johnstcn/whatsnew | whatsnew/models.py | whatsnew/models.py | from __future__ import unicode_literals
from datetime import timedelta, datetime, date
from pytz import utc
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.postgres.fields import JSONField
from django.contrib.auth.models import User
from django.db.models import signals
class Site(models.Model):
name = models.CharField(max_length=200)
base_url = models.URLField()
url_template = models.URLField(null=True, blank=True)
navigation = models.CharField(max_length=200, null=True, blank=True)
ref_xpath = models.CharField(max_length=200)
ref_filter = models.CharField(max_length=200)
check_period = models.IntegerField(default=60)
last_checked = models.DateTimeField(default=datetime.fromtimestamp(0))
broken = models.BooleanField(default=False)
def __str__(self):
return self.name
@property
def updates(self):
return self.siteupdate_set.all().order_by('-date')
@property
def latest_update(self):
try:
return self.updates[:1].get()
except ObjectDoesNotExist:
return None
@property
def next_check(self):
return self.last_checked + timedelta(minutes=self.check_period)
@property
def needs_check(self):
return self.next_check < datetime.now(utc)
class SiteUpdate(models.Model):
site = models.ForeignKey(Site, on_delete=models.CASCADE)
date = models.DateTimeField()
url = models.URLField()
ref = models.CharField(max_length=200)
def __str__(self):
return "%s:%s" % (self.site.name, self.ref)
class Tag(models.Model):
site = models.ManyToManyField(Site, blank=True, related_name='site_tags')
name = models.CharField(max_length=200)
def __str__(self):
return self.name
class UserSeen(models.Model):
user = models.OneToOneField(User, null=False, blank=False)
seen = JSONField(dict)
def create_userseen(sender, instance, created, **kwargs):
if created:
UserSeen.objects.create(user=instance, seen={})
signals.post_save.connect(create_userseen, sender=User)
| from __future__ import unicode_literals
from datetime import timedelta, datetime, date
from pytz import utc
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.postgres.fields import JSONField
from django.contrib.auth.models import User
class Site(models.Model):
name = models.CharField(max_length=200)
base_url = models.URLField()
url_template = models.URLField(null=True, blank=True)
navigation = models.CharField(max_length=200, null=True, blank=True)
ref_xpath = models.CharField(max_length=200)
ref_filter = models.CharField(max_length=200)
check_period = models.IntegerField(default=60)
last_checked = models.DateTimeField(default=datetime.fromtimestamp(0))
broken = models.BooleanField(default=False)
def __str__(self):
return self.name
@property
def updates(self):
return self.siteupdate_set.all().order_by('-date')
@property
def latest_update(self):
try:
return self.updates[:1].get()
except ObjectDoesNotExist:
return None
@property
def next_check(self):
return self.last_checked + timedelta(minutes=self.check_period)
@property
def needs_check(self):
return self.next_check < datetime.now(utc)
class SiteUpdate(models.Model):
site = models.ForeignKey(Site, on_delete=models.CASCADE)
date = models.DateTimeField()
url = models.URLField()
ref = models.CharField(max_length=200)
def __str__(self):
return "%s:%s" % (self.site.name, self.ref)
class Tag(models.Model):
site = models.ManyToManyField(Site, blank=True, related_name='site_tags')
name = models.CharField(max_length=200)
def __str__(self):
return self.name
class UserSeen(models.Model):
user = models.OneToOneField(User, null=False, blank=False)
seen = JSONField(dict)
| mit | Python |
b8587ed7d95f50d510e671a13ed5dc32dbeff6ab | Update setuptools.py | vadimkantorov/wigwam | wigs/setuptools.py | wigs/setuptools.py | class setuptools(PythonWig):
git_uri = 'https://github.com/pypa/setuptools'
tarball_uri = 'https://github.com/pypa/setuptools/archive/v$RELEASE_VERSION$.tar.gz'
dependencies = ['pypa', 'pyparsing', 'appdirs']
last_release_version = 'v34.3.1'
| class setuptools(PythonWig):
git_uri = 'https://github.com/pypa/setuptools'
tarball_uri = 'https://github.com/pypa/setuptools/archive/v$RELEASE_VERSION$.tar.gz'
dependencies = ['pypa', 'pyparsing']
last_release_version = 'v34.3.1'
| mit | Python |
958dffb2b345bde93d6129fd0d2f58b6ccb84972 | Add logElementTree to meta_line prefixes | JBarberU/strawberry_py | util/meta_line.py | util/meta_line.py | from colors import Colors
from line import Line
import re
prefixes = [
("Start", "", Colors.GREEN),
("Pass", "", Colors.GREEN),
("Debug", "", Colors.BLUE),
("Error", "", Colors.RED),
("Fail", "", Colors.RED),
("Duration", "s;", Colors.BLUE),
("logElementTree", "", Colors.BLUE),
]
class MetaLine(Line):
color = Colors.NORMAL
prefix = ""
body = ""
def __init__(self, line):
self.body = line
for (p, end, c) in prefixes:
match = re.compile(".*%s ?: ?" % p).match(line)
if match:
self.color = c
self.prefix = p
(_, index) = match.span()
body = line[index:]
if end != "":
index = body.find(end)
if index != -1:
body = body[:index]
body += "\n"
self.body = body
break
def str(self):
return "%s%s%s: %s" % (self.color, self.prefix, Colors.NORMAL, self.body)
| from colors import Colors
from line import Line
import re
prefixes = [
("Start", "", Colors.GREEN),
("Pass", "", Colors.GREEN),
("Debug", "", Colors.BLUE),
("Error", "", Colors.RED),
("Fail", "", Colors.RED),
("Duration", "s;", Colors.BLUE)
]
class MetaLine(Line):
color = Colors.NORMAL
prefix = ""
body = ""
def __init__(self, line):
self.body = line
for (p, end, c) in prefixes:
match = re.compile(".*%s ?: ?" % p).match(line)
if match:
self.color = c
self.prefix = p
(_, index) = match.span()
body = line[index:]
if end != "":
index = body.find(end)
if index != -1:
body = body[:index]
body += "\n"
self.body = body
break
def str(self):
return "%s%s%s: %s" % (self.color, self.prefix, Colors.NORMAL, self.body)
| mit | Python |
09cf7a81eb57dfb39634788b81a565fda3c3d377 | Use new 'test_user/admin' fixtures | RBE-Avionik/skylines,Turbo87/skylines,TobiasLohner/SkyLines,snip/skylines,RBE-Avionik/skylines,kerel-fs/skylines,Harry-R/skylines,skylines-project/skylines,kerel-fs/skylines,shadowoneau/skylines,snip/skylines,skylines-project/skylines,RBE-Avionik/skylines,Harry-R/skylines,shadowoneau/skylines,TobiasLohner/SkyLines,Harry-R/skylines,shadowoneau/skylines,RBE-Avionik/skylines,shadowoneau/skylines,skylines-project/skylines,skylines-project/skylines,Turbo87/skylines,Harry-R/skylines,kerel-fs/skylines,TobiasLohner/SkyLines,snip/skylines,Turbo87/skylines,Turbo87/skylines | tests/model/test_search.py | tests/model/test_search.py | from skylines.model import User, Club, Airport
from skylines.model.search import (
combined_search_query, escape_tokens, text_to_tokens
)
MODELS = [User, Club, Airport]
def search(text):
# Split the search text into tokens and escape them properly
tokens = text_to_tokens(text)
tokens = escape_tokens(tokens)
# Create combined search query
return combined_search_query(MODELS, tokens)
def test_tokenizer():
# Check that this does not throw exceptions
text_to_tokens('\\')
text_to_tokens('blabla \\')
text_to_tokens('"')
text_to_tokens('"blabla \\')
# Check that the tokenizer returns expected results
assert text_to_tokens('a b c') == ['a', 'b', 'c']
assert text_to_tokens('a \'b c\'') == ['a', 'b c']
assert text_to_tokens('a "b c" d') == ['a', 'b c', 'd']
assert text_to_tokens('old "mac donald" has a FARM') == \
['old', 'mac donald', 'has', 'a', 'FARM']
def test_escaping():
assert escape_tokens(['hello!']) == ['hello!']
assert escape_tokens(['hello *!']) == ['hello %!']
assert escape_tokens(['hello %!']) == ['hello \\%!']
assert escape_tokens(['hello _!']) == ['hello \\_!']
def test_search(test_user, test_admin):
assert search('example').count() == 2
assert search('user').count() == 1
assert search('man').count() == 1
assert search('man*er').count() == 1
assert search('*er').count() == 2
assert search('exa*er').count() == 2
assert search('exp*er').count() == 0
assert search('xyz').count() == 0
| from skylines.model import User, Club, Airport
from skylines.model.search import (
combined_search_query, escape_tokens, text_to_tokens
)
MODELS = [User, Club, Airport]
def search(text):
# Split the search text into tokens and escape them properly
tokens = text_to_tokens(text)
tokens = escape_tokens(tokens)
# Create combined search query
return combined_search_query(MODELS, tokens)
def test_tokenizer():
# Check that this does not throw exceptions
text_to_tokens('\\')
text_to_tokens('blabla \\')
text_to_tokens('"')
text_to_tokens('"blabla \\')
# Check that the tokenizer returns expected results
assert text_to_tokens('a b c') == ['a', 'b', 'c']
assert text_to_tokens('a \'b c\'') == ['a', 'b c']
assert text_to_tokens('a "b c" d') == ['a', 'b c', 'd']
assert text_to_tokens('old "mac donald" has a FARM') == \
['old', 'mac donald', 'has', 'a', 'FARM']
def test_escaping():
assert escape_tokens(['hello!']) == ['hello!']
assert escape_tokens(['hello *!']) == ['hello %!']
assert escape_tokens(['hello %!']) == ['hello \\%!']
assert escape_tokens(['hello _!']) == ['hello \\_!']
def test_search(bootstrapped_db):
assert search('example').count() == 2
assert search('user').count() == 1
assert search('man').count() == 1
assert search('man*er').count() == 1
assert search('*er').count() == 2
assert search('exa*er').count() == 2
assert search('exp*er').count() == 0
assert search('xyz').count() == 0
| agpl-3.0 | Python |
1891469e5d3eb34efbe3c5feed1f7770e680120e | Fix incorrect docstring for NFA class | caleb531/automata | automata/nfa.py | automata/nfa.py | #!/usr/bin/env python3
import automata.automaton as automaton
class NFA(automaton.Automaton):
"""a nondeterministic finite automaton"""
def validate_automaton(self):
"""returns True if this NFA is internally consistent;
raises the appropriate exception if this NFA is invalid"""
for state in self.states:
if state not in self.transitions:
raise automaton.MissingStateError(
'state {} is missing from transition function'.format(
state))
for start_state, paths in self.transitions.items():
invalid_states = set().union(*paths.values()).difference(
self.states)
if invalid_states:
raise automaton.InvalidStateError(
'states are not valid ({})'.format(
', '.join(invalid_states)))
if self.initial_state not in self.states:
raise automaton.InvalidStateError(
'{} is not a valid state'.format(self.initial_state))
for state in self.final_states:
if state not in self.states:
raise automaton.InvalidStateError(
'{} is not a valid state'.format(state))
return True
# TODO
def validate_input(self, input_str):
"""returns True if the given string is accepted by this NFA;
raises the appropriate exception if the string is not accepted"""
return True
| #!/usr/bin/env python3
import automata.automaton as automaton
class NFA(automaton.Automaton):
"""a deterministic finite automaton"""
def validate_automaton(self):
"""returns True if this NFA is internally consistent;
raises the appropriate exception if this NFA is invalid"""
for state in self.states:
if state not in self.transitions:
raise automaton.MissingStateError(
'state {} is missing from transition function'.format(
state))
for start_state, paths in self.transitions.items():
invalid_states = set().union(*paths.values()).difference(
self.states)
if invalid_states:
raise automaton.InvalidStateError(
'states are not valid ({})'.format(
', '.join(invalid_states)))
if self.initial_state not in self.states:
raise automaton.InvalidStateError(
'{} is not a valid state'.format(self.initial_state))
for state in self.final_states:
if state not in self.states:
raise automaton.InvalidStateError(
'{} is not a valid state'.format(state))
return True
# TODO
def validate_input(self, input_str):
"""returns True if the given string is accepted by this NFA;
raises the appropriate exception if the string is not accepted"""
return True
| mit | Python |
440fcebdcc06c0fbb26341764a0df529cec6587d | Support for angular + API added. | gcavalcante8808/flask-wiki,gcavalcante8808/flask-wiki,gcavalcante8808/flask-wiki | flask_wiki/frontend/frontend.py | flask_wiki/frontend/frontend.py | from flask import Flask, render_template, abort, redirect, url_for
from flask.ext.script import Manager
from jinja2 import TemplateNotFound
app = Flask(__name__)
app.config['TESTING'] = True
manager = Manager(app)
@app.route('/', endpoint='frontend-index')
def root():
# Redirect Base URL for the real Index Page.
return redirect(url_for('frontend-pages', page='index'))
@app.route('/<page>', endpoint='frontend-pages')
def show(page='index'):
"""
Try to Deliver a page.
:param page: name of the page
:return: template.
"""
try:
return render_template('pages/index.html')
except (TemplateNotFound,):
abort(404)
if __name__ == '__main__':
manager.run()
| from flask import Flask, render_template, abort, redirect, url_for
from flask.ext.script import Manager
from jinja2 import TemplateNotFound
app = Flask(__name__)
app.config['TESTING'] = True
manager = Manager(app)
@app.route('/', endpoint='frontend-index')
def root():
# Redirect Base URL for the real Index Page.
return redirect(url_for('frontend-pages', page='index'))
@app.route('/<page>', endpoint='frontend-pages')
def show(page='index'):
"""
Try to Deliver a page.
:param page: name of the page
:return: template.
"""
try:
return render_template('pages/%s.html' % page)
except (TemplateNotFound,):
abort(404)
if __name__ == '__main__':
manager.run()
| bsd-2-clause | Python |
2e324cf7f847cfc8f6c4da6aa0bc1f133405fa5d | Add test of object with a __wrapped__ attribute | etgalloway/fullqualname | tests/test_fullqualname.py | tests/test_fullqualname.py | """Tests for fullqualname."""
import decorator
import inspect
import nose
import sys
from fullqualname import fullqualname
def decorator_(f_):
def wrapper_(f_, *args, **kw):
return f_(*args, **kw)
return decorator.decorator(wrapper_, f_)
class C_(object):
@decorator_
def decorated_method_(self):
"""decorated method"""
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_classmethod():
# Test built-in class method object.
obj = object.mro
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is a class.
assert inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.object.mro'
else:
expected = '__builtin__.object.mro'
nose.tools.assert_equals(fullqualname(obj), expected)
def func_():
"""function"""
def test_function():
# Test function object.
obj = func_
assert type(obj).__name__ == 'function'
expected = __name__ + '.func_'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_function_wrapped_attribute():
# Test function object that has a __wrapped__ attribute.
obj = C_.decorated_method_
assert hasattr(obj, '__wrapped__')
# In Python 3, object type is 'function'.
assert type(obj).__name__ == 'function' or sys.version_info[0] == 2
# In Python 2, object is an 'instancemethod'.
assert type(obj).__name__ == 'instancemethod' or sys.version_info[0] == 3
expected = __name__ + '.C_.decorated_method_'
nose.tools.assert_equals(fullqualname(obj), expected)
| """Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_classmethod():
# Test built-in class method object.
obj = object.mro
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is a class.
assert inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.object.mro'
else:
expected = '__builtin__.object.mro'
nose.tools.assert_equals(fullqualname(obj), expected)
def func_():
"""function"""
def test_function():
# Test function object.
obj = func_
assert type(obj).__name__ == 'function'
expected = __name__ + '.func_'
nose.tools.assert_equals(fullqualname(obj), expected)
| bsd-3-clause | Python |
fb6e5b11492675b7a7c94424737c91acbb541d69 | REFACTOR Change order of tests. | matatk/tdd-bdd-commit,matatk/tdd-bdd-commit | tests/test_message_body.py | tests/test_message_body.py | from tddcommitmessage.messagebody import MessageBody
from tddcommitmessage import Kind
def test_message_is_wrapped_in_quotes():
msg = MessageBody(Kind.red, 'Forty-two')
assert str(msg) == '"RED Forty-two"'
def test_first_letter_capitalised():
msg = MessageBody(Kind.red, 'forty-two')
assert str(msg) == '"RED Forty-two"'
def test_message_with_double_quote_is_wrapped_with_single():
msg = MessageBody(Kind.red, 'But what are "Birds"?')
assert str(msg) == r"""'RED But what are "Birds"?'"""
| from tddcommitmessage.messagebody import MessageBody
from tddcommitmessage import Kind
def test_message_is_wrapped_in_quotes():
msg = MessageBody(Kind.red, 'Forty-two')
assert str(msg) == '"RED Forty-two"'
def test_message_with_double_quote_is_wrapped_with_single():
msg = MessageBody(Kind.red, 'But what are "Birds"?')
assert str(msg) == r"""'RED But what are "Birds"?'"""
def test_first_letter_capitalised():
msg = MessageBody(Kind.red, 'forty-two')
assert str(msg) == '"RED Forty-two"'
| mit | Python |
a1d4053365f434e2c950fab1de17a05bbf8ff7a2 | Check permission when listing messages | yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core | foodsaving/conversations/api.py | foodsaving/conversations/api.py | from django.utils.translation import ugettext_lazy as _
from rest_framework import mixins
from rest_framework.permissions import IsAuthenticated, BasePermission
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from foodsaving.conversations.models import Conversation, ConversationMessage
from foodsaving.conversations.serializers import ConversationSerializer, ConversationMessageSerializer, \
CreateConversationMessageSerializer
class IsConversationParticipant(BasePermission):
message = _('You are not in this conversation')
def has_permission(self, request, view):
conversation_id = request.GET.get('conversation', None)
# if they specify a conversation, check they are in it
if conversation_id:
conversation = Conversation.objects.filter(pk=conversation_id).first() # Conversation or None
if not conversation:
return False
return request.user in conversation.participants.all()
# otherwise it is fine (messages will be filtered for the users conversations)
return True
class ConversationMessageViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin,
GenericViewSet
):
"""
ConversationMessages
"""
# TODO: sort by newest first (reverse id)
# TODO: limit to 50 or so
# TODO: to load older messages add "before" that does a "where id < before"
queryset = ConversationMessage.objects
serializer_class = ConversationMessageSerializer
permission_classes = (IsAuthenticated, IsConversationParticipant)
filter_fields = ('conversation',)
def get_serializer_class(self):
if self.action == 'create':
return CreateConversationMessageSerializer
return self.serializer_class
def get_queryset(self):
return self.queryset.filter(conversation__participants=self.request.user)
class RetrieveConversationMixin(object):
"""Retrieve a conversation instance."""
def retrieve_conversation(self, request, *args, **kwargs):
target = self.get_object()
conversation = Conversation.objects.get_or_create_for_target(target)
serializer = ConversationSerializer(conversation)
return Response(serializer.data)
| from rest_framework import mixins
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from foodsaving.conversations.models import Conversation, ConversationMessage
from foodsaving.conversations.serializers import ConversationSerializer, ConversationMessageSerializer, \
CreateConversationMessageSerializer
class ConversationMessageViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin,
GenericViewSet
):
"""
ConversationMessages
"""
# TODO: sort by newest first (reverse id)
# TODO: limit to 50 or so
# TODO: to load older messages add "before" that does a "where id < before"
queryset = ConversationMessage.objects
serializer_class = ConversationMessageSerializer
permission_classes = (IsAuthenticated,)
filter_fields = ('conversation',)
def get_serializer_class(self):
if self.action == 'create':
return CreateConversationMessageSerializer
return self.serializer_class
def get_queryset(self):
# TODO: should return an error if the user is not in the conversation, not just filter messages
return self.queryset.filter(conversation__participants=self.request.user)
class RetrieveConversationMixin(object):
"""Retrieve a conversation instance."""
def retrieve_conversation(self, request, *args, **kwargs):
target = self.get_object()
conversation = Conversation.objects.get_or_create_for_target(target)
serializer = ConversationSerializer(conversation)
return Response(serializer.data)
| agpl-3.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.