commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
c88314f935d9bf1e65c2a4f6d3eb6931fee5c4f5 | fix evaluate.py | Utils/py/BallDetection/RegressionNetwork/evaluate.py | Utils/py/BallDetection/RegressionNetwork/evaluate.py | #!/usr/bin/env python3
import argparse
import pickle
import tensorflow.keras as keras
import numpy as np
from pathlib import Path
DATA_DIR = Path(Path(__file__).parent.absolute() / "data").resolve()
MODEL_DIR = Path(Path(__file__).parent.absolute() / "models/best_models").resolve()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Evaluate the network given ')
parser.add_argument('-b', '--database-path', dest='imgdb_path', default=str(DATA_DIR / 'imgdb.pkl'),
help='Path to the image database containing test data.'
'Default is imgdb.pkl in the data folder.')
parser.add_argument('-m', '--model-path', dest='model_path', default=str(MODEL_DIR / 'fy1500_conf.h5'),
help='Store the trained model using this path. Default is fy1500_conf.h5.')
args = parser.parse_args()
res = {"x": 16, "y": 16}
with open(args.imgdb_path, "rb") as f:
mean = pickle.load(f)
print("mean=" + str(mean))
x = pickle.load(f)
y = pickle.load(f)
model = keras.models.load_model(args.model_path)
print(model.summary())
x = np.array(x)
y = np.array(y)
result = model.evaluate(x, y)
print("Evaluation result")
print("=================")
for idx in range(0, len(result)):
print(model.metrics_names[idx] + ":", result[idx])
| #!/usr/bin/env python3
import argparse
import pickle
import tensorflow.keras as keras
import numpy as np
parser = argparse.ArgumentParser(description='Train the network given ')
parser.add_argument('-b', '--database-path', dest='imgdb_path',
help='Path to the image database containing test data.'
'Default is img.db in current folder.')
parser.add_argument('-m', '--model-path', dest='model_path',
help='Store the trained model using this path. Default is model.h5.')
args = parser.parse_args()
imgdb_path = "img.db"
model_path = "model.h5"
res = {"x": 16, "y": 16}
if args.model_path is not None:
model_path = args.model_path
if args.imgdb_path is not None:
imgdb_path = args.imgdb_path
with open(imgdb_path, "rb") as f:
mean = pickle.load(f)
print("mean=" + str(mean))
x = pickle.load(f)
y = pickle.load(f)
model = keras.models.load_model(model_path)
print(model.summary())
x = np.array(x)
y = np.array(y)
result = model.evaluate(x, y)
print("Evaluation result")
print("=================")
for idx in range(0, len(result)):
print(model.metrics_names[idx] + ":", result[idx])
| Python | 0.000002 |
cc7e3e5ef9d9c59b6b1ac80826445839ede73092 | Revert mast dev host change | astroquery/mast/__init__.py | astroquery/mast/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| Python | 0 |
3ef82203daebd532af2f8effebe8fa31cec11e76 | fix error message encoding | atest/robot/tidy/TidyLib.py | atest/robot/tidy/TidyLib.py | from __future__ import with_statement
import os
from os.path import abspath, dirname, join
from subprocess import call, STDOUT
import tempfile
from robot.utils.asserts import assert_equals
ROBOT_SRC = join(dirname(abspath(__file__)), '..', '..', '..', 'src')
class TidyLib(object):
def __init__(self, interpreter):
self._cmd = [interpreter, '-m', 'robot.tidy']
self._interpreter = interpreter
def run_tidy_and_return_output(self, options, input, command=None):
"""Runs tidy in the operating system and returns output."""
options = options.split(' ') if options else []
with tempfile.TemporaryFile() as output:
rc = call(self._cmd + options + [self._path(input)], stdout=output,
stderr=STDOUT, cwd=ROBOT_SRC, shell=os.sep=='\\')
output.seek(0)
content = output.read()
if rc:
raise RuntimeError(content)
return content
def run_tidy_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
result = self.run_tidy_and_return_output(options, input)
self._assert_result(result, open(self._path(expected)).read())
def run_tidy_as_a_script_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
cmd = [self._interpreter, join(ROBOT_SRC, 'robot', 'tidy.py')]
result = self.run_tidy_and_return_output(options, input, cmd)
self._assert_result(result, open(self._path(expected)).read())
def _path(self, path):
return path.replace('/', os.sep)
def _assert_result(self, result, expected):
result = result.decode('UTF-8')
expected = expected.decode('UTF-8')
result_lines = result.splitlines()
expected_lines = expected.splitlines()
for line1, line2 in zip(result_lines, expected_lines):
msg = "\n%s\n!=\n%s\n" % (result, expected)
assert_equals(repr(unicode(line1)), repr(unicode(line2)), msg)
| from __future__ import with_statement
import os
from os.path import abspath, dirname, join
from subprocess import call, STDOUT
import tempfile
from robot.utils.asserts import assert_equals
ROBOT_SRC = join(dirname(abspath(__file__)), '..', '..', '..', 'src')
class TidyLib(object):
def __init__(self, interpreter):
self._cmd = [interpreter, '-m', 'robot.tidy']
self._interpreter = interpreter
def run_tidy_and_return_output(self, options, input, command=None):
"""Runs tidy in the operating system and returns output."""
options = options.split(' ') if options else []
with tempfile.TemporaryFile() as output:
rc = call(self._cmd + options + [self._path(input)], stdout=output,
stderr=STDOUT, cwd=ROBOT_SRC, shell=os.sep=='\\')
output.seek(0)
content = output.read()
if rc:
raise RuntimeError(content)
return content
def run_tidy_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
result = self.run_tidy_and_return_output(options, input)
self._assert_result(result, open(self._path(expected)).read())
def run_tidy_as_a_script_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
cmd = [self._interpreter, join(ROBOT_SRC, 'robot', 'tidy.py')]
result = self.run_tidy_and_return_output(options, input, cmd)
self._assert_result(result, open(self._path(expected)).read())
def _path(self, path):
return path.replace('/', os.sep)
def _assert_result(self, result, expected):
result = result.decode('UTF-8')
expected = expected.decode('UTF-8')
for line1, line2 in zip(result.splitlines(), expected.splitlines()):
msg = "\n%s\n!=\n%s\n" % (result, expected)
assert_equals(repr(unicode(line1)), repr(unicode(line2)), msg)
| Python | 0.000001 |
3904a7ac75318de08452fcea1a49b6d6e681da4e | remove debug. | lib/acli/output/route53.py | lib/acli/output/route53.py | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, print_function, unicode_literals)
from acli.output import (output_ascii_table, dash_if_none)
def output_route53_list(output_media=None, zones=None):
"""
@type output_media: unicode
@type zones: list | dict
"""
if isinstance(zones, dict):
zones = [zones]
for hosted_zone_dict in zones:
if output_media == 'console':
td = list()
td.append(['id', 'name',
'count', 'comment',
'private zone'])
for hosted_zone in hosted_zone_dict.get('HostedZones'):
zone_id = dash_if_none(hosted_zone.get('Id'))
zone_name = hosted_zone.get('Name')
record_count = str(hosted_zone.get('ResourceRecordSetCount'))
comment = hosted_zone.get('Config').get('Comment')
private_zone = str(hosted_zone.get('Config').get('PrivateZone'))
td.append([zone_id,
zone_name,
record_count,
comment,
private_zone])
output_ascii_table(table_title="Route53 Zones",
table_data=td,
inner_heading_row_border=True)
exit(0)
def get_record_set_values(resource_records):
"""
@type resource_records: list
"""
out = list()
for record in resource_records:
out.append(record.get('Value'))
return out
def output_route53_info(output_media=None, zone=None, record_sets=None):
"""
@type output_media: unicode
@type zone: zone
@type record_sets: ResourceRecordSets
"""
if output_media == 'console':
td = list()
td.append(['id', zone['HostedZone']['Id']])
td.append(['Name', zone['HostedZone']['Name']])
td.append(['Count', str(zone['HostedZone']['ResourceRecordSetCount'])])
td.append(['Comment', zone['HostedZone']['Config']['Comment']])
td.append(['Private', str(zone['HostedZone']['Config']['PrivateZone'])])
td.append(['Name Servers', "\n".join(zone['DelegationSet']['NameServers'])])
td.append(['Records', ' '])
td.append(['{0}'.format("-" * 12), '{0}'.format("-" * 20)])
for record_set in record_sets['ResourceRecordSets']:
td.append(['Name', record_set['Name']])
td.append([' Type', record_set['Type']])
td.append([' TTL', str(record_set['TTL'])])
td.append([' Values', "\n".join(get_record_set_values(record_set['ResourceRecords']))])
output_ascii_table(table_title="Zone Info",
table_data=td)
exit(0)
| # -*- coding: utf-8 -*-
from __future__ import (absolute_import, print_function, unicode_literals)
from acli.output import (output_ascii_table, dash_if_none)
def output_route53_list(output_media=None, zones=None):
"""
@type output_media: unicode
@type zones: list | dict
"""
if isinstance(zones, dict):
zones = [zones]
for hosted_zone_dict in zones:
if output_media == 'console':
td = list()
td.append(['id', 'name',
'count', 'comment',
'private zone'])
for hosted_zone in hosted_zone_dict.get('HostedZones'):
zone_id = dash_if_none(hosted_zone.get('Id'))
zone_name = hosted_zone.get('Name')
record_count = str(hosted_zone.get('ResourceRecordSetCount'))
comment = hosted_zone.get('Config').get('Comment')
private_zone = str(hosted_zone.get('Config').get('PrivateZone'))
td.append([zone_id,
zone_name,
record_count,
comment,
private_zone])
output_ascii_table(table_title="Route53 Zones",
table_data=td,
inner_heading_row_border=True)
exit(0)
def get_record_set_values(resource_records):
"""
@type resource_records: list
"""
print(resource_records.__class__.__name__)
out = list()
for record in resource_records:
out.append(record.get('Value'))
return out
def output_route53_info(output_media=None, zone=None, record_sets=None):
"""
@type output_media: unicode
@type zone: zone
@type record_sets: ResourceRecordSets
"""
if output_media == 'console':
td = list()
td.append(['id', zone['HostedZone']['Id']])
td.append(['Name', zone['HostedZone']['Name']])
td.append(['Count', str(zone['HostedZone']['ResourceRecordSetCount'])])
td.append(['Comment', zone['HostedZone']['Config']['Comment']])
td.append(['Private', str(zone['HostedZone']['Config']['PrivateZone'])])
td.append(['Name Servers', "\n".join(zone['DelegationSet']['NameServers'])])
td.append(['Records', ' '])
td.append(['{0}'.format("-" * 12), '{0}'.format("-" * 20)])
for record_set in record_sets['ResourceRecordSets']:
td.append(['Name', record_set['Name']])
td.append([' Type', record_set['Type']])
td.append([' TTL', str(record_set['TTL'])])
td.append([' Values', "\n".join(get_record_set_values(record_set['ResourceRecords']))])
output_ascii_table(table_title="Zone Info",
table_data=td)
exit(0)
| Python | 0 |
fe67796130854d83b3dfaa085d67d9eabe35a155 | Allow getdate for Energy Point Rule condition | frappe/social/doctype/energy_point_rule/energy_point_rule.py | frappe/social/doctype/energy_point_rule/energy_point_rule.py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.cache_manager
from frappe.model.document import Document
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert
class EnergyPointRule(Document):
def on_update(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def on_trash(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def apply(self, doc):
whitelisted_globals = {
"getdate": getdate
}
if frappe.safe_eval(self.condition, whitelisted_globals, {'doc': doc.as_dict()}):
multiplier = 1
if self.multiplier_field:
multiplier = doc.get(self.multiplier_field) or 1
points = round(self.points * multiplier)
reference_doctype = doc.doctype
reference_name = doc.name
user = doc.get(self.user_field)
rule = self.name
# incase of zero as result after roundoff
if not points: return
# if user_field has no value
if not user or user == 'Administrator': return
try:
create_energy_points_log(reference_doctype, reference_name, {
'points': points,
'user': user,
'rule': rule
})
except Exception as e:
frappe.log_error(frappe.get_traceback(), 'apply_energy_point')
def process_energy_points(doc, state):
if (frappe.flags.in_patch
or frappe.flags.in_install
or not is_energy_point_enabled()):
return
old_doc = doc.get_doc_before_save()
# check if doc has been cancelled
if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2:
return revert_points_for_cancelled_doc(doc)
for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype,
dict(reference_doctype = doc.doctype, enabled=1)):
frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc)
def revert_points_for_cancelled_doc(doc):
energy_point_logs = frappe.get_all('Energy Point Log', {
'reference_doctype': doc.doctype,
'reference_name': doc.name,
'type': 'Auto'
})
for log in energy_point_logs:
revert(log.name, _('Reference document has been cancelled'))
def get_energy_point_doctypes():
return [
d.reference_doctype for d in frappe.get_all('Energy Point Rule',
['reference_doctype'], {'enabled': 1})
]
| # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.cache_manager
from frappe.model.document import Document
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert
class EnergyPointRule(Document):
def on_update(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def on_trash(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def apply(self, doc):
if frappe.safe_eval(self.condition, None, {'doc': doc.as_dict()}):
multiplier = 1
if self.multiplier_field:
multiplier = doc.get(self.multiplier_field) or 1
points = round(self.points * multiplier)
reference_doctype = doc.doctype
reference_name = doc.name
user = doc.get(self.user_field)
rule = self.name
# incase of zero as result after roundoff
if not points: return
# if user_field has no value
if not user or user == 'Administrator': return
try:
create_energy_points_log(reference_doctype, reference_name, {
'points': points,
'user': user,
'rule': rule
})
except Exception as e:
frappe.log_error(frappe.get_traceback(), 'apply_energy_point')
def process_energy_points(doc, state):
if (frappe.flags.in_patch
or frappe.flags.in_install
or not is_energy_point_enabled()):
return
old_doc = doc.get_doc_before_save()
# check if doc has been cancelled
if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2:
return revert_points_for_cancelled_doc(doc)
for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype,
dict(reference_doctype = doc.doctype, enabled=1)):
frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc)
def revert_points_for_cancelled_doc(doc):
energy_point_logs = frappe.get_all('Energy Point Log', {
'reference_doctype': doc.doctype,
'reference_name': doc.name,
'type': 'Auto'
})
for log in energy_point_logs:
revert(log.name, _('Reference document has been cancelled'))
def get_energy_point_doctypes():
return [
d.reference_doctype for d in frappe.get_all('Energy Point Rule',
['reference_doctype'], {'enabled': 1})
]
| Python | 0 |
6236ee8344add06f6adbfef9df5ab224ea19b1fe | Remove unused import | moocng/courses/backends.py | moocng/courses/backends.py | # Copyright (C) 2010-2012 Yaco Sistemas (http://www.yaco.es)
# Copyright (C) 2009 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.auth.models import Group
from django.contrib.auth.models import SiteProfileNotAvailable
from django.core.exceptions import ObjectDoesNotExist
from djangosaml2.backends import Saml2Backend
from moocng.teacheradmin.models import Invitation
class Saml2BackendExtension(Saml2Backend):
# This function is called when a new user is created
# we will check here if a pending teacher invitation
# exists for this user
def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
user = self.update_user(user, attributes, attribute_mapping,
force_save=True)
user_pendings = Invitation.objects.filter(email=user.email)
for user_pending in user_pendings:
user_pending.course.teachers.add(user)
user_pending.delete()
return user
def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set, otherwise it will try to set
it in the profile object.
"""
if not attribute_mapping:
return user
try:
profile = user.get_profile()
except ObjectDoesNotExist:
profile = None
except SiteProfileNotAvailable:
profile = None
user_modified = False
profile_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
try:
for attr in django_attrs:
if hasattr(user, attr):
if attr == 'groups':
user.groups = Group.objects.filter(name__in=attributes[saml_attr])
else:
setattr(user, attr, attributes[saml_attr][0])
user_modified = True
elif profile is not None and hasattr(profile, attr):
setattr(profile, attr, attributes[saml_attr][0])
profile_modified = True
except KeyError:
# the saml attribute is missing
pass
if user_modified or force_save:
user.save()
if profile is not None and (profile_modified or force_save):
profile.save()
return user
| # Copyright (C) 2010-2012 Yaco Sistemas (http://www.yaco.es)
# Copyright (C) 2009 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.auth.models import Group
from django.contrib.auth.models import SiteProfileNotAvailable
from django.core.exceptions import ObjectDoesNotExist
from djangosaml2.backends import Saml2Backend
from moocng.courses.models import Course
from moocng.teacheradmin.models import Invitation
class Saml2BackendExtension(Saml2Backend):
# This function is called when a new user is created
# we will check here if a pending teacher invitation
# exists for this user
def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
user = self.update_user(user, attributes, attribute_mapping,
force_save=True)
user_pendings = Invitation.objects.filter(email=user.email)
for user_pending in user_pendings:
user_pending.course.teachers.add(user)
user_pending.delete()
return user
def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set, otherwise it will try to set
it in the profile object.
"""
if not attribute_mapping:
return user
try:
profile = user.get_profile()
except ObjectDoesNotExist:
profile = None
except SiteProfileNotAvailable:
profile = None
user_modified = False
profile_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
try:
for attr in django_attrs:
if hasattr(user, attr):
if attr == 'groups':
user.groups = Group.objects.filter(name__in=attributes[saml_attr])
else:
setattr(user, attr, attributes[saml_attr][0])
user_modified = True
elif profile is not None and hasattr(profile, attr):
setattr(profile, attr, attributes[saml_attr][0])
profile_modified = True
except KeyError:
# the saml attribute is missing
pass
if user_modified or force_save:
user.save()
if profile is not None and (profile_modified or force_save):
profile.save()
return user
| Python | 0.000001 |
6a2a0667179a78e2c56dff551b0d010db6ed0150 | fix imports | chainerrl/initializers/__init__.py | chainerrl/initializers/__init__.py | from chainerrl.initializers.constant import VarianceScalingConstant # NOQA
from chainerrl.initializers.normal import LeCunNormal # NOQA
from chainerrl.initializers.uniform import VarianceScalingUniform # NOQA
| from chainerrl.initializers.constant import VarianceScalingConstant # NOQA
from chainerrl.initializers.normal import LeCunNormal # NOQA
| Python | 0.000002 |
fb19411797ae7ac00e022a9409459c0f42969a91 | Remove unused code | backend/api/helpers/i18n.py | backend/api/helpers/i18n.py | from typing import Optional
from django.conf import settings
from django.utils import translation
def make_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
return getattr(root, field_name).localize(language)
return resolver
| from typing import Optional
from django.conf import settings
from django.utils import translation
def make_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
return getattr(root, field_name).localize(language)
return resolver
def make_dict_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
field = getattr(root, field_name)
return field.get(language, field["en"])
return resolver
| Python | 0.000006 |
2af841027c17256964ce92b0459d32a9c210e357 | remove unneeded check | mythril/analysis/solver.py | mythril/analysis/solver.py | from z3 import Solver, simplify, sat, unknown, FuncInterp, UGE
from mythril.exceptions import UnsatError
from mythril.laser.ethereum.transaction.transaction_models import (
ContractCreationTransaction,
)
import logging
def get_model(constraints):
s = Solver()
s.set("timeout", 100000)
for constraint in constraints:
s.add(constraint)
result = s.check()
if result == sat:
return s.model()
elif result == unknown:
logging.info("Timeout encountered while solving expression using z3")
raise UnsatError
def pretty_print_model(model):
ret = ""
for d in model.decls():
if type(model[d]) == FuncInterp:
condition = model[d].as_list()
ret += "%s: %s\n" % (d.name(), condition)
continue
try:
condition = "0x%x" % model[d].as_long()
except:
condition = str(simplify(model[d]))
ret += "%s: %s\n" % (d.name(), condition)
return ret
def get_transaction_sequence(global_state, constraints):
"""
Generate concrete transaction sequence
:param global_state: GlobalState to generate transaction sequence for
:param constraints: list of constraints used to generate transaction sequence
:param caller: address of caller
:param max_callvalue: maximum callvalue for a transaction
"""
transaction_sequence = global_state.world_state.transaction_sequence
# gaslimit & gasprice don't exist yet
tx_template = {
"calldata": None,
"call_value": None,
"caller": "0xCA11EDEADBEEF37E636E6CA11EDEADBEEFCA11ED",
}
txs = {}
creation_tx_ids = []
tx_constraints = constraints.copy()
for transaction in transaction_sequence:
tx_id = str(transaction.id)
if not isinstance(transaction, ContractCreationTransaction):
# Constrain calldatasize
max_calldatasize = 5000
if max_calldatasize != None:
tx_constraints.append(
UGE(max_calldatasize, transaction.call_data.calldatasize)
)
txs[tx_id] = tx_template.copy()
else:
creation_tx_ids.append(tx_id)
model = get_model(tx_constraints)
for transaction in transaction_sequence:
if not isinstance(transaction, ContractCreationTransaction):
tx_id = str(transaction.id)
txs[tx_id]["calldata"] = "0x" + "".join(
[
hex(b)[2:] if len(hex(b)) % 2 == 0 else "0" + hex(b)[2:]
for b in transaction.call_data.concretized(model)
]
)
for d in model.decls():
name = d.name()
logging.warn(d.name)
if "call_value" in name:
tx_id = name.replace("call_value", "")
if not tx_id in creation_tx_ids:
call_value = "0x%x" % model[d].as_long()
txs[tx_id]["call_value"] = call_value
if "caller" in name:
# caller is 'creator' for creation transactions
tx_id = name.replace("caller", "")
caller = "0x" + ("%x" % model[d].as_long()).zfill(64)
txs[tx_id]["caller"] = caller
return txs
| from z3 import Solver, simplify, sat, unknown, FuncInterp, UGE
from mythril.exceptions import UnsatError
from mythril.laser.ethereum.transaction.transaction_models import (
ContractCreationTransaction,
)
import logging
def get_model(constraints):
s = Solver()
s.set("timeout", 100000)
for constraint in constraints:
s.add(constraint)
result = s.check()
if result == sat:
return s.model()
elif result == unknown:
logging.info("Timeout encountered while solving expression using z3")
raise UnsatError
def pretty_print_model(model):
ret = ""
for d in model.decls():
if type(model[d]) == FuncInterp:
condition = model[d].as_list()
ret += "%s: %s\n" % (d.name(), condition)
continue
try:
condition = "0x%x" % model[d].as_long()
except:
condition = str(simplify(model[d]))
ret += "%s: %s\n" % (d.name(), condition)
return ret
def get_transaction_sequence(global_state, constraints):
"""
Generate concrete transaction sequence
:param global_state: GlobalState to generate transaction sequence for
:param constraints: list of constraints used to generate transaction sequence
:param caller: address of caller
:param max_callvalue: maximum callvalue for a transaction
"""
transaction_sequence = global_state.world_state.transaction_sequence
# gaslimit & gasprice don't exist yet
tx_template = {
"calldata": None,
"call_value": None,
"caller": "0xCA11EDEADBEEF37E636E6CA11EDEADBEEFCA11ED",
}
txs = {}
creation_tx_ids = []
tx_constraints = constraints.copy()
for transaction in transaction_sequence:
tx_id = str(transaction.id)
if not isinstance(transaction, ContractCreationTransaction):
# Constrain calldatasize
max_calldatasize = 5000
if max_calldatasize != None:
tx_constraints.append(
UGE(max_calldatasize, transaction.call_data.calldatasize)
)
txs[tx_id] = tx_template.copy()
else:
creation_tx_ids.append(tx_id)
model = get_model(tx_constraints)
for transaction in transaction_sequence:
if not isinstance(transaction, ContractCreationTransaction):
tx_id = str(transaction.id)
txs[tx_id]["calldata"] = "0x" + "".join(
[
hex(b)[2:] if len(hex(b)) % 2 == 0 else "0" + hex(b)[2:]
for b in transaction.call_data.concretized(model)
]
)
for d in model.decls():
name = d.name()
if "call_value" in name:
tx_id = name.replace("call_value", "")
if not tx_id in creation_tx_ids:
call_value = "0x%x" % model[d].as_long()
txs[tx_id]["call_value"] = call_value
if "caller" in name:
tx_id = name.replace("caller", "")
if not tx_id in creation_tx_ids:
caller = "0x" + ("%x" % model[d].as_long()).zfill(64)
txs[tx_id]["caller"] = caller
return txs
| Python | 0.000001 |
4563e383962690cc196f4551f217d488501b660e | support for mysql as well | bin/count_users_in_rooms.py | bin/count_users_in_rooms.py | import sys
import os
import yaml
import redis
dino_env = sys.argv[1]
dino_home = sys.argv[2]
if dino_home is None:
raise RuntimeError('need environment variable DINO_HOME')
if dino_env is None:
raise RuntimeError('need environment variable DINO_ENVIRONMENT')
def load_secrets_file(config_dict: dict) -> dict:
from string import Template
import ast
secrets_path = dino_home + '/secrets/%s.yaml' % dino_env
# first substitute environment variables, which holds precedence over the yaml config (if it exists)
template = Template(str(config_dict))
template = template.safe_substitute(os.environ)
if os.path.isfile(secrets_path):
try:
secrets = yaml.safe_load(open(secrets_path))
except Exception as e:
raise RuntimeError("Failed to open secrets configuration {0}: {1}".format(secrets_path, str(e)))
template = Template(template)
template = template.safe_substitute(secrets)
return ast.literal_eval(template)
config = yaml.safe_load(open(dino_home + '/dino.yaml'))[dino_env]
config = load_secrets_file(config)
dbtype = config['database']['type']
the_count = 0
if dbtype == 'rdbms':
dbdriver = config['database']['driver']
dbname = config['database']['db']
dbhost = config['database']['host']
dbport = config['database']['port']
dbuser = config['database']['user']
dbpass = config['database']['password']
if dbdriver.startswith('postgres'):
import psycopg2
conn = psycopg2.connect("dbname='%s' user='%s' host='%s' port='%s' password='%s'" % (
dbname, dbuser, dbhost, dbport, dbpass)
)
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
if dbtype == 'rdbms' and dbdriver.startswith('mysql'):
import MySQLdb
conn = MySQLdb.connect(passwd=dbpass, db=dbname, user=dbuser, host=dbhost, port=dbport)
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
r_host, r_port = config['cache']['host'].split(':')
r_db = config['cache']['db']
r_server = redis.Redis(host=r_host, port=r_port, db=r_db)
r_server.set('users:online:inrooms', the_count)
| import sys
import os
import yaml
import redis
import psycopg2
dino_env = sys.argv[1]
dino_home = sys.argv[2]
if dino_home is None:
raise RuntimeError('need environment variable DINO_HOME')
if dino_env is None:
raise RuntimeError('need environment variable DINO_ENVIRONMENT')
def load_secrets_file(config_dict: dict) -> dict:
from string import Template
import ast
secrets_path = dino_home + '/secrets/%s.yaml' % dino_env
# first substitute environment variables, which holds precedence over the yaml config (if it exists)
template = Template(str(config_dict))
template = template.safe_substitute(os.environ)
if os.path.isfile(secrets_path):
try:
secrets = yaml.safe_load(open(secrets_path))
except Exception as e:
raise RuntimeError("Failed to open secrets configuration {0}: {1}".format(secrets_path, str(e)))
template = Template(template)
template = template.safe_substitute(secrets)
return ast.literal_eval(template)
config = yaml.safe_load(open(dino_home + '/dino.yaml'))[dino_env]
config = load_secrets_file(config)
dbtype = config['database']['type']
if dbtype == 'rdbms':
dbname = config['database']['db']
dbhost = config['database']['host']
dbport = config['database']['port']
dbuser = config['database']['user']
dbpass = config['database']['password']
try:
conn = psycopg2.connect("dbname='%s' user='%s' host='%s' port='%s' password='%s'" % (
dbname, dbuser, dbhost, dbport, dbpass)
)
except:
raise RuntimeError('could not connect to db')
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
r_host, r_port = config['cache']['host'].split(':')
r_db = config['cache']['db']
r_server = redis.Redis(host=r_host, port=r_port, db=r_db)
r_server.set('users:online:inrooms', the_count)
| Python | 0 |
dbcbd5aed6abcc58a65d63653b7a3f41b429369b | clean up and format | bin/spritz.py | bin/spritz.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import sys
import time
import math
import fileinput
def to_unicode(text, encoding='utf-8'):
"""Convert ``text`` to unicode using ``encoding``.
:param text: string object to convert to ``unicode``
:type text: ``str`` or ``unicode``
:returns: string object as unicode object
:rytpe: ``unicode``
"""
if isinstance(text, basestring):
if not isinstance(text, unicode):
text = unicode(text, encoding)
return text
##################################################
# ORP Functions #
##################################################
def get_orp(integer):
"""Get Optimal Reading Position (ORP) given ``integer``.
ORP is slightly left of center.
:param integer: length of string object to calculate ORP
:type integer: ``integer``
:returns: value of ORP
:rytpe: ``integer``
"""
percentage = 0.45
return int(math.ceil(integer * percentage))
def calculate_spaces(word, max_length):
"""Determine buffer spaces for ``word`` given the ``max_length``.
:param word: string object for calculation
:type word: ``unicode``
:param max_length: value of longest word in full text
:type max_length: ``integer``
:returns: word's ORP, number of prefix spaces, and number of post spaces
:rytpe: ``tuple`` of ``integers``
"""
max_orp = get_orp(max_length)
orp = get_orp(len(word))
prefix_space = (max_orp - orp)
postfix_space = (max_length - len(word) - prefix_space)
return (orp, prefix_space, postfix_space)
def find_max(reading):
"""
Find longest word in ``reading``.
:param reading: the full string object to be spritzed
:type reading: ``unicode``
:returns: number of characters in the longest word
:rytpe: ``integer``
"""
reading = sorted(reading, key=len, reverse=True)
return len(reading[0])
##################################################
# Output Functions #
##################################################
def insert_color(word, orp):
"""Change color of the ORP letter in ``word`` to red.
:param word: the word to be color-coded
:type word: ``unicode``
:param orp: the index of the ORP letter
:type orp: ``integer``
:returns: word with ORP letter in red
:rytpe: ``unicode``
"""
color_red = "\033[91m"
color_restore = "\033[0m"
chars = list(word)
chars.insert(orp, color_red)
chars.insert((orp + 2), color_restore)
return ''.join(chars)
def print_word(word, orp_config):
"""Pretty print `word` with spritz color formatting
:param word: the word to be color-coded
:type word: ``unicode``
:param orp_config: formatting data for ``word``
:type orp_config: ``tuple`` of ``integers``
:returns: Nothing. Prints to console
:rytpe: ``None``
"""
(orp, prefix, postfix) = orp_config
orp = orp - 1 # change for Python list indexing
print_string = (" " * prefix) + insert_color(word, orp) + (" " * postfix)
print("\r{}".format(print_string))
sys.stdout.flush()
##################################################
# Key Functions #
##################################################
def parse_article(article):
"""
Clean up input ``article`` and insert appropriate pauses.
:param article: the full string object to be spritzed
:type article: ``unicode``
:returns: words in ``article``
:rytpe: ``list``
"""
remove = (',', '.', '!', '?', '-', ';')
for char in remove:
article = article.replace(char, " <pause> ")
article = article.strip()
article = article.replace("\n", " <pause> <pause> ")
return article.split()
def spritz(wpm, reading):
""""Spritz" the ``reading``.
:param wpm: words per minute
:type wpm: ``integer``
:param reading: the full string object to be spritzed
:type reading: ``unicode``
:returns: Nothing. Prints to console
:rytpe: ``None``
"""
sleep_interval = (60.0 / wpm)
max_length = find_max(reading)
for word in reading:
if word == "<pause>":
time.sleep(sleep_interval * 3)
continue
word_sleep_interval = 0.01 * len(word)
time.sleep(sleep_interval + word_sleep_interval)
orp_config = calculate_spaces(word, max_length)
#print(word)
print_word(word, orp_config)
##################################################
# Main Function #
##################################################
def main():
"""Parse command line args and spritz text.
"""
if len(sys.argv) >= 2 and sys.argv[1]:
try:
wpm = int(sys.argv[1])
except ValueError:
print ("<wpm> need to be an integer")
exit(1)
else:
wpm = 250
article = ""
for line in fileinput.input(sys.argv[2:]):
article += line
reading = parse_article(article)
spritz(wpm, reading)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import fileinput
import time
import math
def ORP(n):
percentage = 0.45
return int(math.ceil(n * 0.45))
def calculate_spaces(word, maxLength):
maxOrp = ORP(maxLength) # index + 1
orp = ORP(len(word)) # index + 1
prefixSpace = maxOrp - orp
postfixSpace = maxLength - len(word) - prefixSpace
return (orp, prefixSpace, postfixSpace)
def find_max(reading):
reading = sorted(reading, key=lambda x: len(x), reverse=True)
return len(reading[0])
def parse_article(article):
"""
argument: article::string
returns: [ word::string | sign::string ]
word :: single word
sign :: <pause>
"""
charToRemoved = ",.!\xad"
for eachChar in charToRemoved:
article = article.replace(eachChar, "")
article = article.strip()
article = article.replace("\n", " <pause> ")
return article.split()
def print_word(word, orpConfig):
def insert_color(word, orpIndex):
colorCode_red = "\033[91m"
colorCode_restore = "\033[0m"
return word[0:orpIndex] + colorCode_red + word[orpIndex] + colorCode_restore + word[orpIndex+1:]
orp, prefix, postfix = orpConfig
stringToPrint = " " * prefix + insert_color(word, orp-1) + " " * postfix
print ("\r%s" % stringToPrint, end='')
sys.stdout.flush()
def spritz(wpm, reading):
"""
function to perform "spritz"
"""
secondPerWord = 60.0 / wpm
sleepInterval = secondPerWord
maxLength = find_max(reading)
for word in reading:
if word == "<pause>":
time.sleep(sleepInterval * 10)
continue
wordSleepInterval = 0.01 * len(word)
time.sleep(sleepInterval + wordSleepInterval)
orpConfig = calculate_spaces(word, maxLength)
print_word(word, orpConfig)
def main(wpm, article):
"""
Main function
"""
reading = parse_article(article)
spritz(wpm, reading)
if __name__ == '__main__':
if len(sys.argv) >= 2 and sys.argv[1]:
try:
wpm = int(sys.argv[1])
except:
print ("<wpm> need to be an integer")
exit(1)
else:
wpm = 250
article = ""
for line in fileinput.input(sys.argv[2:]):
article += line
main(wpm, article)
| Python | 0.0002 |
952a342cc160f7b994e7a06c7836d1319414a30e | Fix bitcointxn.disassemble so it actually works | bitcointxn.py | bitcointxn.py | from bitcoinvarlen import varlenDecode, varlenEncode
from util import dblsha
from struct import pack, unpack
_nullprev = b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'
class Txn:
def __init__(self, data=None):
if data:
self.data = data
self.idhash()
@classmethod
def new(cls):
o = cls()
o.version = 1
o.inputs = []
o.outputs = []
o.locktime = 0
return o
def setCoinbase(self, sigScript, seqno = 0xffffffff):
self.inputs = ( ((_nullprev, 0xffffffff), sigScript, seqno), )
def addInput(self, prevout, sigScript, seqno = 0xffffffff):
self.inputs.append( (prevout, sigScript, seqno) )
def addOutput(self, amount, pkScript):
self.outputs.append( (amount, pkScript) )
def disassemble(self):
self.version = unpack('<L', self.data[:4])[0]
(inputCount, data) = varlenDecode(self.data[4:])
inputs = []
for i in range(inputCount):
prevout = (data[:32], unpack('<L', data[32:36])[0])
(sigScriptLen, data) = varlenDecode(data[36:])
sigScript = data[:sigScriptLen]
seqno = unpack('<L', data[sigScriptLen:sigScriptLen + 4])[0]
data = data[sigScriptLen + 4:]
inputs.append( (prevout, sigScript, seqno) )
self.inputs = inputs
(outputCount, data) = varlenDecode(data)
outputs = []
for i in range(outputCount):
amount = unpack('<Q', data[:8])[0]
(pkScriptLen, data) = varlenDecode(data[8:])
pkScript = data[:pkScriptLen]
data = data[pkScriptLen:]
outputs.append( (amount, pkScript) )
self.outputs = outputs
assert len(data) == 4
self.locktime = unpack('<L', data)[0]
def isCoinbase(self):
return len(self.inputs) == 1 and self.inputs[0][1] == 0xffffffff and self.input[0][0] == _nullprev
def assemble(self):
data = pack('<L', self.version)
inputs = self.inputs
data += varlenEncode(len(inputs))
for prevout, sigScript, seqno in inputs:
data += prevout[0] + pack('<L', prevout[1])
data += varlenEncode(len(sigScript)) + sigScript
data += pack('<L', seqno)
outputs = self.outputs
data += varlenEncode(len(outputs))
for amount, pkScript in inputs:
data += pack('<Q', amount)
data += varlenEncode(len(pkScript)) + pkScript
data += pack('<L', self.locktime)
self.data = data
self.idhash()
def idhash(self):
self.txid = dblsha(self.data)
| from bitcoinvarlen import varlenDecode, varlenEncode
from util import dblsha
from struct import pack, unpack
_nullprev = b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'
class Txn:
def __init__(self, data=None):
if data:
self.data = data
self.idhash()
@classmethod
def new(cls):
o = cls()
o.version = 1
o.inputs = []
o.outputs = []
o.locktime = 0
return o
def setCoinbase(self, sigScript, seqno = 0xffffffff):
self.inputs = ( ((_nullprev, 0xffffffff), sigScript, seqno), )
def addInput(self, prevout, sigScript, seqno = 0xffffffff):
self.inputs.append( (prevout, sigScript, seqno) )
def addOutput(self, amount, pkScript):
self.outputs.append( (amount, pkScript) )
def disassemble(self):
self.version = unpack('<L', self.data[:4])[0]
(inputCount, data) = varlenDecode(self.data[4:])
inputs = []
for i in range(inputCount):
prevout = (data[:32], unpack('<L', data[32:36])[0])
(sigScript, data) = varlenDecode(data[36:])
sigScript = data[:sigScript]
seqno = unpack('<L', data[sigScript:sigScript + 4])[0]
data = data[sigScript + 4:]
inputs.append( (prevout, sigScript, seqno) )
self.inputs = inputs
(outputCount, data) = varlenDecode(self.data[4:])
outputs = []
for i in range(outputCount):
amount = unpack('<Q', data[:8])[0]
(pkScript, data) = varlenDecode(data[8:])
pkScript = data[:pkScript]
data = data[pkScript:]
outputs.append( (amount, pkScript) )
self.outputs = outputs
assert len(data) == 4
self.locktime = unpack('<L', data)[0]
def isCoinbase(self):
return len(self.inputs) == 1 and self.inputs[0][1] == 0xffffffff and self.input[0][0] == _nullprev
def assemble(self):
data = pack('<L', self.version)
inputs = self.inputs
data += varlenEncode(len(inputs))
for prevout, sigScript, seqno in inputs:
data += prevout[0] + pack('<L', prevout[1])
data += varlenEncode(len(sigScript)) + sigScript
data += pack('<L', seqno)
outputs = self.outputs
data += varlenEncode(len(outputs))
for amount, pkScript in inputs:
data += pack('<Q', amount)
data += varlenEncode(len(pkScript)) + pkScript
data += pack('<L', self.locktime)
self.data = data
self.idhash()
def idhash(self):
self.txid = dblsha(self.data)
| Python | 0.000001 |
81de62d46d7daefb2e1eef0d0cc4f5ca5c8aef2f | Use GCBV queryset to get PostGetMixin obj. | blog/utils.py | blog/utils.py | from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
| from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
| Python | 0 |
2b419d499c37597094379f524d8347f35eeda57c | Fix tinycss css validator | src/checker/plugin/checkers/tinycss_css_validator_plugin.py | src/checker/plugin/checkers/tinycss_css_validator_plugin.py | from common import PluginType
import tinycss
from yapsy.IPlugin import IPlugin
import logging
class CssValidator(IPlugin):
category = PluginType.CHECKER
id = "tinycss"
def __init__(self):
self.journal = None
def setJournal(self, journal):
self.journal = journal
def check(self, transaction):
"""Pusti validator, ulozi chyby.
"""
try:
parser = tinycss.make_parser('page3')
c = transaction.getContent()
if type(c) == str:
data = c
else:
data = str(transaction.getContent(), 'utf-8')
stylesheet = parser.parse_stylesheet(data)
for error in stylesheet.errors:
self.journal.foundDefect(transaction.idno, "stylesheet", "Stylesheet error", [error.line, error.reason], 0.7)
except UnicodeDecodeError as e:
logging.getLogger(__name__).debug("Unicode decode error: "+format(e))
return
| from common import PluginType
import tinycss
from yapsy.IPlugin import IPlugin
import logging
class CssValidator(IPlugin):
category = PluginType.CHECKER
id = "tinycss"
def __init__(self):
self.journal = None
def setJournal(self, journal):
self.journal = journal
def check(self, transaction):
"""Pusti validator, ulozi chyby.
"""
try:
parser = tinycss.make_parser('page3')
data = str(transaction.getContent(), 'utf-8')
stylesheet = parser.parse_stylesheet(data)
for error in stylesheet.errors:
self.journal.foundDefect(transaction.idno, "stylesheet", "Stylesheet error", [error.line, error.reason], 0.7)
except UnicodeDecodeError as e:
logging.getLogger(__name__).debug("Unicode decode error: "+format(e))
return
| Python | 0.000209 |
08f9575a0de95432729cbf1a9649148998030e17 | fix error output on windows (#50) | cmake/legacy/wafstyleout.py | cmake/legacy/wafstyleout.py | #!/usr/bin/env python
import subprocess
import sys
import os
import argparse
import platform
def unicodeWrite(out, str):
try:
out.write(str)
except UnicodeEncodeError:
bytes = str.encode(out.encoding or 'ascii', 'replace')
if hasattr(sys.stdout, 'buffer'):
out.buffer.write(bytes)
else:
out.write(bytes.decode(out.encoding or 'ascii', 'replace'))
try:
p = subprocess.Popen(sys.argv[1:], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
except Exception as e:
print('Execution failure: %s' % str(e))
sys.exit(-1)
includes = ''
msg = ''
if out:
out = out.decode(sys.stdout.encoding or 'ascii', 'replace')
includes = '\n'.join([l for l in out.split(os.linesep) if l.startswith('Note: including file:')])
out = '\n'.join([l for l in out.split(os.linesep) if not l.startswith('Note: including file:')])
msg = msg + out
unicodeWrite(sys.stdout, includes) # Ninja relies on result of /showIncludes when compiling with cl
if err:
err = err.decode(sys.stderr.encoding or 'ascii', 'replace')
msg = msg + err
if msg:
parser = argparse.ArgumentParser()
parser.add_argument('-o')
parser.add_argument('-c')
(args, unparsed) = parser.parse_known_args(sys.argv[2:])
src_str = None
for opt in [args.c, args.o]:
if opt:
src_str = opt
break
if not src_str:
linkOutArg = '/out:'
for arg in unparsed:
if arg.startswith(linkOutArg):
src_str = arg[len(linkOutArg):]
break
if not src_str:
src_str = sys.argv[-1]
try:
src_str = os.path.basename(src_str)
except:
pass
# The Visual Studio compiler always prints name of the input source
# file when compiling and "Creating library <file>.lib and object
# <file>.exp" when linking an executable. We try to ignore those
# outputs using a heuristic.
if p.returncode == 0 and (
msg.strip() == src_str or
msg.strip().startswith('Creating library ')):
sys.exit(p.returncode)
if p.returncode == 0:
marker_str = 'WARNING'
else:
if 'bde_runtest' in sys.argv[2]:
marker_str = 'TEST'
else:
marker_str = 'ERROR'
# This logic handles unicode in the output.
status_str = u'{}[{} ({})] <<<<<<<<<<\n{}>>>>>>>>>>\n'.format('\n' if platform.system() == 'Windows' else '', src_str, marker_str, msg)
unicodeWrite(sys.stderr, status_str)
sys.exit(p.returncode)
| #!/usr/bin/env python
import subprocess
import sys
import os
import argparse
def unicodeWrite(out, str):
try:
out.write(str)
except UnicodeEncodeError:
bytes = str.encode(out.encoding or 'ascii', 'replace')
if hasattr(sys.stdout, 'buffer'):
out.buffer.write(bytes)
else:
out.write(bytes.decode(out.encoding or 'ascii', 'replace'))
try:
p = subprocess.Popen(sys.argv[1:], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
except Exception as e:
print('Execution failure: %s' % str(e))
sys.exit(-1)
includes = ''
msg = ''
if out:
out = out.decode(sys.stdout.encoding or 'ascii', 'replace')
includes = '\n'.join([l for l in out.split(os.linesep) if l.startswith('Note: including file:')])
out = '\n'.join([l for l in out.split(os.linesep) if not l.startswith('Note: including file:')])
msg = msg + out
unicodeWrite(sys.stdout, includes) # Ninja relies on result of /showIncludes when compiling with cl
if err:
err = err.decode(sys.stderr.encoding or 'ascii', 'replace')
msg = msg + err
if msg:
parser = argparse.ArgumentParser()
parser.add_argument('-o')
parser.add_argument('-c')
(args, unparsed) = parser.parse_known_args(sys.argv[2:])
src_str = None
for opt in [args.c, args.o]:
if opt:
src_str = opt
break
if not src_str:
linkOutArg = '/out:'
for arg in unparsed:
if arg.startswith(linkOutArg):
src_str = arg[len(linkOutArg):]
break
if not src_str:
src_str = sys.argv[-1]
try:
src_str = os.path.basename(src_str)
except:
pass
# The Visual Studio compiler always prints name of the input source
# file when compiling and "Creating library <file>.lib and object
# <file>.exp" when linking an executable. We try to ignore those
# outputs using a heuristic.
if p.returncode == 0 and (
msg.strip() == src_str or
msg.strip().startswith('Creating library ')):
sys.exit(p.returncode)
if p.returncode == 0:
marker_str = 'WARNING'
else:
if 'bde_runtest' in sys.argv[2]:
marker_str = 'TEST'
else:
marker_str = 'ERROR'
# This logic handles unicode in the output.
status_str = u'[{} ({})] <<<<<<<<<<\n{}>>>>>>>>>>\n'.format(src_str, marker_str, msg)
unicodeWrite(sys.stderr, status_str)
sys.exit(p.returncode)
| Python | 0 |
88dd48eab612e89b956dea5600a999c78c61d5fb | fix lpproj algorithm | lpproj/lpproj.py | lpproj/lpproj.py | import numpy as np
from scipy import linalg
from sklearn.neighbors import kneighbors_graph, NearestNeighbors
from sklearn.utils import check_array
from sklearn.base import BaseEstimator, TransformerMixin
class LocalityPreservingProjection(BaseEstimator, TransformerMixin):
def __init__(self, n_neighbors=5, n_components=2, eigen_solver='auto',
neighbors_algorithm='auto', kernel_width=None):
self.n_neighbors = n_neighbors
self.n_components = n_components
self.eigen_solver = eigen_solver
self.neighbors_algorithm = neighbors_algorithm
self.kernel_width = kernel_width
def fit(self, X, y=None):
X = check_array(X)
self.nbrs_ = NearestNeighbors(n_neighbors=self.n_neighbors,
algorithm=self.neighbors_algorithm)
self.nbrs_.fit(X)
self.training_data_ = self.nbrs_._fit_X
# TODO: make this more efficient
# TODO: make duplicates behave correctly
if self.kernel_width is None:
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='connectivity', include_self=True)
else:
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='distance')
W.data = np.exp(-W.data ** 2 / self.kernel_width ** 2)
W = W.toarray()
W = np.maximum(W, W.T)
D = np.diag(W.sum(1))
L = D - W
A = np.dot(X.T, np.dot(L, X))
B = np.dot(X.T, np.dot(D, X))
evals, evecs = linalg.eigh(A, B)
self.projection_ = evecs[:, :self.n_components]
return self
def transform(self, X):
X = check_array(X)
return np.dot(X, self.projection_)
| import numpy as np
from sklearn.neighbors import kneighbors_graph
from sklearn.utils import check_array
from sklearn.base import BaseEstimator, TransformerMixin
class LocalityPreservingProjection(BaseEstimator, TransformerMixin)::
def __init__(self, n_neighbors=5, n_components=2, eigen_solver='auto',
neighbors_algorithm='auto'):
self.n_neighbors = n_neighbors
self.n_components = n_components
self.eigen_solver = eigen_solver
self.neighbors_algorithm = neighbors_algorithm
def fit(self, X, y=None):
X = check_array(X)
self.nbrs_ = NearestNeighbors(n_neighbors=self.n_neighbors,
algorithm=self.neighbors_algorithm)
self.nbrs_.fit(X)
self.training_data_ = self.nbrs_._fit_X
# TODO: make this more efficient
# L = D - W
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='connectivity')
D = np.diag(W.sum(1))
L = D - W
evals, evecs = np.linalg.eigh(np.dot(X, np.dot(L, X.T)),
np.dot(X, np.dot(D, X.T)))
self.projection_ = evecs[:, :self.n_components]
return self
def transform(self, X):
X = check_array(X)
reutrn np.dot(self.projection_.T, X)
| Python | 0.000012 |
8e58d7cccb837254cc433c7533bff119cc19645d | Use json instead of django.utils.simplejson. | javascript_settings/templatetags/javascript_settings_tags.py | javascript_settings/templatetags/javascript_settings_tags.py | import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| Python | 0 |
afafb47d77fd673abf8d8ce9baa9824b985a943a | Add create_class_wrapper and class_wrapper | undecorate.py | undecorate.py | """Allow your decorations to be un-decorated.
In some cases, such as when testing, it can be useful to access the
decorated class or function directly, so as to not to use the behavior
or interface that the decorator might introduce.
Example:
>>> from functools import wraps
>>> from undecorate import unwrap, unwrappable
>>>
>>> @unwrappable
... def pack(func):
... @wraps(func)
... def wrapper(args, kwargs):
... return func(*args, **kwargs)
... return wrapper
...
>>> @pack
... def myfunc(a, b, c=None, d=None):
... return (a, b, c, d)
...
>>> myfunc('a', 'b', c='c')
Traceback (most recent call last):
...
TypeError: wrapper() got an unexpected keyword argument 'c'
>>>
>>> unwrap(myfunc)('a', 'b', c='c')
('a', 'b', 'c', None)
"""
from functools import wraps, partial
def unwrappable(decorator):
"""Make a decorator able to be un-decorated.
This meta-decorator takes a decorator, and returns a new decorator
that allows the decoration to be used by unwrap().
"""
@wraps(decorator)
def wrapper(decoration):
decorated = decorator(decoration)
decorated.__decoration__ = decoration
return decorated
return wrapper
def unwrap(wrapped):
"""Remove all wrappers from this decorated object."""
while True:
decoration = getattr(wrapped, '__decoration__', None)
if decoration is None:
return wrapped
wrapped = decoration
CLASS_WRAPPER_DELETES = ('__dict__', '__doc__', '__weakref__')
CLASS_WRAPPER_ASSIGNMENTS = ('__module__',)
def create_class_wrapper(wrapper,
wrapped,
deleted=CLASS_WRAPPER_DELETES,
assigned=CLASS_WRAPPER_ASSIGNMENTS):
"""Create a wrapper class that looks like the wrapped class.
wrapper is the class used to override the wrapped class.
wrapped is the class has values overridden by the wrapper.
deleted is a tuple naming the __dict__ items to be removed from the
wrapper class (defaults to CLASS_WRAPPER_DELETES).
assigned is a tuple naming the __dict__ items to be copied directly
from the wrapped class (defaults to CLASS_WRAPPER_ASSIGNMENTS).
A notable difference from update_wrapper is that is creates a new class
that does not appear to be exactly the same as the wrapped class, but
rather mimics the name and the module, and inherits from the original
class, relying on class inheritance to mimic the behavior.
"""
__dict__ = dict(wrapper.__dict__)
for attr in deleted:
__dict__.pop(attr)
for attr in assigned:
__dict__[attr] = getattr(wrapped, attr)
__dict__['__wrapped__'] = wrapped
# Use the metaclass of the wrapped class
return wrapped.__class__(wrapped.__name__, (wrapped,), __dict__)
def class_wraps(wrapped,
deleted=CLASS_WRAPPER_DELETES,
assigned=CLASS_WRAPPER_ASSIGNMENTS):
"""Decorator factory to apply create_class_wrapper() to a wrapper class.
Return a decorator that invokes create_class_wrapper() with the decorated
class as the wrapper argument and the arguments to class_wraps() as the
remaining arguments. Default arguments are as for create_class_wrapper().
This is a convenience function to simplify applying partial() to
create_class_wrapper().
"""
return partial(create_class_wrapper, wrapped=wrapped,
deleted=deleted, assigned=assigned)
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
doctest.testfile('README.rst', optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
| """Allow your decorations to be un-decorated.
In some cases, such as when testing, it can be useful to access the
decorated class or function directly, so as to not to use the behavior
or interface that the decorator might introduce.
Example:
>>> from functools import wraps
>>> from undecorate import unwrap, unwrappable
>>>
>>> @unwrappable
... def pack(func):
... @wraps(func)
... def wrapper(args, kwargs):
... return func(*args, **kwargs)
... return wrapper
...
>>> @pack
... def myfunc(a, b, c=None, d=None):
... return (a, b, c, d)
...
>>> myfunc('a', 'b', c='c')
Traceback (most recent call last):
...
TypeError: wrapper() got an unexpected keyword argument 'c'
>>>
>>> unwrap(myfunc)('a', 'b', c='c')
('a', 'b', 'c', None)
"""
from functools import wraps
def unwrappable(decorator):
"""Make a decorator able to be un-decorated.
This meta-decorator takes a decorator, and returns a new decorator
that allows the decoration to be used by unwrap().
"""
@wraps(decorator)
def wrapper(decoration):
decorated = decorator(decoration)
decorated.__decoration__ = decoration
return decorated
return wrapper
def unwrap(wrapped):
"""Remove all wrappers from this decorated object."""
while True:
decoration = getattr(wrapped, '__decoration__', None)
if decoration is None:
return wrapped
wrapped = decoration
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
doctest.testfile('README.rst', optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
| Python | 0.000021 |
551c4b971f1d18e232ba193cf486300d3490224b | add log | api/photo2song.py | api/photo2song.py | import asyncio
from collections import Counter
from api.bluemix_vision_recognition import VisionRecognizer
from api.echonest import Echonest
from api.spotify import Spotify
from machines.machine_loader import MachineLoader
import machines.photo_mood
def convert(image_urls):
vr = VisionRecognizer()
ec = Echonest()
sp = Spotify()
photo_to_mood = MachineLoader.load(machines.photo_mood)
TARGET_LABELS = ['Boat', 'Human', 'Insect', 'Invertebrate', 'Mammal', 'Man Made Scene', 'Outdoors', 'People Activity', 'Placental Mammal', 'Vertebrate']
log = []
# analyze moode
log.append("begin vision recognition")
moods = Counter()
matrix = vr.recognize(image_urls).to_matrix(TARGET_LABELS)
for r in matrix:
mood = photo_to_mood.predict(r)[0]
moods[int(mood)] += 1
target_mood = moods.most_common(1)[0][0] # get top and its score
target_mood = Echonest.MOOD[target_mood]
# choose song from mood
log.append("begin search song by mood")
tracks = ec.search_songs(target_mood)
# load spotify info
@asyncio.coroutine
def load_spotify(t):
t.load_spotify(sp)
log.append("begin load song information")
tasks = [load_spotify(t) for t in tracks]
done, _ = asyncio.get_event_loop().run_until_complete(asyncio.wait(tasks))
result = {
"mood": target_mood,
"tracks": [t.__dict__ for t in tracks],
"log": log
}
return result
| import asyncio
from collections import Counter
from api.bluemix_vision_recognition import VisionRecognizer
from api.echonest import Echonest
from api.spotify import Spotify
from machines.machine_loader import MachineLoader
import machines.photo_mood
def convert(image_urls):
vr = VisionRecognizer()
ec = Echonest()
sp = Spotify()
photo_to_mood = MachineLoader.load(machines.photo_mood)
TARGET_LABELS = ['Boat', 'Human', 'Insect', 'Invertebrate', 'Mammal', 'Man Made Scene', 'Outdoors', 'People Activity', 'Placental Mammal', 'Vertebrate']
# analyze moode
moods = Counter()
matrix = vr.recognize(image_urls).to_matrix(TARGET_LABELS)
for r in matrix:
mood = photo_to_mood.predict(r)[0]
moods[int(mood)] += 1
target_mood = moods.most_common(1)[0][0] # get top and its score
target_mood = Echonest.MOOD[target_mood]
# choose song from mood
tracks = ec.search_songs(target_mood)
# load spotify info
@asyncio.coroutine
def load_spotify(t):
t.load_spotify(sp)
tasks = [load_spotify(t) for t in tracks]
done, _ = asyncio.get_event_loop().run_until_complete(asyncio.wait(tasks))
result = {
"mood": target_mood,
"tracks": [t.__dict__ for t in tracks]
}
return result
| Python | 0.000002 |
c59c0911c5022291b38774bf407ca83557c78cc5 | test login and logout views. | user/tests.py | user/tests.py | from django.test import TestCase
class ViewsTest(TestCase):
"""
TestCase to test all exposed views for anonymous users.
"""
def setUp(self):
pass
def testHome(self):
response = self.client.get('/user/')
self.assertEquals(response.status_code, 200)
def testLogin(self):
response = self.client.get('/user/login/')
self.assertEquals(response.status_code, 200)
def testLogout(self):
response = self.client.get('/user/logout/')
self.assertEquals(response.status_code, 200)
| from django.test import TestCase
class ViewsTest(TestCase):
"""
TestCase to test all exposed views for anonymous users.
"""
def setUp(self):
pass
def testHome(self):
response = self.client.get('/user/')
self.assertEquals(response.status_code, 200)
def testLogin(self):
response = self.client.get('/login/')
self.assertEquals(response.status_code, 200)
def testLogout(self):
response = self.client.get('/logout/')
self.assertEquals(response.status_code, 200)
| Python | 0 |
c6536da7fc1eda82922b286c096412e4371f6d4c | Bump version | graphysio/__init__.py | graphysio/__init__.py | """Graphical time series visualizer and analyzer."""
__version__ = '2021.07.14.1'
__all__ = [
'algorithms',
'dialogs',
'exporter',
'legend',
'mainui',
'puplot',
'tsplot',
'utils',
'types',
'ui',
'transformations',
]
| """Graphical time series visualizer and analyzer."""
__version__ = '2021.07.14'
__all__ = [
'algorithms',
'dialogs',
'exporter',
'legend',
'mainui',
'puplot',
'tsplot',
'utils',
'types',
'ui',
'transformations',
]
| Python | 0 |
23cdb0d62e44797f84aee61f1a4c2909df8221b0 | Fix settings import and add an option to DjangoAppEngineMiddleware to allow setting up of signals on init | main/__init__.py | main/__init__.py | import logging
import os
from django.utils.importlib import import_module
def validate_models():
"""
Since BaseRunserverCommand is only run once, we need to call
model valdidation here to ensure it is run every time the code
changes.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
logging.info("Validating models...")
s = StringIO()
num_errors = get_validation_errors(s, None)
if num_errors:
s.seek(0)
error_text = s.read()
logging.critical("One or more models did not validate:\n%s" %
error_text)
else:
logging.info("All models validated.")
from djangoappengine.utils import on_production_server
if not on_production_server:
validate_models()
class DjangoAppEngineMiddleware:
def __init__(self, app, setup_signals=False):
self.settings_module = os.environ['DJANGO_SETTINGS_MODULE']
from djangoappengine.boot import setup_env
setup_env()
from django.conf import settings
if setup_signals:
# Load all models.py to ensure signal handling installation or index
# loading of some apps.
for app_to_import in settings.INSTALLED_APPS:
try:
import_module('%s.models' % app_to_import)
except ImportError:
pass
## In vanilla Django, staticfiles overrides runserver to use StaticFilesHandler
## if necessary. As we can't do this in our runserver (because we handover to dev_appserver)
## this has to be done here
if (not on_production_server and settings.DEBUG) and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
app = StaticFilesHandler(app)
if getattr(settings, 'ENABLE_APPSTATS', False):
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app = appstats_wsgi_middleware(app)
self.wrapped_app = app
def __call__(self, environ, start_response):
#Always make sure the settings module is set - AppEngine sometimes loses it!
os.environ['DJANGO_SETTINGS_MODULE'] = self.settings_module
return self.wrapped_app(environ, start_response)
| import logging
import os
def validate_models():
"""
Since BaseRunserverCommand is only run once, we need to call
model valdidation here to ensure it is run every time the code
changes.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
logging.info("Validating models...")
s = StringIO()
num_errors = get_validation_errors(s, None)
if num_errors:
s.seek(0)
error_text = s.read()
logging.critical("One or more models did not validate:\n%s" %
error_text)
else:
logging.info("All models validated.")
from djangoappengine.utils import on_production_server
if not on_production_server:
validate_models()
from django.conf import settings
class DjangoAppEngineMiddleware:
def __init__(self, app):
self.settings_module = os.environ['DJANGO_SETTINGS_MODULE']
from djangoappengine.boot import setup_env
setup_env()
## In vanilla Django, staticfiles overrides runserver to use StaticFilesHandler
## if necessary. As we can't do this in our runserver (because we handover to dev_appserver)
## this has to be done here
if (not on_production_server and settings.DEBUG) and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
app = StaticFilesHandler(app)
if getattr(settings, 'ENABLE_APPSTATS', False):
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app = appstats_wsgi_middleware(app)
self.wrapped_app = app
def __call__(self, environ, start_response):
#Always make sure the settings module is set - AppEngine sometimes loses it!
os.environ['DJANGO_SETTINGS_MODULE'] = self.settings_module
return self.wrapped_app(environ, start_response)
| Python | 0 |
e4ad2863236cd36e5860f1d17a06ca05e30216d5 | Store more stuff about songs in the queue | make_database.py | make_database.py | import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| Python | 0 |
939ba609e6e7a527ef3325c4dd5b0a51c97d1af9 | fix #29 | djangocms_reversion2/signals.py | djangocms_reversion2/signals.py | # -*- coding: utf-8 -*-
from cms.operations import REVERT_PAGE_TRANSLATION_TO_LIVE
from django.db.models import signals
def make_page_version_dirty(page, language):
pv = page.page_versions.filter(active=True, language=language)
if pv.count() > 0:
pv = pv.first()
if not pv.dirty:
pv.dirty = True
pv.save()
def mark_title_dirty(sender, instance, **kwargs):
page = instance.page
language = instance.language
make_page_version_dirty(page, language)
def handle_placeholder_change(**kwargs):
language = kwargs.get('language')
placeholder = kwargs.get('placeholder')
target_placeholder = kwargs.get('target_placeholder', None)
page = None
if placeholder:
page = placeholder.page
elif target_placeholder:
page = target_placeholder.page
if page:
make_page_version_dirty(page, language)
def handle_page_publish(**kwargs):
language = kwargs.get('language')
page = kwargs.get('instance')
# when the page is published create a backup automatically
from djangocms_reversion2.models import PageVersion
try:
PageVersion.create_version(page, language, version_parent=None,
comment='Auto before publish', title='auto')
make_page_version_dirty(page, language)
except AssertionError:
# AssertionError page is not dirty
pass
def handle_page_reverted_to_live(**kwargs):
page = kwargs.get('obj')
translation = kwargs.get('translation')
language = translation.language
operation = kwargs.get('operation')
if operation == REVERT_PAGE_TRANSLATION_TO_LIVE:
from djangocms_reversion2.models import PageVersion
# if a page draft is replaced by the currently published page, then we have to make a backup and also
# set the active flag correctly
try:
PageVersion.create_version(page, language, version_parent=None,
comment='Auto before revert to live', title='auto')
make_page_version_dirty(page, language)
except AssertionError:
# AssertionError page is not dirty
pass
def handle_page_delete(sender, instance, **kwargs):
# deleting a real page will delete all of its hidden versions
page = instance
for pv in page.page_versions.iterator():
pv.hidden_page.delete()
pv.delete()
def delete_hidden_page(sender, **kwargs):
# deleting a PageVersion deletes its hidden page in the PageTree
# This signal handler deletes the hidden page associated to a PageVersion
# (reverse to on_delete=models.CASCADE)
# Problem was that an infinite loop can be originated
# if kwargs['instance'] and kwargs['instance'].hidden_page:
# hidden_page = kwargs['instance'].hidden_page
# try:
# hidden_page.delete()
# except Exception as e:
# print(e)
pass
def connect_all_plugins():
from cms.signals import post_placeholder_operation, post_publish, pre_obj_operation
post_placeholder_operation.connect(handle_placeholder_change, dispatch_uid='reversion2_placeholder')
signals.post_save.connect(mark_title_dirty, sender='cms.Title', dispatch_uid='reversion2_title')
signals.pre_delete.connect(handle_page_delete, sender='cms.Page', dispatch_uid='reversion2_page')
signals.pre_delete.connect(delete_hidden_page, sender='djangocms_reversion2.PageVersion',
dispatch_uid='reversion2_page_version')
post_publish.connect(handle_page_publish, dispatch_uid='reversion2_page_publish')
pre_obj_operation.connect(handle_page_reverted_to_live,
dispatch_uid='reversion2_page_revert_to_live')
| # -*- coding: utf-8 -*-
from django.db.models import signals
def make_page_version_dirty(page, language):
pv = page.page_versions.filter(active=True, language=language)
if pv.count() > 0:
pv = pv.first()
if not pv.dirty:
pv.dirty = True
pv.save()
def mark_title_dirty(sender, instance, **kwargs):
page = instance.page
language = instance.language
make_page_version_dirty(page, language)
def handle_placeholder_change(**kwargs):
language = kwargs.get('language')
placeholder = kwargs.get('placeholder')
target_placeholder = kwargs.get('target_placeholder', None)
page = None
if placeholder:
page = placeholder.page
elif target_placeholder:
page = target_placeholder.page
if page:
make_page_version_dirty(page, language)
def handle_page_publish(**kwargs):
language = kwargs.get('language')
page = kwargs.get('instance')
# when the page is published create a backup automatically
from djangocms_reversion2.models import PageVersion
try:
PageVersion.create_version(page, language, version_parent=None,
comment='Auto before publish', title='auto')
make_page_version_dirty(page, language)
except AssertionError:
# AssertionError page is not dirty
pass
def handle_page_delete(sender, instance, **kwargs):
# deleting a real page will delete all of its hidden versions
page = instance
for pv in page.page_versions.iterator():
pv.hidden_page.delete()
pv.delete()
def delete_hidden_page(sender, **kwargs):
# deleting a PageVersion deletes its hidden page in the PageTree
# This signal handler deletes the hidden page associated to a PageVersion
# (reverse to on_delete=models.CASCADE)
# Problem was that an infinite loop can be originated
# if kwargs['instance'] and kwargs['instance'].hidden_page:
# hidden_page = kwargs['instance'].hidden_page
# try:
# hidden_page.delete()
# except Exception as e:
# print(e)
pass
def connect_all_plugins():
from cms.signals import post_placeholder_operation, post_publish
post_placeholder_operation.connect(handle_placeholder_change, dispatch_uid='reversion2_placeholder')
signals.post_save.connect(mark_title_dirty, sender='cms.Title', dispatch_uid='reversion2_title')
signals.pre_delete.connect(handle_page_delete, sender='cms.Page', dispatch_uid='reversion2_page')
signals.pre_delete.connect(delete_hidden_page, sender='djangocms_reversion2.PageVersion',
dispatch_uid='reversion2_page_version')
post_publish.connect(handle_page_publish, dispatch_uid='reversion2_page_publish')
| Python | 0.000001 |
70448c7f4ea132376a6d3547edb99ec616501171 | Implement Gref#parents in terms of Gref#direct_parents | groundstation/gref.py | groundstation/gref.py | import os
import groundstation.objects.object_factory as object_factory
from groundstation.objects.update_object import UpdateObject
from groundstation.objects.root_object import RootObject
import logger
log = logger.getLogger(__name__)
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def tips(self):
return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip, silent=False):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
if not silent:
raise
def direct_parents(self, tip):
"""Return all parents of `tip` in the order they're written into the
object"""
obj = object_factory.hydrate_object(self.store[tip].data)
if isinstance(obj, RootObject):
# Roots can't have parents
return []
elif isinstance(obj, UpdateObject):
return obj.parents
else:
raise "Unknown object hydrated %s" % (str(type(obj)))
def parents(self, tips=None):
"""Return all ancestors of `tip`, in an undefined order"""
# XXX This will asplode the stack at some point
parents = set()
this_iter = (tips or self.tips())
while this_iter:
tip = this_iter.pop()
tips_parents = self.direct_parents(tip)
parents = parents.union(set(tips_parents))
this_iter.extend(tips_parents)
return parents
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| import os
import groundstation.objects.object_factory as object_factory
from groundstation.objects.update_object import UpdateObject
from groundstation.objects.root_object import RootObject
import logger
log = logger.getLogger(__name__)
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def tips(self):
return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip, silent=False):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
if not silent:
raise
def direct_parents(self, tip):
"""Return all parents of `tip` in the order they're written into the
object"""
obj = object_factory.hydrate_object(self.store[tip].data)
if isinstance(obj, RootObject):
# Roots can't have parents
return []
elif isinstance(obj, UpdateObject):
return obj.parents
else:
raise "Unknown object hydrated %s" % (str(type(obj)))
def parents(self, tips=None):
"""Return all ancestors of `tip`, in an undefined order"""
# XXX This will asplode the stack at some point
parents = []
for tip in (tips or self.tips()):
obj = object_factory.hydrate_object(self.store[tip].data)
if isinstance(obj, UpdateObject):
for tip in obj.parents:
parents.append(tip)
parents.extend(self.parents([tip]))
elif isinstance(obj, RootObject):
return []
else:
raise "Unknown object hydrated %s" % (str(type(obj)))
return parents
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| Python | 0.002283 |
f3195d0d41232c7655250dea15ba4ecbe1a7b036 | append http:// if protocol is missing, sanitize the return value | Commands/Slurp.py | Commands/Slurp.py | # -*- coding: utf-8 -*-
"""
Created on Aug 31, 2015
@author: Tyranic-Moron
"""
import HTMLParser
import re
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
from Utils import WebUtils
from bs4 import BeautifulSoup
class Slurp(CommandInterface):
triggers = ['slurp']
help = "slurp <attribute> <url> <css selector> - scrapes the given attribute from the tag selected at the given url"
htmlParser = HTMLParser.HTMLParser()
def execute(self, message):
"""
@type message: IRCMessage
"""
if len(message.ParameterList) < 3:
return IRCResponse(ResponseType.Say, u"Not enough parameters, usage: {}".format(self.help), message.ReplyTo)
prop, url, selector = (message.ParameterList[0], message.ParameterList[1], u" ".join(message.ParameterList[2:]))
if not re.match(ur'^\w+://', url):
url = u"http://{}".format(url)
page = WebUtils.fetchURL(url)
if page is None:
return IRCResponse(ResponseType.Say, u"Problem fetching {}".format(url), message.ReplyTo)
soup = BeautifulSoup(page.body)
tag = soup.select_one(selector)
if tag is None:
return IRCResponse(ResponseType.Say,
u"'{}' does not select a tag at {}".format(selector, url),
message.ReplyTo)
specials = {
'tagname': tag.name,
'text': tag.text
}
if prop in specials:
value = specials[prop]
elif prop in tag:
value = tag[prop]
else:
return IRCResponse(ResponseType.Say,
u"The tag selected by '{}' ({}) does not have attribute '{}'".format(selector,
tag.name,
prop),
message.ReplyTo)
if not isinstance(value, basestring):
value = u" ".join(value)
# sanitize the value
value = value.strip()
value = re.sub(ur'[\r\n]+', u' ', value)
value = re.sub(ur'\s+', u' ', value)
value = self.htmlParser.unescape(value)
return IRCResponse(ResponseType.Say, value, message.ReplyTo)
| # -*- coding: utf-8 -*-
"""
Created on Aug 31, 2015
@author: Tyranic-Moron
"""
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
from Utils import WebUtils
from bs4 import BeautifulSoup
class Slurp(CommandInterface):
triggers = ['slurp']
help = "slurp <attribute> <url> <css selector> - scrapes the given attribute from the tag selected at the given url"
def execute(self, message):
"""
@type message: IRCMessage
"""
if len(message.ParameterList) < 3:
return IRCResponse(ResponseType.Say, u"Not enough parameters, usage: {}".format(self.help), message.ReplyTo)
prop, url, selector = (message.ParameterList[0], message.ParameterList[1], u" ".join(message.ParameterList[2:]))
page = WebUtils.fetchURL(url)
if page is None:
return IRCResponse(ResponseType.Say, u"Problem fetching {}".format(url), message.ReplyTo)
soup = BeautifulSoup(page.body)
tag = soup.select_one(selector)
if tag is None:
return IRCResponse(ResponseType.Say,
u"'{}' does not select a tag at {}".format(selector, url),
message.ReplyTo)
specials = {
'name': tag.name,
'text': tag.text
}
if prop in specials:
value = specials[prop]
elif prop in tag:
value = tag[prop]
else:
return IRCResponse(ResponseType.Say,
u"The tag selected by '{}' ({}) does not have attribute '{}'".format(selector,
tag.name,
prop),
message.ReplyTo)
if not isinstance(value, basestring):
value = u" ".join(value)
return IRCResponse(ResponseType.Say, value, message.ReplyTo)
| Python | 0.000015 |
ed97f1cdbcc5a00c2bf597ad921b17da652b0b07 | add annotations to _pytesttester.py | bottleneck/_pytesttester.py | bottleneck/_pytesttester.py | """
Generic test utilities.
Based on scipy._libs._testutils
"""
import os
import sys
from typing import Optional, List
__all__ = ["PytestTester"]
class PytestTester(object):
"""
Pytest test runner entry point.
"""
def __init__(self, module_name: str) -> None:
self.module_name = module_name
def __call__(
self,
label: str = "fast",
verbose: int = 1,
extra_argv: Optional[List[str]] = None,
doctests: bool = False,
coverage: bool = False,
tests: Optional[List[str]] = None,
parallel: Optional[int] = None,
) -> bool:
import pytest
module = sys.modules[self.module_name]
module_path = os.path.abspath(module.__path__[0])
pytest_args = ["-l"]
if doctests:
raise ValueError("Doctests not supported")
if extra_argv:
pytest_args += list(extra_argv)
if verbose and int(verbose) > 1:
pytest_args += ["-" + "v" * (int(verbose) - 1)]
if coverage:
pytest_args += ["--cov=" + module_path]
if label == "fast":
pytest_args += ["-m", "not slow"]
elif label != "full":
pytest_args += ["-m", label]
if tests is None:
tests = [self.module_name]
if parallel is not None and parallel > 1:
if _pytest_has_xdist():
pytest_args += ["-n", str(parallel)]
else:
import warnings
warnings.warn(
"Could not run tests in parallel because "
"pytest-xdist plugin is not available."
)
pytest_args += ["--pyargs"] + list(tests)
try:
code = pytest.main(pytest_args)
except SystemExit as exc:
code = exc.code
return code == 0
def _pytest_has_xdist() -> bool:
"""
Check if the pytest-xdist plugin is installed, providing parallel tests
"""
# Check xdist exists without importing, otherwise pytests emits warnings
from importlib.util import find_spec
return find_spec("xdist") is not None
| """
Generic test utilities.
Based on scipy._libs._testutils
"""
from __future__ import division, print_function, absolute_import
import os
import sys
__all__ = ["PytestTester"]
class PytestTester(object):
"""
Pytest test runner entry point.
"""
def __init__(self, module_name):
self.module_name = module_name
def __call__(
self,
label="fast",
verbose=1,
extra_argv=None,
doctests=False,
coverage=False,
tests=None,
parallel=None,
):
import pytest
module = sys.modules[self.module_name]
module_path = os.path.abspath(module.__path__[0])
pytest_args = ["-l"]
if doctests:
raise ValueError("Doctests not supported")
if extra_argv:
pytest_args += list(extra_argv)
if verbose and int(verbose) > 1:
pytest_args += ["-" + "v" * (int(verbose) - 1)]
if coverage:
pytest_args += ["--cov=" + module_path]
if label == "fast":
pytest_args += ["-m", "not slow"]
elif label != "full":
pytest_args += ["-m", label]
if tests is None:
tests = [self.module_name]
if parallel is not None and parallel > 1:
if _pytest_has_xdist():
pytest_args += ["-n", str(parallel)]
else:
import warnings
warnings.warn(
"Could not run tests in parallel because "
"pytest-xdist plugin is not available."
)
pytest_args += ["--pyargs"] + list(tests)
try:
code = pytest.main(pytest_args)
except SystemExit as exc:
code = exc.code
return code == 0
def _pytest_has_xdist():
"""
Check if the pytest-xdist plugin is installed, providing parallel tests
"""
# Check xdist exists without importing, otherwise pytests emits warnings
from importlib.util import find_spec
return find_spec("xdist") is not None
| Python | 0.000002 |
075c06a6360d8b88745e3bffd4883beead36c59b | Add orders_script | config_example.py | config_example.py | CHROMEDRIVER_PATH = '/usr/lib/chromium-browser/chromedriver'
FACEBOOK = {
'email': '',
'password': '',
}
HIPMENU = {
'restaurant_url': 'https://www.hipmenu.ro/#p1/rg/cluj-prod/group/98254//',
}
SKYPE = {
'username': '',
'password': '',
'conversation_title': '',
}
NEXMO = {
'api_key': '',
'api_secret': '',
'phone_number': '40744444444',
}
TEST = True
orders_script = """
var orders = [];
var my_name = document.querySelector('#h-profilename').textContent;
var name_tags = Array.prototype.slice.call(document.querySelectorAll('.container-white-rounded .header-left p'));
var price_tags = Array.prototype.slice.call(document.querySelectorAll('.container-white-rounded .summary-total .value'));
if (name_tags.length > price_tags.length) {
name_tags.splice(0, 1);
}
for (var i = 0; i < name_tags.length; i++) {
orders.push({
name: name_tags[i].textContent.replace('Selecțiile mele', my_name).trim(),
price: price_tags[i].textContent.trim(),
});
}
return orders;
"""
| CHROMEDRIVER_PATH = '/usr/lib/chromium-browser/chromedriver'
FACEBOOK = {
'email': '',
'password': '',
}
HIPMENU = {
'restaurant_url': 'https://www.hipmenu.ro/#p1/rg/cluj-prod/group/98254//',
}
SKYPE = {
'username': '',
'password': '',
'conversation_title': '',
}
NEXMO = {
'api_key': '',
'api_secret': '',
'phone_number': '40744444444',
}
TEST = True
| Python | 0.000001 |
60890b614132a8cfd48be3e001114275752e9ac4 | fix typo | megnet/config.py | megnet/config.py | """Data types"""
import numpy as np
import tensorflow as tf
DTYPES = {'float32': {'numpy': np.float32, 'tf': tf.float32},
'float16': {'numpy': np.float16, 'tf': tf.float16},
'int32': {'numpy': np.int32, 'tf': tf.int32},
'int16': {'numpy': np.int16, 'tf': tf.int16}}
class DataType:
np_float = np.float32
np_int = np.int32
tf_float = tf.float32
tf_int = tf.int32
@classmethod
def set_dtype(cls, data_type: str) -> None:
"""
Class method to set the data types
Args:
data_type (str): '16' or '32'
"""
if data_type.endswith('32'):
float_key = 'float32'
int_key = 'int32'
elif data_type.endswith('16'):
float_key = 'float16'
int_key = 'int16'
else:
raise ValueError("Data type not known, choose '16' or '32'")
cls.np_float = DTYPES[float_key]['numpy']
cls.tf_float = DTYPES[float_key]['tf']
cls.np_int = DTYPES[int_key]['numpy']
cls.tf_int = DTYPES[int_key]['tf']
def set_global_dtypes(data_type) -> None:
"""
Function to set the data types
Args:
data_type (str): '16' or '32'
Returns:
"""
DataType.set_dtype(data_type)
| """Data types"""
import numpy as np
import tensorflow as tf
DTYPES = {'float32': {'numpy': np.float32, 'tf': tf.float32},
'float16': {'numpy': np.float16, 'tf': tf.float16},
'int32': {'numpy': np.int32, 'tf': tf.int32},
'int16': {'numpy': np.int32, 'tf': tf.int32}}
class DataType:
np_float = np.float32
np_int = np.int32
tf_float = tf.float32
tf_int = tf.int32
@classmethod
def set_dtype(cls, data_type: str) -> None:
"""
Class method to set the data types
Args:
data_type (str): '16' or '32'
"""
if data_type.endswith('32'):
float_key = 'float32'
int_key = 'int32'
elif data_type.endswith('16'):
float_key = 'float16'
int_key = 'int16'
else:
raise ValueError("Data type not known, choose '16' or '32'")
cls.np_float = DTYPES[float_key]['numpy']
cls.tf_float = DTYPES[float_key]['tf']
cls.np_int = DTYPES[int_key]['numpy']
cls.tf_int = DTYPES[int_key]['tf']
def set_global_dtypes(data_type) -> None:
"""
Function to set the data types
Args:
data_type (str): '16' or '32'
Returns:
"""
DataType.set_dtype(data_type)
| Python | 0.999991 |
5eb2c6f7e1bf0cc1b73b167a08085fccf77974fe | Tidy up and doc-comment AWSInstanceEnv class | app/config/aws.py | app/config/aws.py | # -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| Python | 0 |
9642b8f3d2f14b3a61054f68f05f4ef8eaca0803 | add validation | molo/core/management/commands/add_translated_pages_to_pages.py | molo/core/management/commands/add_translated_pages_to_pages.py | from __future__ import absolute_import, unicode_literals
from django.core.management.base import BaseCommand
from molo.core.models import PageTranslation, SiteLanguage, Page
class Command(BaseCommand):
def handle(self, *args, **options):
# first add all the translations to the main language Page
# and add the main language page as a translated page
# to the translated pages
main_language = SiteLanguage.objects.get(is_main_language=True)
pages = Page.objects.all().exclude(depth__in=[1, 2, 3])
for page in pages:
if page.specific.language.pk == main_language.pk:
for translation in PageTranslation.objects.filter(page=page):
if translation.page and translation.translated_page:
page.specific.translated_pages.add(
translation.translated_page.specific)
translation.translated_page.specific.translated_pages\
.add(page.specific)
page.save()
translation.translated_page.save()
else:
self.stdout.write(self.style.NOTICE(
'Translation with pk "%s"'
'is missing page/translated_page'
% (translation.pk)))
# loop through all translated_pages on the main language page and
# add all the translations to the rest of the translated pages
# except the language that it is in
for page in Page.objects.all().exclude(depth__in=[1, 2, 3]):
if page.language:
if page.specific.language.pk == main_language.pk:
for translated_page in \
page.specific.translated_pages.all():
translations = page.specific.translated_pages.all().\
exclude(language__pk=translated_page.language.pk)
for translation in translations:
translated_page.translated_pages.add(translation)
translated_page.save()
| from __future__ import absolute_import, unicode_literals
from django.core.management.base import BaseCommand
from molo.core.models import PageTranslation, SiteLanguage, Page
class Command(BaseCommand):
def handle(self, *args, **options):
# first add all the translations to the main language Page
# and add the main language page as a translated page
# to the translated pages
main_language = SiteLanguage.objects.get(is_main_language=True)
pages = Page.objects.all().exclude(depth__in=[1, 2, 3])
for page in pages:
if page.specific.language.pk == main_language.pk:
for translation in PageTranslation.objects.filter(page=page):
if translation.page and translation.translated_page:
page.specific.translated_pages.add(
translation.translated_page.specific)
translation.translated_page.specific.translated_pages\
.add(page.specific)
page.save()
translation.translated_page.save()
else:
self.stdout.write(self.style.NOTICE(
'Translation with pk "%s"'
'is missing page/translated_page'
% (translation.pk)))
# loop through all translated_pages on the main language page and
# add all the translations to the rest of the translated pages
# except the language that it is in
for page in Page.objects.all().exclude(depth__in=[1, 2, 3]):
if page.specific.language.pk == main_language.pk:
for translated_page in page.specific.translated_pages.all():
translations = page.specific.translated_pages.all().\
exclude(language__pk=translated_page.language.pk)
for translation in translations:
translated_page.translated_pages.add(translation)
translated_page.save()
| Python | 0.000001 |
cb4c91e3d109c939236f9581691f837fa0709108 | Delete manage hackathon detail router | open-hackathon-client/src/client/views/route_manage.py | open-hackathon-client/src/client/views/route_manage.py | # -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
sys.path.append("..")
from client import app
from . import render
from flask_login import login_required
from client.functions import is_local
@app.route("/manage/create_event")
@login_required
def create_event():
return render("/create_event.html", islocal=is_local())
@app.route("/manage")
@login_required
def myhackathon():
return render("/manage/myhackathon.html", hackathon_name="")
@app.route("/manage/<hackathon_name>/user")
@login_required
def registerusers(hackathon_name):
return render("/manage/registerusers.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/azurecert")
@login_required
def azurecert(hackathon_name):
return render("/manage/azurecert.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/organizers")
@login_required
def organizers(hackathon_name):
return render("/manage/organizers.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/edit")
@login_required
def edithackathon(hackathon_name):
return render("/manage/edithackathon.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/template")
@login_required
def template(hackathon_name):
return render("/manage/template.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/adminmgr")
@login_required
def adminmgr(hackathon_name):
return render("/manage/adminmgr.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/experiment")
@login_required
def experiment(hackathon_name):
return render("/manage/experiment.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/team")
@login_required
def team(hackathon_name):
return render("/manage/team.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/team/<team_id>")
@login_required
def team_award(hackathon_name, team_id):
return render("/manage/team_award.html", hackathon_name=hackathon_name, team_id=team_id)
@app.route("/manage/<hackathon_name>/award")
@login_required
def award(hackathon_name):
return render("/manage/award.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/host_server")
@login_required
def host_server(hackathon_name):
return render("/manage/host_server.html", hackathon_name=hackathon_name)
| # -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
sys.path.append("..")
from client import app
from . import render
from flask_login import login_required
from client.functions import is_local
@app.route("/manage/create_event")
@login_required
def create_event():
return render("/create_event.html", islocal=is_local())
@app.route("/manage")
@login_required
def myhackathon():
return render("/manage/myhackathon.html", hackathon_name="")
# get registered user list of a hackathon
# @app.route("/manage/<hackathon_name>")
# @login_required
# def hackathon_manage_detail(hackathon_name):
# return render("/manage/detail.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/user")
@login_required
def registerusers(hackathon_name):
return render("/manage/registerusers.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/azurecert")
@login_required
def azurecert(hackathon_name):
return render("/manage/azurecert.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/organizers")
@login_required
def organizers(hackathon_name):
return render("/manage/organizers.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/edit")
@login_required
def edithackathon(hackathon_name):
return render("/manage/edithackathon.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/template")
@login_required
def template(hackathon_name):
return render("/manage/template.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/adminmgr")
@login_required
def adminmgr(hackathon_name):
return render("/manage/adminmgr.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/experiment")
@login_required
def experiment(hackathon_name):
return render("/manage/experiment.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/team")
@login_required
def team(hackathon_name):
return render("/manage/team.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/team/<team_id>")
@login_required
def team_award(hackathon_name, team_id):
return render("/manage/team_award.html", hackathon_name=hackathon_name, team_id=team_id)
@app.route("/manage/<hackathon_name>/award")
@login_required
def award(hackathon_name):
return render("/manage/award.html", hackathon_name=hackathon_name)
@app.route("/manage/<hackathon_name>/host_server")
@login_required
def host_server(hackathon_name):
return render("/manage/host_server.html", hackathon_name=hackathon_name)
| Python | 0 |
58d7592c603509f2bb625e4e2e5cb31ada4a8194 | Change test for make_kernel(kerneltype='airy') from class to function | astropy/nddata/convolution/tests/test_make_kernel.py | astropy/nddata/convolution/tests/test_make_kernel.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
| Python | 0.000001 |
e7647da318b2fc5f973080446882347a287aec3a | use same cache for custom domain redirect | openedx/core/djangoapps/appsembler/sites/middleware.py | openedx/core/djangoapps/appsembler/sites/middleware.py | from django.conf import settings
from django.core.cache import cache, caches
from django.contrib.redirects.models import Redirect
from django.shortcuts import redirect
from .models import AlternativeDomain
import logging
log = logging.getLogger(__name__)
class CustomDomainsRedirectMiddleware(object):
def process_request(self, request):
cache_general = caches['general']
hostname = request.get_host()
if hostname.endswith(settings.SITE_NAME):
cache_key = '{prefix}-{site}'.format(prefix=settings.CUSTOM_DOMAINS_REDIRECT_CACHE_KEY_PREFIX, site=hostname)
custom_domain = cache_general.get(cache_key)
if custom_domain is None:
try:
alternative_domain = AlternativeDomain.objects.select_related('site').get(domain=hostname)
custom_domain = alternative_domain.site.domain
except AlternativeDomain.DoesNotExist:
custom_domain = ""
cache_general.set(cache_key, custom_domain, settings.CUSTOM_DOMAINS_REDIRECT_CACHE_TIMEOUT)
if custom_domain:
return redirect("https://" + custom_domain)
return
class RedirectMiddleware(object):
"""
Redirects requests for URLs persisted using the django.contrib.redirects.models.Redirect model.
With the exception of the main site.
"""
def process_request(self, request):
"""
Redirects the current request if there is a matching Redirect model
with the current request URL as the old_path field.
"""
site = request.site
try:
in_whitelist = any(map(
lambda p: p in request.path,
settings.MAIN_SITE_REDIRECT_WHITELIST))
if (site.id == settings.SITE_ID) and not in_whitelist:
return redirect("https://appsembler.com/tahoe/")
except Exception:
# I'm not entirely sure this middleware get's called only in LMS or in other apps as well.
# Soooo just in case
pass
cache_key = '{prefix}-{site}'.format(prefix=settings.REDIRECT_CACHE_KEY_PREFIX, site=site.domain)
redirects = cache.get(cache_key)
if redirects is None:
redirects = {redirect.old_path: redirect.new_path for redirect in Redirect.objects.filter(site=site)}
cache.set(cache_key, redirects, settings.REDIRECT_CACHE_TIMEOUT)
redirect_to = redirects.get(request.path)
if redirect_to:
return redirect(redirect_to, permanent=True)
| from django.conf import settings
from django.core.cache import cache
from django.contrib.redirects.models import Redirect
from django.shortcuts import redirect
from .models import AlternativeDomain
import logging
log = logging.getLogger(__name__)
class CustomDomainsRedirectMiddleware(object):
def process_request(self, request):
hostname = request.get_host()
if hostname.endswith(settings.SITE_NAME):
cache_key = '{prefix}-{site}'.format(prefix=settings.CUSTOM_DOMAINS_REDIRECT_CACHE_KEY_PREFIX, site=hostname)
custom_domain = cache.get(cache_key)
if custom_domain is None:
try:
alternative_domain = AlternativeDomain.objects.select_related('site').get(domain=hostname)
custom_domain = alternative_domain.site.domain
except AlternativeDomain.DoesNotExist:
custom_domain = ""
cache.set(cache_key, custom_domain, settings.CUSTOM_DOMAINS_REDIRECT_CACHE_TIMEOUT)
if custom_domain:
return redirect("https://" + custom_domain)
return
class RedirectMiddleware(object):
"""
Redirects requests for URLs persisted using the django.contrib.redirects.models.Redirect model.
With the exception of the main site.
"""
def process_request(self, request):
"""
Redirects the current request if there is a matching Redirect model
with the current request URL as the old_path field.
"""
site = request.site
try:
in_whitelist = any(map(
lambda p: p in request.path,
settings.MAIN_SITE_REDIRECT_WHITELIST))
if (site.id == settings.SITE_ID) and not in_whitelist:
return redirect("https://appsembler.com/tahoe/")
except Exception:
# I'm not entirely sure this middleware get's called only in LMS or in other apps as well.
# Soooo just in case
pass
cache_key = '{prefix}-{site}'.format(prefix=settings.REDIRECT_CACHE_KEY_PREFIX, site=site.domain)
redirects = cache.get(cache_key)
if redirects is None:
redirects = {redirect.old_path: redirect.new_path for redirect in Redirect.objects.filter(site=site)}
cache.set(cache_key, redirects, settings.REDIRECT_CACHE_TIMEOUT)
redirect_to = redirects.get(request.path)
if redirect_to:
return redirect(redirect_to, permanent=True)
| Python | 0 |
21850d8ab44981b2bb02cb50386db717aacc730b | Fix poor coverage | paystackapi/tests/test_product.py | paystackapi/tests/test_product.py | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
class TestProduct(BaseTestCase):
@httpretty.activate
def test_product_create(self):
"""Method defined to test product creation."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Product successfully created"}',
status=201,
)
response = Product.create(
name="Product pypaystack test", description="my test description",
price=500000, currency="NGN"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_product_list(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}',
status=201,
)
response = Product.list()
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product fetch method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.fetch(5499)
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_update(self):
"""Function defined to test Product update method."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.update(product_id=5499, name="Product pypaystack test",
description="my test description", price=500000000,
currency="USD"
)
self.assertEqual(response['status'], True)
| import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
class TestProduct(BaseTestCase):
@httpretty.activate
def test_product_create(self):
"""Method defined to test product creation."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Product successfully created"}',
status=201,
)
response = Product.create(
name="Product pypaystack test", description="my test description",
price=500000, currency="NGN"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_product_list(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}',
status=201,
)
response = Product.list()
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.fetch(5499)
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.update(product_id=5499, name="Product pypaystack test",
description="my test description", price=500000000,
currency="USD"
)
self.assertEqual(response['status'], True)
| Python | 0.002037 |
69ac1eb2f125e93444c134346dca954d8c040d42 | Implement the API to get system running status | paradrop/src/paradrop/backend/system_status.py | paradrop/src/paradrop/backend/system_status.py | '''
Get system running status including CPU load, memory usage, network traffic.
'''
import psutil
import time
class SystemStatus(object):
def __init__(self):
self.timestamp = time.time()
self.cpu_load = []
self.mem = dict(total = 0,
available = 0,
free = 0,
cached = 0,
buffers = 0)
self.disk_partitions = {}
self.network = {}
partitions = psutil.disk_partitions()
for p in partitions:
if p.fstype == 'ext4':
usage = psutil.disk_usage(p.mountpoint)
self.disk_partitions[p.mountpoint] = {
'total': usage.total,
'used': usage.used
}
def getStatus(self):
timestamp = time.time()
if (timestamp > self.timestamp + 0.8):
self.timestamp = timestamp
self.refreshCpuLoad()
self.refreshMemoryInfo()
self.refreshDiskInfo()
self.refreshNetworkTraffic()
result = {
'cpu_load': self.cpu_load,
'mem': self.mem,
'disk': self.disk_partitions,
'network': self.network
}
return result
def refreshCpuLoad(self):
self.cpu_load = map(int, psutil.cpu_percent(percpu=True))
def refreshMemoryInfo(self):
mem = psutil.virtual_memory()
self.mem['total'] = mem.total
self.mem['available'] = mem.available
self.mem['free'] = mem.free
self.mem['cached'] = mem.cached
self.mem['buffers'] = mem.buffers
def refreshDiskInfo(self):
for key, value in self.disk_partitions.iteritems():
usage = psutil.disk_usage(key)
self.disk_partitions[key]['total'] = usage.total
self.disk_partitions[key]['used'] = usage.used
def refreshNetworkTraffic(self):
excluded_interfaces = set(["lo", 'br-lan', 'docker0', 'wlan0'])
interfaces = {}
stats = psutil.net_if_stats()
for key, value in stats.iteritems():
if key in excluded_interfaces:
continue
interfaces[key] = {
'isup': value.isup,
'speed': value.speed,
'mtu': value.mtu
}
addresses = psutil.net_if_addrs()
for key, value in addresses.iteritems():
if key in excluded_interfaces:
continue
for i in value:
if i.family == 2:
interfaces[key]['ipv4'] = i.address
interfaces[key]['netmask'] = i.netmask
elif i.family == 17:
interfaces[key]['mac'] = i.address
traffic = psutil.net_io_counters(pernic=True)
for key, value in traffic.iteritems():
if key in excluded_interfaces:
continue
interfaces[key]['bytes_sent'] = value.bytes_sent
interfaces[key]['bytes_recv'] = value.bytes_recv
interfaces[key]['packets_sent'] = value.packets_sent
interfaces[key]['packets_recv'] = value.packets_recv
interfaces[key]['errin'] = value.errin
interfaces[key]['errout'] = value.errout
interfaces[key]['dropin'] = value.dropin
interfaces[key]['dropout'] = value.dropout
self.network = interfaces
| '''
Get system running status including CPU load, memory usage, network traffic.
'''
class SystemStatus(object):
def __init__(self):
pass
def getStatus(self):
test = {
'cpuload': 10
}
return test
def refreshCpuLoad(self):
pass
def refreshMemoryInfo(self):
pass
def refreshDiskInfo(self):
pass
def refreshNetworkTraffic(self):
pass
| Python | 0.000001 |
c11b5b2181434651e1979c6db328ba81ed19566d | Add debug to see why test-meson-helloworld. | meson_install.py | meson_install.py | #!/usr/bin/env python3
# Copyright 2015 wink saville
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import utils
import parseinstallargs
import subprocess
import sys
import os
import traceback
import shutil
APP='meson'
URL='https://github.com/mesonbuild/meson.git'
DEFAULT_VER='0.27.0'
#CHECKOUT=DEFAULT_VER
CHECKOUT='master'
class Installer:
'''Installer for meson.'''
def __init__(self, defaultVer=DEFAULT_VER, defaultCodePrefixDir=None,
defaultInstallPrefixDir=None, defaultForceInstall=None):
'''See parseinstallargs for defaults prefixes'''
self.args = parseinstallargs.InstallArgs(APP, defaultVer, defaultCodePrefixDir,
defaultInstallPrefixDir, defaultForceInstall)
def install(self):
dst_dir = os.path.join(self.args.installPrefixDir, 'bin')
os.makedirs(dst_dir, exist_ok=True)
retval = 0
try:
print('dst_dir =', dst_dir)
dst = os.path.join(dst_dir, self.args.app)
print('dst =', dst)
output = subprocess.check_output([dst, '-v'],
stderr=subprocess.STDOUT)
print('output =', output)
if output is None:
print('output is None')
output = b''
except BaseException as err:
traceback.print_exc()
output = b''
print('forceInstall =', self.args.forceInstall)
print('self.args.ver =', self.args.ver)
print('output =', output)
if not self.args.forceInstall and bytes(self.args.ver, 'utf-8') in output:
print('{app} {ver} is already installed'
.format(app=self.args.app, ver=self.args.ver))
else:
print('compiling {app} {ver}'
.format(app=self.args.app, ver=self.args.ver))
code_dir = os.path.join(self.args.codePrefixDir, self.args.app)
if self.args.forceInstall:
shutil.rmtree(code_dir, ignore_errors=True)
os.makedirs(code_dir)
utils.git('clone', [URL, code_dir])
os.chdir(code_dir)
utils.git('checkout', [CHECKOUT])
# Not a list but a string
subprocess.check_call('./install_meson.py --prefix {}'
.format(self.args.installPrefixDir), shell=True)
return retval
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[1] == 'printVer':
print(DEFAULT_VER)
else:
installer = Installer()
installer.install()
| #!/usr/bin/env python3
# Copyright 2015 wink saville
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import utils
import parseinstallargs
import subprocess
import sys
import os
import traceback
import shutil
APP='meson'
URL='https://github.com/mesonbuild/meson.git'
DEFAULT_VER='0.27.0'
#CHECKOUT=DEFAULT_VER
CHECKOUT='master'
class Installer:
'''Installer for meson.'''
def __init__(self, defaultVer=DEFAULT_VER, defaultCodePrefixDir=None,
defaultInstallPrefixDir=None, defaultForceInstall=None):
'''See parseinstallargs for defaults prefixes'''
self.args = parseinstallargs.InstallArgs(APP, defaultVer, defaultCodePrefixDir,
defaultInstallPrefixDir, defaultForceInstall)
def install(self):
dst_dir = os.path.join(self.args.installPrefixDir, 'bin')
os.makedirs(dst_dir, exist_ok=True)
retval = 0
try:
dst = os.path.join(dst_dir, self.args.app)
output = subprocess.check_output([dst, '-v'],
stderr=subprocess.STDOUT)
if output is None:
output = b''
except BaseException as err:
output = b''
if not self.args.forceInstall and bytes(self.args.ver, 'utf-8') in output:
print('{app} {ver} is already installed'
.format(app=self.args.app, ver=self.args.ver))
else:
print('compiling {app} {ver}'
.format(app=self.args.app, ver=self.args.ver))
code_dir = os.path.join(self.args.codePrefixDir, self.args.app)
if self.args.forceInstall:
shutil.rmtree(code_dir, ignore_errors=True)
os.makedirs(code_dir)
utils.git('clone', [URL, code_dir])
os.chdir(code_dir)
utils.git('checkout', [CHECKOUT])
# Not a list but a string
subprocess.check_call('./install_meson.py --prefix {}'
.format(self.args.installPrefixDir), shell=True)
return retval
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[1] == 'printVer':
print(DEFAULT_VER)
else:
installer = Installer()
installer.install()
| Python | 0 |
396fbb31fdfe212da9c531e5aa6240c554f0d86f | Refactor logging to use recommended pylint format | xboxapi/client.py | xboxapi/client.py | #-*- coding: utf-8 -*-
import requests
import logging
import json
import os
# Local libraries
from .gamer import Gamer
import xboxapi
logging.basicConfig()
class Client(object):
def __init__(self, api_key=None, timeout=None, lang=None):
self.api_key = api_key
self.timeout = timeout
self.endpoint = 'https://xboxapi.com/v2/'
self.timeout = timeout if timeout is not None else 3 # Seconds
self.lang = lang
self.last_method_call = None
self.continuation_token = None
# Debug logging can be triggered from environment variable
# XBOXAPI_DEBUG=1
self.logger = logging.getLogger('xboxapi')
log_level = logging.DEBUG if os.getenv('XBOXAPI_DEBUG') else logging.INFO
self.logger.setLevel(log_level)
if self.api_key is None:
raise ValueError('Api key is missing')
def gamer(self, gamertag=None, xuid=None):
''' return a gamer object '''
if gamertag is None:
raise ValueError('No gamertag given!')
return Gamer(gamertag=gamertag, client=self, xuid=xuid)
def api_get(self, method):
''' GET wrapper on requests library '''
headers = {'X-Auth': self.api_key,
'User-Agent': 'Python/XboxApi ' + xboxapi.__version__}
if self.lang is not None:
headers['Accept-Language'] = self.lang
url = self.endpoint + method
# Check for continuation token and the method match the last call
if method == self.last_method_call and self.continuation_token is not None:
url = url + '?continuationToken=' + self.continuation_token
self.logger.debug('%s %s', 'GET', url)
self.logger.debug('Headers: %s', headers)
res = requests.get(self.endpoint + method,
headers=headers, timeout=self.timeout)
self.xboxapi_response_error(res)
self.logger.debug('Response: %s', res.json())
# Track method calls and peak for continuation token
self.last_method_call = method
self.continuation_token = None
if 'X-Continuation-Token' in res.headers:
self.continuation_token = res.headers['X-Continuation-Token']
return res
def api_post(self, method, body):
''' POST wrapper on requests library '''
headers = {
'X-AUTH': self.api_key,
'Content-Type': 'application/json'
}
url = '{}{}'.format(self.endpoint, method)
self.logger.debug('%s %s', 'POST', url)
self.logger.debug('Headers: %s', headers)
self.logger.debug('Body: %s', body)
res = requests.post(self.endpoint + method, headers=headers, data=json.dumps(body),
timeout=self.timeout)
self.xboxapi_response_error(res)
self.logger.debug('Response: %s', res.json())
return res
def calls_remaining(self):
''' Check on the limits from server '''
server_headers = self.api_get('accountxuid').headers
limit_headers = {}
limit_headers['X-RateLimit-Reset'] = server_headers['X-RateLimit-Reset']
limit_headers['X-RateLimit-Limit'] = server_headers['X-RateLimit-Limit']
limit_headers['X-RateLimit-Remaining'] = server_headers['X-RateLimit-Remaining']
| #-*- coding: utf-8 -*-
import requests
import logging
import json
import os
# Local libraries
from .gamer import Gamer
import xboxapi
logging.basicConfig()
class Client(object):
def __init__(self, api_key=None, timeout=None, lang=None):
self.api_key = api_key
self.timeout = timeout
self.endpoint = 'https://xboxapi.com/v2/'
self.timeout = timeout if timeout is not None else 3 # Seconds
self.lang = lang
self.last_method_call = None
self.continuation_token = None
# Debug logging can be triggered from environment variable
# XBOXAPI_DEBUG=1
self.logger = logging.getLogger('xboxapi')
log_level = logging.DEBUG if os.getenv(
'XBOXAPI_DEBUG') else logging.INFO
self.logger.setLevel(log_level)
if self.api_key is None:
raise ValueError('Api key is missing')
def gamer(self, gamertag=None, xuid=None):
''' return a gamer object '''
if gamertag is None:
raise ValueError('No gamertag given!')
return Gamer(gamertag=gamertag, client=self, xuid=xuid)
def api_get(self, method):
''' GET wrapper on requests library '''
headers = {'X-Auth': self.api_key,
'User-Agent': 'Python/XboxApi ' + xboxapi.__version__}
if self.lang is not None:
headers['Accept-Language'] = self.lang
url = self.endpoint + method
# Check for continuation token and the method match the last call
if method == self.last_method_call and self.continuation_token is not None:
url = url + '?continuationToken=' + self.continuation_token
self.logger.debug('{} {}'.format('GET', url))
self.logger.debug('Headers: {}'.format(headers))
res = requests.get(self.endpoint + method,
headers=headers, timeout=self.timeout)
self.logger.debug('Response: {}'.format(res.json()))
# Track method calls and peak for continuation token
self.last_method_call = method
self.continuation_token = None
if 'X-Continuation-Token' in res.headers:
self.continuation_token = res.headers['X-Continuation-Token']
return res
def api_post(self, method, body):
''' POST wrapper on requests library '''
headers = {
'X-AUTH': self.api_key,
'Content-Type': 'application/json'
}
url = '{}{}'.format(self.endpoint, method)
self.logger.debug('{} {}'.format('POST', url))
self.logger.debug('Headers: {}'.format(headers))
self.logger.debug('Body: {}'.format(body))
res = requests.post(self.endpoint + method, headers=headers, data=json.dumps(body),
timeout=self.timeout)
self.logger.debug('Response: {}'.format(res.json()))
return res
def calls_remaining(self):
''' Check on the limits from server '''
server_headers = self.api_get('accountxuid').headers
limit_headers = {}
limit_headers[
'X-RateLimit-Reset'] = server_headers['X-RateLimit-Reset']
limit_headers[
'X-RateLimit-Limit'] = server_headers['X-RateLimit-Limit']
limit_headers[
'X-RateLimit-Remaining'] = server_headers['X-RateLimit-Remaining']
return limit_headers
| Python | 0 |
2726ec1c400a212b1cac13f20d65c1b43eb042b0 | Fix formatting in download-google-smart-card-client-library.py | example_js_standalone_smart_card_client_app/download-google-smart-card-client-library.py | example_js_standalone_smart_card_client_app/download-google-smart-card-client-library.py | #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downloads from GitHub the latest released version of the client library for
communicating to the Google Smart Card Connector app."""
import json
import os
import sys
import urllib2
GITHUB_REPO_OWNER = "GoogleChrome"
GITHUB_REPO = "chromeos_smart_card_connector"
CLIENT_LIBRARY_ASSET_NAME = "google-smart-card-client-library.js"
OUTPUT_FILE_NAME = "google-smart-card-client-library.js"
GITHUB_LATEST_RELEASE_URL_TEMPLATE = \
"https://api.github.com/repos/{owner}/{repo}/releases/latest"
def main():
sys.stderr.write('Accessing GitHub API...\n')
latest_release_url = GITHUB_LATEST_RELEASE_URL_TEMPLATE.format(
owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO)
latest_release_info = json.load(urllib2.urlopen(latest_release_url))
client_library_download_url = None
for asset in latest_release_info.get("assets", []):
if asset["name"] == CLIENT_LIBRARY_ASSET_NAME:
client_library_download_url = asset["browser_download_url"]
if client_library_download_url is None:
raise RuntimeError("Asset with the client library not found in the latest "
"GitHub release")
sys.stderr.write('Downloading from "{0}"...\n'.format(
client_library_download_url))
client_library = urllib2.urlopen(client_library_download_url).read()
if os.path.dirname(__file__):
output_file_path = os.path.join(
os.path.relpath(os.path.dirname(__file__)), OUTPUT_FILE_NAME)
else:
output_file_path = OUTPUT_FILE_NAME
with open(output_file_path, "wt") as f:
f.write(client_library)
sys.stderr.write(
'Successfully finished. The library is stored at "{0}".\n'.format(
output_file_path))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downloads from GitHub the latest released version of the client library for
communicating to the Google Smart Card Connector app."""
import json
import os
import sys
import urllib2
GITHUB_REPO_OWNER = "GoogleChrome"
GITHUB_REPO = "chromeos_smart_card_connector"
CLIENT_LIBRARY_ASSET_NAME = "google-smart-card-client-library.js"
OUTPUT_FILE_NAME = "google-smart-card-client-library.js"
GITHUB_LATEST_RELEASE_URL_TEMPLATE = \
"https://api.github.com/repos/{owner}/{repo}/releases/latest"
def main():
sys.stderr.write('Accessing GitHub API...\n')
latest_release_url = GITHUB_LATEST_RELEASE_URL_TEMPLATE.format(
owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO)
latest_release_info = json.load(urllib2.urlopen(latest_release_url))
client_library_download_url = None
for asset in latest_release_info.get("assets", []):
if asset["name"] == CLIENT_LIBRARY_ASSET_NAME:
client_library_download_url = asset["browser_download_url"]
if client_library_download_url is None:
raise RuntimeError("Asset with the client library not found in the latest "
"GitHub release")
sys.stderr.write('Downloading from "{0}"...\n'.format(
client_library_download_url))
client_library = urllib2.urlopen(client_library_download_url).read()
if os.path.dirname(__file__):
output_file_path = os.path.join(
os.path.relpath(os.path.dirname(__file__)), OUTPUT_FILE_NAME)
else:
output_file_path = OUTPUT_FILE_NAME
with open(output_file_path, "wt") as f:
f.write(client_library)
sys.stderr.write(
'Successfully finished. The library is stored at "{0}".\n'.format(
output_file_path))
if __name__ == '__main__':
main()
| Python | 0.999998 |
83549f9549a253fb7a86e8c051bd24fab91a0f5f | Make the output a little better | conda_build/main_inspect.py | conda_build/main_inspect.py | # (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import sys
import argparse
from collections import defaultdict
from conda.misc import which_package
from conda.lock import Locked
from conda_build.main_build import args_func
from conda_build.config import config
from conda_build.ldd import get_package_linkages
def main():
p = argparse.ArgumentParser(
description='tool for inspecting conda packages'
)
p.add_argument(
'packages',
action='store',
nargs='+',
help='conda packages to inspect',
)
p.add_argument(
'--linkages',
action="store_true",
help="inspect the linkages of the binary files in the package",
)
p.set_defaults(func=execute)
args = p.parse_args()
args_func(args, p)
def print_linkages(depmap):
# Print system and not found last
k = sorted(depmap.keys() - {'system', 'not found'})
for dep in k + ['system', 'not found']:
print("%s:" % dep)
for lib, path in sorted(depmap[dep]):
print(" %s => %s" % (lib, path))
print()
def execute(args, parser):
with Locked(config.croot):
for pkg in args.packages:
if args.linkages:
linkages = get_package_linkages(pkg)
depmap = defaultdict(set)
for binary in linkages:
for lib, path in linkages[binary]:
if path.startswith(config.test_prefix):
deps = list(which_package(path))
if len(deps) > 1:
print("Warning: %s comes from multiple packages: %s" % (path, ' and '.join(deps)), file=sys.stderr)
for d in deps:
depmap[d].add((lib, path))
elif path == 'not found':
depmap['not found'].add((lib, path))
else:
depmap['system'].add((lib, path))
print_linkages(depmap)
| # (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import sys
import argparse
from collections import defaultdict
from conda.misc import which_package
from conda.lock import Locked
from conda_build.main_build import args_func
from conda_build.config import config
from conda_build.ldd import get_package_linkages
def main():
p = argparse.ArgumentParser(
description='tool for inspecting conda packages'
)
p.add_argument(
'packages',
action='store',
nargs='+',
help='conda packages to inspect',
)
p.add_argument(
'--linkages',
action="store_true",
help="inspect the linkages of the binary files in the package",
)
p.set_defaults(func=execute)
args = p.parse_args()
args_func(args, p)
def print_linkages(depmap):
# Print system and not found last
k = sorted(depmap.keys() - {'system', 'not found'})
for dep in k + ['system', 'not found']:
print(dep)
for lib, path in depmap[dep]:
print(" %s => %s" % (lib, path))
print()
def execute(args, parser):
with Locked(config.croot):
for pkg in args.packages:
if args.linkages:
linkages = get_package_linkages(pkg)
depmap = defaultdict(set)
for binary in linkages:
for lib, path in linkages[binary]:
if path.startswith(config.test_prefix):
deps = list(which_package(path))
if len(deps) > 1:
print("Warning: %s comes from multiple packages: %s" % (path, ' and '.join(deps)), file=sys.stderr)
for d in deps:
depmap[d].add((lib, path))
elif path == 'not found':
depmap['not found'].add((lib, path))
else:
depmap['system'].add((lib, path))
print_linkages(depmap)
| Python | 0.999998 |
ff9e3c6ef604a47a616e111ee2a90fda77692977 | Bump version to 3.3.2 | src/jukeboxmaya/__init__.py | src/jukeboxmaya/__init__.py | __author__ = 'David Zuber'
__email__ = 'zuber.david@gmx.de'
__version__ = '3.3.2'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
| __author__ = 'David Zuber'
__email__ = 'zuber.david@gmx.de'
__version__ = '3.3.1'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
| Python | 0.000002 |
a9c7a6e441159bdf1fd13d70bcc91617dee93f03 | revert revert. | lib/kodi65/selectdialog.py | lib/kodi65/selectdialog.py | # -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <phil65@kodi.tv>
# This program is Free Software see LICENSE file for details
import xbmcgui
import xbmc
from kodi65 import addon
C_LIST_SIMPLE = 3
C_LIST_DETAIL = 6
C_BUTTON_GET_MORE = 5
C_LABEL_HEADER = 1
class SelectDialog(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self)
self.items = kwargs.get('listing')
self.header = kwargs.get('header')
self.detailed = kwargs.get('detailed')
self.extrabutton = kwargs.get('extrabutton')
self.listitems = [i.get_listitem() for i in self.items] if self.items else []
self.index = -1
def onInit(self):
if not self.listitems:
self.index == -1
self.close()
self.list = self.getControl(C_LIST_DETAIL)
self.getControl(C_LIST_DETAIL).setVisible(self.detailed)
self.getControl(C_LIST_SIMPLE).setVisible(not self.detailed)
self.getControl(C_BUTTON_GET_MORE).setVisible(bool(self.extrabutton))
if self.extrabutton:
self.getControl(C_BUTTON_GET_MORE).setLabel(self.extrabutton)
self.getControl(C_LABEL_HEADER).setLabel(self.header)
self.list.addItems(self.listitems)
self.setFocus(self.list)
def onClick(self, control_id):
if control_id in [C_LIST_SIMPLE, C_LIST_DETAIL]:
self.index = int(self.list.getSelectedPosition())
elif control_id == C_BUTTON_GET_MORE:
self.index = -2
self.close()
def onFocus(self, control_id):
pass
def open(listitems, header, detailed=True, extrabutton=False):
"""
open selectdialog, return index (-1 for closing, -2 for extra button)
"""
xbmc.executebuiltin("Dialog.Close(busydialog)")
w = SelectDialog('DialogSelect.xml', addon.PATH,
listing=listitems,
header=header,
detailed=detailed,
extrabutton=extrabutton)
w.doModal()
return w.index
| # -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <phil65@kodi.tv>
# This program is Free Software see LICENSE file for details
import xbmcgui
import xbmc
from kodi65 import addon
C_LIST_SIMPLE = 3
C_LIST_DETAIL = 6
C_BUTTON_GET_MORE = 5
C_LABEL_HEADER = 1
class SelectDialog(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self)
self.items = kwargs.get('listing')
self.header = kwargs.get('header')
self.detailed = kwargs.get('detailed')
self.extrabutton = kwargs.get('extrabutton')
self.listitems = [i.get_listitem() for i in self.items] if self.items else []
self.index = -1
def onInit(self):
if not self.listitems:
self.index == -1
self.close()
elif len(self.listitems) == 1:
self.index == 0
self.close()
self.list = self.getControl(C_LIST_DETAIL)
self.getControl(C_LIST_DETAIL).setVisible(self.detailed)
self.getControl(C_LIST_SIMPLE).setVisible(not self.detailed)
self.getControl(C_BUTTON_GET_MORE).setVisible(bool(self.extrabutton))
if self.extrabutton:
self.getControl(C_BUTTON_GET_MORE).setLabel(self.extrabutton)
self.getControl(C_LABEL_HEADER).setLabel(self.header)
self.list.addItems(self.listitems)
self.setFocus(self.list)
def onClick(self, control_id):
if control_id in [C_LIST_SIMPLE, C_LIST_DETAIL]:
self.index = int(self.list.getSelectedPosition())
elif control_id == C_BUTTON_GET_MORE:
self.index = -2
self.close()
def onFocus(self, control_id):
pass
def open(listitems, header, detailed=True, extrabutton=False):
"""
open selectdialog, return index (-1 for closing, -2 for extra button)
"""
xbmc.executebuiltin("Dialog.Close(busydialog)")
w = SelectDialog('DialogSelect.xml', addon.PATH,
listing=listitems,
header=header,
detailed=detailed,
extrabutton=extrabutton)
w.doModal()
return w.index
| Python | 0.00001 |
7a83a9be7e2a986979cc898c3fd3aa3bb49442cc | modify dx model | cea/technologies/direct_expansion_units.py | cea/technologies/direct_expansion_units.py | # -*- coding: utf-8 -*-
"""
direct expansion units
"""
from __future__ import division
from scipy.interpolate import interp1d
from math import log, ceil
import pandas as pd
import numpy as np
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
__author__ = "Shanshan Hsieh"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Shanshan Hsieh"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
# FIXME: this model is simplified, and required update
PRICE_DX_PER_W = 1.6 #USD FIXME: to be moved to database
# operation costs
def calc_cop_DX(Q_load_W):
cop = 2.3
return cop
def calc_DX(mdot_kgpers, T_sup_K, T_re_K):
if np.isclose(mdot_kgpers, 0.0):
wdot_W = 0
else:
q_chw_W = mdot_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK * (T_re_K - T_sup_K)
cop_DX = calc_cop_DX(q_chw_W)
wdot_W = q_chw_W/cop_DX
return wdot_W
# investment and maintenance costs
def calc_Cinv_DX(Q_design_W):
"""
Assume the same cost as gas boilers.
:type Q_design_W : float
:param Q_design_W: Design Load of Boiler in [W]
:param gV: globalvar.py
:rtype InvCa : float
:returns InvCa: Annualized investment costs in CHF/a including Maintenance Cost
"""
Capex_a = 0
Opex_fixed = 0
if Q_design_W > 0:
InvC = Q_design_W * PRICE_DX_PER_W
Inv_IR = 5 / 100
Inv_LT = 25
Inv_OM = 5 / 100
Capex_a = InvC * (Inv_IR) * (1 + Inv_IR) ** Inv_LT / ((1 + Inv_IR) ** Inv_LT - 1)
Opex_fixed = Capex_a * Inv_OM
return Capex_a, Opex_fixed | # -*- coding: utf-8 -*-
"""
direct expansion units
"""
from __future__ import division
from scipy.interpolate import interp1d
from math import log, ceil
import pandas as pd
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
__author__ = "Shanshan Hsieh"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Shanshan Hsieh"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
# FIXME: this model is simplified, and required update
PRICE_DX_PER_W = 1.373 #USD FIXME: to be moved to database
# operation costs
def calc_cop_DX(Q_load_W):
cop = 2.7
return cop
def calc_DX(mdot_kgpers, T_sup_K, T_re_K):
q_chw_W = mdot_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK * (T_re_K - T_sup_K)
cop_DX = calc_cop_DX(q_chw_W)
wdot_W = q_chw_W/cop_DX
return wdot_W
# investment and maintenance costs
def calc_Cinv_DX(Q_design_W):
"""
Assume the same cost as gas boilers.
:type Q_design_W : float
:param Q_design_W: Design Load of Boiler in [W]
:param gV: globalvar.py
:rtype InvCa : float
:returns InvCa: Annualized investment costs in CHF/a including Maintenance Cost
"""
Capex_a = 0
Opex_fixed = 0
if Q_design_W > 0:
InvC = Q_design_W * PRICE_DX_PER_W
Inv_IR = 5 / 100
Inv_LT = 25
Inv_OM = 5 / 100
Capex_a = InvC * (Inv_IR) * (1 + Inv_IR) ** Inv_LT / ((1 + Inv_IR) ** Inv_LT - 1)
Opex_fixed = Capex_a * Inv_OM
return Capex_a, Opex_fixed | Python | 0 |
8b359d97e59d759bfd7711c8aacf9abc657fe457 | fix demo | pipeline/demo/pipeline-homo-data-split-demo.py | pipeline/demo/pipeline-homo-data-split-demo.py | from pipeline.component.homo_data_split import HomoDataSplit
from pipeline.backend.config import Backend
from pipeline.backend.config import WorkMode
from pipeline.backend.pipeline import PipeLine
from pipeline.component.dataio import DataIO
from pipeline.component.input import Input
from pipeline.interface.data import Data
guest = 9999
host = 10000
arbiter = 10002
guest_train_data = {"name": "breast_homo_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_homo_host", "namespace": "experiment"}
input_0 = Input(name="train_data")
print ("get input_0's init name {}".format(input_0.name))
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).algorithm_param(with_label=True, output_format="dense")
dataio_0.get_party_instance(role='host', party_id=host).algorithm_param(with_label=True)
homo_data_split_0 = HomoDataSplit(name="homo_data_split_0", stratified=True, test_size=0.2, validate_size=0.1)
print ("get input_0's name {}".format(input_0.name))
pipeline.add_component(dataio_0, data=Data(data=input_0.data))
pipeline.add_component(homo_data_split_0, data=Data(data=dataio_0.output.data))
pipeline.compile()
pipeline.fit(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
print (pipeline.get_component("dataio_0").get_model_param())
print (pipeline.get_component("homo_data_split_0").summary())
| from pipeline.component.homo_data_split import HomoDataSplit
from pipeline.backend.config import Backend
from pipeline.backend.config import WorkMode
from pipeline.backend.pipeline import PipeLine
from pipeline.component.dataio import DataIO
from pipeline.component.input import Input
from pipeline.interface.data import Data
guest = 9999
host = 10000
arbiter = 10002
guest_train_data = {"name": "breast_homo_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_homo_host", "namespace": "experiment"}
input_0 = Input(name="train_data")
print ("get input_0's init name {}".format(input_0.name))
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).algorithm_param(with_label=True, output_format="dense")
dataio_0.get_party_instance(role='host', party_id=host).algorithm_param(with_label=True)
homo_data_split_0 = HomoDataSplit(name="homo_data_split_0", stratified=True, test_size=0.2, validate_size=0.1)
print ("get input_0's name {}".format(input_0.name))
pipeline.add_component(dataio_0, data=Data(data=input_0.data))
pipeline.add_component(homo_data_split_0, data=Data(data=dataio_0.output.data))
pipeline.compile()
pipeline.fit(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
# predict
pipeline.predict(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
print (pipeline.get_component("dataio_0").get_model_param())
print (pipeline.get_component("homo_data_split_0").summary())
| Python | 0 |
7ad707e722eabefc989cfa41fbf17c8315d948fd | Add optional parameters for Django fields. | oauth2client/django_orm.py | oauth2client/django_orm.py | # Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utilities for Django.
Utilities for using OAuth 2.0 in conjunction with
the Django datastore.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import oauth2client
import base64
import pickle
from django.db import models
from oauth2client.client import Storage as BaseStorage
class CredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self, connection=None):
return 'VARCHAR'
def to_python(self, value):
if not value:
return None
if isinstance(value, oauth2client.client.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class FlowField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self, connection=None):
return 'VARCHAR'
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Flow):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from
the datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsField
on a db model class.
"""
def __init__(self, model_class, key_name, key_value, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
key_value: string, key value for the entity that has the credentials
property_name: string, name of the property that is an CredentialsProperty
"""
self.model_class = model_class
self.key_name = key_name
self.key_value = key_value
self.property_name = property_name
def get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credential = None
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query)
if len(entities) > 0:
credential = getattr(entities[0], self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self.put)
return credential
def put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
args = {self.key_name: self.key_value}
entity = self.model_class(**args)
setattr(entity, self.property_name, credentials)
entity.save()
| # Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utilities for Django.
Utilities for using OAuth 2.0 in conjunction with
the Django datastore.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import oauth2client
import base64
import pickle
from django.db import models
from oauth2client.client import Storage as BaseStorage
class CredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
if not value:
return None
if isinstance(value, oauth2client.client.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class FlowField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Flow):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from
the datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsField
on a db model class.
"""
def __init__(self, model_class, key_name, key_value, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
key_value: string, key value for the entity that has the credentials
property_name: string, name of the property that is an CredentialsProperty
"""
self.model_class = model_class
self.key_name = key_name
self.key_value = key_value
self.property_name = property_name
def get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credential = None
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query)
if len(entities) > 0:
credential = getattr(entities[0], self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self.put)
return credential
def put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
args = {self.key_name: self.key_value}
entity = self.model_class(**args)
setattr(entity, self.property_name, credentials)
entity.save()
| Python | 0 |
ed48555984886ff5ade23aeb23ad5f85e77e5b69 | fix docs | chainercv/transforms/image/pca_lighting.py | chainercv/transforms/image/pca_lighting.py | import numpy
def pca_lighting(img, sigma, eigen_value=None, eigen_vector=None):
"""Alter the intensities of input image using PCA.
This is used in training of AlexNet [Krizhevsky]_.
.. [Krizhevsky] Alex Krizhevsky, Ilya Sutskever, Geoffrey E. Hinton. \
ImageNet Classification with Deep Convolutional Neural Networks. \
NIPS 2012.
Args:
image (numpy.ndarray): An image array to be augmented. This is in
CHW format.
sigma (float): Standard deviation of the Gaussian. In AlexNet
[Krizhevsky]_, this value is 10% of the range of intensity
(25.5 if the range is [0, 255]).
eigen_value: (numpy.ndarray): An array of eigen values. The shape
have to be (3,). If it is not specified, the values computed from
ImageNet are used.
eigen_vector: (numpy.ndarray): An array of eigen vectors. The shape
have to be (3, 3). If it is not specified, the vectors computed
from ImageNet are used.
Returns:
An image in CHW format.
"""
if sigma <= 0:
return img
# these values are copied from facebook/fb.resnet.torch
if eigen_value is None:
eigen_value = numpy.array((0.2175, 0.0188, 0.0045))
if eigen_vector is None:
eigen_vector = numpy.array((
(0.4009, -0.814, 0.4203),
(0.7192, -0.0045, -0.6948),
(-0.5675, -0.5808, -0.5836)))
alpha = numpy.random.normal(0, sigma, size=3)
img = img.copy()
img += eigen_vector.dot(eigen_value * alpha).reshape(-1, 1, 1)
return img
| import numpy
def pca_lighting(img, sigma, eigen_value=None, eigen_vector=None):
"""Alter the intensities of input image using PCA.
This is used in training of AlexNet [Krizhevsky]_.
.. [Krizhevsky] Alex Krizhevsky, Ilya Sutskever, Geoffrey E. Hinton. \
ImageNet Classification with Deep Convolutional Neural Networks. \
NIPS 2012.
Args:
image (numpy.ndarray): An image array to be augmented. This is in
CHW format.
sigma (float): Standard deviation of the Gaussian. In AlexNet
[Krizhevsky]_, this value is 10% of the range of intensity
(25.5 if the range is [0, 255]).
eigen_value: (numpy.ndarray): An array of eigen values. The shape
have to be (3,). If it is not specified, the values computed from
ImageNet is used.
eigen_vector: (numpy.ndarray): An array of eigen vectors. The shape
have to be (3, 3). If it is not specified, the vectors computed
from ImageNet is used.
Returns:
An image in CHW format.
"""
if sigma <= 0:
return img
# these values are copied from facebook/fb.resnet.torch
if eigen_value is None:
eigen_value = numpy.array((0.2175, 0.0188, 0.0045))
if eigen_vector is None:
eigen_vector = numpy.array((
(0.4009, -0.814, 0.4203),
(0.7192, -0.0045, -0.6948),
(-0.5675, -0.5808, -0.5836)))
alpha = numpy.random.normal(0, sigma, size=3)
img = img.copy()
img += eigen_vector.dot(eigen_value * alpha).reshape(-1, 1, 1)
return img
| Python | 0.000001 |
6f04f1ed35635c08836f1eee67983abf9735f5db | handle more exceptions | channelstream/wsgi_views/error_handlers.py | channelstream/wsgi_views/error_handlers.py | from pyramid.view import exception_view_config
@exception_view_config(context='marshmallow.ValidationError', renderer='json')
def marshmallow_invalid_data(context, request):
request.response.status = 422
return context.messages
@exception_view_config(context='itsdangerous.BadTimeSignature', renderer='json')
@exception_view_config(context='itsdangerous.BadSignature', renderer='json')
def itsdangerous_signer_error(context, request):
request.response.status = 401
return {'request': 'Bad Signature'}
| from pyramid.view import exception_view_config
@exception_view_config(context='marshmallow.ValidationError', renderer='json')
def marshmallow_invalid_data(context, request):
request.response.status = 422
return context.messages
@exception_view_config(context='itsdangerous.BadTimeSignature', renderer='json')
def itsdangerous_signer_error(context, request):
request.response.status = 401
return {'request': 'Bad Signature'}
| Python | 0.000002 |
155fd9ae952a4eba53521739589d5e3462108ed2 | remove default statement per Gunther's comment | chatterbot/ext/django_chatterbot/models.py | chatterbot/ext/django_chatterbot/models.py | from django.db import models
class Statement(models.Model):
"""A short (<255) chat message, tweet, forum post, etc"""
text = models.CharField(
unique=True,
blank=False,
null=False,
max_length=255
)
def __str__(self):
if len(self.text.strip()) > 60:
return '{}...'.format(self.text[:57])
elif len(self.text.strip()) > 0:
return self.text
return '<empty>'
class Response(models.Model):
"""Connection between a response and the statement that triggered it
Comparble to a ManyToMany "through" table, but without the M2M indexing/relations.
Only the text and number of times it has occurred are currently stored.
Might be useful to store additional features like language, location(s)/region(s),
first created datetime(s), username, user full name, user gender, etc.
A the very least occurrences should be an FK to a meta-data table with this info.
"""
statement = models.ForeignKey(
'Statement',
related_name='in_response_to'
)
response = models.ForeignKey(
'Statement',
related_name='+'
)
unique_together = (('statement', 'response'),)
occurrence = models.PositiveIntegerField(default=0)
def __str__(self):
s = self.statement.text if len(self.statement.text) <= 20 else self.statement.text[:17] + '...'
s += ' => '
s += self.response.text if len(self.response.text) <= 40 else self.response.text[:37] + '...'
return s
| from django.db import models
class Statement(models.Model):
"""A short (<255) chat message, tweet, forum post, etc"""
text = models.CharField(
unique=True,
blank=False,
null=False,
default='<empty>',
max_length=255
)
def __str__(self):
if len(self.text.strip()) > 60:
return '{}...'.format(self.text[:57])
elif len(self.text.strip()) > 0:
return self.text
return '<empty>'
class Response(models.Model):
"""Connection between a response and the statement that triggered it
Comparble to a ManyToMany "through" table, but without the M2M indexing/relations.
Only the text and number of times it has occurred are currently stored.
Might be useful to store additional features like language, location(s)/region(s),
first created datetime(s), username, user full name, user gender, etc.
A the very least occurrences should be an FK to a meta-data table with this info.
"""
statement = models.ForeignKey(
'Statement',
related_name='in_response_to'
)
response = models.ForeignKey(
'Statement',
related_name='+'
)
unique_together = (('statement', 'response'),)
occurrence = models.PositiveIntegerField(default=0)
def __str__(self):
s = self.statement.text if len(self.statement.text) <= 20 else self.statement.text[:17] + '...'
s += ' => '
s += self.response.text if len(self.response.text) <= 40 else self.response.text[:37] + '...'
return s
| Python | 0 |
4d81c88627b0f71c765112b9a814fe876239bcc5 | Print stats for constant points to. | src/main/copper/analysis.py | src/main/copper/analysis.py | import os
from .project import ProjectManager
from .analysis_steps import *
from .analysis_stats import AnalysisStatisticsBuilder as StatBuilder
class Analysis(object):
def __init__(self, config, projects=ProjectManager()):
self.logger = logging.getLogger(__name__)
self._config = config
self._stats = None
self._pipeline = [
CleaningStep(),
FactGenerationStep(),
DatabaseCreationStep(),
SanityCheckStep(projects.SCHEMA),
LoadProjectStep(projects.SYMBOL_LOOKUP),
LoadProjectStep(projects.CALLGRAPH),
LoadProjectStep(projects.POINTS_TO),
]
@property
def pipeline(self):
return [step.check() for step in self._pipeline]
@property
def stats(self):
# Compute stats if needed
if self._stats is None:
self.compute_stats()
return self._stats
@property
def input_files(self):
return [os.path.abspath(f) for f in self._config.input_files]
@property
def output_directory(self):
return os.path.abspath(self._config.output_directory)
@property
def facts_directory(self):
return os.path.join(self.output_directory, 'facts')
@property
def database_directory(self):
return os.path.join(self.output_directory, 'db')
def load_project(self, project):
LoadProjectStep(project).apply(self)
def run(self):
# Run each step of pipeline
for step in self.pipeline:
step.apply(self)
# Compute stats
self.compute_stats()
def compute_stats(self):
self._stats = (
StatBuilder(self)
.count('instruction')
.count('reachable_function')
.count('callgraph:fn_edge', 'call-graph edges')
.count('var_points_to', 'var-points-to')
.count('constant_points_to', 'constant-points-to')
.count('ptr_points_to', 'ptr-points-to')
.build()
)
| import os
from .project import ProjectManager
from .analysis_steps import *
from .analysis_stats import AnalysisStatisticsBuilder as StatBuilder
class Analysis(object):
def __init__(self, config, projects=ProjectManager()):
self.logger = logging.getLogger(__name__)
self._config = config
self._stats = None
self._pipeline = [
CleaningStep(),
FactGenerationStep(),
DatabaseCreationStep(),
SanityCheckStep(projects.SCHEMA),
LoadProjectStep(projects.SYMBOL_LOOKUP),
LoadProjectStep(projects.CALLGRAPH),
LoadProjectStep(projects.POINTS_TO),
]
@property
def pipeline(self):
return [step.check() for step in self._pipeline]
@property
def stats(self):
# Compute stats if needed
if self._stats is None:
self.compute_stats()
return self._stats
@property
def input_files(self):
return [os.path.abspath(f) for f in self._config.input_files]
@property
def output_directory(self):
return os.path.abspath(self._config.output_directory)
@property
def facts_directory(self):
return os.path.join(self.output_directory, 'facts')
@property
def database_directory(self):
return os.path.join(self.output_directory, 'db')
def load_project(self, project):
LoadProjectStep(project).apply(self)
def run(self):
# Run each step of pipeline
for step in self.pipeline:
step.apply(self)
# Compute stats
self.compute_stats()
def compute_stats(self):
self._stats = (
StatBuilder(self)
.count('instruction')
.count('reachable_function')
.count('callgraph:fn_edge', 'call-graph edges')
.count('var_points_to', 'var-points-to')
.count('ptr_points_to', 'ptr-points-to')
.build()
)
| Python | 0 |
4a1b670cd49f458c44bed638e2f9ecace211883a | fix and update first user as admin | websitemixer/plugins/Install/Setup.py | websitemixer/plugins/Install/Setup.py | import os
from flask import render_template, request, redirect
from websitemixer import app, db, models
@app.route('/setup/step1/')
def setup1():
return render_template("Install/step1.html")
@app.route('/setup/step2/',methods=['POST'])
def setup2():
secretkey = os.urandom(24).encode('hex')
appname = request.form['appname']
dbname = request.form['dbname']
dbuser = request.form['dbuser']
dbpwd = request.form['dbpwd']
dbsrv = request.form['dbsrv']
with open('config.py', 'w') as file:
file.seek(0)
file.truncate()
file.write("import os\n")
file.write("basedir = os.path.abspath(os.path.dirname(__file__))\n\n")
file.write("SECRET_KEY = '"+secretkey+"'\n")
file.write("UPLOAD_FOLDER = basedir+'/websitemixer/static/upload/'\n")
file.write("ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'zip'])\n\n")
if request.form['dbmeth'] == 'mysql':
file.write("SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://"+dbuser+":"+dbpwd+"@"+dbsrv+":3306/"+dbname+"'\n")
elif request.form['dbmeth'] == 'postgres':
file.write("SQLALCHEMY_DATABASE_URI = 'postgresql://"+dbuser+":"+dbpwd+"@"+dbsrv+":5432/"+dbname+"'\n")
else:
file.write("SQLALCHEMY_DATABASE_URI = 'sqlite:///'+os.path.join(basedir,'"+appname+".db')\n")
file.close()
return render_template("Install/step2.html")
@app.route('/setup/step3/',methods=['POST'])
def setup3():
db.drop_all()
db.create_all()
sitename = request.form['sitename']
sitedesc = request.form['sitedesc']
admuser = request.form['admuser']
admpwd1 = request.form['admpwd1']
admpwd2 = request.form['admpwd2']
admemail = request.form['admemail']
a = models.User(admuser, admpwd1, admemail)
db.session.add(a)
update = models.User.query.filter_by(username=admuser).update(dict(admin=1))
a = models.Setting('siteName',sitename)
db.session.add(a)
a = models.Setting('siteSubheading',sitedesc)
db.session.add(a)
a = models.Setting('theme','Base')
db.session.add(a)
a = models.Post(admuser, 'Hello World!', '/hello-world/', '<p>This is your first post! You can delete this and start posting!</p>', '', '', 'Hello World, Welcome')
db.session.add(a)
a = models.Page('About', '/about/', '<p>It\'s an about page!</p>', '', '')
db.session.add(a)
a = models.Page('Contact', '/contact/', '<p>It\'s a contact page!</p>', '', '')
db.session.add(a)
db.session.commit()
return redirect('/')
| import os
from flask import render_template, request, redirect
from websitemixer import app, db, models
@app.route('/setup/step1/')
def setup1():
return render_template("Install/step1.html")
@app.route('/setup/step2/',methods=['POST'])
def setup2():
secretkey = os.urandom(24).encode('hex')
appname = request.form['appname']
dbname = request.form['dbname']
dbuser = request.form['dbuser']
dbpwd = request.form['dbpwd']
dbsrv = request.form['dbsrv']
with open('config.py', 'w') as file:
file.seek(0)
file.truncate()
file.write("import os\n")
file.write("basedir = os.path.abspath(os.path.dirname(__file__))\n\n")
file.write("SECRET_KEY = '"+secretkey+"'\n")
file.write("UPLOAD_FOLDER = basedir+'/websitemixer/static/upload/'\n")
file.write("ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'zip'])\n\n")
if request.form['dbmeth'] == 'mysql':
file.write("SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://"+dbuser+":"+dbpwd+"@"+dbsrv+":3306/"+dbname+"'\n")
elif request.form['dbmeth'] == 'postgres':
file.write("SQLALCHEMY_DATABASE_URI = 'postgresql://"+dbuser+":"+dbpwd+"@"+dbsrv+":5432/"+dbname+"'\n")
else:
file.write("SQLALCHEMY_DATABASE_URI = 'sqlite:///'+os.path.join(basedir,'"+appname+".db')\n")
file.close()
return render_template("Install/step2.html")
@app.route('/setup/step3/',methods=['POST'])
def setup3():
db.drop_all()
db.create_all()
sitename = request.form['sitename']
sitedesc = request.form['sitedesc']
admuser = request.form['admuser']
admpwd1 = request.form['admpwd1']
admpwd2 = request.form['admpwd2']
admemail = request.form['admemail']
a = models.User(admuser, admpwd1, admemail)
db.session.add(a)
update = Setting.query.filter_by(username=admuser).update(dict(is_admin=1))
a = models.Setting('siteName',sitename)
db.session.add(a)
a = models.Setting('siteSubheading',sitedesc)
db.session.add(a)
a = models.Setting('theme','Base')
db.session.add(a)
a = models.Post(admuser, 'Hello World!', '/hello-world/', '<p>This is your first post! You can delete this and start posting!</p>', '', '', 'Hello World, Welcome')
db.session.add(a)
a = models.Page('About', '/about/', '<p>It\'s an about page!</p>', '', '')
db.session.add(a)
a = models.Page('Contact', '/contact/', '<p>It\'s a contact page!</p>', '', '')
db.session.add(a)
db.session.commit()
return redirect('/')
| Python | 0 |
9fec79f71f6dbf80d11989fbbfc2bed43668b75d | Use skimage for circle definition | python/thunder/extraction/feature/methods/localmax.py | python/thunder/extraction/feature/methods/localmax.py | from numpy import cos, sin, pi, array, sqrt
from thunder.extraction.feature.base import FeatureMethod, FeatureAlgorithm
from thunder.extraction.feature.creators import MeanFeatureCreator
from thunder.extraction.source import SourceModel, Source
class LocalMax(FeatureMethod):
def __init__(self, **kwargs):
algorithm = LocalMaxFeatureAlgorithm(**kwargs)
creator = MeanFeatureCreator()
super(self.__class__, self).__init__(algorithm, creator, **kwargs)
class LocalMaxFeatureAlgorithm(FeatureAlgorithm):
"""
Find sources by identifying local maxima in an array.
Will first find source centers, and then automatically define
a circle around each center using the specified radius and resolution
Parameters
----------
minDistance : int, optional, default = 10
Minimum distance between source centers
maxSources : int, optional, deafut = None
Maximum number of sources
radius : scalar, optional, default=5
Radius of circles defined around centers
res : scalar, optional, deafult=10
Number of points to use to define circles around centers
"""
def __init__(self, minDistance=10, maxSources=None, radius=5, res=10, **extra):
self.minDistance = minDistance
if self.minDistance < 1:
raise Exception("Cannot set minDistance less than 1, got %s" % minDistance)
self.maxSources = maxSources
self.radius = radius
self.res = res
def extract(self, im):
from numpy import ones, concatenate
from skimage.feature import peak_local_max
from skimage.draw import circle
# extract local peaks
if im.ndim == 2:
peaks = peak_local_max(im, min_distance=self.minDistance, num_peaks=self.maxSources).tolist()
else:
peaks = []
for i in range(0, im.shape[2]):
tmp = peak_local_max(im[:, :, i], min_distance=self.minDistance, num_peaks=self.maxSources)
peaks = peaks.append(concatenate((tmp, ones((len(tmp), 1)) * i), axis=1))
# construct circular regions from peak points
def pointToCircle(center, radius):
rr, cc = circle(center[0], center[1], radius)
return array(zip(rr, cc))
# return circles as sources
circles = [pointToCircle(p, self.radius) for p in peaks]
return SourceModel([Source(c) for c in circles]) | from numpy import cos, sin, pi, array, sqrt
from thunder.extraction.feature.base import FeatureMethod, FeatureAlgorithm
from thunder.extraction.feature.creators import MeanFeatureCreator
from thunder.extraction.source import SourceModel, Source
class LocalMax(FeatureMethod):
def __init__(self, **kwargs):
algorithm = LocalMaxFeatureAlgorithm(**kwargs)
creator = MeanFeatureCreator()
super(self.__class__, self).__init__(algorithm, creator, **kwargs)
class LocalMaxFeatureAlgorithm(FeatureAlgorithm):
"""
Find sources by identifying local maxima in an array.
Will first find source centers, and then automatically define
a circle around each center using the specified radius and resolution
Parameters
----------
minDistance : int, optional, default = 10
Minimum distance between source centers
maxSources : int, optional, deafut = None
Maximum number of sources
radius : scalar, optional, default=5
Radius of circles defined around centers
res : scalar, optional, deafult=10
Number of points to use to define circles around centers
"""
def __init__(self, minDistance=10, maxSources=None, radius=5, res=10, **extra):
self.minDistance = minDistance
if self.minDistance < 1:
raise Exception("Cannot set minDistance less than 1, got %s" % minDistance)
self.maxSources = maxSources
self.radius = radius
self.res = res
def extract(self, im):
"""
Extract sources from an image by finding local maxima.
Parameters
----------
im : ndarray
The image or volume
Returns
-------
A SourceModel with circular regions.
"""
from numpy import ones, concatenate
from skimage.feature import peak_local_max
# extract local peaks
if im.ndim == 2:
peaks = peak_local_max(im, min_distance=self.minDistance, num_peaks=self.maxSources).tolist()
else:
peaks = []
for i in range(0, im.shape[2]):
tmp = peak_local_max(im[:, :, i], min_distance=self.minDistance, num_peaks=self.maxSources)
peaks = peaks.append(concatenate((tmp, ones((len(tmp), 1)) * i), axis=1))
# estimate circular regions from peak points
def pointToCircle(center, radius):
ccol = center[0]
crow = center[1]
r2 = radius * radius
colrange = range(center[0] - radius + 1, center[0] + radius)
rowrange = range(center[1] - radius + 1, center[1] + radius)
pts = [[([c, r], radius - sqrt((c - ccol) ** 2 + (r - crow) ** 2))
for c in colrange if ((c - ccol) ** 2 + (r - crow) ** 2 < r2)] for r in rowrange]
pts = concatenate(array(pts))
k = map(lambda p: p[0], pts)
v = map(lambda p: p[1], pts)
return k, v
sources = [pointToCircle(p, self.radius) for p in peaks]
return SourceModel([Source(s[0], s[1]) for s in sources]) | Python | 0 |
40fe16d058d18d2384be464ecefed1028edace17 | Fix error on SASL PLAIN authentication | txircd/modules/ircv3_sasl_plain.py | txircd/modules/ircv3_sasl_plain.py | from txircd.modbase import Module
from base64 import b64decode
class SaslPlainMechanism(Module):
def authenticate(self, user, authentication):
try:
authenticationID, authorizationID, password = b64decode(authentication[0]).split("\0")
except TypeError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
except ValueError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
if "server_sasl_agent" not in self.ircd.servconfig or self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
return self.ircd.module_data_cache["sasl_agent"].authenticate(user, authenticationid=authenticationID, authorizationid=authorizationID, password=password)
# TODO: The rest of this doesn't really make sense until s2s, but we'll return false for now since it's failing
return False
def bindSaslResult(self, user, successFunction, failureFunction):
if "server_sasl_agent" not in self.ircd.servconfig or self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return
self.ircd.module_data_cache["sasl_agent"].bindSaslResult(user, successFunction, failureFunction)
# TODO: server_sasl_agent stuff when s2s
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "sasl_mechanisms" not in self.ircd.module_data_cache:
self.ircd.module_data_cache["sasl_mechanisms"] = {}
self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] = SaslPlainMechanism().hook(self.ircd)
return {}
def cleanup(self):
del self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] | from txircd.modbase import Module
from base64 import b64decode
class SaslPlainMechanism(Module):
def authenticate(self, user, authentication):
try:
authenticationID, authorizationID, password = b64decode(authentication[0]).split("\0")
except TypeError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
except ValueError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
if self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
return self.ircd.module_data_cache["sasl_agent"].authenticate(user, authenticationid=authenticationID, authorizationid=authorizationID, password=password)
# TODO: The rest of this doesn't really make sense until s2s, but we'll return false for now since it's failing
return False
def bindSaslResult(self, user, successFunction, failureFunction):
if self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return
self.ircd.module_data_cache["sasl_agent"].bindSaslResult(user, successFunction, failureFunction)
# TODO: server_sasl_agent stuff when s2s
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "sasl_mechanisms" not in self.ircd.module_data_cache:
self.ircd.module_data_cache["sasl_mechanisms"] = {}
self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] = SaslPlainMechanism().hook(self.ircd)
return {}
def cleanup(self):
del self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] | Python | 0.000003 |
026db0e635f0c82e1b24884cb768d53b7fadfc0c | use lots of connections for the pool | feedly/storage/cassandra/connection.py | feedly/storage/cassandra/connection.py | from pycassa.pool import ConnectionPool
def get_cassandra_connection(keyspace_name, hosts):
if get_cassandra_connection._connection is None:
get_cassandra_connection._connection = ConnectionPool(
keyspace_name, hosts, pool_size=len(hosts)*24,
prefill=False, timeout=10)
return get_cassandra_connection._connection
get_cassandra_connection._connection = None
| from pycassa.pool import ConnectionPool
def get_cassandra_connection(keyspace_name, hosts):
if get_cassandra_connection._connection is None:
get_cassandra_connection._connection = ConnectionPool(
keyspace_name, hosts)
return get_cassandra_connection._connection
get_cassandra_connection._connection = None
| Python | 0 |
4e2affde042fab083ec24ec8d6e04ba2f45d1f7d | add utcnow to if conditional evaluation | flexget/plugins/filter/if_condition.py | flexget/plugins/filter/if_condition.py | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.moves import builtins
import logging
import datetime
from copy import copy
from jinja2 import UndefinedError
from flexget import plugin
from flexget.event import event
from flexget.task import Task
from flexget.entry import Entry
from flexget.utils.template import evaluate_expression
log = logging.getLogger('if')
class FilterIf(object):
"""Can run actions on entries that satisfy a given condition.
Actions include accept, reject, and fail, as well as the ability to run other filter plugins on the entries."""
schema = {
'type': 'array',
'items': {
'type': 'object',
'additionalProperties': {
'anyOf': [
{'$ref': '/schema/plugins'},
{'enum': ['accept', 'reject', 'fail']}
]
}
}
}
def check_condition(self, condition, entry):
"""Checks if a given `entry` passes `condition`"""
# Make entry fields and other utilities available in the eval namespace
# We need our namespace to be an Entry instance for lazy loading to work
eval_locals = copy(entry)
eval_locals.update({'has_field': lambda f: f in entry,
'timedelta': datetime.timedelta,
'utcnow': datetime.datetime.utcnow(),
'now': datetime.datetime.now()})
try:
# Restrict eval namespace to have no globals and locals only from eval_locals
passed = evaluate_expression(condition, eval_locals)
if passed:
log.debug('%s matched requirement %s' % (entry['title'], condition))
return passed
except UndefinedError as e:
# Extract the name that did not exist
missing_field = e.args[0].split('\'')[1]
log.debug('%s does not contain the field %s' % (entry['title'], missing_field))
except Exception as e:
log.error('Error occurred while evaluating statement `%s`. (%s)' % (condition, e))
def __getattr__(self, item):
"""Provides handlers for all phases."""
for phase, method in plugin.phase_methods.items():
if item == method and phase not in ['accept', 'reject', 'fail', 'input']:
break
else:
raise AttributeError(item)
def handle_phase(task, config):
entry_actions = {
'accept': Entry.accept,
'reject': Entry.reject,
'fail': Entry.fail}
for item in config:
requirement, action = list(item.items())[0]
passed_entries = (e for e in task.entries if self.check_condition(requirement, e))
if isinstance(action, str):
if not phase == 'filter':
continue
# Simple entry action (accept, reject or fail) was specified as a string
for entry in passed_entries:
entry_actions[action](entry, 'Matched requirement: %s' % requirement)
else:
# Other plugins were specified to run on this entry
fake_task = Task(task.manager, task.name, config=action, options=task.options)
fake_task.session = task.session
# This entry still belongs to our feed, accept/reject etc. will carry through.
fake_task.all_entries[:] = passed_entries
methods = {}
for plugin_name, plugin_config in action.items():
p = plugin.get_plugin_by_name(plugin_name)
method = p.phase_handlers.get(phase)
if method:
methods[method] = (fake_task, plugin_config)
# Run the methods in priority order
for method in sorted(methods, reverse=True):
method(*methods[method])
handle_phase.priority = 80
return handle_phase
@event('plugin.register')
def register_plugin():
plugin.register(FilterIf, 'if', api_ver=2)
| from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.moves import builtins
import logging
import datetime
from copy import copy
from jinja2 import UndefinedError
from flexget import plugin
from flexget.event import event
from flexget.task import Task
from flexget.entry import Entry
from flexget.utils.template import evaluate_expression
log = logging.getLogger('if')
class FilterIf(object):
"""Can run actions on entries that satisfy a given condition.
Actions include accept, reject, and fail, as well as the ability to run other filter plugins on the entries."""
schema = {
'type': 'array',
'items': {
'type': 'object',
'additionalProperties': {
'anyOf': [
{'$ref': '/schema/plugins'},
{'enum': ['accept', 'reject', 'fail']}
]
}
}
}
def check_condition(self, condition, entry):
"""Checks if a given `entry` passes `condition`"""
# Make entry fields and other utilities available in the eval namespace
# We need our namespace to be an Entry instance for lazy loading to work
eval_locals = copy(entry)
eval_locals.update({'has_field': lambda f: f in entry,
'timedelta': datetime.timedelta,
'now': datetime.datetime.now()})
try:
# Restrict eval namespace to have no globals and locals only from eval_locals
passed = evaluate_expression(condition, eval_locals)
if passed:
log.debug('%s matched requirement %s' % (entry['title'], condition))
return passed
except UndefinedError as e:
# Extract the name that did not exist
missing_field = e.args[0].split('\'')[1]
log.debug('%s does not contain the field %s' % (entry['title'], missing_field))
except Exception as e:
log.error('Error occurred while evaluating statement `%s`. (%s)' % (condition, e))
def __getattr__(self, item):
"""Provides handlers for all phases."""
for phase, method in plugin.phase_methods.items():
if item == method and phase not in ['accept', 'reject', 'fail', 'input']:
break
else:
raise AttributeError(item)
def handle_phase(task, config):
entry_actions = {
'accept': Entry.accept,
'reject': Entry.reject,
'fail': Entry.fail}
for item in config:
requirement, action = list(item.items())[0]
passed_entries = (e for e in task.entries if self.check_condition(requirement, e))
if isinstance(action, str):
if not phase == 'filter':
continue
# Simple entry action (accept, reject or fail) was specified as a string
for entry in passed_entries:
entry_actions[action](entry, 'Matched requirement: %s' % requirement)
else:
# Other plugins were specified to run on this entry
fake_task = Task(task.manager, task.name, config=action, options=task.options)
fake_task.session = task.session
# This entry still belongs to our feed, accept/reject etc. will carry through.
fake_task.all_entries[:] = passed_entries
methods = {}
for plugin_name, plugin_config in action.items():
p = plugin.get_plugin_by_name(plugin_name)
method = p.phase_handlers.get(phase)
if method:
methods[method] = (fake_task, plugin_config)
# Run the methods in priority order
for method in sorted(methods, reverse=True):
method(*methods[method])
handle_phase.priority = 80
return handle_phase
@event('plugin.register')
def register_plugin():
plugin.register(FilterIf, 'if', api_ver=2)
| Python | 0.000001 |
fb4b9e4570c4053204304fc934d0fe816d4c056d | add new split dictionary and dependencies | tests/resources/dictionaries/transaction_dictionary.py | tests/resources/dictionaries/transaction_dictionary.py | # -*- coding: utf-8 -*-
from tests.resources.dictionaries import card_dictionary
from tests.resources.dictionaries import customer_dictionary
from tests.resources.dictionaries import recipient_dictionary
from tests.resources import pagarme_test
from pagarme import recipient
BOLETO_TRANSACTION = {'amount': '10000', 'payment_method': 'boleto'}
CALCULATE_INTALLMENTS_AMOUNT = {'amount': '10000', 'free_installments': "1", 'interest_rate': '13',
'max_installments': '12'}
PAY_BOLETO = {'status': 'paid'}
REFUNDED_OR_CAPTURE_TRANSACTION = {'amount': '10000'}
RECIPIENT = recipient.create(recipient_dictionary.RECIPIENT_DICTIONARY)
SPLIT_RULE_PERCENTAGE = {'recipient_id': RECIPIENT['id'], 'percentage': 100, 'liable': 'true',
'charge_processing_fee': 'true'}
BOLETO_TRANSACTION_SPLIT = {'amount': BOLETO_TRANSACTION['amount'], 'payment_method': BOLETO_TRANSACTION['payment_method'],
'split_rules':[SPLIT_RULE_PERCENTAGE]}
INVALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount': '10000',
'card_number': card_dictionary.INVALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.INVALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.INVALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.INVALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_CAPTURE_FALSE_DICTIONARY = {'amount': '10000', 'capture': 'false',
'card_number': card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount': '10000',
'card_number': card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION__WITH_POSTBACK_DICTIONARY = {'amount': '10000',
'card_number': card_dictionary.VALID_CARD_DICTIONARY['card_number'], 'postback_url': pagarme_test.create_postback_url(),
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
| # -*- coding: utf-8 -*-
from tests.resources.dictionaries import card_dictionary
from tests.resources.dictionaries import customer_dictionary
from tests.resources import pagarme_test
BOLETO_TRANSACTION = {'amount': '10000', 'payment_method': 'boleto'}
CALCULATE_INTALLMENTS_AMOUNT = {'amount': '10000', 'free_installments': "1", 'interest_rate': '13',
'max_installments': '12'}
PAY_BOLETO = {'status':'paid'}
REFUNDED_OR_CAPTURE_TRANSACTION = {'amount':'10000'}
INVALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount':'10000',
'card_number':card_dictionary.INVALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.INVALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.INVALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.INVALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_CAPTURE_FALSE_DICTIONARY = {'amount':'10000', 'capture':'false',
'card_number':card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name':card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount':'10000',
'card_number':card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION__WITH_POSTBACK_DICTIONARY = {'amount':'10000',
'card_number':card_dictionary.VALID_CARD_DICTIONARY['card_number'], 'postback_url':pagarme_test.create_postback_url(),
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY} | Python | 0 |
fa23d59a66cfc192bcfed6cdbb8426479487ccca | Add unit tests | tests/unit/synapseutils/unit_test_synapseutils_walk.py | tests/unit/synapseutils/unit_test_synapseutils_walk.py | import json
import uuid
import pytest
from unittest.mock import patch, call
import synapseclient
import synapseutils.walk_functions
def test_helpWalk_not_container(syn):
"""Test if entry entity isn't a container"""
entity = {"id": "syn123", "concreteType": "File"}
with patch.object(syn, "get", return_value=entity):
result = synapseutils.walk_functions._helpWalk(syn=syn, synId="syn123", includeTypes=["folder", "file"])
# Execute generator
gen_result = list(result)
assert gen_result == []
def test_helpWalk_one_child_file(syn):
"""Test if there is one file in parent directory"""
entity = {"id": "syn123", "concreteType": "org.sagebionetworks.repo.model.Project", "name": "parent_folder"}
child = [{"id": "syn2222", "conreteType": "File", "name": "test_file"}]
expected = [
(('parent_folder', 'syn123'), [], [('test_file', 'syn2222')])
]
with patch.object(syn, "get", return_value=entity) as mock_syn_get,\
patch.object(syn, "getChildren", return_value=child) as mock_get_child:
result = synapseutils.walk_functions._helpWalk(syn=syn, synId="syn123", includeTypes=["folder", "file"])
# Execute generator
gen_result = list(result)
mock_syn_get.assert_called_once_with("syn123", downloadFile=False)
mock_get_child.assert_called_once_with("syn123", ["folder", "file"])
assert gen_result == expected
def test_helpWalk_directory(syn):
"""Test recursive functionality"""
entity_list = [
{"id": "syn123", "concreteType": "org.sagebionetworks.repo.model.Project", "name": "parent_folder"},
{"id": "syn124", "concreteType": "org.sagebionetworks.repo.model.Folder", "name": "test_folder"}
]
child_list = [
[{"id": "syn2222", "concreteType": "File", "name": "test_file"},
{"id": "syn124", "concreteType": "org.sagebionetworks.repo.model.Folder", "name": "test_folder"}],
[{"id": "syn22223", "conreteType": "File", "name": "test_file_2"}]
]
expected = [
(('parent_folder', 'syn123'), [('test_folder', 'syn124')], [('test_file', 'syn2222')]),
(('parent_folder/test_folder', 'syn124'), [], [('test_file_2', 'syn22223')])
]
with patch.object(syn, "get", side_effect=entity_list),\
patch.object(syn, "getChildren", side_effect=child_list):
result = synapseutils.walk_functions._helpWalk(syn=syn, synId="syn123", includeTypes=["folder", "file"])
# Execute generator
gen_result = list(result)
assert gen_result == expected
| import json
import uuid
import pytest
from unittest.mock import patch, call
import synapseclient
import synapseutils.walk_functions
def test_helpWalk_not_container(syn):
"""Test if entry entity isn't a container"""
entity = {"id": "syn123", "concreteType": "File"}
with patch.object(syn, "get", return_value=entity):
result = synapseutils.walk_functions._helpWalk(syn=syn, synId="syn123", includeTypes=["folder", "file"])
# Execute generator
gen_result = list(result)
assert gen_result == []
def test_helpWalk_one_child_file(syn):
"""Test if there is one file in parent directory"""
entity = {"id": "syn123", "concreteType": "org.sagebionetworks.repo.model.Project", "name": "parent_folder"}
child = [{"id": "syn2222", "conreteType": "File", "name": "test_file"}]
expected = [
(('parent_folder', 'syn123'), [], [('test_file', 'syn2222')])
]
with patch.object(syn, "get", return_value=entity),\
patch.object(syn, "getChildren", return_value=child):
result = synapseutils.walk_functions._helpWalk(syn=syn, synId="syn123", includeTypes=["folder", "file"])
# Execute generator
gen_result = list(result)
assert gen_result == expected
def test_helpWalk_directory(syn):
"""Test recursive functionality"""
entity_list = [
{"id": "syn123", "concreteType": "org.sagebionetworks.repo.model.Project", "name": "parent_folder"},
{"id": "syn124", "concreteType": "org.sagebionetworks.repo.model.Folder", "name": "test_folder"}
]
child_list = [
[{"id": "syn2222", "concreteType": "File", "name": "test_file"},
{"id": "syn124", "concreteType": "org.sagebionetworks.repo.model.Folder", "name": "test_folder"}],
[{"id": "syn22223", "conreteType": "File", "name": "test_file_2"}]
]
expected = [
(('parent_folder', 'syn123'), [('test_folder', 'syn124')], [('test_file', 'syn2222')]),
(('parent_folder/test_folder', 'syn124'), [], [('test_file_2', 'syn22223')])
]
with patch.object(syn, "get", side_effect=entity_list),\
patch.object(syn, "getChildren", side_effect=child_list):
result = synapseutils.walk_functions._helpWalk(syn=syn, synId="syn123", includeTypes=["folder", "file"])
# Execute generator
gen_result = list(result)
assert gen_result == expected
# def test_helpWalk_not_container(syn):
# entity = {"id": "syn123", "concreteType": "File"}
# with patch.object(syn, "get", return_value=entity),\
# patch.object(syn, "getChildren", return_value=None):
# synapseutils.walk._helpWalk(syn, "syn123", "syn456", updateLinks=False)
| Python | 0.000001 |
d2fe267359feec48888469909bec3b432d1f4a93 | Fix `BundleIntegrationTest`. (#4953) | tests/python/pants_test/engine/legacy/test_bundle_integration.py | tests/python/pants_test/engine/legacy/test_bundle_integration.py | # coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from contextlib import contextmanager
from pants.base.deprecated import deprecated_conditional
from pants.util.contextutil import temporary_dir
from pants_test.pants_run_integration_test import PantsRunIntegrationTest, ensure_engine
class BundleIntegrationTest(PantsRunIntegrationTest):
TARGET_PATH = 'testprojects/src/java/org/pantsbuild/testproject/bundle'
def test_bundle_basic(self):
args = ['-q', 'bundle', self.TARGET_PATH]
self.do_command(*args, success=True, enable_v2_engine=True)
@contextmanager
def bundled(self, target_name):
with temporary_dir() as temp_distdir:
with self.pants_results(
['-q',
'--pants-distdir={}'.format(temp_distdir),
'bundle',
'{}:{}'.format(self.TARGET_PATH, target_name)]) as pants_run:
self.assert_success(pants_run)
yield os.path.join(temp_distdir,
'{}.{}-bundle'.format(self.TARGET_PATH.replace('/', '.'), target_name))
@ensure_engine
def test_bundle_mapper(self):
with self.bundled('mapper') as bundle_dir:
self.assertTrue(os.path.isfile(os.path.join(bundle_dir, 'bundle_files/file1.txt')))
@ensure_engine
def test_bundle_relative_to(self):
with self.bundled('relative_to') as bundle_dir:
self.assertTrue(os.path.isfile(os.path.join(bundle_dir, 'b/file1.txt')))
@ensure_engine
def test_bundle_rel_path(self):
with self.bundled('rel_path') as bundle_dir:
self.assertTrue(os.path.isfile(os.path.join(bundle_dir, 'b/file1.txt')))
@ensure_engine
def test_bundle_directory(self):
with self.bundled('directory') as bundle_dir:
root = os.path.join(bundle_dir, 'a/b')
self.assertTrue(os.path.isdir(root))
# NB: The behaviour of this test will change with the relevant deprecation
# in `pants.backend.jvm.tasks.bundle_create`, because the parent directory
# will not be symlinked.
deprecated_conditional(
lambda: os.path.isfile(os.path.join(root, 'file1.txt')),
'1.5.0.dev0',
'default recursive inclusion of files in directory',
'A non-recursive/literal glob should no longer include child paths.'
)
def test_bundle_explicit_recursion(self):
with self.bundled('explicit_recursion') as bundle_dir:
root = os.path.join(bundle_dir, 'a/b')
self.assertTrue(os.path.isdir(root))
self.assertTrue(os.path.isfile(os.path.join(root, 'file1.txt')))
@ensure_engine
def test_bundle_resource_ordering(self):
"""Ensures that `resources=` ordering is respected."""
pants_run = self.run_pants(
['-q',
'run',
'testprojects/src/java/org/pantsbuild/testproject/bundle:bundle-resource-ordering']
)
self.assert_success(pants_run)
self.assertEquals(pants_run.stdout_data.strip(), 'Hello world from Foo')
| # coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from contextlib import contextmanager
from pants.base.deprecated import deprecated_conditional
from pants.util.contextutil import temporary_dir
from pants_test.pants_run_integration_test import PantsRunIntegrationTest, ensure_engine
class BundleIntegrationTest(PantsRunIntegrationTest):
TARGET_PATH = 'testprojects/src/java/org/pantsbuild/testproject/bundle'
def test_bundle_basic(self):
args = ['-q', 'bundle', self.TARGET_PATH]
self.do_command(*args, success=True, enable_v2_engine=True)
@contextmanager
def bundled(self, target_name):
with temporary_dir() as temp_distdir:
with self.pants_results(
['-q',
'--pants-distdir={}'.format(temp_distdir),
'bundle',
'{}:{}'.format(self.TARGET_PATH, target_name)]) as pants_run:
self.assert_success(pants_run)
yield os.path.join(temp_distdir,
'{}.{}-bundle'.format(self.TARGET_PATH.replace('/', '.'), target_name))
@ensure_engine
def test_bundle_mapper(self):
with self.bundled('mapper') as bundle_dir:
self.assertTrue(os.path.isfile(os.path.join(bundle_dir, 'bundle_files/file1.txt')))
@ensure_engine
def test_bundle_relative_to(self):
with self.bundled('relative_to') as bundle_dir:
self.assertTrue(os.path.isfile(os.path.join(bundle_dir, 'b/file1.txt')))
@ensure_engine
def test_bundle_rel_path(self):
with self.bundled('rel_path') as bundle_dir:
self.assertTrue(os.path.isfile(os.path.join(bundle_dir, 'b/file1.txt')))
@ensure_engine
def test_bundle_directory(self):
with self.bundled('directory') as bundle_dir:
root = os.path.join(bundle_dir, 'a/b')
self.assertTrue(os.path.isdir(root))
# NB: The behaviour of this test will change with the relevant deprecation
# in `pants.backend.jvm.tasks.bundle_create`, because the parent directory
# will not be symlinked.
deprecated_conditional(
lambda: os.path.isfile(os.path.join(root, 'file1.txt')),
'1.5.0.dev0',
'default recursive inclusion of files in directory',
'A non-recursive/literal glob should no longer include child paths.'
)
def test_bundle_explicit_recursion(self):
with self.bundled('explicit_recursion') as bundle_dir:
root = os.path.join(bundle_dir, 'a/b')
self.assertTrue(os.path.isdir(root))
self.assertTrue(os.path.isfile(os.path.join(root, 'file1.txt')))
@ensure_engine
def test_bundle_resource_ordering(self):
"""Ensures that `resources=` ordering is respected."""
pants_run = self.run_pants(
['-q',
'run',
'testprojects/src/java/org/pantsbuild/testproject/bundle:bundle-resource-ordering']
)
self.assert_success(pants_run)
self.assertEquals(pants_run.stdout_data, 'Hello world from Foo\n\n')
| Python | 0 |
1bccf48e6e142e6c62374dd9d7dc94330f15c650 | Update ipc_lista1.3.py | lista1/ipc_lista1.3.py | lista1/ipc_lista1.3.py | #ipc_lista1.3
#Professor: Jucimar Junior
#Any Mendes Carvalho - 161531004
#
#
#
#
#Faça um programa que peça dois números e imprima a soma.
number1 = input("Digite o primeiro: ")
number2 = input("Digite o segundo número: ")
print(number1+number2)
| #ipc_lista1.3
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça dois números e imprima a soma.
number1 = input("Digite o primeiro: ")
number2 = input("Digite o segundo número: ")
print(number1+number2)
| Python | 0 |
4e6fc94fde8eace1b461eba59dc4a56611664877 | Update ipc_lista1.7.py | lista1/ipc_lista1.7.py | lista1/ipc_lista1.7.py | #ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
| #ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: "
| Python | 0 |
950e9f82be8b3a02ce96db47061cf828da231be9 | Update ipc_lista1.8.py | lista1/ipc_lista1.8.py | lista1/ipc_lista1.8.py | #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input("Entre
| #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input
| Python | 0 |
26c781807937038ec2c4fbfd4413ae2c60decd1b | add stdint.h for c++ default header include. | src/py/cpp_fragment_tmpl.py | src/py/cpp_fragment_tmpl.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
hpp_tmpl="""#ifndef __FRAGMENT_HPP__
#define __FRAGMENT_HPP__
#include <string>
#include <vector>
#include <map>
#include <list>
// linux int type define; should be remore/add by system dependent in the future version.
#include <stdint.h>
{includes}
void fragment_container();
#endif
"""
cpp_tmpl="""#include "{head_file}"
#include <iostream>
#include <stdio.h>
void fragment_container()
{{
// tmp code begin
{tmp_cpp}
// tmp code end
}}
"""
| #!/usr/bin/env python
# -*- coding:utf-8 -*-
hpp_tmpl="""#ifndef __FRAGMENT_HPP__
#define __FRAGMENT_HPP__
#include <string>
#include <vector>
#include <map>
#include <list>
{includes}
void fragment_container();
#endif
"""
cpp_tmpl="""#include "{head_file}"
#include <iostream>
#include <stdio.h>
void fragment_container()
{{
// tmp code begin
{tmp_cpp}
// tmp code end
}}
"""
| Python | 0 |
5e21c7d0fa46e2b290368533cc6dc741b1d366e2 | correct src path in settings | functional-tests/clickerft/settings.py | functional-tests/clickerft/settings.py | from os.path import dirname, realpath
BASEDIR = dirname(dirname(dirname(realpath(__file__))))
HOME = "file://" + BASEDIR + "/src/"
| import os
BASEDIR = os.path.dirname(os.getcwd())
HOME = "file://" + BASEDIR + "/src/"
| Python | 0.000001 |
54563933a265a7c70adce3996d0a31eb9c915203 | Use kwarg normally in piratepad.controllers.Form | addons/piratepad/controllers.py | addons/piratepad/controllers.py | from openobject.tools import expose
from openerp.controllers import form
from openerp.utils import rpc, TinyDict
import cherrypy
class Form(form.Form):
_cp_path = "/piratepad/form"
@expose('json', methods=('POST',))
def save(self, pad_name):
params, data = TinyDict.split(cherrypy.session['params'])
ctx = dict(rpc.session.context,
default_res_model=params.model, default_res_id=params.id,
active_id=False, active_ids=[])
pad_link = "http://piratepad.net/"+'-'.join(pad_name.split())
attachment_id = rpc.RPCProxy('ir.attachment').create({
'name': pad_name,
'url': pad_link,
}, ctx)
return {'id': attachment_id, 'name': pad_name, 'url': pad_link}
| from openobject.tools import expose
from openerp.controllers import form
from openerp.utils import rpc, common, TinyDict
import cherrypy
class Form(form.Form):
_cp_path = "/piratepad/form"
@expose('json', methods=('POST',))
def save(self, **kwargs):
params, data = TinyDict.split(cherrypy.session['params'])
pad_name=kwargs.get('pad_name')
ctx = dict(rpc.session.context,
default_res_model=params.model, default_res_id=params.id,
active_id=False, active_ids=[])
pad_link = "http://piratepad.net/"+'-'.join(pad_name.split())
attachment_id = rpc.RPCProxy('ir.attachment').create({
'name': pad_name,
'url': pad_link,
}, ctx)
return {'id': attachment_id, 'name': pad_name, 'url': pad_link}
| Python | 0 |
31aa44ef336c497be9f545c9bd4af64aac250748 | Fix remote coverage execution | python/helpers/coverage_runner/run_coverage.py | python/helpers/coverage_runner/run_coverage.py | """Coverage.py's main entrypoint."""
import os
import sys
bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH')
if bundled_coverage_path:
sys_path_backup = sys.path
sys.path = [p for p in sys.path if p != bundled_coverage_path]
from coverage.cmdline import main
sys.path = sys_path_backup
else:
from coverage.cmdline import main
coverage_file = os.getenv('PYCHARM_COVERAGE_FILE')
coverage_file = coverage_file[0:-len(".coverage")]
run_cov = os.getenv('PYCHARM_RUN_COVERAGE')
if os.getenv('CREATE_TEMP_COVERAGE_FILE'):
line = 'LOG: PyCharm: File mapping:%s\t%s\n'
import tempfile
(h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage')
print(line%(coverage_file + ".coverage", new_cov_file + ".coverage"))
print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt'))
print(line%(coverage_file + '.xml', new_cov_file + '.xml'))
coverage_file = new_cov_file
if coverage_file:
os.environ['COVERAGE_FILE'] = coverage_file + ".coverage"
if run_cov:
a_file = open(coverage_file + '.syspath.txt', mode='w')
a_file.write(os.getcwd()+"\n")
for path in sys.path: a_file.write(path + "\n")
a_file.close()
argv = []
for arg in sys.argv:
if arg.startswith('-m'):
argv.append('-m')
argv.append(arg[2:])
else:
argv.append(arg)
sys.argv = argv
cwd = os.getcwd()
try:
main()
finally:
if run_cov:
os.chdir(cwd)
main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"]) | """Coverage.py's main entrypoint."""
import os
import sys
bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH')
if bundled_coverage_path:
sys_path_backup = sys.path
sys.path = [p for p in sys.path if p != bundled_coverage_path]
from coverage.cmdline import main
sys.path = sys_path_backup
else:
from coverage.cmdline import main
coverage_file = os.getenv('PYCHARM_COVERAGE_FILE')
coverage_file = coverage_file[0:-len(".coverage")]
run_cov = os.getenv('PYCHARM_RUN_COVERAGE')
if os.getenv('CREATE_TEMP_COVERAGE_FILE'):
line = 'LOG: PyCharm: File mapping:%s\t%s\n'
import tempfile
(h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage')
print(line%(coverage_file + ".coverage", new_cov_file + ".coverage"))
print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt'))
print(line%(coverage_file + '.xml', new_cov_file + '.xml'))
coverage_file = new_cov_file + ".cov"
if coverage_file:
os.environ['COVERAGE_FILE'] = coverage_file + ".coverage"
if run_cov:
a_file = open(coverage_file + '.syspath.txt', mode='w')
a_file.write(os.getcwd()+"\n")
for path in sys.path: a_file.write(path + "\n")
a_file.close()
argv = []
for arg in sys.argv:
if arg.startswith('-m'):
argv.append('-m')
argv.append(arg[2:])
else:
argv.append(arg)
sys.argv = argv
cwd = os.getcwd()
try:
main()
finally:
if run_cov:
os.chdir(cwd)
main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"]) | Python | 0.000001 |
1f6a154967ecd74c538f9ddda3f4a83018a6eef7 | Attempt to fix iris_val_based_early_stopping test. Change: 127441610 | tensorflow/examples/skflow/iris_val_based_early_stopping.py | tensorflow/examples/skflow/iris_val_based_early_stopping.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example of DNNClassifier for Iris plant dataset, with early stopping."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
from sklearn import datasets
from sklearn import metrics
from sklearn.cross_validation import train_test_split
import tensorflow as tf
from tensorflow.contrib import learn
def clean_folder(folder):
"""Cleans the given folder if it exists."""
try:
shutil.rmtree(folder)
except OSError:
pass
def main(unused_argv):
iris = datasets.load_iris()
x_train, x_test, y_train, y_test = train_test_split(
iris.data, iris.target, test_size=0.2, random_state=42)
x_train, x_val, y_train, y_val = train_test_split(
x_train, y_train, test_size=0.2, random_state=42)
val_monitor = learn.monitors.ValidationMonitor(
x_val, y_val, early_stopping_rounds=200)
model_dir = '/tmp/iris_model'
clean_folder(model_dir)
# classifier with early stopping on training data
classifier1 = learn.DNNClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(x_train),
hidden_units=[10, 20, 10], n_classes=3, model_dir=model_dir)
classifier1.fit(x=x_train, y=y_train, steps=2000)
score1 = metrics.accuracy_score(y_test, classifier1.predict(x_test))
model_dir = '/tmp/iris_model_val'
clean_folder(model_dir)
# classifier with early stopping on validation data, save frequently for
# monitor to pick up new checkpoints.
classifier2 = learn.DNNClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(x_train),
hidden_units=[10, 20, 10], n_classes=3, model_dir=model_dir,
config=tf.contrib.learn.RunConfig(save_checkpoints_secs=1))
classifier2.fit(x=x_train, y=y_train, steps=2000, monitors=[val_monitor])
score2 = metrics.accuracy_score(y_test, classifier2.predict(x_test))
# In many applications, the score is improved by using early stopping
print('score1: ', score1)
print('score2: ', score2)
print('score2 > score1: ', score2 > score1)
if __name__ == '__main__':
tf.app.run()
| # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example of DNNClassifier for Iris plant dataset, with early stopping."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from sklearn import datasets
from sklearn import metrics
from sklearn.cross_validation import train_test_split
import tensorflow as tf
from tensorflow.contrib import learn
def main(unused_argv):
iris = datasets.load_iris()
x_train, x_test, y_train, y_test = train_test_split(
iris.data, iris.target, test_size=0.2, random_state=42)
x_train, x_val, y_train, y_val = train_test_split(
x_train, y_train, test_size=0.2, random_state=42)
val_monitor = learn.monitors.ValidationMonitor(
x_val, y_val, early_stopping_rounds=200)
# classifier with early stopping on training data
classifier1 = learn.DNNClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(x_train),
hidden_units=[10, 20, 10], n_classes=3, model_dir='/tmp/iris_model/')
classifier1.fit(x=x_train, y=y_train, steps=2000)
score1 = metrics.accuracy_score(y_test, classifier1.predict(x_test))
# classifier with early stopping on validation data, save frequently for
# monitor to pick up new checkpoints.
classifier2 = learn.DNNClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(x_train),
hidden_units=[10, 20, 10], n_classes=3, model_dir='/tmp/iris_model_val/',
config=tf.contrib.learn.RunConfig(save_checkpoints_secs=1))
classifier2.fit(x=x_train, y=y_train, steps=2000, monitors=[val_monitor])
score2 = metrics.accuracy_score(y_test, classifier2.predict(x_test))
# In many applications, the score is improved by using early stopping
print('score1: ', score1)
print('score2: ', score2)
print('score2 > score1: ', score2 > score1)
if __name__ == '__main__':
tf.app.run()
| Python | 0.999225 |
c109728986a3a583fe037780c88bdaa458e663c4 | Bump 2.1.1 | appium/version.py | appium/version.py | version = '2.1.1'
| version = '2.1.0'
| Python | 0.000011 |
05140304c1ef08e7e291eec92de4091320bdfc0e | Add acceleration to example | encoder/examples/encoder_lcd.py | encoder/examples/encoder_lcd.py | # -*- coding: utf-8 -*-
"""Read encoder and print position value to LCD."""
from machine import sleep_ms
from pyb_encoder import Encoder
from hd44780 import HD44780
class STM_LCDShield(HD44780):
_default_pins = ('PD2','PD1','PD6','PD5','PD4','PD3')
def main():
lcd.set_string("Value: ")
lastval = 0
while True:
val = enc.value
if lastval != val:
lastpos = val
lcd.set_cursor(6, 0)
for c in "%3i" % val:
lcd.send_byte(c)
enc.cur_accel = max(0, enc.cur_accel - enc.accel)
sleep_ms(50)
if __name__ == '__main__':
lcd = STM_LCDShield()
enc = Encoder('A0', 'A1', max_value=999, accel=5)
main()
| # -*- coding: utf-8 -*-
"""Read encoder and print position value to LCD."""
from machine import sleep_ms
from pyb_encoder import Encoder
from hd44780 import HD44780
class STM_LCDShield(HD44780):
_default_pins = ('PD2','PD1','PD6','PD5','PD4','PD3')
def main():
lcd.set_string("Value: ")
lastval = 0
while True:
val = enc.value
if lastval != val:
lastpos = val
lcd.set_cursor(6, 0)
for c in "%3i" % val:
lcd.send_byte(c)
sleep_ms(50)
if __name__ == '__main__':
lcd = STM_LCDShield()
enc = Encoder('A0', 'A1', max_value=999)
main()
| Python | 0 |
2268ebdc47b1d9221c06622a7b1992cae14013c2 | Test endpoint for the web server | web/server.py | web/server.py | import http.client
import os
from flask import Flask
from pymongo import MongoClient
MONGO_URL = os.environ.get('MONGO_URL', 'mongodb://mongo:27017/')
MONGO_DATABASE = os.environ.get('MONGO_DATABASE', 'whistleblower')
DATABASE = MongoClient(MONGO_URL)[MONGO_DATABASE]
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/facebook_webhook', methods=['POST'])
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
| import http.client
import os
from flask import Flask
from pymongo import MongoClient
MONGO_URL = os.environ.get('MONGO_URL', 'mongodb://mongo:27017/')
MONGO_DATABASE = os.environ.get('MONGO_DATABASE', 'whistleblower')
DATABASE = MongoClient(MONGO_URL)[MONGO_DATABASE]
app = Flask(__name__)
@app.route('/facebook_webhook', methods=['POST'])
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
| Python | 0 |
db40a42c2825b157017e6730a2b5c95371bbe598 | Allow user to adjust nyquist freq and freq spacing in cp_utils.py | arfit/cp_utils.py | arfit/cp_utils.py | import carmcmc as cm
from gatspy.periodic import LombScargleFast
import matplotlib.pyplot as plt
import numpy as np
def csample_from_files(datafile, chainfile, p, q):
data = np.loadtxt(datafile)
times, tind = np.unique(data[:,0], return_index=True)
data = data[tind, :]
chain = np.loadtxt(chainfile)
assert chain.shape[1] == p + q + 5, 'dimension mismatch'
return cm.CarmaSample(data[:,0], data[:,1], data[:,2], None, q=q, trace=chain[:,:-2], loglike=chain[:,-2], logpost=chain[:,-1])
def normalised_lombscargle(ts, ys, dys, oversampling=5, nyquist_factor=3):
model = LombScargleFast().fit(ts, ys, dys)
pers, pows = model.periodogram_auto(oversampling=oversampling, nyquist_factor=nyquist_factor)
fs = 1.0/pers
T = np.max(ts) - np.min(ts)
mu = 1/T*np.trapz(ys, ts)
s2 = 1/T*np.trapz(np.square(ys-mu), ts)
return fs, s2*pows/np.trapz(pows, fs)
def plot_psd_sample_data(sample, oversampling=5, nyquist_factor=3):
psd_low, psd_high, psd_med, fs = sample.plot_power_spectrum(doShow=False)
plt.clf()
plt.loglog(fs, psd_med, '-b', alpha=0.33)
plt.fill_between(fs, psd_low, psd_high, color='b', alpha=0.17)
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
bw = fs[-1] - fs[0]
T = sample.time[-1] - sample.time[0]
s2 = 1/T*np.trapz(np.square(sample.ysig), sample.time)
noise_level = s2/bw
levels = noise_level*np.sqrt(sample.get_samples('measerr_scale'))
plt.axhline(np.median(levels), color='g', alpha=0.33)
plt.fill_between(fs, np.percentile(levels, 84)+0*fs, np.percentile(levels, 16)+0*fs, color='g', alpha=0.17)
plt.loglog(fs, psd, '-r', alpha=0.33)
def plot_psd_sample_draw(sample, loc='upper left', oversampling=5, nyquist_factor=3):
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
ys_draw = sample.predict(sample.time, bestfit='random')[0]
fs, dpsd = normalised_lombscargle(sample.time, ys_draw, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
plt.loglog(fs, psd, '-k', label='Data', alpha=0.5)
plt.loglog(fs, dpsd, '-b', label='Prediction', alpha=0.5)
plt.legend(loc=loc)
| import carmcmc as cm
from gatspy.periodic import LombScargleFast
import matplotlib.pyplot as plt
import numpy as np
def csample_from_files(datafile, chainfile, p, q):
data = np.loadtxt(datafile)
times, tind = np.unique(data[:,0], return_index=True)
data = data[tind, :]
chain = np.loadtxt(chainfile)
assert chain.shape[1] == p + q + 5, 'dimension mismatch'
return cm.CarmaSample(data[:,0], data[:,1], data[:,2], None, q=q, trace=chain[:,:-2], loglike=chain[:,-2], logpost=chain[:,-1])
def normalised_lombscargle(ts, ys, dys):
model = LombScargleFast().fit(ts, ys, dys)
T = np.max(ts)-np.min(ts)
dts = np.diff(np.sort(ts))
fny = 1.0/(2.0*np.min(dts))
df = 1.0/T
N = fny/df
fs = np.linspace(df, fny, N)
pows = model.score_frequency_grid(df, df, N)
mu = 1.0/T*np.trapz(ys, ts)
s2 = 1.0/T*np.trapz(np.square(ys-mu), ts)
return fs, s2*pows/np.trapz(pows, fs)
def plot_psd_sample_data(sample):
psd_low, psd_high, psd_med, fs = sample.plot_power_spectrum(doShow=False)
plt.clf()
plt.loglog(fs, psd_med, '-b', alpha=0.33)
plt.fill_between(fs, psd_low, psd_high, color='b', alpha=0.17)
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig)
bw = fs[-1] - fs[0]
T = sample.time[-1] - sample.time[0]
s2 = 1/T*np.trapz(np.square(sample.ysig), sample.time)
noise_level = s2/bw
levels = noise_level*np.sqrt(sample.get_samples('measerr_scale'))
plt.axhline(np.median(levels), color='g', alpha=0.33)
plt.fill_between(fs, np.percentile(levels, 84)+0*fs, np.percentile(levels, 16)+0*fs, color='g', alpha=0.17)
plt.loglog(fs, psd, '-r', alpha=0.33)
def plot_psd_sample_draw(sample, loc='upper left'):
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig)
ys_draw = sample.predict(sample.time, bestfit='random')[0]
fs, dpsd = normalised_lombscargle(sample.time, ys_draw, sample.ysig)
plt.loglog(fs, psd, '-k', label='Data', alpha=0.5)
plt.loglog(fs, dpsd, '-b', label='Prediction', alpha=0.5)
plt.legend(loc=loc)
| Python | 0 |
4fd80a9a593a4f9100899e96a383782c68a41af1 | Fix to subtract USDT withdrawals from balance | poloniex_apis/api_models/deposit_withdrawal_history.py | poloniex_apis/api_models/deposit_withdrawal_history.py | from collections import defaultdict
from poloniex_apis.api_models.ticker_price import TickerData
class DWHistory:
def __init__(self, history):
self.withdrawals = defaultdict(float)
self.deposits = defaultdict(float)
self.history = history
def get_dw_history(self):
for deposit in self.history['deposits']:
if deposit['currency'] in self.deposits:
self.deposits[deposit['currency']] += float(deposit['amount'])
else:
self.deposits[deposit['currency']] = float(deposit['amount'])
for withdrawal in self.history['withdrawals']:
if withdrawal['currency'] in self.withdrawals:
self.withdrawals[withdrawal['currency']] += float(withdrawal['amount'])
else:
self.withdrawals[withdrawal['currency']] = float(withdrawal['amount'])
return self.deposits, self.withdrawals
def get_btc_balance(self, ticker):
balance = 0
for deposit_symbol, amount in self.deposits.items():
if deposit_symbol == u"USDT":
balance += amount * ticker.get_price("USDT_BTC")
if deposit_symbol != u'BTC':
balance += amount * ticker.get_price("BTC_" + deposit_symbol)
else:
balance += amount
for withdrawal_symbol, amount in self.withdrawals.items():
if withdrawal_symbol == u"USDT":
balance -= amount * ticker.get_price("USDT_BTC")
if withdrawal_symbol != u'BTC':
balance -= amount * ticker.get_price("BTC_" + withdrawal_symbol)
else:
balance -= amount
return balance
| from collections import defaultdict
from poloniex_apis.api_models.ticker_price import TickerData
class DWHistory:
def __init__(self, history):
self.withdrawals = defaultdict(float)
self.deposits = defaultdict(float)
self.history = history
def get_dw_history(self):
for deposit in self.history['deposits']:
if deposit['currency'] in self.deposits:
self.deposits[deposit['currency']] += float(deposit['amount'])
else:
self.deposits[deposit['currency']] = float(deposit['amount'])
for withdrawal in self.history['withdrawals']:
if withdrawal['currency'] in self.withdrawals:
self.withdrawals[withdrawal['currency']] += float(withdrawal['amount'])
else:
self.withdrawals[withdrawal['currency']] = float(withdrawal['amount'])
return self.deposits, self.withdrawals
def get_btc_balance(self, ticker):
balance = 0
for deposit_symbol, amount in self.deposits.items():
if deposit_symbol == u"USDT":
balance += amount * ticker.get_price("USDT_BTC")
if deposit_symbol != u'BTC':
balance += amount * ticker.get_price("BTC_" + deposit_symbol)
else:
balance += amount
for withdrawal_symbol, amount in self.withdrawals.items():
if withdrawal_symbol == u"USDT":
balance += amount * ticker.get_price("USDT_BTC")
if withdrawal_symbol != u'BTC':
balance -= amount * ticker.get_price("BTC_" + withdrawal_symbol)
else:
balance -= amount
return balance
| Python | 0.000007 |
1e16048c7ceb50377fdfdda3a39ef9910d2021bb | Bump version to 0.2 | wagtail_simple_gallery/__init__.py | wagtail_simple_gallery/__init__.py | __version__ = '0.2' | __version__ = '0.1' | Python | 0.000001 |
7e9d3b3d2c4e46c2b16595b7acc6aa670ece9e6e | use correct API to save to bucket. | astrobin/tasks.py | astrobin/tasks.py | from django.conf import settings
from celery.decorators import task
from celery.task.sets import subtask
from PIL import Image as PILImage
from subprocess import call
import StringIO
import os
import os.path
from image_utils import *
from s3 import *
from notifications import *
@task()
def solve_image(image, callback=None):
# Solve
path = settings.UPLOADS_DIRECTORY
uid = image.filename
original_ext = image.original_ext
solved = False
command = ['nice', '-n', '5', '/usr/local/astrometry/bin/solve-field', path + uid + original_ext]
call(command)
solved_filename = settings.UPLOADS_DIRECTORY + image.filename + '-ngc.png'
if os.path.exists(settings.UPLOADS_DIRECTORY + image.filename + '.solved'):
solved = True
solved_file = open(solved_filename)
solved_data = StringIO.StringIO(solved_file.read())
solved_image = PILImage.open(solved_data)
(w, h) = solved_image.size
(w, h) = scale_dimensions(w, h, settings.RESIZED_IMAGE_SIZE)
solved_resizedImage = solved_image.resize((w, h), PILImage.ANTIALIAS)
# Then save to bucket
solved_resizedFile = StringIO.StringIO()
solved_resizedImage.save(solved_resizedFile, 'PNG')
save_to_bucket(uid + '_solved.png', solved_resizedFile.getvalue())
if solved:
push_notification([image.user], 'image_solved',
{'object_url':image.get_absolute_url() + '?mod=solved'})
else:
push_notification([image.user], 'image_not_solved',
{'object_url':image.get_absolute_url()})
if callback:
callback(image, solved, '%s%s*' % (path, uid))
@task()
def store_image(image, solve, callback=None):
try:
store_image_in_s3(settings.UPLOADS_DIRECTORY, image.filename, image.original_ext)
except S3CreateError, exc:
store_image.retry(exc=exc)
push_notification([image.user], 'image_ready', {'object_url':image.get_absolute_url()})
if callback:
callback(image, True, solve)
@task
def delete_image(filename, ext):
delete_image_from_s3(filename, ext)
| from django.conf import settings
from celery.decorators import task
from celery.task.sets import subtask
from PIL import Image as PILImage
from subprocess import call
import StringIO
import os
import os.path
from image_utils import *
from s3 import *
from notifications import *
@task()
def solve_image(image, callback=None):
# Solve
path = settings.UPLOADS_DIRECTORY
uid = image.filename
original_ext = image.original_ext
solved = False
command = ['nice', '-n', '5', '/usr/local/astrometry/bin/solve-field', path + uid + original_ext]
call(command)
solved_filename = settings.UPLOADS_DIRECTORY + image.filename + '-ngc.png'
if os.path.exists(settings.UPLOADS_DIRECTORY + image.filename + '.solved'):
solved = True
solved_file = open(solved_filename)
solved_data = StringIO.StringIO(solved_file.read())
solved_image = PILImage.open(solved_data)
(w, h) = solved_image.size
(w, h) = scale_dimensions(w, h, settings.RESIZED_IMAGE_SIZE)
solved_resizedImage = solved_image.resize((w, h), PILImage.ANTIALIAS)
# Then save to bucket
solved_resizedFile = StringIO.StringIO()
solved_resizedImage.save(solved_resizedFile, 'PNG')
save_to_bucket(solved_resizedFile.getvalue(),
'image/png',
settings.S3_SOLVED_BUCKET,
uid,
'.png')
if solved:
push_notification([image.user], 'image_solved',
{'object_url':image.get_absolute_url() + '?mod=solved'})
else:
push_notification([image.user], 'image_not_solved',
{'object_url':image.get_absolute_url()})
if callback:
callback(image, solved, '%s%s*' % (path, uid))
@task()
def store_image(image, solve, callback=None):
try:
store_image_in_s3(settings.UPLOADS_DIRECTORY, image.filename, image.original_ext)
except S3CreateError, exc:
store_image.retry(exc=exc)
push_notification([image.user], 'image_ready', {'object_url':image.get_absolute_url()})
if callback:
callback(image, True, solve)
@task
def delete_image(filename, ext):
delete_image_from_s3(filename, ext)
| Python | 0 |
70f137998b2cc3b9c873a57e17a435c6ca181192 | improve code for getting the pricelist | product_supplier_intercompany/models/purchase_order.py | product_supplier_intercompany/models/purchase_order.py | # Copyright 2021 Akretion (https://www.akretion.com).
# @author Sébastien BEAU <sebastien.beau@akretion.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _, fields, models
from odoo.exceptions import UserError
class PurchaseOrder(models.Model):
_inherit = "purchase.order"
def _get_intercompany_pricelist(self, partner, dest_company):
if partner.property_product_pricelist.is_intercompany_supplier:
return partner.property_product_pricelist
else:
pricelist = self.env["product.pricelist"].search(
[
("company_id", "=", dest_company.id),
("is_intercompany_supplier", "=", True),
]
)
if len(pricelist) == 0:
raise UserError(
_(
(
"The Company {} do not have an intercompany pricelist "
"configured.\nPlease contact them and ask them to "
"active the option on the pricelist"
).format(dest_company.name)
)
)
else:
# Note in case that there is several pricelist that match we take
# the first one and the user will change it manually if needed
return fields.first(pricelist)
def _prepare_sale_order_data(
self, name, partner, dest_company, direct_delivery_address
):
res = super()._prepare_sale_order_data(
name, partner, dest_company, direct_delivery_address
)
res["pricelist_id"] = self._get_intercompany_pricelist(partner, dest_company).id
return res
| # Copyright 2021 Akretion (https://www.akretion.com).
# @author Sébastien BEAU <sebastien.beau@akretion.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _, models
from odoo.exceptions import UserError
class PurchaseOrder(models.Model):
_inherit = "purchase.order"
def _prepare_sale_order_data(
self, name, partner, dest_company, direct_delivery_address
):
res = super()._prepare_sale_order_data(
name, partner, dest_company, direct_delivery_address
)
pricelist = self.env["product.pricelist"].search(
[
("company_id", "=", dest_company.id),
("is_intercompany_supplier", "=", True),
]
)
if not len(pricelist) == 1:
raise UserError(
_("Company %s do not have an intercompany pricelist configured"),
dest_company.name,
)
else:
res["pricelist_id"] = pricelist.id
return res
| Python | 0 |
590ba3c9d645f6eac41687bee9f12f7c914858d6 | revert to http for loading clusters | progressivis/datasets/__init__.py | progressivis/datasets/__init__.py | import os
from progressivis import ProgressiveError
from .random import generate_random_csv
from .wget import wget_file
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../data'))
def get_dataset(name, **kwds):
if not os.path.isdir(DATA_DIR):
os.mkdir(DATA_DIR)
if name == 'bigfile':
return generate_random_csv('%s/bigfile.csv'%DATA_DIR, 1000000, 30)
if name == 'smallfile':
return generate_random_csv('%s/smallfile.csv'%DATA_DIR, 30000, 10)
if name == 'warlogs':
return wget_file(filename='%s/warlogs.vec.bz2'%DATA_DIR,
url='http://www.cs.ubc.ca/labs/imager/video/2014/QSNE/warlogs.vec.bz2',
**kwds)
if name.startswith('cluster:'):
fname = name[len('cluster:'):] + ".txt"
return wget_file(filename='%s/%s'%(DATA_DIR, fname),
url='http://cs.joensuu.fi/sipu/datasets/%s'%fname)
raise ProgressiveError('Unknow dataset %s'%name)
__all__ = ['get_dataset',
'generate_random_csv']
| import os
from progressivis import ProgressiveError
from .random import generate_random_csv
from .wget import wget_file
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../data'))
def get_dataset(name, **kwds):
if not os.path.isdir(DATA_DIR):
os.mkdir(DATA_DIR)
if name == 'bigfile':
return generate_random_csv('%s/bigfile.csv'%DATA_DIR, 1000000, 30)
if name == 'smallfile':
return generate_random_csv('%s/smallfile.csv'%DATA_DIR, 30000, 10)
if name == 'warlogs':
return wget_file(filename='%s/warlogs.vec.bz2'%DATA_DIR,
url='http://www.cs.ubc.ca/labs/imager/video/2014/QSNE/warlogs.vec.bz2',
**kwds)
if name.startswith('cluster:'):
fname = name[len('cluster:'):] + ".txt"
return wget_file(filename='%s/%s'%(DATA_DIR, fname),
url='https://cs.joensuu.fi/sipu/datasets/%s'%fname)
raise ProgressiveError('Unknow dataset %s'%name)
__all__ = ['get_dataset',
'generate_random_csv']
| Python | 0 |
0658934a7a7a1581c6f1d871c192f49b42144b09 | fix issue with ControlPlayer on mac | pyforms/gui/Controls/ControlPlayer/VideoQt5GLWidget.py | pyforms/gui/Controls/ControlPlayer/VideoQt5GLWidget.py | from pyforms.gui.Controls.ControlPlayer.AbstractGLWidget import AbstractGLWidget
from PyQt5 import QtGui
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5 import QtCore
class VideoQt5GLWidget(AbstractGLWidget, QOpenGLWidget):
def initializeGL(self):
self.gl = self.context().versionFunctions()
self.gl.initializeOpenGLFunctions()
'''
Sets up the OpenGL rendering context, defines display lists, etc.
Gets called once before the first time resizeGL() or paintGL() is called.
'''
self.gl.glClearDepth(1.0)
self.gl.glClearColor(0, 0, 0, 1.0)
self.gl.glEnable(self.gl.GL_DEPTH_TEST)
def perspective(self, fovy, aspect, zNear, zFar):
ymax = zNear * math.tan( fovy * math.pi / 360.0 );
ymin = -ymax;
xmin = ymin * aspect;
xmax = ymax * aspect;
self.gl.glFrustum( xmin, xmax, ymin, ymax, zNear, zFar )
def resizeGL(self, width, height):
self.setupViewport(width, height)
def setupViewport(self, width, height):
side = min(width, height)
self.gl.glViewport((width - side) // 2, (height - side) // 2, side,
side)
self.gl.glMatrixMode(self.gl.GL_PROJECTION)
self.gl.glLoadIdentity()
#self.gl.glOrtho(-0.5, +0.5, +0.5, -0.5, 4.0, 15.0)
self.perspective(40.0, float(width) / float(height), 0.01, 10.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW) | from pyforms.gui.Controls.ControlPlayer.AbstractGLWidget import AbstractGLWidget
from PyQt5 import QtGui
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5 import QtCore
class VideoQt5GLWidget(AbstractGLWidget, QOpenGLWidget):
def initializeGL(self):
self.gl = self.context().versionFunctions()
self.gl.initializeOpenGLFunctions()
'''
Sets up the OpenGL rendering context, defines display lists, etc.
Gets called once before the first time resizeGL() or paintGL() is called.
'''
self.gl.glClearDepth(1.0)
self.gl.glClearColor(0, 0, 0, 1.0)
self.gl.glEnable(GL.GL_DEPTH_TEST)
def perspective(self, fovy, aspect, zNear, zFar):
ymax = zNear * math.tan( fovy * math.pi / 360.0 );
ymin = -ymax;
xmin = ymin * aspect;
xmax = ymax * aspect;
self.gl.glFrustum( xmin, xmax, ymin, ymax, zNear, zFar )
def resizeGL(self, width, height):
self.setupViewport(width, height)
def setupViewport(self, width, height):
side = min(width, height)
self.gl.glViewport((width - side) // 2, (height - side) // 2, side,
side)
self.gl.glMatrixMode(self.gl.GL_PROJECTION)
self.gl.glLoadIdentity()
#self.gl.glOrtho(-0.5, +0.5, +0.5, -0.5, 4.0, 15.0)
self.perspective(40.0, float(width) / float(height), 0.01, 10.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW) | Python | 0 |
d0ed8aeb2126a4b14b8413bd8c6d54952451e890 | Update version number. | libcloud/__init__.py | libcloud/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
@var __version__: Current version of libcloud
"""
__all__ = ["__version__", "enable_debug"]
__version__ = "0.5.0-dev"
def enable_debug(fo):
"""
Enable library wide debugging to a file-like object.
@param fo: Where to append debugging information
@type fo: File like object, only write operations are used.
"""
from libcloud.base import ConnectionKey, LoggingHTTPConnection, LoggingHTTPSConnection
LoggingHTTPSConnection.log = fo
LoggingHTTPConnection.log = fo
ConnectionKey.conn_classes = (LoggingHTTPConnection, LoggingHTTPSConnection)
def _init_once():
"""
Utility function that is ran once on Library import.
This checks for the LIBCLOUD_DEBUG enviroment variable, which if it exists
is where we will log debug information about the provider transports.
If LIBCLOUD_DEBUG is not a path, C{/tmp/libcloud_debug.log} is used by
default.
"""
import os
d = os.getenv("LIBCLOUD_DEBUG")
if d:
if d.isdigit():
d = "/tmp/libcloud_debug.log"
fo = open(d, "a")
enable_debug(fo)
_init_once()
| # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
@var __version__: Current version of libcloud
"""
__all__ = ["__version__", "enable_debug"]
__version__ = "0.4.3-dev"
def enable_debug(fo):
"""
Enable library wide debugging to a file-like object.
@param fo: Where to append debugging information
@type fo: File like object, only write operations are used.
"""
from libcloud.base import ConnectionKey, LoggingHTTPConnection, LoggingHTTPSConnection
LoggingHTTPSConnection.log = fo
LoggingHTTPConnection.log = fo
ConnectionKey.conn_classes = (LoggingHTTPConnection, LoggingHTTPSConnection)
def _init_once():
"""
Utility function that is ran once on Library import.
This checks for the LIBCLOUD_DEBUG enviroment variable, which if it exists
is where we will log debug information about the provider transports.
If LIBCLOUD_DEBUG is not a path, C{/tmp/libcloud_debug.log} is used by
default.
"""
import os
d = os.getenv("LIBCLOUD_DEBUG")
if d:
if d.isdigit():
d = "/tmp/libcloud_debug.log"
fo = open(d, "a")
enable_debug(fo)
_init_once()
| Python | 0.000021 |
b999240903bb71e14818fb3f2d8eb12bda75ada2 | Bump tensorflow to 2.1.0 (#721) | tensorflow_io/core/python/ops/version_ops.py | tensorflow_io/core/python/ops/version_ops.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""version_ops"""
package = 'tensorflow==2.1.0'
version = '0.11.0'
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""version_ops"""
package = 'tensorflow>=2.1.0rc2'
version = '0.11.0'
| Python | 0 |
ce0a3a4b13b8257fa95c95376a043b02958e73f2 | Fix exception parameters | src/sentry_gitlab/plugin.py | src/sentry_gitlab/plugin.py | """
sentry_gitlab.plugin
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import forms
from sentry.plugins.bases.issue import IssuePlugin
from django.utils.translation import ugettext_lazy as _
from gitlab import *
import sentry_gitlab
class GitLabOptionsForm(forms.Form):
gitlab_url = forms.CharField(
label=_('GitLab URL'),
widget=forms.TextInput(attrs={'placeholder': 'e.g. https://gitlab.example.com'}),
help_text=_('Enter the URL for your GitLab server'),
required=True)
gitlab_token = forms.CharField(
label=_('GitLab Private Token'),
widget=forms.TextInput(attrs={'placeholder': 'e.g. g5DWFtLzaztgYFrqhVfE'}),
help_text=_('Enter your GitLab API token'),
required=True)
gitlab_repo = forms.CharField(
label=_('Repository Name'),
widget=forms.TextInput(attrs={'placeholder': 'e.g. namespace/repo'}),
help_text=_('Enter your repository name, including namespace.'),
required=True)
class GitLabPlugin(IssuePlugin):
author = 'Pancentric Ltd'
author_url = 'https://github.com/pancentric/sentry-gitlab'
version = sentry_gitlab.VERSION
description = "Integrate GitLab issues by linking a repository to a project"
resource_links = [
('Bug Tracker', 'https://github.com/pancentric/sentry-gitlab/issues'),
('Source', 'https://github.com/pancentric/sentry-gitlab'),
]
slug = 'gitlab'
title = _('GitLab')
conf_title = title
conf_key = 'gitlab'
project_conf_form = GitLabOptionsForm
def is_configured(self, request, project, **kwargs):
return bool(self.get_option('gitlab_repo', project))
def get_new_issue_title(self, **kwargs):
return 'Create GitLab Issue'
def create_issue(self, request, group, form_data, **kwargs):
url = self.get_option('gitlab_url', group.project)
token = self.get_option('gitlab_token', group.project)
repo = self.get_option('gitlab_repo', group.project)
if repo.find('/') == -1:
repo_url = str(repo)
else:
repo_url = str(repo.replace('/', '%2F'))
gl = Gitlab(url, token)
try:
gl.auth()
except GitlabAuthenticationError as e:
raise forms.ValidationError(_('Unauthorized: Invalid Private Token: %s') % (e,))
except Exception as e:
raise forms.ValidationError(_('Error Communicating with GitLab: %s') % (e,))
data = {'title': form_data['title'], 'description': form_data['description']}
proj = gl.Project(id=repo_url)
issue = proj.Issue(data)
issue.save()
return issue.id
def get_issue_label(self, group, issue_id, **kwargs):
return 'GL-%s' % issue_id
def get_issue_url(self, group, issue_id, **kwargs):
url = self.get_option('gitlab_url', group.project)
repo = self.get_option('gitlab_repo', group.project)
return '%s/%s/issues/%s' % (url, repo, issue_id)
| """
sentry_gitlab.plugin
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import forms
from sentry.plugins.bases.issue import IssuePlugin
from django.utils.translation import ugettext_lazy as _
from gitlab import *
import sentry_gitlab
class GitLabOptionsForm(forms.Form):
gitlab_url = forms.CharField(
label=_('GitLab URL'),
widget=forms.TextInput(attrs={'placeholder': 'e.g. https://gitlab.example.com'}),
help_text=_('Enter the URL for your GitLab server'),
required=True)
gitlab_token = forms.CharField(
label=_('GitLab Private Token'),
widget=forms.TextInput(attrs={'placeholder': 'e.g. g5DWFtLzaztgYFrqhVfE'}),
help_text=_('Enter your GitLab API token'),
required=True)
gitlab_repo = forms.CharField(
label=_('Repository Name'),
widget=forms.TextInput(attrs={'placeholder': 'e.g. namespace/repo'}),
help_text=_('Enter your repository name, including namespace.'),
required=True)
class GitLabPlugin(IssuePlugin):
author = 'Pancentric Ltd'
author_url = 'https://github.com/pancentric/sentry-gitlab'
version = sentry_gitlab.VERSION
description = "Integrate GitLab issues by linking a repository to a project"
resource_links = [
('Bug Tracker', 'https://github.com/pancentric/sentry-gitlab/issues'),
('Source', 'https://github.com/pancentric/sentry-gitlab'),
]
slug = 'gitlab'
title = _('GitLab')
conf_title = title
conf_key = 'gitlab'
project_conf_form = GitLabOptionsForm
def is_configured(self, request, project, **kwargs):
return bool(self.get_option('gitlab_repo', project))
def get_new_issue_title(self, **kwargs):
return 'Create GitLab Issue'
def create_issue(self, request, group, form_data, **kwargs):
url = self.get_option('gitlab_url', group.project)
token = self.get_option('gitlab_token', group.project)
repo = self.get_option('gitlab_repo', group.project)
if repo.find('/') == -1:
repo_url = str(repo)
else:
repo_url = str(repo.replace('/', '%2F'))
gl = Gitlab(url, token)
try:
gl.auth()
except GitlabAuthenticationError:
raise forms.ValidationError(_('Unauthorized: Invalid Private Token: %s') % (e,))
except Exception:
raise forms.ValidationError(_('Error Communicating with GitLab: %s') % (e,))
data = {'title': form_data['title'], 'description': form_data['description']}
proj = gl.Project(id=repo_url)
issue = proj.Issue(data)
issue.save()
return issue.id
def get_issue_label(self, group, issue_id, **kwargs):
return 'GL-%s' % issue_id
def get_issue_url(self, group, issue_id, **kwargs):
url = self.get_option('gitlab_url', group.project)
repo = self.get_option('gitlab_repo', group.project)
return '%s/%s/issues/%s' % (url, repo, issue_id)
| Python | 0.000011 |
f45cd2ff52cb672068e4bcf31b9c260cd43032ee | Use timeout decorator with use_signals=False | paasta_tools/remote_git.py | paasta_tools/remote_git.py | # Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import unicode_literals
import dulwich.client
import dulwich.errors
from paasta_tools.utils import timeout
def _make_determine_wants_func(ref_mutator):
"""Returns a safer version of ref_mutator, suitable for passing as the
determine_wants argument to dulwich's send_pack method. The returned
function will not delete or modify any existing refs."""
def determine_wants(old_refs):
refs = {k.decode('UTF-8'): v.decode('UTF-8') for k, v in old_refs.items()}
new_refs = ref_mutator(refs)
new_refs = {k.encode('UTF-8'): v.encode('UTF-8') for k, v in new_refs.items()}
new_refs.update(old_refs) # Make sure we don't delete/modify anything.
return new_refs
return determine_wants
def make_force_push_mutate_refs_func(targets, sha):
"""Create a 'force push' function that will inform send_pack that we want
to mark a certain list of target branches/tags to point to a particular
git_sha.
:param targets: List of branches/tags to point at the input sha
:param sha: The git sha to point the branches/tags at
:returns: A function to do the ref manipulation that a dulwich client can use"""
def mutate_refs(refs):
for target in targets:
refs[target.encode('UTF-8')] = sha.encode('UTF-8')
return refs
return mutate_refs
def create_remote_refs(git_url, ref_mutator, force=False):
"""Creates refs (tags, branches) on a remote git repo.
:param git_url: the URL or path to the remote git repo.
:param ref_mutator: A function that determines the new refs to create on
the remote repo. This gets passed a dictionary of the
remote server's refs in the format {name : hash, ...},
and should return a dictionary of the same format.
:param force: Bool, defaults to false. If true we will overwrite
refs even if they are already set.
:returns: The map of refs, with our changes applied.
"""
client, path = dulwich.client.get_transport_and_path(git_url)
if force is False:
determine_wants = _make_determine_wants_func(ref_mutator)
else:
determine_wants = ref_mutator
# We know we don't need to push any objects.
def generate_pack_contents(have, want):
return []
return client.send_pack(path, determine_wants, generate_pack_contents)
class LSRemoteException(Exception):
pass
@timeout(error_message="Timed out connecting to git server, is it reachable from where you are?", use_signals=False)
def list_remote_refs(git_url):
"""Get the refs from a remote git repo as a dictionary of name->hash."""
client, path = dulwich.client.get_transport_and_path(git_url)
try:
refs = client.fetch_pack(path, lambda refs: [], None, lambda data: None)
return {k.decode('UTF-8'): v.decode('UTF-8') for k, v in refs.items()}
except dulwich.errors.HangupException as e:
raise LSRemoteException("Unable to fetch remote refs: %s" % e)
| # Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import unicode_literals
import time
import dulwich.client
import dulwich.errors
from paasta_tools.utils import timeout
def _make_determine_wants_func(ref_mutator):
"""Returns a safer version of ref_mutator, suitable for passing as the
determine_wants argument to dulwich's send_pack method. The returned
function will not delete or modify any existing refs."""
def determine_wants(old_refs):
refs = {k.decode('UTF-8'): v.decode('UTF-8') for k, v in old_refs.items()}
new_refs = ref_mutator(refs)
new_refs = {k.encode('UTF-8'): v.encode('UTF-8') for k, v in new_refs.items()}
new_refs.update(old_refs) # Make sure we don't delete/modify anything.
return new_refs
return determine_wants
def make_force_push_mutate_refs_func(targets, sha):
"""Create a 'force push' function that will inform send_pack that we want
to mark a certain list of target branches/tags to point to a particular
git_sha.
:param targets: List of branches/tags to point at the input sha
:param sha: The git sha to point the branches/tags at
:returns: A function to do the ref manipulation that a dulwich client can use"""
def mutate_refs(refs):
for target in targets:
refs[target.encode('UTF-8')] = sha.encode('UTF-8')
return refs
return mutate_refs
def create_remote_refs(git_url, ref_mutator, force=False):
"""Creates refs (tags, branches) on a remote git repo.
:param git_url: the URL or path to the remote git repo.
:param ref_mutator: A function that determines the new refs to create on
the remote repo. This gets passed a dictionary of the
remote server's refs in the format {name : hash, ...},
and should return a dictionary of the same format.
:param force: Bool, defaults to false. If true we will overwrite
refs even if they are already set.
:returns: The map of refs, with our changes applied.
"""
client, path = dulwich.client.get_transport_and_path(git_url)
if force is False:
determine_wants = _make_determine_wants_func(ref_mutator)
else:
determine_wants = ref_mutator
# We know we don't need to push any objects.
def generate_pack_contents(have, want):
return []
return client.send_pack(path, determine_wants, generate_pack_contents)
class LSRemoteException(Exception):
pass
@timeout()
def list_remote_refs(git_url):
"""Get the refs from a remote git repo as a dictionary of name->hash."""
time.sleep(15)
client, path = dulwich.client.get_transport_and_path(git_url)
try:
refs = client.fetch_pack(path, lambda refs: [], None, lambda data: None)
return {k.decode('UTF-8'): v.decode('UTF-8') for k, v in refs.items()}
except dulwich.errors.HangupException as e:
raise LSRemoteException("Unable to fetch remote refs: %s" % e)
| Python | 0.023608 |
a9cd7d6eaa7ea70e962cf4d1c9e4aa53a2845968 | Bump version number | lillebror/version.py | lillebror/version.py | # Copyright 2012 Loop Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__title__ = 'lillebror'
__version__ = '0.2.0'
__author__ = 'Max Persson'
__license__ = 'Apache License 2.0'
__copyright__ = 'Copyright 2013 Max Persson'
__project_url__ = 'https://github.com/looplab/lillebror'
| # Copyright 2012 Loop Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__title__ = 'lillebror'
__version__ = '0.1.1'
__author__ = 'Max Persson'
__license__ = 'Apache License 2.0'
__copyright__ = 'Copyright 2013 Max Persson'
__project_url__ = 'https://github.com/looplab/lillebror'
| Python | 0.000002 |
d9f445796599bf1ecb48e21a53f3188925012053 | Correct order of synced/desynced subtitles in calc_correction | linear-correction.py | linear-correction.py | #!/usr/bin/env python
import srt
import datetime
import utils
def timedelta_to_milliseconds(delta):
return delta.days * 86400000 + \
delta.seconds * 1000 + \
delta.microseconds / 1000
def parse_args():
def srt_timestamp_to_milliseconds(parser, arg):
try:
delta = srt.srt_timestamp_to_timedelta(arg)
except ValueError:
parser.error('not a valid SRT timestamp: %s' % arg)
else:
return timedelta_to_milliseconds(delta)
parser = utils.basic_parser()
parser.add_argument(
'--desynced-start',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the first desynchronised timestamp',
)
parser.add_argument(
'--synced-start',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the first synchronised timestamp',
)
parser.add_argument(
'--desynced-end',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the second desynchronised timestamp',
)
parser.add_argument(
'--synced-end',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the second synchronised timestamp',
)
return parser.parse_args()
def calc_correction(synced_start, synced_end, desynced_start, desynced_end):
angular = (synced_end - synced_start) / (desynced_end - desynced_start)
linear = synced_end - angular * desynced_end
return angular, linear
def correct_time(current_msecs, angular, linear):
return round(current_msecs * angular + linear)
def correct_timedelta(bad_delta, angular, linear):
bad_msecs = timedelta_to_milliseconds(bad_delta)
good_msecs = correct_time(bad_msecs, angular, linear)
good_delta = datetime.timedelta(milliseconds=good_msecs)
return good_delta
def linear_correct_subs(subtitles, angular, linear):
for subtitle in subtitles:
subtitle.start = correct_timedelta(subtitle.start, angular, linear)
subtitle.end = correct_timedelta(subtitle.end, angular, linear)
yield subtitle
def main():
args = parse_args()
angular, linear = calc_correction(
args.synced_start, args.synced_end,
args.desynced_start, args.desynced_end,
)
subtitles_in = srt.parse(args.input.read())
corrected_subs = linear_correct_subs(subtitles_in, angular, linear)
output = srt.compose(corrected_subs, strict=args.strict)
args.output.write(output)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import srt
import datetime
import utils
def timedelta_to_milliseconds(delta):
return delta.days * 86400000 + \
delta.seconds * 1000 + \
delta.microseconds / 1000
def parse_args():
def srt_timestamp_to_milliseconds(parser, arg):
try:
delta = srt.srt_timestamp_to_timedelta(arg)
except ValueError:
parser.error('not a valid SRT timestamp: %s' % arg)
else:
return timedelta_to_milliseconds(delta)
parser = utils.basic_parser()
parser.add_argument(
'--desynced-start',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the first desynchronised timestamp',
)
parser.add_argument(
'--synced-start',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the first synchronised timestamp',
)
parser.add_argument(
'--desynced-end',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the second desynchronised timestamp',
)
parser.add_argument(
'--synced-end',
type=lambda arg: srt_timestamp_to_milliseconds(parser, arg),
required=True,
help='the second synchronised timestamp',
)
return parser.parse_args()
def calc_correction(synced_start, synced_end, desynced_start, desynced_end):
angular = (desynced_end - desynced_start) / (synced_end - synced_start)
linear = desynced_end - angular * synced_end
return angular, linear
def correct_time(current_msecs, angular, linear):
return round(current_msecs * angular + linear)
def correct_timedelta(bad_delta, angular, linear):
bad_msecs = timedelta_to_milliseconds(bad_delta)
good_msecs = correct_time(bad_msecs, angular, linear)
good_delta = datetime.timedelta(milliseconds=good_msecs)
return good_delta
def linear_correct_subs(subtitles, angular, linear):
for subtitle in subtitles:
subtitle.start = correct_timedelta(subtitle.start, angular, linear)
subtitle.end = correct_timedelta(subtitle.end, angular, linear)
yield subtitle
def main():
args = parse_args()
angular, linear = calc_correction(
args.synced_start, args.synced_end,
args.desynced_start, args.desynced_end,
)
subtitles_in = srt.parse(args.input.read())
corrected_subs = linear_correct_subs(subtitles_in, angular, linear)
output = srt.compose(corrected_subs, strict=args.strict)
args.output.write(output)
if __name__ == '__main__':
main()
| Python | 0.000001 |
3fea814461d2a51e0cc13c4981fa6f4cdfca75e9 | Correct broken import, this could never have worked. | providers/moviedata/filmtipset.py | providers/moviedata/filmtipset.py | from providers.moviedata.provider import MoviedataProvider
from urllib import urlencode
from access_keys import ACCESS_KEYS
from application import APPLICATION as APP
IDENTIFIER = "Filmtipset"
class Provider(MoviedataProvider):
def get_url(self, movie):
options = {
"action": "search",
"id": movie["name"],
"returntype": "json",
"accesskey": ACCESS_KEYS[IDENTIFIER]["ACCESS_KEY"],
"usernr": ACCESS_KEYS[IDENTIFIER]["USER_KEY"],
}
return "http://www.filmtipset.se/api/api.cgi?" + urlencode(options)
def get_movie_data(self, movie):
url = self.get_url(movie)
APP.debug("Fetching url: %s" % url)
data = self.parse_json(url, path="0.data.0.hits")
data = self.find_movie_matching_year(data, movie["year"])
if not data:
return None, {}
data = self.transform_data(data)
return data["id"], data
def find_movie_matching_year(self, data, year):
if not year:
return self.traverse_json(data, path="0.movie")
for i in range(5):
data = self.traverse_json(data, "%s.movie" % i)
if data.get("year", None) == year:
return data
return self.traverse_json(data, path="0.movie")
def get_data_mapping(self):
return {
"id": lambda data: "tt" + data["imdb"],
"title": "orgname",
"title_swe": "name",
"country": "country",
"director": "director",
"year": "year",
"filmtipset_my_grade": "grade.value",
"filmtipset_my_grade_type": "grade.type",
"filmtipset_avg_grade": "filmtipsetgrade.value",
"filmtipset_url": "url",
"filmtipset_id": "id",
}
| from providers.moviedata.provider import MoviedataProvider
from urllib import urlencode
from settings import ACCESS_KEYS
from application import APPLICATION as APP
IDENTIFIER = "Filmtipset"
class Provider(MoviedataProvider):
def get_url(self, movie):
options = {
"action": "search",
"id": movie["name"],
"returntype": "json",
"accesskey": ACCESS_KEYS[IDENTIFIER]["ACCESS_KEY"],
"usernr": ACCESS_KEYS[IDENTIFIER]["USER_KEY"],
}
return "http://www.filmtipset.se/api/api.cgi?" + urlencode(options)
def get_movie_data(self, movie):
url = self.get_url(movie)
APP.debug("Fetching url: %s" % url)
data = self.parse_json(url, path="0.data.0.hits")
data = self.find_movie_matching_year(data, movie["year"])
if not data:
return None, {}
data = self.transform_data(data)
return data["id"], data
def find_movie_matching_year(self, data, year):
if not year:
return self.traverse_json(data, path="0.movie")
for i in range(5):
data = self.traverse_json(data, "%s.movie" % i)
if data.get("year", None) == year:
return data
return self.traverse_json(data, path="0.movie")
def get_data_mapping(self):
return {
"id": lambda data: "tt" + data["imdb"],
"title": "orgname",
"title_swe": "name",
"country": "country",
"director": "director",
"year": "year",
"filmtipset_my_grade": "grade.value",
"filmtipset_my_grade_type": "grade.type",
"filmtipset_avg_grade": "filmtipsetgrade.value",
"filmtipset_url": "url",
"filmtipset_id": "id",
}
| Python | 0 |
ca49d4b27407812e2afcd5a20e546db85706d538 | refactor operator append | calculator.py | calculator.py | import re
from collections import deque
class Calculator:
supported_operators = {
"*": {"precedence": 3, "assoc": "left"},
"/": {"precedence": 3, "assoc": "left"},
"+": {"precedence": 2, "assoc": "left"},
"-": {"precedence": 2, "assoc": "left"}
}
def calculate(self, input_string):
output = deque([])
operators = []
elems = self.__find_exp_elems(input_string)
self.__parse_infix_notation(elems, output, operators)
return self.__eval_rpn(output)
def __find_exp_elems(self, input_string):
regexp = "\d+|[{}]".format("".join(self.supported_operators.keys()))
elems = re.findall(regexp, input_string)
if not elems:
raise RuntimeError("No operands or operators provided")
else: return elems
def __parse_infix_notation(self, elements, output, operators):
for e in elements:
if e.isdigit():
output.append(float(e))
else:
self.__append_operator(operators, output, e)
while len(operators) > 0:
output.append(operators.pop())
def __eval_rpn(self, output):
operands = []
while len(output) > 0:
token = output.popleft()
if self.__is_operand(token):
operands.append(token)
else: self.__eval_operator(token, operands)
if len(operands) == 1:
return operands.pop()
else: raise RuntimeError("Input has too many values")
def __is_operand(self, token):
return isinstance(token, float)
def __eval_operator(self, token, operands):
if token == "+": self.__add(operands)
elif token == "-": self.__subtract(operands)
elif token == "*": self.__multiply(operands)
elif token == "/": self.__divide(operands)
def __append_operator(self, operators, output, op1):
while len(operators) > 0:
op2 = operators[-1]
if self.__should_pop_op_off_stack(op1, op2):
output.append(operators.pop())
else: break
operators.append(op1)
def __should_pop_op_off_stack(self, op1, op2):
return (self.__is_left_associative(op1) and
self.__op_precedence(op1) <= self.__op_precedence(op2)) or \
(self.__is_right_associative(op1) and
self.__op_precedence(op1) < self.__op_precedence(op2))
def __is_left_associative(self, op):
return self.supported_operators[op]["assoc"] == "left"
def __is_right_associative(self, op):
return self.supported_operators[op]["assoc"] == "right"
def __op_precedence(self, op):
return self.supported_operators[op]["precedence"]
def __add(self, operands):
if len(operands) < 2:
raise RuntimeError("Addition requires 2 operands")
else:
result = operands.pop() + operands.pop()
operands.append(result)
def __subtract(self, operands):
if len(operands) < 2:
raise RuntimeError("Subtraction requires 2 operands")
else:
op2 = operands.pop()
op1 = operands.pop()
operands.append(op1 - op2)
def __multiply(self, operands):
if len(operands) < 2:
raise RuntimeError("Multiplication requires 2 operands")
else:
result = operands.pop() * operands.pop()
operands.append(result)
def __divide(self, operands):
if len(operands) < 2:
raise RuntimeError("Division requires 2 operands")
else:
op2 = operands.pop()
op1 = operands.pop()
operands.append(op1 / op2)
| import re
from collections import deque
class Calculator:
supported_operators = {
"*": {"precedence": 3, "assoc": "left"},
"/": {"precedence": 3, "assoc": "left"},
"+": {"precedence": 2, "assoc": "left"},
"-": {"precedence": 2, "assoc": "left"}
}
def calculate(self, input_string):
output = deque([])
operators = []
elems = self.__find_expression_elements(input_string)
self.__parse_infix_notation(elems, output, operators)
return self.__eval_rpn(output)
def __find_expression_elements(self, input_string):
regexp = "\d+|[{}]".format("".join(self.supported_operators.keys()))
elems = re.findall(regexp, input_string)
if not elems:
raise RuntimeError("No operands or operators provided")
else: return elems
def __parse_infix_notation(self, elements, output, operators):
for e in elements:
if e.isdigit():
output.append(float(e))
else:
self.__append_operator(operators, output, e)
while len(operators) > 0:
output.append(operators.pop())
def __eval_rpn(self, output):
operands = []
while len(output) > 0:
token = output.popleft()
if isinstance(token, float):
operands.append(token)
else:
if token == "+": self.__add(operands)
if token == "-": self.__subtract(operands)
if token == "*": self.__multiply(operands)
if token == "/": self.__divide(operands)
if len(operands) == 1:
return operands.pop()
else: raise RuntimeError("Input has too many values")
def __append_operator(self, operators, output, op1):
while len(operators) > 0:
op2 = operators[-1]
if ((self.__is_left_associative(op1) and self.__op_precedence(op1) <= self.__op_precedence(op2)) or
(self.__is_right_associative(op1) and self.__op_precedence(op1) <= self.__op_precedence(op2))):
output.append(operators.pop())
else:
break
operators.append(op1)
def __is_left_associative(self, op):
return self.supported_operators[op]["assoc"] == "left"
def __is_right_associative(self, op):
return self.supported_operators[op]["assoc"] == "right"
def __op_precedence(self, op):
return self.supported_operators[op]["precedence"]
def __add(self, operands):
if len(operands) < 2:
raise RuntimeError("Addition requires 2 operands")
else:
result = operands.pop() + operands.pop()
operands.append(result)
def __subtract(self, operands):
if len(operands) < 2:
raise RuntimeError("Subtraction requires 2 operands")
else:
op2 = operands.pop()
op1 = operands.pop()
operands.append(op1 - op2)
def __multiply(self, operands):
if len(operands) < 2:
raise RuntimeError("Multiplication requires 2 operands")
else:
result = operands.pop() * operands.pop()
operands.append(result)
def __divide(self, operands):
if len(operands) < 2:
raise RuntimeError("Division requires 2 operands")
else:
op2 = operands.pop()
op1 = operands.pop()
operands.append(op1 / op2)
| Python | 0.000014 |
00d9ee19790e0cd1bfdab0765e4c0e858d5f22bd | fix bug in use of 'media' | localcrawler/core.py | localcrawler/core.py | from urlparse import urlparse
from BeautifulSoup import BeautifulSoup
from django.conf import settings
from django.test.client import Client
import sys
__all__ = ['Crawler']
class Crawler(object):
def __init__(self, entry_point='/',
img=True, media=True, # Media is deprecated: Use img
media_dir=True, static_dir=True, css=True, js=True,
bad_soup=True, client=None, ignore=None,
return_results=False, return_response=False,
output=sys.stderr):
self.results = None
self.queue = [entry_point]
self.ignore = ignore or []
self.img = img
self.media = media # Deprecated. Use img
self.media_dir = media_dir
self.static_dir = static_dir
self.css = css
self.js = js
self.bad_soup = bad_soup
self.return_results = return_results
self.return_response = return_response
self.client = client or Client()
self.output = output
self.success = True
self.crawled = 0
self.failed = 0
self.succeeded = 0
def crawl(self):
self.results = []
while self.queue:
self.check(self.queue.pop(0))
return self.success
def check(self, url):
"""
Open a single URL and check it's status code.
If status is OK, run a scan if content type is html.
"""
response = self.client.get(url, follow=True)
if self.return_results:
if self.return_response:
result = (url, response.status_code, response)
else:
result = (url, response.status_code)
self.results.append(result)
self.ignore.append(url)
# check if we're a 200
if response.status_code != 200:
self.success = False
self.report(response.status_code, url, "URL Failed")
return
self.succeeded += 1
html = response.content
if response.get('Content-Type', '').startswith('text/html'):
self.scan(html, url)
def report(self, prefix, url, message):
self.failed += 1
print >>self.output, "[%s] %s (%s)" % (prefix, url, message)
def scan(self, html, url):
"""
Scan a HTML document for further links we might be interested in.
"""
try:
soup = BeautifulSoup(html)
except Exception, e:
if self.bad_soup:
self.success = False
self.report("SOUP", url, unicode(e))
return
# media is deprecated but currently setting either media or
# img to False will disable checking of images
if self.img and self.media:
for img in soup.findAll('img'):
src = img.get('src', '')
if self._relevant(src):
self.queue.append(src)
if self.js:
for js in soup.findAll('script', attrs={'type': 'text/javascript'}):
src = js.get('src', '')
if self._relevant(src):
self.queue.append(src)
if self.css:
for css in soup.findAll('link', attrs={'type': 'text/css'}):
href = css.get('href', '')
if self._relevant(href):
self.queue.append(href)
for a in soup.findAll('a'):
href = a.get('href', '')
if self._relevant(href):
self.queue.append(href)
def _relevant(self, url):
if not url:
return False
url_parts = urlparse(url)
conditions = [
url_parts.netloc == '',
url.startswith('/'),
not url in self.ignore,
]
if not self.media_dir:
conditions.append(not url.startswith(settings.MEDIA_URL))
if not self.static_dir:
conditions.append(not url.startswith(settings.STATIC_URL))
return all(conditions)
| from urlparse import urlparse
from BeautifulSoup import BeautifulSoup
from django.conf import settings
from django.test.client import Client
import sys
__all__ = ['Crawler']
class Crawler(object):
def __init__(self, entry_point='/',
img=True, media=True, # Media is deprecated: Use img
media_dir=True, static_dir=True, css=True, js=True,
bad_soup=True, client=None, ignore=None,
return_results=False, return_response=False,
output=sys.stderr):
self.results = None
self.queue = [entry_point]
self.ignore = ignore or []
self.img = img
self.media = img # Deprecated. Use img
self.media_dir = media_dir
self.static_dir = static_dir
self.css = css
self.js = js
self.bad_soup = bad_soup
self.return_results = return_results
self.return_response = return_response
self.client = client or Client()
self.output = output
self.success = True
self.crawled = 0
self.failed = 0
self.succeeded = 0
def crawl(self):
self.results = []
while self.queue:
self.check(self.queue.pop(0))
return self.success
def check(self, url):
"""
Open a single URL and check it's status code.
If status is OK, run a scan if content type is html.
"""
response = self.client.get(url, follow=True)
if self.return_results:
if self.return_response:
result = (url, response.status_code, response)
else:
result = (url, response.status_code)
self.results.append(result)
self.ignore.append(url)
# check if we're a 200
if response.status_code != 200:
self.success = False
self.report(response.status_code, url, "URL Failed")
return
self.succeeded += 1
html = response.content
if response.get('Content-Type', '').startswith('text/html'):
self.scan(html, url)
def report(self, prefix, url, message):
self.failed += 1
print >>self.output, "[%s] %s (%s)" % (prefix, url, message)
def scan(self, html, url):
"""
Scan a HTML document for further links we might be interested in.
"""
try:
soup = BeautifulSoup(html)
except Exception, e:
if self.bad_soup:
self.success = False
self.report("SOUP", url, unicode(e))
return
if self.img and self.media:
for img in soup.findAll('img'):
src = img.get('src', '')
if self._relevant(src):
self.queue.append(src)
if self.js:
for js in soup.findAll('script', attrs={'type': 'text/javascript'}):
src = js.get('src', '')
if self._relevant(src):
self.queue.append(src)
if self.css:
for css in soup.findAll('link', attrs={'type': 'text/css'}):
href = css.get('href', '')
if self._relevant(href):
self.queue.append(href)
for a in soup.findAll('a'):
href = a.get('href', '')
if self._relevant(href):
self.queue.append(href)
def _relevant(self, url):
if not url:
return False
url_parts = urlparse(url)
conditions = [
url_parts.netloc == '',
url.startswith('/'),
not url in self.ignore,
]
if not self.media_dir:
conditions.append(not url.startswith(settings.MEDIA_URL))
if not self.static_dir:
conditions.append(not url.startswith(settings.STATIC_URL))
return all(conditions)
| Python | 0 |
b8241c2ff0cff4a0bc96e6d229c80029cdbcb71c | Change contact email. | luminoth/__init__.py | luminoth/__init__.py | from .cli import cli # noqa
__version__ = '0.0.1.dev0'
__title__ = 'Luminoth'
__description__ = 'Computer vision toolkit based on TensorFlow'
__uri__ = 'http://luminoth.ai'
__doc__ = __description__ + ' <' + __uri__ + '>'
__author__ = 'Tryolabs'
__email__ = 'luminoth@tryolabs.com'
__license__ = 'BSD 3-Clause License'
__copyright__ = 'Copyright (c) 2017 Tryolabs S.A.'
| from .cli import cli # noqa
__version__ = '0.0.1.dev0'
__title__ = 'Luminoth'
__description__ = 'Computer vision toolkit based on TensorFlow'
__uri__ = 'http://luminoth.ai'
__doc__ = __description__ + ' <' + __uri__ + '>'
__author__ = 'Tryolabs'
__email__ = 'hello@tryolabs.com'
__license__ = 'BSD 3-Clause License'
__copyright__ = 'Copyright (c) 2017 Tryolabs S.A.'
| Python | 0 |
df3441a2c98fffbb18c11d3660acb86d2e31e5fa | Fix main run | src/ultros/core/__main__.py | src/ultros/core/__main__.py | # coding=utf-8
import argparse
import asyncio
from ultros.core.ultros import Ultros
"""
Ultros - Module runnable
"""
__author__ = "Gareth Coles"
__version__ = "0.0.1"
def start(arguments):
u = Ultros(arguments.config, arguments.data)
u.start()
def init(arguments):
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="ultros")
parser.add_argument(
"--version", action="version", version="Ultros {}".format(__version__)
)
parser.add_argument(
"--config", help="specify a directory containing configuration files",
default="./config"
)
parser.add_argument(
"--data", help="specify a directory to store data files",
default="./data"
)
subparsers = parser.add_subparsers()
parser_init = subparsers.add_parser(
"init", help="Create a default directory structure with example files"
)
parser_init.set_defaults(func=init)
parser_start = subparsers.add_parser("start", help="Start Ultros")
parser_start.set_defaults(func=start)
args = parser.parse_args()
if hasattr(args, "func"):
args.func(args)
else:
parser.print_usage()
| # coding=utf-8
import argparse
import asyncio
from ultros.core.ultros import Ultros
"""
Ultros - Module runnable
"""
__author__ = "Gareth Coles"
__version__ = "0.0.1"
def start(args):
u = Ultros(args.config, args.data)
# Gonna have to be a coroutine if we're AIO-based. Probably.
asyncio.get_event_loop().run_until_complete(u.start)
def init(args):
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="ultros")
parser.add_argument(
"--version", action="version", version="Ultros {}".format(__version__)
)
parser.add_argument(
"--config", help="specify a directory containing configuration files",
default="./config"
)
parser.add_argument(
"--data", help="specify a directory to store data files",
default="./data"
)
subparsers = parser.add_subparsers()
parser_init = subparsers.add_parser(
"init", help="Create a default directory structure with example files"
)
parser_init.set_defaults(func=init)
parser_start = subparsers.add_parser("start", help="Start Ultros")
parser_start.set_defaults(func=start)
args = parser.parse_args()
if hasattr(args, "func"):
args.func(args)
else:
parser.print_usage()
| Python | 0.000733 |
4300cf8c6e98081c429fcbed44ed387af7735aa4 | Add a link to python-markdown-math extension | markups/mdx_mathjax.py | markups/mdx_mathjax.py | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2015
# Maintained in https://github.com/mitya57/python-markdown-math
'''
Math extension for Python-Markdown
==================================
Adds support for displaying math formulas using [MathJax](http://www.mathjax.org/).
Author: 2015, Dmitry Shachnev <mitya57@gmail.com>.
'''
from __future__ import absolute_import
import markdown
class MathExtension(markdown.extensions.Extension):
def __init__(self, *args, **kwargs):
self.config = {
'enable_dollar_delimiter': [False, 'Enable single-dollar delimiter'],
}
super(MathExtension, self).__init__(*args, **kwargs)
def extendMarkdown(self, md, md_globals):
def handle_match_inline(m):
node = markdown.util.etree.Element('script')
node.set('type', 'math/tex')
node.text = markdown.util.AtomicString(m.group(3))
return node
def handle_match(m):
node = markdown.util.etree.Element('script')
node.set('type', 'math/tex; mode=display')
if '\\begin' in m.group(2):
node.text = markdown.util.AtomicString(m.group(2) + m.group(4) + m.group(5))
else:
node.text = markdown.util.AtomicString(m.group(3))
return node
configs = self.getConfigs()
inlinemathpatterns = (
markdown.inlinepatterns.Pattern(r'(?<!\\|\$)(\$)([^\$]+)(\$)'), # $...$
markdown.inlinepatterns.Pattern(r'(?<!\\)(\\\()(.+?)(\\\))') # \(...\)
)
mathpatterns = (
markdown.inlinepatterns.Pattern(r'(?<!\\)(\$\$)([^\$]+)(\$\$)'), # $$...$$
markdown.inlinepatterns.Pattern(r'(?<!\\)(\\\[)(.+?)(\\\])'), # \[...\]
markdown.inlinepatterns.Pattern(r'(?<!\\)(\\begin{([a-z]+?\*?)})(.+?)(\\end{\3})')
)
if not configs['enable_dollar_delimiter']:
inlinemathpatterns = inlinemathpatterns[1:]
for i, pattern in enumerate(inlinemathpatterns):
pattern.handleMatch = handle_match_inline
md.inlinePatterns.add('math-inline-%d' % i, pattern, '<escape')
for i, pattern in enumerate(mathpatterns):
pattern.handleMatch = handle_match
md.inlinePatterns.add('math-%d' % i, pattern, '<escape')
def makeExtension(*args, **kwargs):
return MathExtension(*args, **kwargs)
| # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2015
'''
Math extension for Python-Markdown
==================================
Adds support for displaying math formulas using [MathJax](http://www.mathjax.org/).
Author: 2015, Dmitry Shachnev <mitya57@gmail.com>.
'''
from __future__ import absolute_import
import markdown
class MathExtension(markdown.extensions.Extension):
def __init__(self, *args, **kwargs):
self.config = {
'enable_dollar_delimiter': [False, 'Enable single-dollar delimiter'],
}
super(MathExtension, self).__init__(*args, **kwargs)
def extendMarkdown(self, md, md_globals):
def handle_match_inline(m):
node = markdown.util.etree.Element('script')
node.set('type', 'math/tex')
node.text = markdown.util.AtomicString(m.group(3))
return node
def handle_match(m):
node = markdown.util.etree.Element('script')
node.set('type', 'math/tex; mode=display')
if '\\begin' in m.group(2):
node.text = markdown.util.AtomicString(m.group(2) + m.group(4) + m.group(5))
else:
node.text = markdown.util.AtomicString(m.group(3))
return node
configs = self.getConfigs()
inlinemathpatterns = (
markdown.inlinepatterns.Pattern(r'(?<!\\|\$)(\$)([^\$]+)(\$)'), # $...$
markdown.inlinepatterns.Pattern(r'(?<!\\)(\\\()(.+?)(\\\))') # \(...\)
)
mathpatterns = (
markdown.inlinepatterns.Pattern(r'(?<!\\)(\$\$)([^\$]+)(\$\$)'), # $$...$$
markdown.inlinepatterns.Pattern(r'(?<!\\)(\\\[)(.+?)(\\\])'), # \[...\]
markdown.inlinepatterns.Pattern(r'(?<!\\)(\\begin{([a-z]+?\*?)})(.+?)(\\end{\3})')
)
if not configs['enable_dollar_delimiter']:
inlinemathpatterns = inlinemathpatterns[1:]
for i, pattern in enumerate(inlinemathpatterns):
pattern.handleMatch = handle_match_inline
md.inlinePatterns.add('math-inline-%d' % i, pattern, '<escape')
for i, pattern in enumerate(mathpatterns):
pattern.handleMatch = handle_match
md.inlinePatterns.add('math-%d' % i, pattern, '<escape')
def makeExtension(*args, **kwargs):
return MathExtension(*args, **kwargs)
| Python | 0 |
5229bf4a16d468a3a337db65c478671409d6d898 | Update summery.py | metric-consumer/summary.py | metric-consumer/summary.py | #!/usr/bin/python
import os
import argparse
import re
def cumulative_moving_average(new_value, old_mean, total_items):
return old_mean + (new_value - old_mean) / total_items
def print_file_summary(path):
cma = 0
n = 0
with open(path, 'r') as csv_file:
all_lines = csv_file.readlines()
for line in all_lines[1:]:
try:
values = line.split(',')
#latency,1467792005016000000,3,False,338,False,256.0,1.467791983851e+12
receive_time = float(values[1])
send_time = float(values[7])
receive_time = receive_time/1000000 #convert from nanoseconds
travel_time = receive_time - send_time
cma = cumulative_moving_average(travel_time, cma, n+1)
n = n+1
except:
continue
print '{} = mean {}'.format(path, cma)
parser = argparse.ArgumentParser(description='Traverse all csv files in given dir and print mean travel time')
parser.add_argument('--dir', dest='dir', type=str, help='Root directory')
parser.set_defaults(dir='.')
args = parser.parse_args()
csv_pattern = re.compile(".*\.csv$")
for root, dirs, files in os.walk(args.dir):
for f in files:
if(csv_pattern.match(f)):
print_file_summary('{}/{}'.format(root, f))
| #!/usr/bin/python
import os
import argparse
import re
def cumulative_moving_average(new_value, old_mean, total_items):
return old_mean + (new_value - old_mean) / total_items
def print_file_summary(path):
cma = 0
n = 0
with open(path, 'r') as csv_file:
all_lines = csv_file.readlines()
for line in all_lines[1:]:
try:
values = line.split(',')
#latency,1467792005016000000,3,False,338,False,256.0,1.467791983851e+12
receive_time = int(values[1])
send_time = int(float(values[7]))
receive_time = receive_time/1000000 #convert from nanoseconds
travel_time = receive_time - send_time
cma = cumulative_moving_average(travel_time, cma, n+1)
n = n+1
except:
continue
print '{} = mean {}'.format(path, cma)
parser = argparse.ArgumentParser(description='Traverse all csv files in given dir and print mean travel time')
parser.add_argument('--dir', dest='dir', type=str, help='Root directory')
parser.set_defaults(dir='.')
args = parser.parse_args()
csv_pattern = re.compile(".*\.csv$")
for root, dirs, files in os.walk(args.dir):
for f in files:
if(csv_pattern.match(f)):
print_file_summary('{}/{}'.format(root, f))
| Python | 0.000001 |
4395fb9d6c1f7c4c48618a13681eae16e5e41ae6 | Fix docs | xpathwebdriver/default_settings.py | xpathwebdriver/default_settings.py | # -*- coding: utf-8 -*-
'''
Smoothtest
Copyright (c) 2014 Juju. Inc
Code Licensed under MIT License. See LICENSE file.
'''
import logging
import json
class Settings(object):
def __init__(self):
if self.webdriver_remote_credentials_path:
with open(self.webdriver_remote_credentials_path, 'r') as fp:
cred = json.load(fp)
self.webdriver_remote_command_executor = cred['webdriver_remote_command_executor']
self.webdriver_remote_session_id = cred['webdriver_remote_session_id']
@property
def base_url(self):
return self.web_server_url
# Server to be tested URL eg: http://www.example.com
web_server_url = ''
# Virtual display is useful to keep the webdriver browser contained
# avoiding the browser to pop-up abover other windows (with alerts for example)
virtual_display_enable = False # Use virtual display
virtual_display_visible = False # Show the virtual display or may be hidden (for headless testing)
virtual_display_backend = None # 'xvfb', 'xvnc' or 'xephyr', ignores ``virtual_display_visible``
virtual_display_size = (800, 600) # Dimensions of the virtual display
virtual_display_keep_open = False # If we want to check results (useful whe combined with webdriver_browser_keep_open)
webdriver_enabled = True # Whether or not automatically create the browser
webdriver_browser = 'Chrome' # Which browser we would like to use webdriver with: Firefox, Chrome, PhantomJs, etc...
webdriver_browser_keep_open = False # Keep browser open after python process is dead
webdriver_pool_size = 1
#Remote driver/reuse open driver
webdriver_remote_command_executor = '' # Manually provide the url for the driver eg: 'http://127.0.0.1:54551'
webdriver_remote_session_id = '' # Manually provide session id for reusage eg: '4aed25f4a5ce78bb7d57c19663110b3c'
webdriver_remote_credentials_path = '' # Path to json file containing previous 2 key above (eg:dumped by "xpathshell -d <path>.json")
#webdriver_browser_life DEPRECATED, never used in code
# Browsers profiles
# Eg: '/home/<user>/.mozilla/firefox/4iyhtofy.webdriver_autotest' on linux
# or: 'C:/Users/<user>/AppData/Roaming/Mozilla/Firefox/Profiles/c1r3g2wi.default' on windows
webdriver_firefox_profile = None
screenshot_level = 0 # Like a text logging level, but doing screenshots (WIP)
# Higher level-> more screenshots per action
screenshot_exceptions_dir = './' # Were to save logged screenshot
assert_screenshots_dir = '/tmp/'
assert_screenshots_learning = False
assert_screenshots_failed_dir = '/tmp/'
log_level_default = logging.INFO
log_level_root_handler = logging.DEBUG
log_color = False # Not working on Python 3
def smoke_test_module():
Settings()
if __name__ == "__main__":
smoke_test_module()
| # -*- coding: utf-8 -*-
'''
Smoothtest
Copyright (c) 2014 Juju. Inc
Code Licensed under MIT License. See LICENSE file.
'''
import logging
import json
class Settings(object):
def __init__(self):
if self.webdriver_remote_credentials_path:
with open(self.webdriver_remote_credentials_path, 'r') as fp:
cred = json.load(fp)
self.webdriver_remote_command_executor = cred['webdriver_remote_command_executor']
self.webdriver_remote_session_id = cred['webdriver_remote_session_id']
@property
def base_url(self):
return self.web_server_url
# Server to be tested URL eg: http://www.example.com
web_server_url = ''
# Virtual display is useful to keep the webdriver browser contained
# avoiding the browser to pop-up abover other windows (with alerts for example)
virtual_display_enable = False # Use virtual display
virtual_display_visible = False # Show the virtual display or may be hidden (for headless testing)
virtual_display_backend = None # 'xvfb', 'xvnc' or 'xephyr', ignores ``virtual_display_visible``
virtual_display_size = (800, 600) # Dimensions of the virtual display
virtual_display_keep_open = False # Keep the virtual display after a smoothtest
# process finished (useful when we also keep the browser open for debugging)
webdriver_enabled = True # Whether or not automatically create the browser
webdriver_browser = 'Chrome' #'PhantomJS' # Which browser we would like to use webdriver with: Firefox, Chrome, PhantomJs, etc...
webdriver_browser_keep_open = False # Keep browser open after python process is dead
webdriver_pool_size = 1
#Remote driver/reuse open driver
webdriver_remote_command_executor = '' # Manually provide the url for the driver eg: 'http://127.0.0.1:54551'
webdriver_remote_session_id = '' # Manually provide session id for reusage eg: '4aed25f4a5ce78bb7d57c19663110b3c'
webdriver_remote_credentials_path = '' # Path to json file containing previous 2 key above (eg:dumped by "xpathshell -d <path>.json")
#webdriver_browser_life DEPRECATED, never used in code
# Browsers profiles
# Eg: '/home/<user>/.mozilla/firefox/4iyhtofy.webdriver_autotest' on linux
# or: 'C:/Users/<user>/AppData/Roaming/Mozilla/Firefox/Profiles/c1r3g2wi.default' on windows
webdriver_firefox_profile = None
screenshot_level = 0 # Like a text logging level, but doing screenshots (WIP)
# Higher level-> more screenshots per action
screenshot_exceptions_dir = './' # Were to save logged screenshot
assert_screenshots_dir = '/tmp/'
assert_screenshots_learning = False
assert_screenshots_failed_dir = '/tmp/'
log_level_default = logging.INFO
log_level_root_handler = logging.DEBUG
log_color = False # Not working on Python 3
def smoke_test_module():
Settings()
if __name__ == "__main__":
smoke_test_module()
| Python | 0.000003 |
4eb71abf71823a5a065d1b593ca8b624d17a35c9 | prepare for 1.6 | src/pyckson/__init__.py | src/pyckson/__init__.py | from pyckson.decorators import *
from pyckson.json import *
from pyckson.parser import parse
from pyckson.parsers.base import Parser
from pyckson.serializer import serialize
from pyckson.serializers.base import Serializer
from pyckson.dates.helpers import configure_date_formatter
__version__ = '1.6'
| from pyckson.decorators import *
from pyckson.json import *
from pyckson.parser import parse
from pyckson.parsers.base import Parser
from pyckson.serializer import serialize
from pyckson.serializers.base import Serializer
from pyckson.dates.helpers import configure_date_formatter
__version__ = '1.5'
| Python | 0.000001 |
63ae7b4caf877cb043b2c2d4861e6ab5bb5f5390 | fix flake8 | memote/suite/runner.py | memote/suite/runner.py | # -*- coding: utf-8 -*-
# Copyright 2017 Novo Nordisk Foundation Center for Biosustainability,
# Technical University of Denmark.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Run the test suite on an instance of `cobra.Model`.
"""
from __future__ import absolute_import
import sys
import shlex
import os
from os.path import dirname
import click
import pytest
from click_configfile import (ConfigFileReader, Param, SectionSchema,
matches_section)
from .. import __version__
class ConfigSectionSchema(object):
"""Describes all sections of the memote configuration file."""
@matches_section("memote")
class Memote(SectionSchema):
"""Describes the memote configuration keys and values."""
collect = Param(type=bool, default=True)
addargs = Param(type=str, default="")
model = Param(type=click.Path(exists=True, dir_okay=False),
multiple=True)
class ConfigFileProcessor(ConfigFileReader):
config_files = ["memote.ini", "setup.cfg"]
config_section_schemas = [ConfigSectionSchema.Memote]
class MyPlugin:
def pytest_sessionfinish(self):
click.echo("Storing report data.")
def process_collect_flag(no_flag, context):
if no_flag is not None:
return not no_flag
elif "collect" in context.default_map:
return context.default_map["collect"]
else:
return True
def process_addargs(args, context):
if args is not None:
return shlex.split(args) + [dirname(__file__)]
elif "addargs" in context.default_map:
return shlex.split(context.default_map["addargs"]) +\
[dirname(__file__)]
else:
return [dirname(__file__)]
def process_model(model, context):
if len(model) > 0:
os.environ["MEMOTE_MODEL"] = os.pathsep.join(model)
elif "MEMOTE_MODEL" in os.environ:
return
elif "model" in context.default_map:
os.environ["MEMOTE_MODEL"] = os.pathsep.join(
context.default_map["model"]
)
else:
raise ValueError(
"No metabolic model found. Specify one as an argument, as an"
" environment variable MEMOTE_MODEL, or in a configuration file."
)
@click.command(context_settings=dict(
default_map=ConfigFileProcessor.read_config()
))
@click.help_option("--help", "-h")
@click.version_option(__version__, "--version", "-V")
@click.option("--no-collect", type=bool, is_flag=True,
help="Do *not* collect test data needed for generating a report.")
@click.option("--pytest-args", "-a",
help="Any additional arguments you want to pass to pytest as a"
" string.")
@click.argument("model", type=click.Path(exists=True, dir_okay=False), nargs=-1)
@click.pass_context
def cli(ctx, model, pytest_args, no_collect):
collect = process_collect_flag(no_collect, ctx)
args = process_addargs(pytest_args, ctx)
try:
process_model(model, ctx)
except ValueError as err:
click.echo(str(err))
sys.exit(1)
click.echo(os.environ["MEMOTE_MODEL"])
if collect:
errno = pytest.main(args, plugins=[MyPlugin()])
else:
errno = pytest.main(args)
sys.exit(errno)
| # -*- coding: utf-8 -*-
# Copyright 2017 Novo Nordisk Foundation Center for Biosustainability,
# Technical University of Denmark.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Run the test suite on an instance of `cobra.Model`.
"""
from __future__ import absolute_import
import sys
import shlex
import os
from os.path import dirname
import click
import pytest
from click_configfile import (ConfigFileReader, Param, SectionSchema,
matches_section)
from .. import __version__
class ConfigSectionSchema(object):
"""Describes all sections of the memote configuration file."""
@matches_section("memote")
class Memote(SectionSchema):
"""Describes the memote configuration keys and values."""
collect = Param(type=bool, default=True)
addargs = Param(type=str, default="")
model = Param(type=click.Path(exists=True, dir_okay=False),
multiple=True)
class ConfigFileProcessor(ConfigFileReader):
config_files = ["memote.ini", "setup.cfg"]
config_section_schemas = [ConfigSectionSchema.Memote]
class MyPlugin:
def pytest_sessionfinish(self):
click.echo("Storing report data.")
def process_collect_flag(no_flag, context):
if no_flag is not None:
return not no_flag
elif "collect" in context.default_map:
return context.default_map["collect"]
else:
return True
def process_addargs(args, context):
if args is not None:
return shlex.split(args) + [dirname(__file__)]
elif "addargs" in context.default_map:
return shlex.split(context.default_map["addargs"]) +\
[dirname(__file__)]
else:
return [dirname(__file__)]
def process_model(model, context):
if len(model) > 0:
os.environ["MEMOTE_MODEL"] = os.pathsep.join(model)
elif "MEMOTE_MODEL" in os.environ:
return
elif "model" in context.default_map:
os.environ["MEMOTE_MODEL"] = os.pathsep.join(
context.default_map["model"]
)
else:
raise ValueError(
"No metabolic model found. Specify one as an argument, as an"
" environment variable MEMOTE_MODEL, or in a configuration file."
)
@click.command(context_settings={"default_map": ConfigFileProcessor.read_config()})
@click.help_option("--help", "-h")
@click.version_option(__version__, "--version", "-V")
@click.option("--no-collect", type=bool, is_flag=True,
help="Do *not* collect test data needed for generating a report.")
@click.option("--pytest-args", "-a",
help="Any additional arguments you want to pass to pytest as a"
" string.")
@click.argument("model", type=click.Path(exists=True, dir_okay=False), nargs=-1)
@click.pass_context
def cli(ctx, model, pytest_args, no_collect):
collect = process_collect_flag(no_collect, ctx)
args = process_addargs(pytest_args, ctx)
try:
process_model(model, ctx)
except ValueError as err:
click.echo(str(err))
sys.exit(1)
click.echo(os.environ["MEMOTE_MODEL"])
if collect:
errno = pytest.main(args, plugins=[MyPlugin()])
else:
errno = pytest.main(args)
sys.exit(errno)
| Python | 0 |
ea2852a2a1219ecc23bcbb04c4120a6432f67d0d | Support for ffmpeg showifo filter and collecting frame details. | src/python/video_scripts/frames.py | src/python/video_scripts/frames.py | # (c) Patryk Czarnik
# Distributed under MIT License. See LICENCE file in the root directory for details.
# This module defines classes and functions to identify and search for individual frames of videos.
from subprocess import call
import re
import sys
from typing import List
pv = re.compile(r"\[Parsed_showinfo.*\]\s+n:\s*(\d+)\s+pts:\s*(\d+)\s+pts_time:\s*(\d+(?:\.\d+)?)\s+pos:\s*(\d+).+iskey:(\d)\s+type:(.)\s+checksum:([^\s]+).*")
pa = re.compile(r"\[Parsed_ashowinfo.*\]\s+n:\s*(\d+)\s+pts:\s*(\d+)\s+pts_time:\s*(\d+(?:\.\d+)?)\s+pos:\s*(\d+).+nb_samples:(\d+)\s+checksum:([^\s]+).*")
class Frame:
'''Abstract class representing a frame in a multimedia file.'''
def __init__(self, n, pts, time, pos, checksum=None):
self.n = int(n)
self.pts = int(pts)
self.time = float(time)
self.pos = int(pos)
self.checksum = checksum
def __str__(self):
return 'frame no %d, pts: %d, time: %f' % (self.n, self.pts , self.time)
def csv_fields(self):
raise NotImplementedError('This is abstract method, should be implemented in subclasses')
@staticmethod
def of_csv_fields(fields:List[str]):
'''
Creates a Frame object based on the list of CSV fields, as exported by these classes.
'''
if len(fields) > 0:
if fields[0] == 'A':
return AudioFrame.of_csv_fields(fields)
if fields[0] == 'V':
return VideoFrame.of_csv_fields(fields)
return None
@staticmethod
def of_showinfo_line(line:str):
ma = pa.search(line)
if(ma):
return AudioFrame(ma[1], ma[2], ma[3], ma[4], ma[5], ma[6])
mv = pv.search(line)
if(mv):
return VideoFrame(mv[1], mv[2], mv[3], mv[4], mv[5], mv[6], mv[7])
return None
class AudioFrame(Frame):
'''
An object of this class represents an audio frame from a multimedia file.
The meaning of this term and the details attached to it are taken from the ffmpeg tool.
'''
def __init__(self, n, pts, time, pos, samples, checksum=None):
super().__init__(n, pts, time, pos, checksum)
self.samples = int(samples)
def __str__(self):
return 'A frame no %d, pts: %d, time: %f, samples: %d' % (self.n, self.pts , self.time, self.samples)
def csv_fields(self):
return ['A', self.n, self.pts , self.time, self.pos, self.samples, self.checksum]
@staticmethod
def of_csv_fields(fields:List[str]):
return AudioFrame(*fields[1:])
class VideoFrame(Frame):
'''
An object of this class represents a video frame from a multimedia file.
The meaning of this term and the details attached to it are taken from the ffmpeg tool.
'''
def __init__(self, n, pts, time, pos, iskey, frame_type, checksum=None):
super().__init__(n, pts, time, pos, checksum)
if type(iskey) == str:
self.iskey = (iskey.strip() == '1')
else:
self.iskey = bool(iskey)
self.type = str(frame_type)
@property
def iskey_num(self):
return 1 if self.iskey else 0
def __str__(self):
return 'V frame no %d, pts: %d, time: %f, type: %d' % (self.n, self.pts , self.time, self.type)
def csv_fields(self):
return ['V', self.n, self.pts , self.time, self.pos, self.iskey_num, self.type, self.checksum]
@staticmethod
def of_csv_fields(fields:List[str]):
return VideoFrame(*fields[1:])
def run_showinfo(video_file_path:str, txt_out, do_audio:bool=True, do_video:bool=True) -> None:
'''
Runs ffmpeg program in showinfo mode, to collect information about inividual frames.
:param video_file_path: the video file
:param txt_out: output stream to which the ffmpeg output will be written; it ca be an opened (!) file
:param do_audio: should I process audio frames
:param do_video: should I process video frames
'''
filters = []
if do_audio:
filters.append('[a:0]ashowinfo')
if do_video:
filters.append('[v:0]showinfo')
if filters:
filters_str = ';'.join(filters)
call(['ffmpeg', '-y', '-i',video_file_path,'-filter_complex',filters_str,'-f','null','/dev/null'], stderr=txt_out)
def make_showinfo_file(video_file_path:str, txt_file_path:str, do_audio:bool=True, do_video:bool=True) -> None:
'''
Runs ffmpeg program in showinfo mode, to collect information about inividual frames.
The result is a text file in ffmpeg-dependent format.
:param video_file_path:
:param txt_file_path:
:param do_audio:
:param do_video:
'''
with open(txt_file_path, 'wb') as ffout:
run_showinfo(video_file_path, ffout, do_audio, do_video)
| # (c) Patryk Czarnik
# Distributed under MIT License. See LICENCE file in the root directory for details.
# This module defines classes and functions to identify and search for individual frames of videos.
from typing import List
class Frame:
'''Abstract class representing a frame in a multimedia file.'''
def __init__(self, n, pts, time, pos, checksum=None):
self.n = int(n)
self.pts = int(pts)
self.time = float(time)
self.pos = int(pos)
self.checksum = checksum
def __str__(self):
return 'frame no %d, pts: %d, time: %f' % (self.n, self.pts , self.time)
class AudioFrame(Frame):
'''
An object of this class represents an audio frame from a multimedia file.
The meaning of this term and the details attached to it are taken from the ffmpeg tool.
'''
def __init__(self, n, pts, time, pos, samples, checksum=None):
super().__init__(n, pts, time, pos, checksum)
self.samples = int(samples)
def __str__(self):
return 'A frame no %d, pts: %d, time: %f, samples: %d' % (self.n, self.pts , self.time, self.samples)
class VideoFrame(Frame):
'''
An object of this class represents a video frame from a multimedia file.
The meaning of this term and the details attached to it are taken from the ffmpeg tool.
'''
def __init__(self, n, pts, time, pos, iskey, frame_type, checksum=None):
super().__init__(n, pts, time, pos, checksum)
self.iskey = bool(iskey)
self.type = str(frame_type)
def __str__(self):
return 'V frame no %d, pts: %d, time: %f, type: %d' % (self.n, self.pts , self.time, self.type)
| Python | 0 |
3fc6a711146afa79794ec884f560f1ea43e4565a | Update the latest version | src/site/sphinx/conf.py | src/site/sphinx/conf.py | # -*- coding: utf-8 -*-
import sys, os, re
import xml.etree.ElementTree as etree
from datetime import date
from collections import defaultdict
def etree_to_dict(t):
t.tag = re.sub(r'\{[^\}]*\}', '', t.tag)
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.iteritems():
dd[k].append(v)
d = {t.tag: {k:v[0] if len(v) == 1 else v for k, v in dd.iteritems()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems())
if t.text:
text = t.text.strip()
if children or t.attrib:
if text:
d[t.tag]['#text'] = text
else:
d[t.tag] = text
return d
# Parse the Maven pom.xml.
pom = etree_to_dict(etree.parse('../../../pom.xml').getroot())['project']
# Set the basic project information.
project = pom['name']
project_short = pom['name']
copyright = str(date.today().year) + ', ' + pom['organization']['name']
# Set the project version and release.
# Use the last known stable release if the current version ends with '-SNAPSHOT'.
if re.match(r'^.*-SNAPSHOT$', pom['version']):
release = '0.19.0.Final'
version = '0.19'
else:
release = pom['version']
version = re.match(r'^[0-9]+\.[0-9]+', pom['version']).group(0)
# Define some useful global substitutions.
rst_epilog = '\n'
rst_epilog += '.. |baseurl| replace:: http://line.github.io/armeria/\n'
rst_epilog += '.. |jetty_alpnAgent_version| replace:: ' + pom['properties']['jetty.alpnAgent.version'] + '\n'
rst_epilog += '.. |oss_parent_version| replace:: ' + pom['parent']['version'] + '\n'
rst_epilog += '.. |logback_version| replace:: ' + pom['properties']['logback.version'] + '\n'
rst_epilog += '.. |slf4j_version| replace:: ' + pom['properties']['slf4j.version'] + '\n'
rst_epilog += '.. |tomcat_version| replace:: ' + pom['properties']['tomcat.version'] + '\n'
rst_epilog += '\n'
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc']
templates_path = ['_templates']
source_suffix = '.rst'
source_encoding = 'utf-8-sig'
master_doc = 'index'
exclude_trees = ['.build']
add_function_parentheses = True
pygments_style = 'tango'
master_doc = 'index'
sys.path.append(os.path.abspath('_themes'))
html_theme = 'sphinx_rtd_theme'
html_theme_path = ['_themes']
html_short_title = project_short
html_static_path = ['_static']
html_use_smartypants = True
html_use_index = True
html_show_sourcelink = False
htmlhelp_basename = pom['artifactId']
| # -*- coding: utf-8 -*-
import sys, os, re
import xml.etree.ElementTree as etree
from datetime import date
from collections import defaultdict
def etree_to_dict(t):
t.tag = re.sub(r'\{[^\}]*\}', '', t.tag)
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.iteritems():
dd[k].append(v)
d = {t.tag: {k:v[0] if len(v) == 1 else v for k, v in dd.iteritems()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems())
if t.text:
text = t.text.strip()
if children or t.attrib:
if text:
d[t.tag]['#text'] = text
else:
d[t.tag] = text
return d
# Parse the Maven pom.xml.
pom = etree_to_dict(etree.parse('../../../pom.xml').getroot())['project']
# Set the basic project information.
project = pom['name']
project_short = pom['name']
copyright = str(date.today().year) + ', ' + pom['organization']['name']
# Set the project version and release.
# Use the last known stable release if the current version ends with '-SNAPSHOT'.
if re.match(r'^.*-SNAPSHOT$', pom['version']):
release = '0.18.0.Final'
version = '0.18'
else:
release = pom['version']
version = re.match(r'^[0-9]+\.[0-9]+', pom['version']).group(0)
# Define some useful global substitutions.
rst_epilog = '\n'
rst_epilog += '.. |baseurl| replace:: http://line.github.io/armeria/\n'
rst_epilog += '.. |jetty_alpnAgent_version| replace:: ' + pom['properties']['jetty.alpnAgent.version'] + '\n'
rst_epilog += '.. |oss_parent_version| replace:: ' + pom['parent']['version'] + '\n'
rst_epilog += '.. |logback_version| replace:: ' + pom['properties']['logback.version'] + '\n'
rst_epilog += '.. |slf4j_version| replace:: ' + pom['properties']['slf4j.version'] + '\n'
rst_epilog += '.. |tomcat_version| replace:: ' + pom['properties']['tomcat.version'] + '\n'
rst_epilog += '\n'
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc']
templates_path = ['_templates']
source_suffix = '.rst'
source_encoding = 'utf-8-sig'
master_doc = 'index'
exclude_trees = ['.build']
add_function_parentheses = True
pygments_style = 'tango'
master_doc = 'index'
sys.path.append(os.path.abspath('_themes'))
html_theme = 'sphinx_rtd_theme'
html_theme_path = ['_themes']
html_short_title = project_short
html_static_path = ['_static']
html_use_smartypants = True
html_use_index = True
html_show_sourcelink = False
htmlhelp_basename = pom['artifactId']
| Python | 0 |
521c71c38d4e6edc242afb76daf330d9aec8e9ff | remove ipdb | scripts/dataverse/connect_external_accounts.py | scripts/dataverse/connect_external_accounts.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.dataverse.model import AddonDataverseNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in AddonDataverseNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
if not user_addon.external_accounts:
logger.warning('User {0} has no dataverse external account'.format(user_addon.owner._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.dataverse.model import AddonDataverseNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in AddonDataverseNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
# import ipdb; ipdb.set_trace()
if not user_addon.external_accounts:
logger.warning('User {0} has no dataverse external account'.format(user_addon.owner._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
| Python | 0.000016 |
9cdd86499013c1deac7caeb8320c34294789f716 | Add _kill_and_join to async actor stub | py/garage/garage/asyncs/actors.py | py/garage/garage/asyncs/actors.py | """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
| """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
| Python | 0.000001 |
16381d4fafe743c3feb1de7ec27b6cbf95f617f1 | Add state and conf to interactive namespace by default | pyexperiment/utils/interactive.py | pyexperiment/utils/interactive.py | """Provides helper functions for interactive prompts
Written by Peter Duerr
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from pyexperiment import state
from pyexperiment import conf
def embed_interactive(**kwargs):
"""Embed an interactive terminal into a running python process
"""
if not 'state' in kwargs:
kwargs['state'] = state
if not 'conf' in kwargs:
kwargs['conf'] = conf
try:
import IPython
ipython_config = IPython.Config()
ipython_config.TerminalInteractiveShell.confirm_exit = False
if IPython.__version__ == '1.2.1':
IPython.embed(config=ipython_config,
banner1='',
user_ns=kwargs)
else:
IPython.embed(config=ipython_config,
banner1='',
local_ns=kwargs)
except ImportError:
import readline # pylint: disable=unused-variable
import code
code.InteractiveConsole(kwargs).interact()
| """Provides helper functions for interactive prompts
Written by Peter Duerr
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
def embed_interactive(**kwargs):
"""Embed an interactive terminal into a running python process
"""
try:
import IPython
ipython_config = IPython.Config()
ipython_config.TerminalInteractiveShell.confirm_exit = False
if IPython.__version__ == '1.2.1':
IPython.embed(config=ipython_config,
banner1='',
user_ns=kwargs)
else:
IPython.embed(config=ipython_config,
banner1='',
local_ns=kwargs)
except ImportError:
import readline # pylint: disable=unused-variable
import code
code.InteractiveConsole(kwargs).interact()
| Python | 0 |
6cd34697334ddd8ada1daeee9a2c8b9522257487 | Remove unused function | pyramda/iterable/for_each_test.py | pyramda/iterable/for_each_test.py | try:
# Python 3
from unittest import mock
except ImportError:
# Python 2
import mock
from .for_each import for_each
def test_for_each_nocurry_returns_the_original_iterable():
assert for_each(mock.MagicMock(), [1, 2, 3]) == [1, 2, 3]
def test_for_each_curry_returns_the_original_iterable():
assert for_each(mock.MagicMock())([1, 2, 3]) == [1, 2, 3]
def test_for_each_no_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m, ([1, 2, 3])) == [1, 2, 3]
assert len(m.mock_calls) == 3
def test_for_each_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m)([1, 2, 3]) == [1, 2, 3]
assert len(m.mock_calls) == 3
|
try:
# Python 3
from unittest import mock
except ImportError:
# Python 2
import mock
from .for_each import for_each
def print_x_plus_5(x):
print(x + 5)
def test_for_each_nocurry_returns_the_original_iterable():
assert for_each(mock.MagicMock(), [1, 2, 3]) == [1, 2, 3]
def test_for_each_curry_returns_the_original_iterable():
assert for_each(mock.MagicMock())([1, 2, 3]) == [1, 2, 3]
def test_for_each_no_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m, ([1, 2, 3])) == [1, 2, 3]
assert len(m.mock_calls) == 3
def test_for_each_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m)([1, 2, 3]) == [1, 2, 3]
assert len(m.mock_calls) == 3
| Python | 0.000004 |
4f2b7e5601e9f241868f86743eacb0e432be7495 | fix settings of cache in UT | source/jormungandr/tests/integration_tests_settings.py | source/jormungandr/tests/integration_tests_settings.py | # encoding: utf-8
START_MONITORING_THREAD = False
SAVE_STAT = True
# désactivation de l'authentification
PUBLIC = True
LOGGER = {
'version': 1,
'disable_existing_loggers': False,
'formatters':{
'default': {
'format': '[%(asctime)s] [%(levelname)5s] [%(process)5s] [%(name)10s] %(message)s',
},
},
'handlers': {
'default': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'default',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'INFO',
'propagate': True
},
}
}
CACHE_CONFIGURATION = {
'CACHE_TYPE': 'null'
}
| # encoding: utf-8
START_MONITORING_THREAD = False
SAVE_STAT = True
# désactivation de l'authentification
PUBLIC = True
LOGGER = {
'version': 1,
'disable_existing_loggers': False,
'formatters':{
'default': {
'format': '[%(asctime)s] [%(levelname)5s] [%(process)5s] [%(name)10s] %(message)s',
},
},
'handlers': {
'default': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'default',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'INFO',
'propagate': True
},
}
}
| Python | 0 |
26c4effd8741d2511bb0b3bd46cca12d37b0e01b | Add file magic | examples/python/scheme_timer.py | examples/python/scheme_timer.py | #! /usr/bin/env python
"""
Checks the execution time of repeated calls to the Scheme API from Python
Runs an empty Scheme command NUMBER_OF_ITERATIONS times and displays the
total execution time
"""
__author__ = 'Cosmo Harrigan'
NUMBER_OF_ITERATIONS = 100
from opencog.atomspace import AtomSpace, TruthValue, types, get_type_name
from opencog.scheme_wrapper import load_scm, scheme_eval, scheme_eval_h, __init__
atomspace = AtomSpace()
__init__(atomspace)
data = ["opencog/atomspace/core_types.scm",
"opencog/scm/utilities.scm"]
for item in data:
load_scm(atomspace, item)
def test_operation():
for i in range(NUMBER_OF_ITERATIONS):
scheme_eval_h(atomspace, '()')
import timeit
elapsed = timeit.timeit("test_operation()",
setup="from __main__ import test_operation",
number=1)
print "{0} seconds elapsed performing {1} repeated calls = {2} calls / sec".\
format(elapsed, NUMBER_OF_ITERATIONS, NUMBER_OF_ITERATIONS / elapsed)
| """
Checks the execution time of repeated calls to the Scheme API from Python
Runs an empty Scheme command NUMBER_OF_ITERATIONS times and displays the
total execution time
"""
__author__ = 'Cosmo Harrigan'
NUMBER_OF_ITERATIONS = 100
from opencog.atomspace import AtomSpace, TruthValue, types, get_type_name
from opencog.scheme_wrapper import load_scm, scheme_eval, scheme_eval_h, __init__
atomspace = AtomSpace()
__init__(atomspace)
data = ["opencog/atomspace/core_types.scm",
"opencog/scm/utilities.scm"]
for item in data:
load_scm(atomspace, item)
def test_operation():
for i in range(NUMBER_OF_ITERATIONS):
scheme_eval_h(atomspace, '()')
import timeit
elapsed = timeit.timeit("test_operation()",
setup="from __main__ import test_operation",
number=1)
print "{0} seconds elapsed performing {1} repeated calls = {2} calls / sec".\
format(elapsed, NUMBER_OF_ITERATIONS, NUMBER_OF_ITERATIONS / elapsed)
| Python | 0.000001 |
075e7ea4e6be57cb618fcc26484456bf24db99c9 | add button for pyjd to load slides | examples/slideshow/Slideshow.py | examples/slideshow/Slideshow.py | import pyjd
from pyjamas.ui.Button import Button
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui.HTML import HTML
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui import HasAlignment
from pyjamas.ui.Hyperlink import Hyperlink
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.ScrollPanel import ScrollPanel
from pyjamas import Window
from SinkList import SinkList
from pyjamas import History
import Slide
from pyjamas.HTTPRequest import HTTPRequest
from SlideLoader import SlideListLoader
from pyjamas.Timer import Timer
from pyjamas.ui.Button import Button
class Slideshow:
def onHistoryChanged(self, token):
info = self.sink_list.find(token)
if info:
self.show(info, False)
else:
self.showInfo()
def onModuleLoad(self):
self.curInfo=''
self.curSink=None
self.description=HTML()
self.sink_list=SinkList()
self.panel=DockPanel()
self.b=Button("load", self)
self.sinkContainer = DockPanel()
self.sinkContainer.setStyleName("ks-Sink")
height = Window.getClientHeight()
self.sp = ScrollPanel(self.sinkContainer)
self.sp.setWidth("100%")
self.sp.setHeight("%dpx" % (height-110))
vp=VerticalPanel()
vp.setWidth("100%")
vp.setHeight("100%")
vp.add(self.description)
vp.add(self.sp)
self.description.setStyleName("ks-Intro")
self.panel.add(self.sink_list, DockPanel.WEST)
self.panel.add(vp, DockPanel.CENTER)
self.panel.setCellVerticalAlignment(self.sink_list, HasAlignment.ALIGN_TOP)
self.panel.setCellWidth(vp, "100%")
self.panel.setCellHeight(vp, "100%")
Window.addWindowResizeListener(self)
History.addHistoryListener(self)
RootPanel().add(self.panel)
RootPanel().add(self.b)
# kludgy way to detect "real" pyjd / pyjs difference.
# there's a bug in XULRunner nsIXMLHttpRequest which
# stops it from working (open "NS_ERROR_NOT_INITIALISED")
if not hasattr(pyjd, "Browser"):
Timer(1, self)
def onClick(self, sender):
self.loadSinks()
def onTimer(self, tid):
self.loadSinks()
def onWindowResized(self, width, height):
self.sink_list.resize(width, height)
self.sp.setHeight("%dpx" % (height-110))
def show(self, info, affectHistory):
if info == self.curInfo: return
self.curInfo = info
#Logger.write("showing " + info.getName())
if self.curSink <> None:
self.curSink.onHide()
#Logger.write("removing " + self.curSink)
self.sinkContainer.remove(self.curSink)
self.curSink = info.getInstance()
self.sink_list.setSinkSelection(info.getName())
self.description.setHTML(info.getDescription())
if (affectHistory):
History.newItem(info.getName())
self.sinkContainer.add(self.curSink, DockPanel.CENTER)
self.sinkContainer.setCellWidth(self.curSink, "100%")
self.sinkContainer.setCellHeight(self.curSink, "100%")
self.sinkContainer.setCellVerticalAlignment(self.curSink, HasAlignment.ALIGN_TOP)
self.curSink.onShow()
def loadSinks(self):
HTTPRequest().asyncPost("slides.txt", "", SlideListLoader(self))
def setSlides(self, slides):
for l in slides:
name = l[0]
desc = l[1]
self.sink_list.addSink(Slide.init(name, desc))
#Show the initial screen.
initToken = History.getToken()
if len(initToken):
self.onHistoryChanged(initToken)
else:
self.showInfo()
def showInfo(self):
self.show(self.sink_list.sinks[0], False)
if __name__ == '__main__':
pyjd.setup("http://127.0.0.1/examples/slideshow/public/Slideshow.html")
app = Slideshow()
app.onModuleLoad()
pyjd.run()
| from pyjamas.ui.Button import Button
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui.HTML import HTML
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui import HasAlignment
from pyjamas.ui.Hyperlink import Hyperlink
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.ScrollPanel import ScrollPanel
from pyjamas import Window
from SinkList import SinkList
from pyjamas import History
import Slide
from pyjamas.HTTPRequest import HTTPRequest
from SlideLoader import SlideListLoader
class Slideshow:
def onHistoryChanged(self, token):
info = self.sink_list.find(token)
if info:
self.show(info, False)
else:
self.showInfo()
def onModuleLoad(self):
self.curInfo=''
self.curSink=None
self.description=HTML()
self.sink_list=SinkList()
self.panel=DockPanel()
self.loadSinks()
self.sinkContainer = DockPanel()
self.sinkContainer.setStyleName("ks-Sink")
height = Window.getClientHeight()
self.sp = ScrollPanel(self.sinkContainer)
self.sp.setWidth("100%")
self.sp.setHeight("%dpx" % (height-110))
vp=VerticalPanel()
vp.setWidth("100%")
vp.setHeight("100%")
vp.add(self.description)
vp.add(self.sp)
self.description.setStyleName("ks-Intro")
self.panel.add(self.sink_list, DockPanel.WEST)
self.panel.add(vp, DockPanel.CENTER)
self.panel.setCellVerticalAlignment(self.sink_list, HasAlignment.ALIGN_TOP)
self.panel.setCellWidth(vp, "100%")
self.panel.setCellHeight(vp, "100%")
Window.addWindowResizeListener(self)
History.addHistoryListener(self)
RootPanel().add(self.panel)
def onWindowResized(self, width, height):
self.sink_list.resize(width, height)
self.sp.setHeight("%dpx" % (height-110))
def show(self, info, affectHistory):
if info == self.curInfo: return
self.curInfo = info
#Logger.write("showing " + info.getName())
if self.curSink <> None:
self.curSink.onHide()
#Logger.write("removing " + self.curSink)
self.sinkContainer.remove(self.curSink)
self.curSink = info.getInstance()
self.sink_list.setSinkSelection(info.getName())
self.description.setHTML(info.getDescription())
if (affectHistory):
History.newItem(info.getName())
self.sinkContainer.add(self.curSink, DockPanel.CENTER)
self.sinkContainer.setCellWidth(self.curSink, "100%")
self.sinkContainer.setCellHeight(self.curSink, "100%")
self.sinkContainer.setCellVerticalAlignment(self.curSink, HasAlignment.ALIGN_TOP)
self.curSink.onShow()
def loadSinks(self):
HTTPRequest().asyncPost("slides.txt", "", SlideListLoader(self))
def setSlides(self, slides):
for l in slides:
name = l[0]
desc = l[1]
self.sink_list.addSink(Slide.init(name, desc))
#Show the initial screen.
initToken = History.getToken()
if len(initToken):
self.onHistoryChanged(initToken)
else:
self.showInfo()
def showInfo(self):
self.show(self.sink_list.sinks[0], False)
if __name__ == '__main__':
app = Slideshow()
app.onModuleLoad()
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.