commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
1d24e23d3468d2ebfda8d8dbb966a6bf7ef8989e
|
Fix up UTs
|
tests/test_jira_issue_tracker.py
|
tests/test_jira_issue_tracker.py
|
# -*- coding: utf-8 -*-
import conftest
import jira
import jira.client
import pytest
import zazu.plugins.jira_issue_tracker
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2016"
@pytest.fixture
def tracker_mock():
return zazu.plugins.jira_issue_tracker.JiraIssueTracker('https://jira', 'ZZ', None)
@pytest.fixture
def mocked_jira_issue_tracker(mocker, tracker_mock):
jira_mock = mocker.patch('jira.JIRA', autospec=True)
tracker_mock._jira_handle = jira_mock
return tracker_mock
mock_issue_dict = {
'fields': {
'summary': 'name',
'status': {
'name': 'Closed'
},
'resolution': {
'name': 'Done'
},
'description': 'description',
'issuetype': {
'name': 'type'
},
'reporter': {
'name': 'reporter'
},
'assignee': {
'name': 'assignee'
},
},
'key': 'ZZ-1'
}
mock_issue = conftest.dict_to_obj(mock_issue_dict)
def get_mock_issue_no_description(id):
mock_issue_no_description = conftest.dict_to_obj(mock_issue_dict)
mock_issue_no_description.fields.description = None
return mock_issue_no_description
def test_jira_issue_tracker(mocker):
mocker.patch('zazu.credential_helper.get_user_pass_credentials', return_value=('user', 'pass'))
mocker.patch('jira.JIRA', autospec=True)
uut = zazu.plugins.jira_issue_tracker.JiraIssueTracker('https://jira', 'ZZ', ['comp'])
uut.connect()
assert uut.default_project() == 'ZZ'
assert uut.issue_components() == ['comp']
assert uut.issue_types() == ['Task', 'Bug', 'Story']
assert uut.issue('ZZ-1')
def test_jira_issue_tracker_no_description(mocker, mocked_jira_issue_tracker):
mocked_jira_issue_tracker._jira_handle.issue = mocker.Mock(wraps=get_mock_issue_no_description)
assert mocked_jira_issue_tracker.issue('ZZ-1').description == ''
def test_jira_issue_tracker_issue_error(mocker, mocked_jira_issue_tracker):
mocked_jira_issue_tracker._jira_handle.issue = mocker.Mock(side_effect=jira.exceptions.JIRAError('foo'))
with pytest.raises(zazu.issue_tracker.IssueTrackerError) as e:
mocked_jira_issue_tracker.issue('ZZ-1')
assert 'foo' in str(e.value)
def test_jira_issue_tracker_create_issue_error(mocker, mocked_jira_issue_tracker):
mocked_jira_issue_tracker._jira_handle.create_issue = mocker.Mock(side_effect=jira.exceptions.JIRAError('foo'))
with pytest.raises(zazu.issue_tracker.IssueTrackerError) as e:
mocked_jira_issue_tracker.create_issue('', '', '', '', '')
assert 'foo' in str(e.value)
def test_jira_issue_tracker_create_issue(mocker, mocked_jira_issue_tracker):
mocked_jira_issue_tracker._jira_handle.create_issue = mocker.Mock(return_value=mock_issue)
mocked_jira_issue_tracker.create_issue('project', 'issue_type', 'summary', 'description', 'component')
jira_mock = mocked_jira_issue_tracker._jira_handle
jira_mock.create_issue.call_count == 1
jira_mock.assign_issue.call_count == 1
jira_mock.assign_issue.assert_called_once_with(mock_issue, 'reporter')
def test_jira_issue_tracker_no_components(mocker):
uut = zazu.plugins.jira_issue_tracker.JiraIssueTracker.from_config({'url': 'https://jira',
'project': 'ZZ'})
uut._jira_handle = mocker.Mock('jira.JIRA', autospec=True)
assert uut.issue_components() == [None]
def test_from_config_no_project():
with pytest.raises(zazu.ZazuException) as e:
zazu.plugins.jira_issue_tracker.JiraIssueTracker.from_config({'url': 'https://jira'})
assert str(e.value) == 'Jira config requires a "project" field'
def test_from_config_no_url():
with pytest.raises(zazu.ZazuException) as e:
zazu.plugins.jira_issue_tracker.JiraIssueTracker.from_config({'project': 'ZZ'})
assert str(e.value) == 'Jira config requires a "url" field'
def test_jira_validate_id_format():
uut = tracker_mock()
uut.validate_id_format('LC-10')
with pytest.raises(zazu.issue_tracker.IssueTrackerError) as e:
uut.validate_id_format('lc-10')
assert str(e.value) == 'issue id "lc-10" is not of the form PROJ-#'
with pytest.raises(zazu.issue_tracker.IssueTrackerError):
uut.validate_id_format('LC1-10')
with pytest.raises(zazu.issue_tracker.IssueTrackerError):
uut.validate_id_format('LC-10a')
with pytest.raises(zazu.issue_tracker.IssueTrackerError):
uut.validate_id_format('10')
with pytest.raises(zazu.issue_tracker.IssueTrackerError):
uut.validate_id_format('10a')
def test_jira_issue_adaptor(tracker_mock):
uut = zazu.plugins.jira_issue_tracker.JiraIssueAdaptor(mock_issue, tracker_mock)
assert uut.name == 'name'
assert uut.status == 'Closed'
assert uut.description == 'description'
assert uut.assignee == 'assignee'
assert uut.closed
assert uut.type == 'type'
assert uut.browse_url == 'https://jira/browse/ZZ-1'
assert uut.id == 'ZZ-1'
assert str(uut) == uut.id
|
Python
| 0
|
@@ -4035,24 +4035,96 @@
at('LC-10')%0A
+ uut.validate_id_format('Lc-10')%0A uut.validate_id_format('lc-10')%0A
with pyt
@@ -4202,34 +4202,33 @@
date_id_format('
-lc
+3
-10')%0A assert
@@ -4255,18 +4255,17 @@
sue id %22
-lc
+3
-10%22 is
|
e6a3e61ffce9bccfd0f112303e39d686c7d851b9
|
fix describe form test
|
tests/test_management_command.py
|
tests/test_management_command.py
|
# -*- coding: utf-8 -*-
import os
import sys
import shutil
import logging
import importlib
from django.core.management import call_command, find_commands, load_command_class
from django.test import TestCase
from django.utils.six import StringIO, PY3
from django_extensions.management.modelviz import use_model
class MockLoggingHandler(logging.Handler):
""" Mock logging handler to check for expected logs. """
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class CommandTest(TestCase):
def test_error_logging(self):
# Ensure command errors are properly logged and reraised
from django_extensions.management.base import logger
logger.addHandler(MockLoggingHandler())
module_path = "tests.management.commands.error_raising_command"
module = importlib.import_module(module_path)
error_raising_command = module.Command()
self.assertRaises(Exception, error_raising_command.execute)
handler = logger.handlers[0]
self.assertEqual(len(handler.messages['error']), 1)
class ShowTemplateTagsTests(TestCase):
def test_some_output(self):
out = StringIO()
call_command('show_template_tags', stdout=out)
output = out.getvalue()
# Once django_extension is installed during tests it should appear with
# its templatetags
self.assertIn('django_extensions', output)
# let's check at least one
self.assertIn('truncate_letters', output)
class CreateAppTests(TestCase):
def test_command(self):
tmpname = "testapptest"
tmpdir = "/tmp"
tmppath = os.path.join(tmpdir, tmpname)
self.assertFalse(os.path.isdir(tmppath))
out = StringIO()
try:
call_command('create_app', tmpname, parent_path=tmpdir, stdout=out)
finally:
if os.path.isdir(tmppath):
shutil.rmtree(tmppath)
output = out.getvalue()
self.assertIn("Application '%s' created." % tmpname, output)
class AdminGeneratorTests(TestCase):
def test_command(self):
out = StringIO()
call_command('admin_generator', 'django_extensions', stdout=out)
output = out.getvalue()
self.assertIn("class SecretAdmin(admin.ModelAdmin):", output)
if PY3:
self.assertIn("list_display = ('id', 'name', 'text')", output)
self.assertIn("search_fields = ('name',)", output)
else:
self.assertIn("list_display = (u'id', u'name', u'text')", output)
self.assertIn("search_fields = (u'name',)", output)
class DescribeFormTests(TestCase):
def test_command(self):
out = StringIO()
call_command('describe_form', 'django_extensions.Secret', stdout=out)
output = out.getvalue()
self.assertIn("class SecretForm(forms.Form):", output)
self.assertRegexpMatches(output, "name = forms.CharField\(.*max_length=255")
self.assertRegexpMatches(output, "name = forms.CharField\(.*required=False")
self.assertRegexpMatches(output, "name = forms.CharField\(label=u?'Name'")
self.assertRegexpMatches(output, "text = forms.CharField\(.*required=False")
self.assertRegexpMatches(output, "text = forms.CharField\(label=u?'Text'")
class UpdatePermissionsTests(TestCase):
def test_works(self):
from django.db import models
class PermModel(models.Model):
class Meta:
app_label = 'django_extensions'
permissions = (('test_permission', 'test_permission'),)
original_stdout = sys.stdout
out = sys.stdout = StringIO()
call_command('update_permissions', stdout=out, verbosity=3)
sys.stdout = original_stdout
self.assertIn("Can change perm model", out.getvalue())
class CommandSignalTests(TestCase):
pre = None
post = None
def test_works(self):
from django_extensions.management.signals import post_command, \
pre_command
from django_extensions.management.commands.show_template_tags import \
Command
def pre(sender, **kwargs):
CommandSignalTests.pre = dict(**kwargs)
def post(sender, **kwargs):
CommandSignalTests.post = dict(**kwargs)
pre_command.connect(pre, Command)
post_command.connect(post, Command)
out = StringIO()
call_command('show_template_tags', stdout=out)
self.assertIn('args', CommandSignalTests.pre)
self.assertIn('kwargs', CommandSignalTests.pre)
self.assertIn('args', CommandSignalTests.post)
self.assertIn('kwargs', CommandSignalTests.post)
self.assertIn('outcome', CommandSignalTests.post)
class CommandClassTests(TestCase):
"""Try to load every management command to catch exceptions."""
def test_load_commands(self):
try:
management_dir = os.path.join('django_extensions', 'management')
commands = find_commands(management_dir)
for command in commands:
load_command_class('django_extensions', command)
except Exception as e:
self.fail("Can't load command class of {0}\n{1}".format(command, e))
class GraphModelsTests(TestCase):
"""
Tests for the `graph_models` management command.
"""
def test_use_model(self):
include_models = [
'NoWildcardInclude',
'Wildcard*InsideInclude',
'*WildcardPrefixInclude',
'WildcardSuffixInclude*',
'*WildcardBothInclude*'
]
exclude_models = [
'NoWildcardExclude',
'Wildcard*InsideExclude',
'*WildcardPrefixExclude',
'WildcardSuffixExclude*',
'*WildcardBothExclude*'
]
# Any model name should be used if neither include or exclude
# are defined.
self.assertTrue(use_model(
'SomeModel',
None,
None
))
# Any model name should be allowed if `*` is in `include_models`.
self.assertTrue(use_model(
'SomeModel',
['OtherModel', '*', 'Wildcard*Model'],
None
))
# No model name should be allowed if `*` is in `exclude_models`.
self.assertFalse(use_model(
'SomeModel',
None,
['OtherModel', '*', 'Wildcard*Model']
))
# Some tests with the `include_models` defined above.
self.assertFalse(use_model(
'SomeModel',
include_models,
None
))
self.assertTrue(use_model(
'NoWildcardInclude',
include_models,
None
))
self.assertTrue(use_model(
'WildcardSomewhereInsideInclude',
include_models,
None
))
self.assertTrue(use_model(
'MyWildcardPrefixInclude',
include_models,
None
))
self.assertTrue(use_model(
'WildcardSuffixIncludeModel',
include_models,
None
))
self.assertTrue(use_model(
'MyWildcardBothIncludeModel',
include_models,
None
))
# Some tests with the `exclude_models` defined above.
self.assertTrue(use_model(
'SomeModel',
None,
exclude_models
))
self.assertFalse(use_model(
'NoWildcardExclude',
None,
exclude_models
))
self.assertFalse(use_model(
'WildcardSomewhereInsideExclude',
None,
exclude_models
))
self.assertFalse(use_model(
'MyWildcardPrefixExclude',
None,
exclude_models
))
self.assertFalse(use_model(
'WildcardSuffixExcludeModel',
None,
exclude_models
))
self.assertFalse(use_model(
'MyWildcardBothExcludeModel',
None,
exclude_models
))
|
Python
| 0.000008
|
@@ -3428,32 +3428,34 @@
orms.CharField%5C(
+.*
label=u?'Name'%22)
@@ -3606,16 +3606,18 @@
rField%5C(
+.*
label=u?
|
321adc1f8de9ee6df8dd0ff66be321f1e5feafcc
|
Rename API endpoint
|
httpobs/website/backend/api.py
|
httpobs/website/backend/api.py
|
from httpobs.scanner.grader import get_score_description, GRADES
from httpobs.scanner.tasks import scan
from httpobs.scanner.utils import valid_hostname
from httpobs.website import add_response_headers, sanitized_api_response
from flask import Blueprint, jsonify, request
from os import environ
import httpobs.database as database
api = Blueprint('api', __name__)
COOLDOWN = 15 if 'HTTPOBS_DEV' in environ else 300
# TODO: Implement API to write public and private headers to the database
@api.route('/api/v1/analyze', methods=['GET', 'OPTIONS', 'POST'])
@add_response_headers(cors=True)
@sanitized_api_response
def api_post_scan_hostname():
# TODO: Allow people to accidentally use https://mozilla.org and convert to mozilla.org
# Get the hostname
hostname = request.args.get('host', '').lower()
# Fail if it's not a valid hostname (not in DNS, not a real hostname, etc.)
hostname = valid_hostname(hostname) or valid_hostname('www.' + hostname) # prepend www. if necessary
if not hostname:
return {'error': '{hostname} is an invalid hostname'.format(hostname=request.args.get('host', ''))}
# Get the site's id number
try:
site_id = database.select_site_id(hostname)
except IOError:
return {'error': 'Unable to connect to database'}
# Next, let's see if there's a recent scan; if there was a recent scan, let's just return it
# Setting rescan shortens what "recent" means
rescan = True if request.form.get('rescan', 'false') == 'true' else False
if rescan:
row = database.select_scan_recent_scan(site_id, COOLDOWN)
else:
row = database.select_scan_recent_scan(site_id)
# Otherwise, let's start up a scan
if not row:
hidden = True if request.form.get('hidden', 'false') == 'true' else False
# Begin the dispatch process if it was a POST
if request.method == 'POST':
row = database.insert_scan(site_id, hidden=hidden)
scan_id = row['id']
scan.delay(hostname, site_id, scan_id)
else:
return {'error': 'recent-scan-not-found'}
# If there was a rescan attempt and it returned a row, it's because the rescan was done within the cooldown window
elif rescan and request.method == 'POST':
return {'error': 'rescan-attempt-too-soon'}
# Return the scan row
return row
@api.route('/api/v1/getGradeTotals', methods=['GET', 'OPTIONS'])
@add_response_headers(cors=True)
def api_get_grade_totals():
totals = database.select_scan_grade_totals()
# If a grade isn't in the database, return it with quantity 0
totals = {grade: totals.get(grade, 0) for grade in GRADES}
return jsonify(totals)
@api.route('/api/v1/getRecentScans', methods=['GET', 'OPTIONS'])
@add_response_headers(cors=True)
def api_get_recent_scans():
try:
# Get the min and max scores, if they're there
min_score = int(request.args.get('min-score', 0))
max_score = int(request.args.get('max-score', 1000))
min_score = max(0, min_score)
max_score = min(1000, max_score)
except ValueError:
return {'error': 'invalid-parameters'}
return jsonify(database.select_scan_recent_finished_scans(min_score=min_score, max_score=max_score))
@api.route('/api/v1/getScannerStats', methods=['GET', 'OPTIONS'])
@add_response_headers(cors=True)
def api_get_scanner_stats():
return jsonify(database.select_scan_scanner_stats())
@api.route('/api/v1/getScanResults', methods=['GET', 'OPTIONS'])
@add_response_headers(cors=True)
@sanitized_api_response
def api_get_scan_results():
scan_id = request.args.get('scan')
if not scan_id:
return {'error': 'scan-not-found'}
# Get all the test results for the given scan id
tests = dict(database.select_test_results(scan_id))
# For each test, get the test score description and add that in
for test in tests:
tests[test]['score_description'] = get_score_description(tests[test]['result'])
return tests
|
Python
| 0.000001
|
@@ -2409,14 +2409,20 @@
rade
-Totals
+Distribution
', m
|
0efb8c4347b944c692e3352382bf36de1c9f5ef4
|
Fix test_client with no webpack manifest
|
indico/testing/fixtures/app.py
|
indico/testing/fixtures/app.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import os
import pytest
from indico.web.flask.app import make_app
from indico.web.flask.wrappers import IndicoFlask
@pytest.fixture(scope='session')
def app(request, redis_proc):
"""Create the flask app."""
config_override = {
'BASE_URL': 'http://localhost',
'SMTP_SERVER': ('localhost', 0), # invalid port - just in case so we NEVER send emails!
'TEMP_DIR': request.config.indico_temp_dir.strpath,
'CACHE_DIR': request.config.indico_temp_dir.strpath,
'REDIS_CACHE_URL': f'redis://{redis_proc.host}:{redis_proc.port}/0',
'STORAGE_BACKENDS': {'default': 'mem:'},
'PLUGINS': request.config.indico_plugins,
'ENABLE_ROOMBOOKING': True,
'SECRET_KEY': os.urandom(16),
'SMTP_USE_CELERY': False,
}
return make_app(set_path=True, testing=True, config_override=config_override)
@pytest.fixture(autouse=True)
def app_context(app):
"""Create a flask app context."""
with app.app_context():
yield app
@pytest.fixture
def request_context(app_context):
"""Create a flask request context."""
with app_context.test_request_context():
yield
@pytest.fixture
def test_client(app, mocker):
"""Create a flask request context."""
mocker.patch.object(IndicoFlask, 'manifest')
with app.test_client() as c:
yield c
|
Python
| 0
|
@@ -231,16 +231,71 @@
t pytest
+%0Afrom flask_webpackext.ext import _FlaskWebpackExtState
%0A%0Afrom i
@@ -1508,24 +1508,83 @@
context.%22%22%22%0A
+ mocker.patch.object(_FlaskWebpackExtState, 'manifest')%0A
mocker.p
|
2b72700dbd3b25c5a9b85663bd0a660da89322a4
|
Support template list in get_template_module
|
indico/web/flask/templating.py
|
indico/web/flask/templating.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
import functools
import re
from flask import current_app as app
from jinja2.ext import Extension
from jinja2.lexer import Token
from markupsafe import Markup
from indico.util.string import render_markdown
indentation_re = re.compile(r'^ +', re.MULTILINE)
def underline(s, sep='-'):
return u'{0}\n{1}'.format(s, sep * len(s))
def markdown(value):
return Markup(EnsureUnicodeExtension.ensure_unicode(render_markdown(value, extensions=('nl2br',))))
def dedent(value):
"""Removes leading whitespace from each line"""
return indentation_re.sub('', value)
def get_template_module(template_name, **context):
"""Returns the python module of a template.
This allows you to call e.g. macros inside it from Python code."""
app.update_template_context(context)
tpl = app.jinja_env.get_template(template_name)
return tpl.make_module(context)
class EnsureUnicodeExtension(Extension):
"""Ensures all strings in Jinja are unicode"""
@classmethod
def wrap_func(cls, f):
"""Wraps a function to make sure it returns unicode.
Useful for custom filters."""
@functools.wraps(f)
def wrapper(*args, **kwargs):
return cls.ensure_unicode(f(*args, **kwargs))
return wrapper
@staticmethod
def ensure_unicode(s):
"""Converts a bytestring to unicode. Must be registered as a filter!"""
if isinstance(s, str):
return s.decode('utf-8')
return s
def filter_stream(self, stream):
# The token stream looks like this:
# ------------------------
# variable_begin {{
# name event
# dot .
# name getTitle
# lparen (
# rparen )
# pipe |
# name safe
# variable_end }}
# ------------------------
# Intercepting the end of the actual variable is hard but it's rather easy to get the end of
# the variable tag or the start of the first filter. As filters are optional we need to check
# both cases. If we inject the code before the first filter we *probably* don't need to run
# it again later assuming our filters are nice and only return unicode. If that's not the
# case we can simply remove the `variable_done` checks.
# Due to the way Jinja works it is pretty much impossible to apply the filter to arguments
# passed inside a {% trans foo=..., bar=... %} argument list - we have nothing to detect the
# end of an argument as the 'comma' token might be inside a function call. So in that case#
# people simply need to unicodify the strings manually. :(
variable_done = False
in_trans = False
in_variable = False
for token in stream:
# Check if we are inside a trans block - we cannot use filters there!
if token.type == 'block_begin':
block_name = stream.current.value
if block_name == 'trans':
in_trans = True
elif block_name == 'endtrans':
in_trans = False
elif token.type == 'variable_begin':
in_variable = True
if not in_trans and in_variable:
if token.type == 'pipe':
# Inject our filter call before the first filter
yield Token(token.lineno, 'pipe', '|')
yield Token(token.lineno, 'name', 'ensure_unicode')
variable_done = True
elif token.type == 'variable_end' or (token.type == 'name' and token.value == 'if'):
if not variable_done:
# Inject our filter call if we haven't injected it right after the variable
yield Token(token.lineno, 'pipe', '|')
yield Token(token.lineno, 'name', 'ensure_unicode')
variable_done = False
if token.type == 'variable_end':
in_variable = False
# Original token
yield token
|
Python
| 0
|
@@ -1336,16 +1336,24 @@
ate_name
+_or_list
, **cont
@@ -1543,24 +1543,34 @@
nja_env.get_
+or_select_
template(tem
@@ -1579,16 +1579,24 @@
ate_name
+_or_list
)%0A re
|
0dd9b060fd2f4f6f52f74199fd7c704f884f2e12
|
Check that 'run-tox' is used instead of 'tox'
|
tools/jenkins-projects-checks.py
|
tools/jenkins-projects-checks.py
|
#! /usr/bin/env python
# Copyright 2014 SUSE Linux Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import glob
import sys
import voluptuous as v
# The files uses YAML extensions like !include, therefore use the
# jenkins-job-builder yaml parser for loading.
from jenkins_jobs import local_yaml
BUILDER = v.Schema({
v.Required('name'): v.All(str),
v.Required('builders'): v.All(list),
'description': v.All(str)
}, extra=True)
JOB = v.Schema({
v.Required('builders'): v.All(list),
v.Required('name'): v.All(str),
v.Required('node'): v.All(str),
v.Required('publishers'): v.All(list),
'description': v.All(str),
'parameters': v.All(list),
'wrappers': v.All(list)
})
JOB_GROUP = v.Schema({
v.Required('name'): v.All(str),
v.Required('jobs'): v.All(list),
'description': v.All(str)
}, extra=True)
JOB_TEMPLATE = v.Schema({
v.Required('builders'): v.All(list),
v.Required('name'): v.All(str),
v.Required('node'): v.All(str),
v.Required('publishers'): v.All(list),
'description': v.All(str),
'wrappers': v.All(list)
})
PROJECT = v.Schema({
v.Required('name'): v.All(str),
v.Required('jobs'): v.All(list),
'description': v.All(str)
}, extra=True)
PUBLISHER = v.Schema({
v.Required('name'): v.All(str),
v.Required('publishers'): v.All(list),
'description': v.All(str)
})
def normalize(s):
"Normalize string for comparison."
return s.lower().replace("_", "-")
def check_alphabetical():
"""Check that the projects are in alphabetical order
and that indenting looks correct"""
print("Checking jenkins/jobs/projects.yaml")
print("===================================")
# Note that the file has different sections and we need to check
# entries within these sections only
errors = False
last = ""
count = 1
for line in open('jenkins/jobs/projects.yaml', 'r'):
if line.startswith(' name: '):
i = line.find(' name: ')
current = line[i + 7:].strip()
if normalize(last) > normalize(current):
print(" Wrong alphabetical order: %(last)s, %(current)s" %
{"last": last, "current": current})
errors = True
last = current
if (len(line) - len(line.lstrip(' '))) % 2 != 0:
print("Line %(count)s not indented by multiple of 2:\n\t%(line)s" %
{"count": count, "line": line})
errors = True
count = count+1
if errors:
print("Found errors in jenkins/jobs/projects.yaml!\n")
else:
print("No errors found in jenkins/jobs/projects.yaml!\n")
return errors
def validate_jobs():
"""Minimal YAML file validation."""
count = 0
errors = False
print("Validating YAML files")
print("=====================")
for job_file in glob.glob('jenkins/jobs/*.yaml'):
jobs = local_yaml.load(io.open(job_file, 'r', encoding='utf-8'))
for item in jobs:
if 'builder' in item:
schema = BUILDER
entry = item['builder']
elif 'job' in item:
schema = JOB
entry = item['job']
elif 'job-group' in item:
schema = JOB_GROUP
entry = item['job-group']
elif 'job-template' in item:
schema = JOB_TEMPLATE
entry = item['job-template']
elif 'project' in item:
schema = PROJECT
entry = item['project']
elif 'publisher' in item:
schema = PUBLISHER
entry = item['publisher']
elif 'wrapper' in item:
continue
elif 'defaults' in item:
continue
else:
print("Unknown entry in file %s" % job_file)
print(item)
try:
schema(entry)
except Exception as e:
print("Failure: %s" % e)
print("Failure in file %s" % job_file)
print("Failure parsing item:")
print(item)
count += 1
errors = True
# NOTE(pabelanger): Make sure console-log is our last publisher
# defined. We use the publisher to upload logs from zuul-launcher.
result = _check_console_log_publisher(schema, entry)
if result:
print(job_file)
count += result
errors = True
print("%d errors found validating YAML files in jenkins/jobs/*.yaml.\n" % count)
return errors
def _check_console_log_publisher(schema, entry):
count = 0
if schema == JOB or schema == JOB_TEMPLATE:
if 'publishers' in entry:
if 'console-log' in entry['publishers'] and \
entry['publishers'][-1] != 'console-log':
print("ERROR: The console-log publisher MUST be the last "
"publisher in '%s':" % entry['name'])
count += 1
return count
def check_all():
errors = validate_jobs()
errors = check_alphabetical() or errors
if errors:
print("Found errors in jenkins/jobs/*.yaml!")
else:
print("No errors found in jenkins/jobs/*.yaml!")
return errors
if __name__ == "__main__":
sys.exit(check_all())
|
Python
| 0.021828
|
@@ -4944,24 +4944,80 @@
ema, entry)%0A
+ result += _check_tox_builder(schema, entry)%0A
@@ -5673,16 +5673,778 @@
count%0A%0A%0A
+def _check_tox_builder(schema, entry):%0A count = 0%0A if schema == JOB or schema == JOB_TEMPLATE:%0A if 'builders' in entry:%0A for b in entry%5B'builders'%5D:%0A # Test for dict, coming from %22tox:%22%0A if isinstance(b, dict):%0A if 'tox' in b:%0A print(%22ERROR: Use 'run-tox' instead of 'tox' %22%0A %22builder in '%25s':%22 %25 entry%5B'name'%5D)%0A count += 1%0A # And test for %22tox%22 without arguments%0A elif isinstance(b, str) and b == 'tox':%0A print(%22ERROR: Use 'run-tox' instead of 'tox' %22%0A %22builder in '%25s':%22 %25 entry%5B'name'%5D)%0A count += 1%0A return count%0A%0A%0A
def chec
|
9398fa3f674dbed430cc5bd6178c07cc83c81c60
|
remove unnecessary print
|
treeano/sandbox/nodes/spp_net.py
|
treeano/sandbox/nodes/spp_net.py
|
"""
from
"Spatial Pyramid Pooling in Deep Convolutional Networks for Visual Recognition"
http://arxiv.org/abs/1406.4729
"""
from __future__ import division, absolute_import
from __future__ import print_function, unicode_literals
import numpy as np
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
from theano.tensor.signal import downsample
fX = theano.config.floatX
def spp_max_pool_axis_kwargs(in_shape, out_shape):
symbolic = (treeano.utils.is_variable(in_shape)
or treeano.utils.is_variable(out_shape))
if symbolic:
int_ceil = lambda x: T.ceil(x).astype("int32")
else:
int_ceil = lambda x: int(np.ceil(x))
# eg. if input is 5 and output is 2, each pool size should be 3
pool_size = int_ceil(in_shape / out_shape)
# stride should equal pool_size, since we want non-overlapping regions
stride = pool_size
# pad as much as possible, since ignore_border=True
padding = int_ceil((pool_size * out_shape - in_shape) / 2)
if not symbolic:
assert padding < pool_size
return dict(
ds=pool_size,
st=stride,
padding=padding,
)
def spp_max_pool_kwargs(in_shape, out_shape):
assert len(in_shape) == len(out_shape)
axis_res = []
for i, o in zip(in_shape, out_shape):
axis_res.append(spp_max_pool_axis_kwargs(i, o))
return dict(
ds=tuple([r["ds"] for r in axis_res]),
st=tuple([r["st"] for r in axis_res]),
padding=tuple([r["padding"] for r in axis_res]),
# must be set to true for padding to work
ignore_border=True,
)
@treeano.register_node("spatial_pyramid_pooling")
class SpatialPyramidPoolingNode(treeano.NodeImpl):
"""
eg.
SpatialPyramidPoolingNode("spp", spp_levels=[(1, 1), (2, 2), (4, 4)])
"""
hyperparameter_names = ("spp_levels",)
def compute_output(self, network, in_vw):
spp_levels = network.find_hyperparameter(["spp_levels"])
# FIXME generalize to other shape dimensions.
# assume this is of the form bc01 (batch, channel, width, height)
# shape calculation
in_shape = in_vw.symbolic_shape()
if in_vw.shape[1] is None:
out_shape1 = None
else:
out_shape1 = in_vw.shape[1] * sum(d1 * d2 for d1, d2 in spp_levels)
out_shape = (in_vw.shape[0], out_shape1)
# compute out
mp_kwargs_list = [spp_max_pool_kwargs(in_shape[2:], spp_level)
for spp_level in spp_levels]
print(mp_kwargs_list)
pooled = [downsample.max_pool_2d(in_vw.variable, **kwargs)
for kwargs in mp_kwargs_list]
out_var = T.concatenate([p.flatten(2) for p in pooled], axis=1)
network.create_variable(
"default",
variable=out_var,
shape=out_shape,
tags={"output"},
)
|
Python
| 0.000413
|
@@ -2541,38 +2541,8 @@
ls%5D%0A
- print(mp_kwargs_list)%0A
|
849ba5fe3d4ad8799fb6cf75a49380312fb3ebdc
|
Fix use of str instead of bytes when cloaking
|
txircd/modules/extra/cloaking.py
|
txircd/modules/extra/cloaking.py
|
from twisted.internet.abstract import isIPAddress, isIPv6Address
from twisted.plugin import IPlugin
from txircd.config import ConfigValidationError
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import expandIPv6Address, isValidHost, lenBytes, ModeType
from zope.interface import implementer
from hashlib import sha256
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
@implementer(IPlugin, IModuleData, IMode)
class HostCloaking(ModuleData, Mode):
name = "HostCloaking"
affectedActions = { "modechange-user-x": 10 }
def userModes(self) -> List[Tuple[str, ModeType, Mode]]:
return [ ("x", ModeType.NoParam, self) ]
def actions(self) -> List[Tuple[str, int, Callable]]:
return [ ("modeactioncheck-user-x-modechange-user-x", 1, self.modeChanged) ]
def verifyConfig(self, config: Dict[str, Any]) -> None:
if "cloaking_salt" in config:
if not isinstance(config["cloaking_salt"], str):
raise ConfigValidationError("cloaking_salt", "value must be a string")
if not config["cloaking_salt"]:
self.ircd.log.warn("No cloaking salt was found in the config. Host cloaks will be insecure!")
else:
self.ircd.log.warn("No cloaking salt was found in the config. Host cloaks will be insecure!")
if "cloaking_prefix" in config and not isValidHost(config["cloaking_prefix"]): # Make sure the prefix will not make the cloak an invalid hostname
raise ConfigValidationError("cloaking_prefix", "value must be a string and must not contain any invalid hostname characters")
def modeChanged(self, user: "IRCUser", *params: Any) -> Union[str, bool, None]:
if user.uuid[:3] == self.ircd.serverID:
return True
return None
def apply(self, actionType: str, user: "IRCUser", param: str, settingUser: "IRCUser", sourceID: str, adding: bool, paramAgain: Optional[str]) -> None:
if adding:
userHost = user.realHost
if isIPv6Address(userHost):
user.changeHost("cloak", self.applyIPv6Cloak(userHost))
elif isIPAddress(userHost):
user.changeHost("cloak", self.applyIPv4Cloak(userHost))
else:
if "." in userHost:
user.changeHost("cloak", self.applyHostCloak(userHost, user.ip))
else:
if isIPv6Address(user.ip):
return self.applyIPv6Cloak(user.ip)
else:
return self.applyIPv4Cloak(user.ip)
else:
user.resetHost("cloak")
def applyHostCloak(self, host: str, ip: str) -> str:
# Find the last segments of the hostname.
index = len(host[::-1].split(".", 3)[-1]) # Get the length of all segments except the last
# Cloak the first part of the host and leave the last segments alone.
hostHashText = "{}{}".format(self.ircd.config.get("cloaking_salt", ""), host[:index])
hostHashBytes = hostHashText.encode("utf-8")
hostmask = "{}-{}{}".format(self.ircd.config.get("cloaking_prefix", "txircd"), sha256(hostHashBytes).hexdigest()[:8], host[index:])
# This is very rare since we only leave up to 3 segments uncloaked, but make sure the end result isn't too long.
if lenBytes(hostmask) > self.ircd.config.get("hostname_length", 64):
if isIPv6Address(ip):
return self.applyIPv6Cloak(ip)
return self.applyIPv4Cloak(ip)
return hostmask
def applyIPv4Cloak(self, ip: str) -> str:
pieces = ip.split(".")
hashedParts = []
for i in range(len(pieces), 0, -1):
piecesGroup = pieces[:i]
piecesGroup.reverse()
hashedParts.append(sha256(self.ircd.config.get("cloaking_salt", "") + "".join(piecesGroup)).hexdigest()[:8])
return "{}.IP".format(".".join(hashedParts))
def applyIPv6Cloak(self, ip: str) -> str:
pieces = expandIPv6Address(ip).split(":")
hashedParts = []
pieces.reverse()
for i in range(len(pieces), 0, -1):
piecesGroup = pieces[:i]
piecesGroup.reverse()
hashedParts.append(sha256(self.ircd.config.get("cloaking_salt", "") + "".join(piecesGroup)).hexdigest()[:5])
return "{}.IP".format(".".join(hashedParts))
hostCloaking = HostCloaking()
|
Python
| 0.000009
|
@@ -3366,33 +3366,34 @@
%0A%09%09%09
-hashedParts.append(sha256
+ipHashText = %22%7B%7D%7B%7D%22.format
(sel
@@ -3422,34 +3422,33 @@
aking_salt%22, %22%22)
- +
+,
%22%22.join(piecesG
@@ -3445,32 +3445,118 @@
in(piecesGroup))
+%0A%09%09%09ipHashBytes = ipHashText.encode(%22utf-8%22)%0A%09%09%09hashedParts.append(sha256(ipHashBytes)
.hexdigest()%5B:8%5D
@@ -3828,33 +3828,34 @@
%0A%09%09%09
-hashedParts.append(sha256
+ipHashText = %22%7B%7D%7B%7D%22.format
(sel
@@ -3896,10 +3896,9 @@
%22%22)
- +
+,
%22%22.
@@ -3915,16 +3915,102 @@
sGroup))
+%0A%09%09%09ipHashBytes = ipHashText.encode(%22utf-8%22)%0A%09%09%09hashedParts.append(sha256(ipHashBytes)
.hexdige
|
6102f840c68e98a6c09aeb30055d6e58fa9c5006
|
Put temporary files in system's tempdir
|
typhon/tests/files/test_utils.py
|
typhon/tests/files/test_utils.py
|
from tempfile import NamedTemporaryFile
from typhon.files import compress, decompress
class TestCompression:
data = "ABCDEFGHIJKLMNOPQRSTUVWXYZ12345678910"
def create_file(self, filename):
with open(filename, "w") as file:
file.write(self.data)
def check_file(self, filename):
with open(filename) as file:
return self.data == file.readline()
def test_compress_decompress_zip(self):
with NamedTemporaryFile() as file:
with compress(file.name+".zip") as compressed_file:
self.create_file(compressed_file)
with decompress(file.name+".zip") as uncompressed_file:
assert self.check_file(uncompressed_file)
def test_compress_decompress_gzip(self):
with NamedTemporaryFile() as file:
with compress(file.name+".gz") as compressed_file:
self.create_file(compressed_file)
with decompress(file.name+".gz") as uncompressed_file:
assert self.check_file(uncompressed_file)
def test_compress_decompress_bz2(self):
with NamedTemporaryFile() as file:
with compress(file.name+".bz2") as compressed_file:
self.create_file(compressed_file)
with decompress(file.name+".bz2") as uncompressed_file:
assert self.check_file(uncompressed_file)
def test_compress_decompress_lzma(self):
with NamedTemporaryFile() as file:
with compress(file.name+".xz") as compressed_file:
self.create_file(compressed_file)
with decompress(file.name+".xz") as uncompressed_file:
assert self.check_file(uncompressed_file)
|
Python
| 0
|
@@ -13,16 +13,28 @@
e import
+ gettempdir,
NamedTe
@@ -45,16 +45,16 @@
aryFile%0A
-
%0Afrom ty
@@ -473,32 +473,48 @@
edTemporaryFile(
+dir=gettempdir()
) as file:%0A
@@ -819,32 +819,48 @@
edTemporaryFile(
+dir=gettempdir()
) as file:%0A
@@ -1162,32 +1162,48 @@
edTemporaryFile(
+dir=gettempdir()
) as file:%0A
@@ -1476,32 +1476,32 @@
ess_lzma(self):%0A
-
with Nam
@@ -1516,16 +1516,32 @@
aryFile(
+dir=gettempdir()
) as fil
|
0f4c91b55b5f6640954c4f3b459c286fc5f87b53
|
Clarify and improve error handling
|
data/CPNWH_Downloader.py
|
data/CPNWH_Downloader.py
|
"""Use this to directly download CPNWH data.
To prevent timeout errors with their server, the data is downloaded from each
herbaria separately.
"""
import pandas as pd
import requests
import argparse
# Parse arguments
parser = argparse.ArgumentParser(
description='Download CPNWH data into multiple files')
parser.add_argument(
"-r", "--region", type=str, choices=['OR', 'WA'], required=True,
help="Region to use."
)
args = parser.parse_args()
region = args.region
folder = 'CPNWH_' + region + '/raw_data/'
file_prefix = "all_species"
# Gather components of URL
main_url = (
"http://www.pnwherbaria.org/data/results.php?DisplayAs=Checklist"
"&DisplayOption=Tab&ExcludeCultivated=Y&GroupBy=ungrouped"
"&SortBy=ScientificName&SortOrder=ASC&Herbaria="
)
with open('CPNWH_' + region + '_polygon.txt', 'r') as text_file:
region_url = text_file.read().strip()
herbaria = [
"ALA", "BABY", "BBLM", "BLMMD", "BOIS", "CIC", "CRMO", "EVE", "EWU", "FHL",
"HJAEF", "HPSU", "HSC", "ID", "IDS", "LEA", "LINF", "MONT", "MONTU", "NY",
"OSC", "PLU", "PNNL", "PSM", "REED", "RM", "SOC", "SRP", "UAAH", "UBC", "V",
"VALE", "VIU", "WCW", "WS", "WSTC", "WTU", "WWB"
]
# Loop through each herbarium
timed_out_herbaria = []
erroneous_herbaria = []
for i, herbarium in enumerate(herbaria):
print(str(i) + ": \tRequesting herbarium: \t" + herbarium)
url = main_url + herbarium + region_url
response = requests.get(url)
if response.headers['Content-Type'] == 'text/html':
print("*** No CSV returned!")
erroneous_herbaria.append(herbarium)
continue
if (response.elapsed.seconds == 30 or
'<' in response.text or '>' in response.text):
print("*** HIGHLY LIKELY that this download timed out!")
if ("Fatal error" in response.text):
if ("Maximum execution time of 30 seconds exceeded" in response.text):
print("\t!!! DOWNLOAD DEFINITELY TIMED OUT !!!")
print("*** CSV not saved!")
timed_out_herbaria.append(herbarium)
continue
file_path = './' + folder + file_prefix + "_raw_data" + str(i) + ".txt"
with open(file_path, 'w') as output_file:
output_file.write(response.text)
if len(erroneous_herbaria) > 0:
print("\nSome herbaria did not return CSVs.")
print("Here is a url to see why:\n")
print(main_url + ",".join(erroneous_herbaria) + region_url)
if len(timed_out_herbaria) > 0:
print("\nSome herbaria timed out. Deal with these manually:")
print(timed_out_herbaria)
|
Python
| 0.000001
|
@@ -196,16 +196,30 @@
rgparse%0A
+import string%0A
%0A%0A# Pars
@@ -874,19 +874,21 @@
region_
-url
+query
= text_
@@ -908,16 +908,55 @@
trip()%0A%0A
+family_url = %22&QueryCount=1&Family1=%22%0A%0A
herbaria
@@ -1290,25 +1290,29 @@
rbarium%0A
-timed_out
+unknown_error
_herbari
@@ -1319,24 +1319,20 @@
a = %5B%5D%0Ae
-rroneous
+mpty
_herbari
@@ -1482,19 +1482,21 @@
region_
-url
+query
%0A res
@@ -1624,24 +1624,20 @@
e
-rroneous
+mpty
_herbari
@@ -1940,16 +1940,36 @@
xceeded%22
+%0A not
in resp
@@ -2009,124 +2009,734 @@
!!!
-DOWNLOAD DEFINITELY TIMED OUT !!!%22)%0A print(%22*** CSV not saved!%22)%0A timed_out_herbaria.append(herbarium)
+Unknown error; CSV not saved !!!%22)%0A unknown_error_herbaria.append(herbarium)%0A continue%0A%0A # TODO This code is getting messy, I gotta refactor it a little%0A # TODO It's getting confusing because I need to redo the checks above %0A # TODO for each of these new URLs%0A # TODO I think it might be easier to add an %22alphabet search mode%22%0A # TODO to the argparse and have the user re-run the script%0A print(%22*** Timeout confirmed.%22)%0A print(%22*** Splitting requests by first letter of family name...%22)%0A for letter in string.ascii_uppercase:%0A family_query = family_url + letter + '%25'%0A url = main_url + herbarium + family_query + region_query
%0A
@@ -2922,24 +2922,20 @@
if len(e
-rroneous
+mpty
_herbari
@@ -3068,16 +3068,12 @@
in(e
-rroneous
+mpty
_her
@@ -3088,19 +3088,21 @@
region_
-url
+query
)%0A%0Aif le
@@ -3103,25 +3103,29 @@
%0Aif len(
-timed_out
+unknown_error
_herbari
@@ -3159,25 +3159,34 @@
erbaria
-timed out
+had unknown errors
. Deal w
@@ -3221,17 +3221,21 @@
int(
-timed_out
+unknown_error
_her
|
9c2514fce4d8d6c46fddae8e79afe66631b468ae
|
add outer_width to RackTable (#7766)
|
netbox/dcim/tables/racks.py
|
netbox/dcim/tables/racks.py
|
import django_tables2 as tables
from django_tables2.utils import Accessor
from dcim.models import Rack, RackReservation, RackRole
from tenancy.tables import TenantColumn
from utilities.tables import (
BaseTable, ButtonsColumn, ChoiceFieldColumn, ColorColumn, ColoredLabelColumn, LinkedCountColumn, MarkdownColumn,
TagColumn, ToggleColumn, UtilizationColumn,
)
__all__ = (
'RackTable',
'RackReservationTable',
'RackRoleTable',
)
#
# Rack roles
#
class RackRoleTable(BaseTable):
pk = ToggleColumn()
name = tables.Column(linkify=True)
rack_count = tables.Column(verbose_name='Racks')
color = ColorColumn()
actions = ButtonsColumn(RackRole)
class Meta(BaseTable.Meta):
model = RackRole
fields = ('pk', 'id', 'name', 'rack_count', 'color', 'description', 'slug', 'actions')
default_columns = ('pk', 'name', 'rack_count', 'color', 'description', 'actions')
#
# Racks
#
class RackTable(BaseTable):
pk = ToggleColumn()
name = tables.Column(
order_by=('_name',),
linkify=True
)
location = tables.Column(
linkify=True
)
site = tables.Column(
linkify=True
)
tenant = TenantColumn()
status = ChoiceFieldColumn()
role = ColoredLabelColumn()
u_height = tables.TemplateColumn(
template_code="{{ record.u_height }}U",
verbose_name='Height'
)
comments = MarkdownColumn()
device_count = LinkedCountColumn(
viewname='dcim:device_list',
url_params={'rack_id': 'pk'},
verbose_name='Devices'
)
get_utilization = UtilizationColumn(
orderable=False,
verbose_name='Space'
)
get_power_utilization = UtilizationColumn(
orderable=False,
verbose_name='Power'
)
tags = TagColumn(
url_name='dcim:rack_list'
)
class Meta(BaseTable.Meta):
model = Rack
fields = (
'pk', 'id', 'name', 'site', 'location', 'status', 'facility_id', 'tenant', 'role', 'serial', 'asset_tag', 'type',
'width', 'u_height', 'comments', 'device_count', 'get_utilization', 'get_power_utilization', 'tags',
)
default_columns = (
'pk', 'name', 'site', 'location', 'status', 'facility_id', 'tenant', 'role', 'u_height', 'device_count',
'get_utilization', 'get_power_utilization',
)
#
# Rack reservations
#
class RackReservationTable(BaseTable):
pk = ToggleColumn()
reservation = tables.Column(
accessor='pk',
linkify=True
)
site = tables.Column(
accessor=Accessor('rack__site'),
linkify=True
)
tenant = TenantColumn()
rack = tables.Column(
linkify=True
)
unit_list = tables.Column(
orderable=False,
verbose_name='Units'
)
tags = TagColumn(
url_name='dcim:rackreservation_list'
)
actions = ButtonsColumn(RackReservation)
class Meta(BaseTable.Meta):
model = RackReservation
fields = (
'pk', 'id', 'reservation', 'site', 'rack', 'unit_list', 'user', 'created', 'tenant', 'description', 'tags',
'actions',
)
default_columns = (
'pk', 'reservation', 'site', 'rack', 'unit_list', 'user', 'description', 'actions',
)
|
Python
| 0
|
@@ -1843,16 +1843,328 @@
t'%0A )
+%0A outer_width = tables.TemplateColumn(%0A template_code=%22%7B%7B record.outer_width %7D%7D %7B%7B record.outer_unit %7D%7D%22,%0A verbose_name='Outer Width'%0A )%0A outer_depth = tables.TemplateColumn(%0A template_code=%22%7B%7B record.outer_depth %7D%7D %7B%7B record.outer_unit %7D%7D%22,%0A verbose_name='Outer Depth'%0A )
%0A%0A cl
@@ -2375,16 +2375,46 @@
'width',
+ 'outer_width', 'outer_depth',
'u_heig
|
59706b4f3e45fbd3ea107e63f04181cbd89b9749
|
Remove conntrackd comment
|
neutron/conf/agent/l3/ha.py
|
neutron/conf/agent/l3/ha.py
|
# Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.utils import host
from oslo_config import cfg
from neutron._i18n import _
from neutron.agent.linux import keepalived
OPTS = [
cfg.StrOpt('ha_confs_path',
default='$state_path/ha_confs',
help=_('Location to store keepalived/conntrackd '
'config files')),
cfg.StrOpt('ha_vrrp_auth_type',
default='PASS',
choices=keepalived.VALID_AUTH_TYPES,
help=_('VRRP authentication type')),
cfg.StrOpt('ha_vrrp_auth_password',
help=_('VRRP authentication password'),
secret=True),
cfg.IntOpt('ha_vrrp_advert_int',
default=2,
help=_('The advertisement interval in seconds')),
cfg.IntOpt('ha_keepalived_state_change_server_threads',
default=(1 + host.cpu_count()) // 2,
sample_default='(1 + <num_of_cpus>) / 2',
min=1,
help=_('Number of concurrent threads for '
'keepalived server connection requests. '
'More threads create a higher CPU load '
'on the agent node.')),
cfg.IntOpt('ha_vrrp_health_check_interval',
default=0,
help=_('The VRRP health check interval in seconds. Values > 0 '
'enable VRRP health checks. Setting it to 0 disables '
'VRRP health checks. Recommended value is 5. '
'This will cause pings to be sent to the gateway '
'IP address(es) - requires ICMP_ECHO_REQUEST '
'to be enabled on the gateway. '
'If gateway fails, all routers will be reported '
'as master, and master election will be repeated '
'in round-robin fashion, until one of the router '
'restore the gateway connection.')),
]
def register_l3_agent_ha_opts(cfg=cfg.CONF):
cfg.register_opts(OPTS)
|
Python
| 0.000006
|
@@ -914,45 +914,9 @@
ived
-/conntrackd '%0A '
+
conf
|
62763ded79e1afe1b37da0522f126e21ce18ab65
|
Rename ComparableResponse to TestResponse
|
flaskext/attest.py
|
flaskext/attest.py
|
from __future__ import absolute_import
from __future__ import with_statement
from contextlib import contextmanager
from flask import (Response, request, template_rendered as jinja_rendered)
from flask.signals import Namespace
from flask.testing import FlaskClient
from decorator import decorator
signals = Namespace()
template_rendered = signals.signal('template-rendered')
class ComparableResponse(Response):
def __eq__(self, other):
self.freeze()
other.freeze()
other.headers[:] = other.get_wsgi_headers(request.environ)
return all(getattr(self, name) == getattr(other, name)
for name in ('status_code', 'headers', 'data'))
def __ne__(self, other):
return not self == other
def test_context(appfactory):
@contextmanager
def request_context():
app = appfactory()
templates = []
def capture(sender, template, context):
templates.append((template, context))
@jinja_rendered.connect_via(app)
def signal_jinja(sender, template, context):
template_rendered.send(None, template=template.name,
context=context)
try:
from flaskext.genshi import template_generated
except ImportError:
pass
else:
@template_generated.connect_via(app)
def signal_genshi(sender, template, context):
template_rendered.send(None, template=template.filename,
context=context)
with app_context(app) as client:
with template_rendered.connected_to(capture):
yield client, templates
return request_context
@contextmanager
def app_context(app):
with app.test_request_context():
cls = getattr(app, 'test_client_class', FlaskClient)
with cls(app, ComparableResponse) as client:
yield client
def open(*args, **kwargs):
@decorator
def wrapper(func, client, *wrapperargs, **wrapperkwargs):
response = client.open(*args, **kwargs)
return func(response, *wrapperargs, **wrapperkwargs)
return wrapper
def get(*args, **kwargs):
kwargs['method'] = 'GET'
return open(*args, **kwargs)
def post(*args, **kwargs):
kwargs['method'] = 'POST'
return open(*args, **kwargs)
def head(*args, **kwargs):
kwargs['method'] = 'HEAD'
return open(*args, **kwargs)
def put(*args, **kwargs):
kwargs['method'] = 'PUT'
return open(*args, **kwargs)
def delete(*args, **kwargs):
kwargs['method'] = 'DELETE'
return open(*args, **kwargs)
|
Python
| 0.999974
|
@@ -377,26 +377,20 @@
%0A%0Aclass
-Comparable
+Test
Response
@@ -1877,18 +1877,12 @@
pp,
-Comparable
+Test
Resp
|
9d9302bdd961f21f36bbfd61265aa7c1e7117401
|
don't perform the check if first_edit is None
|
follower/models.py
|
follower/models.py
|
from datetime import datetime
import pytz
from django.db import models
import feedparser
feedparser.USER_AGENT='OSMFollower/1.0 +http://mapexplorer.org'
# Create your models here.
class Mapper(models.Model):
user=models.CharField(max_length=20)
scan_date=models.DateTimeField('last_scan_date',null=True,blank=True)
edit_date=models.DateTimeField('last_edit_date',null=True,blank=True)
first_edit_date=models.DateTimeField('last_edit_date',null=True,blank=True)
def check_edits(self):
feed=feedparser.parse('http://www.openstreetmap.org/user/'
+ self.user + '/edits/feed')
if len(feed.entries) > 0:
published_parsed=feed.entries[0].published_parsed
self.edit_date=datetime(published_parsed.tm_year
,published_parsed.tm_mon
,published_parsed.tm_mday
,published_parsed.tm_hour
,published_parsed.tm_min
,published_parsed.tm_sec
,0
,pytz.utc)
published_parsed=feed.entries[-1].published_parsed
first_edit_date=datetime(published_parsed.tm_year
,published_parsed.tm_mon
,published_parsed.tm_mday
,published_parsed.tm_hour
,published_parsed.tm_min
,published_parsed.tm_sec
,0
,pytz.utc)
is_older_edit=first_edit_date < self.first_edit_date
if self.first_edit_date==None or is_older_edit:
self.first_edit_date=first_edit_date
self.scan_date=datetime.now(pytz.utc)
|
Python
| 0.999994
|
@@ -1712,39 +1712,23 @@
-is_older_edit=first_edit_date %3C
+%0A if
sel
@@ -1736,33 +1736,50 @@
.first_edit_date
-%0A
+==None or %5C%0A
if s
@@ -1770,32 +1770,26 @@
-if self.
+
first_edit_d
@@ -1795,31 +1795,31 @@
date
-==None or is_older_edit
+ %3C self.first_edit_date
:%0A
|
3ae39e06cd3695b5640a2a1c079bb32f4c7126b4
|
Switch from tuples to lists in Django's settings
|
antxetamedia/settings.py
|
antxetamedia/settings.py
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'd2w#o#(!antcw5e%(#p5*pu(x=zhw60^byh$)ps+4#e8m#-fj!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'django.contrib.sites',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'grappelli.dashboard',
'grappelli',
'django.contrib.admin',
'adminsortable2',
'ckeditor',
'ckeditor_uploader',
'compressor',
'recurrence',
'kombu.transport.django',
'watson',
'sorl.thumbnail',
'django_filters',
'antxetamedia.frontpage',
'antxetamedia.blobs.apps.BlobsConfig',
'antxetamedia.shows',
'antxetamedia.news.apps.NewsConfig',
'antxetamedia.radio.apps.RadioConfig',
'antxetamedia.projects.apps.ProjectsConfig',
'antxetamedia.schedule',
'antxetamedia.widgets',
'antxetamedia.events.apps.EventsConfig',
'antxetamedia.flatpages',
'antxetamedia.archive',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
SITE_ID = 1
ROOT_URLCONF = 'antxetamedia.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join('antxetamedia/templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'antxetamedia.flatpages.context_processors.menu_flatpage_list',
],
},
},
]
WSGI_APPLICATION = 'antxetamedia.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'eu'
LANGUAGES = [('eu', 'Euskara')]
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOCALE_PATHS = [os.path.join(BASE_DIR, 'antxetamedia/locale')]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, '.media')
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, '.assets')
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'antxetamedia/static')]
STATICFILES_FINDERS = [
'compressor.finders.CompressorFinder',
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
BROKER_URL = 'django://'
CELERY_ALWAYS_EAGER = True
COMPRESS_PRECOMPILERS = (('text/x-sass', 'django_libsass.SassCompiler'),)
CKEDITOR_JQUERY_URL = os.path.join(STATIC_URL, 'bower_components/jquery/dist/jquery.min.js')
CKEDITOR_UPLOAD_PATH = 'ckeditor/'
CKEDITOR_IMAGE_BACKEND = 'pillow'
CKEDITOR_CONFIGS = {
'default': {
'toolbar': [
['Format', 'Bold', 'Italic', 'Underline', 'StrikeThrough', '-',
'NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'JustifyLeft', 'JustifyCenter',
'JustifyRight', 'JustifyBlock'],
['Image', 'Link', 'Source'],
['Undo', 'Redo', '-', 'Cut', 'Copy', 'Paste', 'Find', 'Replace', '-', 'Print'],
],
}
}
GRAPPELLI_INDEX_DASHBOARD = 'antxetamedia.dashboard.AntxetamediaDashboard'
GRAPPELLI_ADMIN_TITLE = 'Antxetamedia'
GRAPPELLI_SWITCH_USER = True
GRAPPELLI_CLEAN_INPUT_TYPES = False
FRONTPAGE_NEWSPODCASTS = 10
FRONTPAGE_RADIOPODCASTS = 5
FRONTPAGE_EVENTS = 5
NEWSCATEGORIES_COOKIE = 'newscategories'
RADIOSHOWS_COOKIE = 'radioshows'
|
Python
| 0
|
@@ -322,25 +322,25 @@
LLED_APPS =
-(
+%5B
%0A 'django
@@ -1167,17 +1167,17 @@
chive',%0A
-)
+%5D
%0A%0AMIDDLE
@@ -1191,17 +1191,17 @@
ASSES =
-(
+%5B
%0A 'dj
@@ -1659,17 +1659,17 @@
eware',%0A
-)
+%5D
%0A%0ASITE_I
@@ -3432,17 +3432,17 @@
ILERS =
-(
+%5B
('text/x
@@ -3483,10 +3483,9 @@
er')
-,)
+%5D
%0A%0ACK
|
9da0110c6d36c099cefecb7d653159f16175a139
|
fix bug
|
dataset/research/job.py
|
dataset/research/job.py
|
""" Classes Job and Experiment. """
import os
from collections import OrderedDict
from copy import copy
import dill
from .. import Pipeline, Config, inbatch_parallel
class Job:
""" Contains one job. """
def __init__(self, executable_units, n_iters, repetition, configs, branches, name):
"""
Parameters
----------
config : dict or Config
config of experiment
"""
self.experiments = []
# self.config = config
self.executable_units = executable_units
self.n_iters = n_iters
self.configs = configs
self.repetition = repetition
self.branches = branches
self.name = name
def init(self, worker_config, gpu_configs):
""" Create experiments. """
self.worker_config = worker_config
for unit in self.executable_units.values():
unit.exec_for = self.get_iterations(unit.exec_for, self.n_iters)
unit.dump_for = self.get_iterations(unit.dump_for, self.n_iters)
for index, config in enumerate(self.configs):
if isinstance(self.branches, list):
branch_config = self.branches[index]
else:
branch_config = dict()
units = OrderedDict()
for name, unit in self.executable_units.items():
unit = unit.get_copy()
if unit.pipeline is not None:
import_config = {key: units[value].pipeline for key, value in unit.kwargs.items()}
else:
import_config = dict()
unit.set_config(config, {**branch_config, **gpu_configs[index]}, worker_config, import_config)
unit.repetition = self.repetition[index]
unit.index = index
unit.create_folder(self.name)
units[name] = unit
self.experiments.append(units)
def get_iterations(self, execute_for, n_iters=None):
""" Get indices of iterations from execute_for. """
if n_iters is not None:
if isinstance(execute_for, int):
if execute_for == -1:
execute_for = [n_iters - 1]
else:
execute_for = list(range(-1, n_iters, execute_for))
elif execute_for is None:
execute_for = list(range(n_iters))
return execute_for
def get_description(self):
""" Get description of job. """
if isinstance(self.branches, list):
description = '\n'.join([str({**config.alias(), **_config, **self.worker_config})
for config, _config in zip(self.configs, self.branches)])
else:
description = '\n'.join([str({**config.alias(), **self.worker_config})
for config in self.configs])
return description
def parallel_execute_for(self, iteration, name, run=False):
""" Parallel execution of pipeline 'name'. """
if run:
while True:
try:
batch = self.executable_units[name].next_batch_root()
self._parallel_run(batch, iteration, name)
except Exception:
break
else:
batch = self.executable_units[name].next_batch_root()
self._parallel_run(batch, iteration, name)
self.put_all_results(iteration, name)
@inbatch_parallel(init='_parallel_init')
def _parallel_run(self, item, iteration, name, batch):
_ = name
item[1].execute_for(batch, iteration)
def _parallel_init(self, iteration, name, *args):
_ = iteration, args
return [(i, experiment[name]) for i, experiment in enumerate(self.experiments)]
@inbatch_parallel(init='_parallel_init')
def parallel_call(self, item, iteration, name):
item[name](iteration, item, *item[name].args, **item[name].kwargs)
def put_all_results(self, iteration, name, result=None):
""" Add values of pipeline variables to results. """
for experiment in self.experiments:
experiment[name].put_result(iteration, result)
|
Python
| 0.000001
|
@@ -3182,39 +3182,32 @@
f._parallel_run(
-batch,
iteration, name)
@@ -3197,32 +3197,39 @@
(iteration, name
+, batch
)%0A
@@ -3385,23 +3385,16 @@
lel_run(
-batch,
iteratio
@@ -3396,24 +3396,31 @@
ration, name
+, batch
)%0A se
|
cfbe7778e441f5851dc0efbacdfebd5209c31742
|
bump version
|
cupy/_version.py
|
cupy/_version.py
|
__version__ = '11.0.0b3'
|
Python
| 0
|
@@ -18,8 +18,9 @@
.0.0
-b3
+rc1
'%0A
|
bb6f7d8a6e71c7b74697b9db46abdd8d0b831ec9
|
Remove unnecessary import and print statement
|
run_experiment.py
|
run_experiment.py
|
#!/usr/bin/env python
import argparse
from collections import namedtuple
from glob import glob
import itertools as it
import os
import os.path
import sys
import numpy as np
import pandas as pd
from otdet.detector import OOTDetector
from otdet.evaluation import TopListEvaluator
from otdet.feature_extraction import ReadabilityMeasures
from otdet.util import pick
def experiment(setting, niter):
"""Do experiment with the specified setting."""
# Obtain normal posts
norm_files = pick(glob(os.path.join(setting.norm_dir, '*.txt')),
k=setting.num_norm, randomized=False)
norm_docs = []
for file in norm_files:
with open(file) as f:
norm_docs.append(f.read())
res = []
for jj in range(niter):
# Obtain OOT posts
oot_files = pick(glob(os.path.join(setting.oot_dir, '*.txt')),
k=setting.num_oot)
oot_docs = []
for file in oot_files:
with open(file) as f:
oot_docs.append(f.read())
# Combine them both
documents = norm_docs + oot_docs
is_oot = [False]*setting.num_norm + [True]*setting.num_oot
# Apply OOT post detection methods
if setting.feature == 'unigram':
detector = OOTDetector()
else:
extractor = ReadabilityMeasures()
detector = OOTDetector(extractor=extractor)
func = getattr(detector, setting.method)
distances = func(documents, metric=setting.metric)
# Construct ranked list of OOT posts (1: most off-topic)
# In case of tie, prioritize normal post (worst case)
s = sorted(zip(distances, is_oot), key=lambda x: x[1])
subresult = sorted(s, reverse=True)
res.append(subresult)
return res
def evaluate(result, setting):
"""Evaluate an experiment result with the given setting."""
n = setting.num_oot
M = setting.num_norm + n
N = setting.num_top
evaluator = TopListEvaluator(result, M=M, n=n, N=N)
return (evaluator.baseline, evaluator.performance,
evaluator.min_sup, evaluator.max_sup)
def shorten(dirname):
"""Shorten a thread directory name."""
split_path = dirname.split(os.sep)
thread, post = (split_path[-2], split_path[-1]) if split_path[-1] != '' \
else (split_path[-3], split_path[-2])
post_id = post.split('__')[0]
return thread[:3] + post_id
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run experiment with given '
'settings')
parser.add_argument('-nd', '--norm-dir', type=str, nargs='+',
required=True, help='Normal thread directory')
parser.add_argument('-od', '--oot-dir', type=str, nargs='+', required=True,
help='Thread directory from which '
'OOT post will be taken')
parser.add_argument('-m', '--num-norm', type=int, nargs='+', required=True,
help='Number of posts taken from '
'normal thread directory')
parser.add_argument('-n', '--num-oot', type=int, nargs='+', required=True,
help='Number of posts taken from '
'another thread directory to be OOT posts')
parser.add_argument('-a', '--method', type=str, nargs='+', required=True,
choices=['clust_dist', 'mean_comp', 'txt_comp_dist'],
help='OOT post detection method to use')
parser.add_argument('-d', '--metric', type=str, nargs='+', required=True,
choices=['euclidean', 'cityblock', 'cosine',
'correlation'],
help='Distance metric to use')
parser.add_argument('-f', '--feature', type=str, nargs='+', required=True,
choices=['unigram', 'readability'],
help='Text features to be used')
parser.add_argument('-t', '--num-top', type=int, nargs='+', required=True,
help='Number of posts in top N list')
parser.add_argument('--niter', type=int, default=1,
help='Number of iteration for each method')
parser.add_argument('-j', '--jobs', type=int, default=1,
help='Number of work processes')
parser.add_argument('--hdf-name', type=str, required=True,
help='Where to store the result in HDF5 format')
parser.add_argument('--hdf-key', type=str, default='df',
help='Identifier in the HDF5 store')
args = parser.parse_args()
# Experiment settings
names = ['method', 'feature', 'metric', 'norm_dir', 'oot_dir', 'num_norm',
'num_oot', 'num_top']
ExprSetting = namedtuple('ExprSetting', names)
settings = list(it.product(args.method, args.feature, args.metric,
args.norm_dir, args.oot_dir, args.num_norm,
args.num_oot, args.num_top))
settings = [ExprSetting(*sett) for sett in settings[:]]
# Do experiments
results = [experiment(setting, args.niter) for setting in settings]
index_tup, column_tup = [], []
data = np.array([])
for setting, result in zip(settings, results):
# Evaluate the result of each setting
baseline, performance, min_sup, max_sup = evaluate(result, setting)
# Prepare Pandas MultiIndex tuples
norm_dir = shorten(setting.norm_dir)
oot_dir = shorten(setting.oot_dir)
index_tup.append((setting.method, setting.feature, setting.metric,
norm_dir, oot_dir))
for res in ['base', 'perf']:
for k in range(min_sup, max_sup+1):
column_tup.append((setting.num_norm, setting.num_oot,
setting.num_top, res, k))
# Prepare Pandas DataFrame data
data = np.concatenate((data, baseline))
data = np.concatenate((data, performance))
# Create index tuples list
st = set()
index = []
for idx in index_tup:
if idx not in st:
index.append(idx)
st.add(idx)
# Create column tuples list
st = set()
columns = []
for col in column_tup:
if col not in st:
columns.append(col)
st.add(col)
# Prepare to store in HDF5 format
index_names = names[:5]
column_names = names[5:] + ['result', 'k']
index = pd.MultiIndex.from_tuples(index, names=index_names)
columns = pd.MultiIndex.from_tuples(columns, names=column_names)
df = pd.DataFrame(data.reshape((len(index), len(columns))),
index=index, columns=columns)
# Store in HDF5 format
df.to_hdf(args.hdf_name, args.hdf_key)
print("Stored in HDF5 format with the name '{}'".format(args.hdf_key))
print('Done', file=sys.stderr, flush=True)
|
Python
| 0.000005
|
@@ -140,19 +140,8 @@
path
-%0Aimport sys
%0A%0Aim
@@ -6878,52 +6878,4 @@
y))%0A
-%0A print('Done', file=sys.stderr, flush=True)%0A
|
eac2f296e855f92d040321edee943ad5f8a8fb39
|
Add filtering to view (nc-463)
|
nodeconductor/events/views.py
|
nodeconductor/events/views.py
|
from rest_framework import generics, response
from nodeconductor.events import elasticsearch_client
class EventListView(generics.GenericAPIView):
def list(self, request, *args, **kwargs):
order_by = request.GET.get('o', '-@timestamp')
elasticsearch_list = elasticsearch_client.ElasticsearchResultList(user=request.user, sort=order_by)
page = self.paginate_queryset(elasticsearch_list)
if page is not None:
return self.get_paginated_response(page)
return response.Response(elasticsearch_list)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
|
Python
| 0
|
@@ -244,16 +244,125 @@
stamp')%0A
+ event_types = request.GET.getlist('event_type')%0A search_text = request.GET.get('search_text')%0A
@@ -427,16 +427,29 @@
ultList(
+%0A
user=req
@@ -472,16 +472,66 @@
order_by
+, event_types=event_types, search_text=search_text
)%0A%0A
|
f975aa9aa03e4334be927f618e136205af055df0
|
change the code because the returning data format is changed
|
rundemo.py
|
rundemo.py
|
#!/usr/bin/env python
"""
author: Lu LIU
created at: 2014-07-25
Description:
a sample of how to use pymmrouting to find multimodal optimal paths in
transportation networks
"""
from pymmrouting.routeplanner import MultimodalRoutePlanner
from pymmrouting.inferenceengine import RoutingPlanInferer
from termcolor import colored
import datetime
import argparse
import json
import logging.config
import os
LOGGING_CONF_FILE = 'logging.json'
DEFAULT_LOGGING_LVL = logging.INFO
path = LOGGING_CONF_FILE
value = os.getenv('LOG_CFG', None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("ROUTING_OPTIONS_FILE",
help="User-defined options about travelling")
parser.add_argument("-c", "--APP-CONFIG", default="config.json",
help="config for client application")
args = parser.parse_args()
ROUTING_OPTIONS_FILE = args.ROUTING_OPTIONS_FILE
CONFIG_FILE = args.APP_CONFIG
with open(ROUTING_OPTIONS_FILE) as f:
routing_options = json.load(f)
# For multimodal routing, a bunch of options are necessary other than routing
# origin and destination. The acceptable format of multimodal routing options
# are stored in a JSON file.
print "Generating multimodal routing plans... ",
inferer = RoutingPlanInferer()
inferer.load_routing_options(routing_options)
# Routing plans of multimodal paths calculation can be generated by the
# inference engine with the routing options as inputs
routing_plans = inferer.generate_routing_plan()
print colored("done!", "green")
print "Inferred plans: "
for i, p in enumerate(routing_plans):
print "== " + str(i+1) + ". " + p.description + " =="
print "modes: " + str(p.mode_list)
print "switch types: " + str(p.switch_type_list)
print "source: " + str(p.source)
print "target: " + str(p.target)
print "public transits: " + str(p.public_transit_set)
print "switch constraints: " + str(p.switch_constraint_list)
route_planner = MultimodalRoutePlanner()
# A multimodal network data model is necessary for multimodal path finding. It
# loads network dataset from external sources, e.g. PostgreSQL database, plain
# text file, etc. A multimodal network is assembled on-the-fly according to a
# concrete routing plan
print "Routing from " + \
colored(str(routing_options['source']['value']['x']) + ',' +
str(routing_options['source']['value']['y']), 'red') + " to " + \
colored(str(routing_options['target']['value']['x']) + ',' +
str(routing_options['target']['value']['y']), 'red')
rough_results = route_planner.batch_find_path(routing_plans)
final_results = route_planner.refine_results(rough_results)
print colored("Finish doing routing plan!", "green")
print "Final refined routing results are: "
for i, r in enumerate(final_results["result list"]):
print "== " + str(i + 1) + ". " + r["description"] + " =="
print "Does it exist? ",
if r["is existent"] is True:
print colored(str(r["is existent"]), "green")
else:
print colored(str(r["is existent"]), "red")
print "Total distance: ",
print colored(str(r["length"]), "red"),
print " meters"
print "Total time (estimated): ",
print colored(str(datetime.timedelta(minutes=float(r["time"]))), "red")
print "Total walking distance: ",
print colored(str(r["walking length"]), "red"),
print " meters"
print "Total walking time (estimated): ",
print colored(str(datetime.timedelta(minutes=float(r["walking time"]))), "red")
print "Multimodal path: "
for p in r["paths"]:
print colored((p["mode"] + ": "), "red")
print str(p["geojson"])
print "Switch Points along the path: "
for sp in r["switch points"]:
print colored((sp["type"] + ": "), "blue")
print str(sp["geojson"])
print str(sp["tags"])
with (open("tmp/multimodal_routing_results.json", 'w')) as result_file:
result_file.write(json.dumps(final_results))
route_planner.cleanup()
|
Python
| 0.000048
|
@@ -3054,18 +3054,13 @@
s%5B%22r
-esult list
+outes
%22%5D):
@@ -3101,27 +3101,23 @@
%22 + r%5B%22
-description
+summary
%22%5D + %22 =
@@ -3158,27 +3158,25 @@
if r%5B%22
-is
existen
-t
+ce
%22%5D is Tr
@@ -3204,35 +3204,33 @@
ored(str(r%5B%22
-is
existen
-t
+ce
%22%5D), %22green%22
@@ -3274,19 +3274,17 @@
(r%5B%22
-is
existen
-t
+ce
%22%5D),
@@ -3346,22 +3346,24 @@
(str(r%5B%22
-length
+distance
%22%5D), %22re
@@ -3483,20 +3483,24 @@
loat(r%5B%22
-time
+duration
%22%5D))), %22
@@ -3579,15 +3579,17 @@
king
- length
+_distance
%22%5D),
@@ -3728,21 +3728,25 @@
%22walking
- time
+_duration
%22%5D))), %22
@@ -3785,86 +3785,8 @@
: %22%0A
- for p in r%5B%22paths%22%5D:%0A print colored((p%5B%22mode%22%5D + %22: %22), %22red%22)%0A
@@ -3795,17 +3795,17 @@
int str(
-p
+r
%5B%22geojso
@@ -3875,17 +3875,17 @@
%5B%22switch
-
+_
points%22%5D
@@ -3916,14 +3916,35 @@
(sp%5B
-%22
+'properties'%5D%5B'switch_
type
-%22
+'
%5D +
@@ -3982,49 +3982,8 @@
r(sp
-%5B%22geojson%22%5D)%0A print str(sp%5B%22tags%22%5D
)%0Awi
|
d62f3bc97bd318ebaf68e97ccc2629d9f8f246b5
|
Correct the pyproj minimum version.
|
sources/mapnik/setup.py
|
sources/mapnik/setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
def prerelease_local_scheme(version):
"""
Return local scheme version unless building on master in CircleCI.
This function returns the local scheme version number
(e.g. 0.0.0.dev<N>+g<HASH>) unless building on CircleCI for a
pre-release in which case it ignores the hash and produces a
PEP440 compliant pre-release version number (e.g. 0.0.0.dev<N>).
"""
from setuptools_scm.version import get_local_node_and_date
if os.getenv('CIRCLE_BRANCH') in ('master', ):
return ''
else:
return get_local_node_and_date(version)
setup(
name='large-image-source-mapnik',
use_scm_version={'root': '../..', 'local_scheme': prerelease_local_scheme},
setup_requires=['setuptools-scm'],
description='A Mapnik/GDAL tilesource for large_image',
long_description='See the large-image package for more details.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
install_requires=[
'large-image>=1.0.0',
'gdal',
'mapnik',
'palettable',
'pyproj>=2.0.0',
],
extras_require={
'girder': 'girder-large-image>=1.0.0',
},
license='Apache Software License 2.0',
keywords='large_image, tile source',
packages=find_packages(exclude=['test', 'test.*']),
url='https://github.com/girder/large_image',
entry_points={
'large_image.source': [
'mapnik = large_image_source_mapnik:MapnikFileTileSource'
],
'girder_large_image.source': [
'mapnik = large_image_source_mapnik.girder_source:MapnikGirderTileSource'
]
},
)
|
Python
| 0
|
@@ -1594,17 +1594,17 @@
proj%3E=2.
-0
+2
.0',%0A
|
a44992f584e6e23ff683f80798f2c672626f9481
|
bump rankabrand scraper, to get new chocolate ratings
|
scraper.py
|
scraper.py
|
# -*- coding: utf-8 -*-
# Copyright 2014 SpendRight, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main loop for all brand scrapers.
If you want to run particular scrapers (for testing), you can put
their names on the command line (e.g. python scraper.py avon kraft).
"""
import logging
from argparse import ArgumentParser
from datetime import datetime
from datetime import timedelta
from os import environ
from srs.db import use_decimal_type_in_sqlite
from srs.harness import run_scrapers
log = logging.getLogger('scraper')
# scrape these campaigns no more often than this limit
DEFAULT_SCRAPE_FREQ = timedelta(days=6, hours=1) # run nightly, scrape weekly
DISABLED_CAMPAIGNS = set()
# scrape rankabrand even less often than that
CAMPAIGN_TO_SCRAPE_FREQ = {
# give Climate Counts a chance to update their rating system
'climate_counts': timedelta(days=80),
'rankabrand': timedelta(days=60),
}
# use this to force scrapers to re-run (e.g. because code has changed)
# this is supposed to be UTC time; if using a date, err toward the future
CAMPAIGN_CHANGED_SINCE = {
'b_corp': datetime(2015, 4, 30),
'bang_accord': datetime(2015, 4, 30),
'climate_counts': datetime(2015, 12, 8),
'cotton_snapshot': datetime(2015, 12, 7),
'free2work': datetime(2015, 10, 26),
'greenpeace_electronics': datetime(2015, 4, 30),
'hrc': datetime(2015, 10, 15),
'mining_the_disclosures': datetime(2015, 9, 24),
'rankabrand': datetime(2015, 5, 23),
'wwf_palm_oil': datetime(2015, 4, 30),
}
def main():
opts = parse_args()
level = logging.DEBUG if opts.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
campaigns = opts.campaigns
if not campaigns and environ.get('MORPH_CAMPAIGNS'):
campaigns = environ['MORPH_CAMPAIGNS'].split(',')
skip_campaigns = set(DISABLED_CAMPAIGNS)
if environ.get('MORPH_SKIP_CAMPAIGNS'):
skip_campaigns.update(environ['MORPH_SKIP_CAMPAIGNS'].split(','))
use_decimal_type_in_sqlite()
run_scrapers(get_records_from_campaign_scraper,
scraper_ids=campaigns,
skip_scraper_ids=skip_campaigns,
default_freq=DEFAULT_SCRAPE_FREQ,
scraper_to_freq=CAMPAIGN_TO_SCRAPE_FREQ,
scraper_to_last_changed=CAMPAIGN_CHANGED_SINCE)
def parse_args(args=None):
parser = ArgumentParser()
parser.add_argument('campaigns', metavar='campaign_id', nargs='*',
help='whitelist of campaigns to scrape')
parser.add_argument(
'-v', '--verbose', dest='verbose', default=False, action='store_true',
help='Enable debug logging')
return parser.parse_args(args)
def get_records_from_campaign_scraper(scraper):
return scraper.scrape_campaign()
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1995,16 +1995,16 @@
(201
-5, 5, 23
+6, 1, 14
),%0A
|
236e1cd509b0f06469e78caef5ac0b8e2abca0b6
|
Use the OSXApp class when packaging OS X applications.
|
sdk/env.py
|
sdk/env.py
|
import app
import osx_app
import linux_app
import os
import platform
import sys
import os.path as p
class PackagingEnvironment(object):
def __init__(self, version, target_os, product_name="kroll"):
self.target_os = target_os
if target_os is 'linux':
self.App = linux_app.LinuxApp
if target_os is 'osx':
self.App = osx_app.App
if target_os is 'win32':
self.App = app.Win32App
self.version = version
if (target_os is 'linux'):
pname = product_name.lower()
self.install_dirs = [
p.expanduser('~/.' + pname),
"/opt/" + pname,
"/usr/local/lib/" + pname,
"/usr/lib/" + pname
]
elif (target_os is 'osx'):
pname = product_name.capitalize()
self.install_dirs = [
p.expanduser('~/Library/Application Support/' + pname),
'/Library/Application Support/' + pname
]
elif (target_os is 'win32'):
pname = product_name.capitalize()
self.install_dirs = [
p.join(os.environ['APPDATA'], pname),
# TODO: Is there a better way to determine this directory?
'C:\\ProgramData\\' + pname
]
else:
raise Exception("Unknown environment!")
# If we are a packaging server, everything we need will
# be in the same directory as this script file.
script_dir = p.abspath(p.dirname(sys._getframe(0).f_code.co_filename))
# If we are in the build hierarchy, try to find runtimes and modules
# relative to this file's location.
build_subpath = p.join('build', self.target_os)
self.components_dir = None
if (p.exists(p.join(script_dir, '..', 'kroll')) and
p.exists(p.join(script_dir, '..', 'build', self.target_os, 'runtime')) and
p.exists(p.join(script_dir, '..', 'build', self.target_os, 'sdk'))):
self.components_dir = p.join(script_dir, '..', 'build', self.target_os)
elif p.exists(p.join(script_dir, '..', 'runtime')) and p.exists(p.join(script_dir, '..', 'sdk')):
self.components_dir = p.join(script_dir, '..')
# Couldn't find any build assets, so assume that we could be a
# packaging server, which has assets in the same directory as the
# script.
elif p.exists(p.join(script_dir, 'runtime')):
self.components_dir = cwd
def log(self, text):
print u' -> %s' % text
def get_excludes(self):
return ['.pdb', '.exp', '.ilk', '.lib', '.svn',
'.git', '.gitignore', '.cvsignore']
def get_component(self, type, name, version):
# First try the build directory.
if self.components_dir:
target = p.join(self.components_dir, type)
if name: # Modules have names
target = p.join(target, name)
if p.exists(target):
return target
# Next try searching list of installed directories
for dir in self.install_dirs:
target = p.join(dir, type, self.target_os)
if name: target = p.join(target, name)
target = p.join(target, version)
if p.exists(target):
return target
return None
def get_sdk_dir(self, version):
c = self.get_component('sdk', None, version)
if not c:
raise Exception(u'Could not find SDK version %s' % version)
return c
def get_runtime_dir(self, version):
c = self.get_component('runtime', None, version)
if not c:
raise Exception(u'Could not find runtime version %s' % version)
return c
def get_module_dir(self, module):
c = self.get_component('modules', module[0], module[1])
if not c:
raise Exception(u'Could not find module %s-%s' % module)
return c
def run(self, command):
self.log(u'Launching: %s' % command)
os.system(command)
|
Python
| 0
|
@@ -329,16 +329,19 @@
osx_app.
+OSX
App%0A%09%09if
|
32a69f96821a543dc8efd1b11b2bf5fa129c4b87
|
Starting fraction is 1
|
EVLA_pipeline1.3.0/EVLA_pipe_fake_flagall.py
|
EVLA_pipeline1.3.0/EVLA_pipe_fake_flagall.py
|
'''
On mixed setups, flagall is already run. This defines the variables set
during that script so it doesn't need to be run multiple times.
'''
logprint("Starting EVLA_pipe_fake_flagall.py",
logfileout='logs/flagall.log')
time_list = runtiming('flagall', 'start')
QA2_flagall = 'Pass'
logprint("These value are fake! You should have run the actual flagall script"
" already in the initial pipeline run! Check those logs for actual"
" flagging fractions.")
start_total = 0.0
start_flagged = 0.0
init_on_source_vis = 1.0
afterzero_total = 1.0
afterzero_flagged = 0.0
zero_flagged = 0.0
aftershadow_total = 1.0
aftershadow_flagged = 0.0
shadow_flagged = 0.0
flagdata_list=[]
cmdreason_list=[]
frac_flagged_on_source1 = 0.0
logprint("Finished EVLA_pipe_fake_flagall.py", logfileout='logs/flagall.log')
logprint("QA2 score: "+QA2_flagall, logfileout='logs/flagall.log')
time_list = runtiming('flagall', 'end')
pipeline_save()
|
Python
| 0.999999
|
@@ -494,17 +494,17 @@
total =
-0
+1
.0%0Astart
|
f70421a0c3143648f7dd2491ad031e62ca92792a
|
increment version for password rest admin form fix
|
accountsplus/__init__.py
|
accountsplus/__init__.py
|
__version__ = '1.3.1'
default_app_config = 'accountsplus.apps.AccountsConfig'
|
Python
| 0
|
@@ -12,17 +12,17 @@
= '1.3.
-1
+2
'%0A%0Adefau
|
bb165b4f8fc88ab3de26b0b52f07ada612e87f2b
|
Fix test cases related to getting tags and fields
|
tests/client_test.py
|
tests/client_test.py
|
""" InfluxAlchemy client tests. """
import mock
import influxdb
from influxalchemy.client import InfluxAlchemy
from influxalchemy.measurement import Measurement
from influxalchemy.query import InfluxDBQuery
@mock.patch("influxdb.InfluxDBClient")
def test_query(mock_flux):
db = influxdb.InfluxDBClient(database="fizz")
db.query.side_effect = influxdb.exceptions.InfluxDBClientError(None)
client = InfluxAlchemy(db)
query = client.query(Measurement.new("buzz"))
assert str(query) == "SELECT * FROM buzz;"
@mock.patch("influxdb.InfluxDBClient.query")
def test_measurements(mock_flux):
mock_res = mock.MagicMock()
mock_res.get_points.return_value = [{"name": "fizz"}]
mock_flux.return_value = mock_res
db = influxdb.InfluxDBClient(database="fizz")
client = InfluxAlchemy(db)
measurements = list(client.measurements())
mock_flux.assert_called_once_with("SHOW MEASUREMENTS;")
@mock.patch("influxdb.InfluxDBClient.query")
def test_tags(mock_flux):
mock_res = mock.MagicMock()
mock_res.get_points.return_value = [{"name": "fizz"}]
mock_flux.return_value = mock_res
db = influxdb.InfluxDBClient(database="fizz")
client = InfluxAlchemy(db)
assert client.tags(Measurement.new("foo")) == ["fizz"]
@mock.patch("influxdb.InfluxDBClient.query")
def test_fields(mock_flux):
mock_res = mock.MagicMock()
mock_res.get_points.return_value = [{"name": "fizz"}]
mock_flux.return_value = mock_res
db = influxdb.InfluxDBClient(database="fizz")
client = InfluxAlchemy(db)
assert client.fields(Measurement.new("foo")) == ["fizz"]
|
Python
| 0
|
@@ -1059,38 +1059,45 @@
n_value = %5B%7B
-%22name%22: %22fizz%22
+'tagKey': 'sensor_id'
%7D%5D%0A mock_
@@ -1246,32 +1246,45 @@
nt.new(%22
-foo%22)) == %5B%22fizz
+environment%22)) == %5B%22sensor_id
%22%5D%0A%0A%0A@mo
@@ -1425,32 +1425,135 @@
alue = %5B
-%7B%22name%22: %22fizz%22%7D
+%0A %7B'fieldKey': 'humidity', 'fieldType': 'float'%7D,%0A %7B'fieldKey': 'temperature', 'fieldType': 'float'%7D%0A
%5D%0A mo
@@ -1711,23 +1711,50 @@
ew(%22
-foo%22)) == %5B%22fizz
+environment%22)) == %5B%22humidity%22, %22temperature
%22%5D%0A
|
499a74ff3256b3c6fb6a0ca4e2fd9578f2948cc8
|
correct variable names
|
tests/eguene_test.py
|
tests/eguene_test.py
|
"""
eugene_test.py
"""
import os
import sys
import numpy as np
import pandas as pd
sys.path.append('~/GitHub/eugene')
import eugene.Config
from eugene.Population import Population
# Setup up variable and truth configuration
eugene.Config.var['x'] = np.linspace(0, 8.0 * np.pi, 1024)
eugene.Config.truth = eugene.Config.var['x'] * np.sin(eugene.Config.var['x']) + eugene.Config.var['x']/2.0 + 1.61
@profile
def error_and_complexity(gene_expression, scale):
"""user fitness function, weighted combination of error and complexity"""
weights = np.array([0.95, 0.025, 0.025])
scaled_gene_expression = 1.0 / (gene_expression / scale)
return np.dot(scaled_gene_expression, weights)
# Setup Population
P = Population(
init_population_size=1000,
objective_function=error_and_complexity,
max_generations=100,
init_tree_size=2,
target=eugene.Config.truth,
pruning=False
)
# Initialize Population
P.initialize()
# Run the Population
P.run(20)
|
Python
| 0.84164
|
@@ -94,16 +94,35 @@
.append(
+os.path.expanduser(
'~/GitHu
@@ -131,16 +131,17 @@
eugene')
+)
%0A%0Aimport
@@ -251,27 +251,27 @@
gene.Config.
-var
+VAR
%5B'x'%5D = np.l
@@ -314,21 +314,21 @@
.Config.
-truth
+TRUTH
= eugen
@@ -332,27 +332,27 @@
gene.Config.
-var
+VAR
%5B'x'%5D * np.s
@@ -364,27 +364,27 @@
gene.Config.
-var
+VAR
%5B'x'%5D) + eug
@@ -394,19 +394,19 @@
.Config.
-var
+VAR
%5B'x'%5D/2.
@@ -415,16 +415,18 @@
+ 1.61%0A%0A
+#
@profile
@@ -897,21 +897,21 @@
.Config.
-truth
+TRUTH
,%0A pr
|
b4439ef76148f73581e6df0bf593504ae796578a
|
correct a bug in geo to country code.
|
dbpedia/geoToCountry.py
|
dbpedia/geoToCountry.py
|
from urllib2 import urlopen
def getCountry(lat, lng):
url = "http://ws.geonames.org/countryCode?lng=" + str(lng) + "&lat=" + str(lat)
country = urlopen(url).read().strip()
return country
|
Python
| 0
|
@@ -175,16 +175,58 @@
strip()%0A
+%09if len(country) != 2:%0A%09%09return %22Unknown%22%0A
retu
|
9c8bfff17254cf88e11517a278bb60ad4c83e41b
|
Add revised alg_strongly_connected_components.py
|
alg_strongly_connected_components.py
|
alg_strongly_connected_components.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur(adj_dict, start_vertex, visited_set,
discover_ls, finish_ls):
visited_set.add(start_vertex)
discover_ls.append(start_vertex)
for neighbor_vertex in adj_dict[start_vertex]:
if neighbor_vertex not in visited_set:
dfs_recur(adj_dict, neighbor_vertex, visited_set,
discover_ls, finish_ls)
finish_ls.insert(0, start_vertex)
def traverse_dfs_recur(adj_dict):
visited_set = set()
discover_ls = []
finish_ls = []
for vertex in adj_dict:
if vertex not in visited_set:
dfs_recur(adj_dict, vertex, visited_set,
discover_ls, finish_ls)
return discover_ls, finish_ls
def transpose_graph(adj_dict):
tr_adj_dict = {}
for vertex in adj_dict:
tr_adj_dict[vertex] = []
for vertex in adj_dict:
for neighbor_vertex in adj_dict[vertex]:
tr_adj_dict[neighbor_vertex].append(vertex)
return tr_adj_dict
def strongly_connected_components(adj_dict):
"""Find strongly connected graphs by Kosaraju's Algorithm."""
discover_ls, finish_ls = traverse_dfs_recur(adj_dict)
print('discover_ls for G: {}'.format(discover_ls))
print('finish_ls for G: {}'.format(finish_ls))
tr_adj_dict = transpose_graph(adj_dict)
print('G^T: {}'.format(tr_adj_dict))
print('strongly connected components:')
scc_visited_set = set()
for vertex in finish_ls:
scc_discover_ls = []
scc_finish_ls = []
if vertex not in scc_visited_set:
dfs_recur(tr_adj_dict, vertex, scc_visited_set,
scc_discover_ls, scc_finish_ls)
print('scc_discover_ls: {}'.format(scc_discover_ls))
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -112,1724 +112,49 @@
def
-dfs_recur(adj_dict, start_vertex, visited_set, %0A discover_ls, finish_ls):%0A visited_set.add(start_vertex)%0A discover_ls.append(start_vertex)%0A for neighbor_vertex in adj_dict%5Bstart_vertex%5D:%0A if neighbor_vertex not in visited_set:%0A dfs_recur(adj_dict, neighbor_vertex, visited_set, %0A discover_ls, finish_ls)%0A finish_ls.insert(0, start_vertex)%0A%0Adef traverse_dfs_recur(adj_dict):%0A visited_set = set()%0A discover_ls = %5B%5D%0A finish_ls = %5B%5D%0A for vertex in adj_dict:%0A if vertex not in visited_set:%0A dfs_recur(adj_dict, vertex, visited_set, %0A discover_ls, finish_ls)%0A return discover_ls, finish_ls%0A%0Adef transpose_graph(adj_dict):%0A tr_adj_dict = %7B%7D%0A%0A for vertex in adj_dict:%0A tr_adj_dict%5Bvertex%5D = %5B%5D%0A%0A for vertex in adj_dict:%0A for neighbor_vertex in adj_dict%5Bvertex%5D:%0A tr_adj_dict%5Bneighbor_vertex%5D.append(vertex)%0A%0A return tr_adj_dict%0A%0Adef strongly_connected_components(adj_dict):%0A %22%22%22Find strongly connected graphs by Kosaraju's Algorithm.%22%22%22%0A discover_ls, finish_ls = traverse_dfs_recur(adj_dict)%0A print('discover_ls for G: %7B%7D'.format(discover_ls))%0A print('finish_ls for G: %7B%7D'.format(finish_ls))%0A%0A tr_adj_dict = transpose_graph(adj_dict)%0A print('G%5ET: %7B%7D'.format(tr_adj_dict))%0A%0A print('strongly connected components:')%0A scc_visited_set = set()%0A for vertex in finish_ls:%0A scc_discover_ls = %5B%5D%0A scc_finish_ls = %5B%5D%0A if vertex not in scc_visited_set:%0A dfs_recur(tr_adj_dict, vertex, scc_visited_set, %0A scc_discover_ls, scc_finish_ls)%0A print('scc_discover_ls: %7B%7D'.format(scc_discover_ls))
+strongly_connected_components():%0A pass
%0A%0A%0Ad
|
580f6d4164d2cbdc9ee8594c3e1f71a1055483cd
|
Remove superfluous log message in authz realms code
|
anchore_engine/subsys/auth/realms.py
|
anchore_engine/subsys/auth/realms.py
|
from yosai.core.realm.realm import AccountStoreRealm
from yosai.core.authz.authz import WildcardPermission, DefaultPermission
import rapidjson
from anchore_engine.db import AccountTypes
from anchore_engine.plugins.authorization.client import AuthzPluginHttpClient, Action
from anchore_engine.subsys import logger
class CaseSensitivePermission(DefaultPermission):
def __init__(self, wildcard_string=None, parts=None, case_sensitive=True):
# Replace constructor with code from the WildcardPermission constructor directly, but with parts init from DefaultPermission
# This is necessary to get the case-sensitivity to init properly since the Default->Wildcard path messes it up
self.case_sensitive = case_sensitive
self.parts = {'domain': {'*'}, 'action': {'*'}, 'target': {'*'}}
if wildcard_string:
self.setparts(wildcard_string, case_sensitive)
else:
self.parts = {'domain': set([parts.get('domain', '*')]),
'action': set(parts.get('action', '*')),
'target': set(parts.get('target', '*'))}
class AnchoreNativeRealm(AccountStoreRealm):
"""
Customized version of hte default AccountStoreRealm.
This is required to get case-sensitive permission behavior, which is not supported by default.
"""
def is_permitted(self, identifiers, permission_s):
"""
If the authorization info cannot be obtained from the accountstore,
permission check tuple yields False.
:type identifiers: subject_abcs.IdentifierCollection
:param permission_s: a collection of one or more permissions, represented
as string-based permissions or Permission objects
and NEVER comingled types
:type permission_s: list of string(s)
:yields: tuple(Permission, Boolean)
"""
identifier = identifiers.primary_identifier
for required_perm in permission_s:
required_permission = CaseSensitivePermission(wildcard_string=required_perm)
logger.info("Requires: {} w/case {}".format(required_permission, required_permission.case_sensitive))
# get_authzd_permissions returns a list of DefaultPermission instances,
# requesting from cache using '*' and permission.domain as hash keys:
domain = next(iter(required_permission.domain))
assigned_permission_s = self.get_authzd_permissions(identifier, domain)
is_permitted = False
for authorized_permission in assigned_permission_s:
if authorized_permission.implies(required_permission):
is_permitted = True
break
yield (required_perm, is_permitted)
def get_authzd_permissions(self, identifier, perm_domain):
"""
:type identifier: str
:type domain: str
:returns: a list of relevant DefaultPermission instances (permission_s)
"""
permission_s = []
related_perms = []
keys = ['*', perm_domain]
def query_permissions(self):
msg = ("Could not obtain cached permissions for [{0}]. "
"Will try to acquire permissions from account store."
.format(identifier))
logger.debug(msg)
permissions = self.account_store.get_authz_permissions(identifier)
if not permissions:
msg = "Could not get permissions from account_store for {0}". \
format(identifier)
raise ValueError(msg)
return permissions
try:
msg2 = ("Attempting to get cached authz_info for [{0}]"
.format(identifier))
logger.debug(msg2)
domain = 'authorization:permissions:' + self.name
related_perms = self.cache_handler. \
hmget_or_create(domain=domain,
identifier=identifier,
keys=keys,
creator_func=query_permissions,
creator=self)
except ValueError:
msg3 = ("No permissions found for identifiers [{0}]. "
"Returning None.".format(identifier))
logger.warning(msg3)
except AttributeError:
# this means the cache_handler isn't configured
queried_permissions = query_permissions(self)
related_perms = [queried_permissions.get('*'),
queried_permissions.get(perm_domain)]
for perms in related_perms:
# must account for None values:
try:
for parts in rapidjson.loads(perms):
permission_s.append(CaseSensitivePermission(parts=parts))
except (TypeError, ValueError):
pass
return permission_s
class ExternalAuthzRealm(AnchoreNativeRealm):
"""
A realm for doing external authz and internal authc
__client__ is the intialized http client for requesting authorization
__account_type_provider__ is a callable that takes a single parameter: username and returns the account type
"""
__client__ = None
__account_type_provider__ = None
@classmethod
def init_realm(cls, config, account_lookup_fn):
logger.debug('Configuring realm with config: {}'.format(config))
cls.__client__ = AuthzPluginHttpClient(url=config.get('endpoint'), verify_ssl=config.get('verify_ssl'))
cls.__account_type_provider__ = account_lookup_fn
def is_permitted(self, identifiers, permission_s):
"""
:type identifiers: SimpleRealmCollection
"""
# If a service account or admin account user, use the default handler, not external calls
if ExternalAuthzRealm.__account_type_provider__ and callable(ExternalAuthzRealm.__account_type_provider__) and \
ExternalAuthzRealm.__account_type_provider__(identifiers.primary_identifier) in [AccountTypes.service, AccountTypes.admin]:
logger.debug('Detected admin or service account, using internal authz')
return super().is_permitted(identifiers, permission_s)
result_list = [] # List of tuples (required_perm, is_permitted)
identifier = identifiers.primary_identifier
actions = {}
for required_perm in permission_s:
required_permission = CaseSensitivePermission(wildcard_string=required_perm)
actions[Action(domain=','.join(required_permission.domain), action=','.join(required_permission.action), target=','.join(required_permission.target))] = required_perm
if actions:
try:
resp = self.__client__.authorize(principal=identifier, action_s=list(actions.keys()))
for i in resp.allowed:
result_list.append((actions[i], True))
for i in resp.denied:
result_list.append((actions[i], False))
except Exception as e:
logger.exception('Unexpected error invoking authorization plugin via client: {}'.format(e))
logger.error('Authorization plugin invocation error. Could not perform a proper authz check. Please check configuration and/or authz service status: {}'.format(self.__client__.url))
raise e
return result_list
|
Python
| 0.000003
|
@@ -2093,122 +2093,8 @@
erm)
-%0A logger.info(%22Requires: %7B%7D w/case %7B%7D%22.format(required_permission, required_permission.case_sensitive))
%0A%0A
|
d41f30419e6acf6d51b14ee7efc2260f6fc0485f
|
Return exit code 1 on docker build image failure
|
scripts/build-images.py
|
scripts/build-images.py
|
import os, re
from argparse import ArgumentParser
from glob import glob
from subprocess import call
class DockerImage:
def __init__(self, dockerfile, versioned=False):
self.dockerfile = dockerfile
self.dockerfile_folder = os.path.dirname(self.dockerfile)
self.versioned = versioned
self._set_name()
self._set_parent_name()
def _set_parent_name(self):
df = open(self.dockerfile).read()
m = re.search('FROM\s+([a-z0-9\/\-\:]+)', df)
self.parent_name = (m.group(1)).strip()
def _set_name(self):
df_location = os.path.dirname(self.dockerfile)
self.name = os.path.split(df_location)[1]
self.full_name = "apiaryio/" + self.name
if self.versioned:
self.tag = self.name
self.name = os.path.split(os.path.dirname(df_location))[1]
self.full_name = "apiaryio/{0}:{1}".format(self.name, self.tag)
def update_images_to_rebuild(rebuild_all, changed_files=None):
if rebuild_all or not changed_files:
print('Rebuilding all images')
images_to_rebuild.update([image.full_name for image in all_images.values()])
else:
print('Selecting images for rebuild')
for cf in changed_files:
if 'scripts/' in cf:
print('A core script has changes, rebuilding all images...')
update_images_to_rebuild(True, all_images)
break
else:
for image in all_images.values():
if image.name in cf:
print('Adding {0} because {1} has changed'.format(image.full_name, cf))
images_to_rebuild.add(image.full_name)
if len(images_to_rebuild) != len(all_images):
for image in all_images.values():
if image.parent_name in images_to_rebuild:
images_to_rebuild.add(image.full_name)
def add_image_ordered(image_name):
image = all_images[image_name]
if image.parent_name in images_to_rebuild:
parent_image = all_images[image.parent_name]
add_image_ordered(parent_image.full_name)
if image not in sorted_images_to_rebuild:
sorted_images_to_rebuild.append(image)
all_images = {}
images_to_rebuild = set()
sorted_images_to_rebuild = []
parser = ArgumentParser()
parser.add_argument('-a', '--rebuild-all')
parser.add_argument('-f', '--changed-files')
args = parser.parse_args()
rebuild_all = args.rebuild_all == '1'
changed_files = []
if args.changed_files:
changed_files = args.changed_files.split('\n')
dockerfiles = glob("./*/Dockerfile")
dockerfiles_versioned = glob("./*/*/Dockerfile")
for df in dockerfiles:
image = DockerImage(df)
all_images[image.full_name] = image
for df in dockerfiles_versioned:
image = DockerImage(df, versioned=True)
all_images[image.full_name] = image
update_images_to_rebuild(rebuild_all, changed_files)
for image_name in images_to_rebuild:
add_image_ordered(image_name)
if not sorted_images_to_rebuild:
print('No images need to be rebuilt')
else:
print('These images will be rebuilt (in this order): ' + ', '.join([image.full_name for image in sorted_images_to_rebuild]))
for image in sorted_images_to_rebuild:
print('Building ' + image.full_name)
return_code = call("docker build -t {0} -f {1} {2}".format(image.full_name, image.dockerfile, image.dockerfile_folder), shell=True)
if return_code != 0:
print('Error building {0}')
break
print("Squashing {0}...".format(image.full_name))
call("docker save {0} > \"/tmp/{1}.tar\"".format(image.full_name, image.name), shell=True)
call("sudo docker-squash -i \"/tmp/{0}.tar\" -o \"/tmp/{0}-squashed.tar\"".format(image.name), shell=True)
call("cat \"/tmp/{0}-squashed.tar\" | docker load".format(image.name), shell=True)
print("Squashed {0}".format(image.full_name))
tmp_image_file = open("/tmp/images", 'w')
tmp_image_file.write('{0}'.format(" ".join([image.full_name for image in sorted_images_to_rebuild])))
|
Python
| 0.000056
|
@@ -6,16 +6,21 @@
t os, re
+, sys
%0A%0Afrom a
@@ -2606,16 +2606,37 @@
it('%5Cn')
+%0Aprint(changed_files)
%0A%0Adocker
@@ -3557,16 +3557,40 @@
ing %7B0%7D'
+.format(image.full_name)
)%0A
@@ -3587,37 +3587,43 @@
e))%0A
-break
+sys.exit(1)
%0A print(%22
|
5b5081ca6bba90f2e15022d2b6e3e293e3a9cc11
|
test if _search_final_redirect function finds last_redirects.txt
|
tests/simple_test.py
|
tests/simple_test.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from har2tree import CrawledTree
from pathlib import Path
import datetime
import os
import uuid
class SimpleTest(unittest.TestCase):
http_redirect_ct: CrawledTree
@classmethod
def setUpClass(cls) -> None:
test_dir = Path(os.path.abspath(os.path.dirname(__file__))) / 'capture_samples' / 'http_redirect'
har_to_process = [test_dir / '0.har']
# ct means CrawledTree
cls.http_redirect_ct = CrawledTree(har_to_process, str(uuid.uuid4()))
def test_root_url(self) -> None:
self.assertEqual(self.http_redirect_ct.root_url, 'https://lookyloo-testing.herokuapp.com/redirect_http')
def test_user_agent(self) -> None:
self.assertEqual(self.http_redirect_ct.user_agent, "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) BingPreview/1.0b")
def test_redirects(self) -> None:
self.assertEqual(self.http_redirect_ct.redirects[1], "https://www.youtube.com/watch?v=iwGFalTRHDA")
def test_start_time(self) -> None:
self.assertEqual(self.http_redirect_ct.start_time, datetime.datetime(2021, 4, 22, 15, 57, 51, 686108, tzinfo=datetime.timezone.utc))
def test_root_referer(self) -> None:
self.assertEqual(self.http_redirect_ct.root_hartree.root_referer, '')
def test_stats(self) -> None:
self.assertEqual(self.http_redirect_ct.root_hartree.stats, {'total_hostnames': 5, 'total_urls': 7, 'total_cookies_sent': 1, 'total_cookies_received': 1})
def test_root_after_redirect(self) -> None:
self.assertEqual(self.http_redirect_ct.root_hartree.root_after_redirect, "https://consent.youtube.com/ml?continue=https://www.youtube.com/watch?v=iwGFalTRHDA&gl=LU&hl=en&pc=yt&uxe=23983172&src=1")
def test_rendered_node_name_equals_last_redirect(self) -> None:
self.assertEqual(self.http_redirect_ct.root_hartree.rendered_node.name, self.http_redirect_ct.root_hartree.har.final_redirect)
def test_tree_start_time_equals_har_start_time(self) -> None:
# Need some formatting because ct.start_time is in datetime fromat natively;
# alternatively, har start_time contains a T before time that would mess the comparison
tree_start_time = self.http_redirect_ct.start_time.strftime("%Y-%m-%d %H:%M:%S")
har_start_time = self.http_redirect_ct.root_hartree.har.initial_start_time[0:19].replace('T', ' ')
self.assertEqual(tree_start_time, har_start_time)
def test_initial_redirect_equals_final_redirect(self) -> None:
# That's normally the case in this capture
self.assertEqual(self.http_redirect_ct.root_hartree.har.initial_redirects[0], self.http_redirect_ct.root_hartree.har.final_redirect)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000265
|
@@ -1832,24 +1832,355 @@
&src=1%22)%0D%0A%0D%0A
+ def test_search_final_redirect(self) -%3E None:%0D%0A self.http_redirect_ct.root_hartree.har._search_final_redirect()%0D%0A self.assertEqual(self.http_redirect_ct.root_hartree.har.final_redirect, %22https://consent.youtube.com/ml?continue=https://www.youtube.com/watch?v=iwGFalTRHDA&gl=LU&hl=en&pc=yt&uxe=23983172&src=1%22)%0D%0A%0D%0A
def test
@@ -3147,22 +3147,16 @@
ect)%0D%0A%0D%0A
- %0D%0A
if __nam
|
687bb616deca1372d69ba0781c61a8ea62112426
|
Allow RO commands in oq workers
|
openquake/commands/workers.py
|
openquake/commands/workers.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017-2019 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import sys
import getpass
from openquake.baselib import sap, config, workerpool
@sap.script
def workers(cmd):
"""
start/stop/restart the workers, or return their status
"""
if config.dbserver.multi_user and getpass.getuser() != 'openquake':
sys.exit('oq workers only works in single user mode')
master = workerpool.WorkerMaster(config.dbserver.host,
**config.zworkers)
print(getattr(master, cmd)())
workers.arg('cmd', 'command',
choices='start stop status restart inspect'.split())
|
Python
| 0.000026
|
@@ -834,16 +834,54 @@
rpool%0A%0A%0A
+ro_commands = ('status', 'inspect')%0A%0A%0A
@sap.scr
@@ -984,16 +984,44 @@
%0A if
+(cmd not in ro_commands and
config.d
@@ -1042,16 +1042,28 @@
user and
+%0A
getpass
@@ -1087,16 +1087,17 @@
enquake'
+)
:%0A
|
a4b84eb95d35fefa2dde356cb31eb888c4110e00
|
Fix a flaky test in PyPy.
|
tests/test__patch.py
|
tests/test__patch.py
|
import time
import unittest
import mongomock
try:
import pymongo
_HAVE_PYMONGO = True
except ImportError:
_HAVE_PYMONGO = False
try:
from unittest import mock
except ImportError:
import mock
@unittest.skipIf(not _HAVE_PYMONGO, 'pymongo not installed')
class PatchTest(unittest.TestCase):
"""Test the use of the patch function.
Test functions in this test are embedded in inner function so that the
patch decorator are only called at testing time.
"""
@mongomock.patch()
def test__decorator(self):
client1 = pymongo.MongoClient()
client1.db.coll.insert_one({'name': 'Pascal'})
client2 = pymongo.MongoClient()
self.assertEqual('Pascal', client2.db.coll.find_one()['name'])
client2.db.coll.drop()
self.assertEqual(None, client1.db.coll.find_one())
@mongomock.patch(on_new='create')
def test__create_new(self):
client1 = pymongo.MongoClient('myserver.example.com', port=12345)
client1.db.coll.insert_one({'name': 'Pascal'})
client2 = pymongo.MongoClient(host='myserver.example.com', port=12345)
self.assertEqual('Pascal', client2.db.coll.find_one()['name'])
@mongomock.patch()
def test__error_new(self):
# Valid because using the default server which was whitelisted by default.
pymongo.MongoClient()
with self.assertRaises(ValueError):
pymongo.MongoClient('myserver.example.com', port=12345)
@mongomock.patch((
'mongodb://myserver.example.com:12345',
'mongodb://otherserver.example.com:27017/default-db',
'mongodb://[2001:67c:2e8:22::c100:68b]',
'mongodb://[2001:67c:2e8:22::c100:68b]:1234',
'mongodb://r1.example.net:27017,r2.example.net:27017/'))
def test__create_servers(self):
pymongo.MongoClient('myserver.example.com', port=12345)
pymongo.MongoClient('otherserver.example.com')
pymongo.MongoClient('[2001:67c:2e8:22::c100:68b]')
pymongo.MongoClient('mongodb://[2001:67c:2e8:22::c100:68b]:27017/base')
pymongo.MongoClient('[2001:67c:2e8:22::c100:68b]', port=1234)
pymongo.MongoClient('r1.example.net')
with self.assertRaises(ValueError):
pymongo.MongoClient()
@mongomock.patch(on_new='timeout')
@mock.patch(time.__name__ + '.sleep')
def test__create_timeout(self, mock_sleep):
pymongo.MongoClient()
with self.assertRaises(pymongo.errors.ServerSelectionTimeoutError):
client = pymongo.MongoClient('myserver.example.com', port=12345)
client.db.coll.insert_one({'name': 'Pascal'})
mock_sleep.assert_called_once_with(30000)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000003
|
@@ -2415,32 +2415,65 @@
.MongoClient()%0A%0A
+ mock_sleep.reset_mock()%0A%0A
with sel
|
19b0b1ed7e94ae4bb05f57baf3163850a64df8f9
|
test exports
|
opensfm/test/test_commands.py
|
opensfm/test/test_commands.py
|
import argparse
from opensfm import commands
from opensfm.test import data_generation
def run_command(command, args):
parser = argparse.ArgumentParser()
command.add_arguments(parser)
parsed_args = parser.parse_args(args)
command.run(parsed_args)
def test_run_all(tmpdir):
data = data_generation.create_berlin_test_folder(tmpdir)
run_all_commands = [
commands.extract_metadata,
commands.detect_features,
commands.match_features,
commands.create_tracks,
commands.reconstruct,
commands.bundle,
commands.mesh,
commands.undistort,
commands.compute_depthmaps,
commands.export_ply,
commands.export_visualsfm,
]
for module in run_all_commands:
command = module.Command()
run_command(command, [data.data_path])
reconstruction = data.load_reconstruction()
assert len(reconstruction[0].shots) == 3
assert len(reconstruction[0].points) > 1000
|
Python
| 0.000003
|
@@ -713,16 +713,143 @@
ualsfm,%0A
+ commands.export_openmvs,%0A commands.export_pmvs,%0A commands.export_bundler,%0A commands.export_colmap%0A
%5D%0A%0A
|
dcf07a4e538e0d97f1c04dc11d12f7dee9a91f11
|
add docs test
|
tests/test_client.py
|
tests/test_client.py
|
import json
from functools import partial
from nose.tools import ok_, eq_, nottest
from solnado import SolrClient
from tornado import gen
from tornado.testing import AsyncTestCase, gen_test
class ClientTestCase(AsyncTestCase):
def setUp(self):
super(ClientTestCase, self).setUp()
self.client = SolrClient()
def test_mk_req(self):
self.assertEquals(self.client.base_url, self.client.mk_req('').url)
self.assertEquals('GET', self.client.mk_req('').method)
def test_mk_url(self):
url = self.client.mk_url(*['a','b','c'], **{'key':'value'})
self.assertEquals('/a/b/c?key=value', url)
@gen_test(timeout=10)
def test_create_collection(self):
p = partial(self.client.create_collection, 'fox', **{'collection_kwargs':{'numShards':1}})
res = yield gen.Task(p)
eq_(200, res.code)
p = partial(self.client.delete_collection, 'fox')
yield gen.Task(p)
@gen_test(timeout=10)
def test_core_status(self):
res = yield gen.Task(partial(self.client.core_status))
ok_(json.loads(res.body.decode('utf8')))
eq_(200, res.code)
@gen_test(timeout=10)
def test_core_create(self):
res = yield gen.Task(partial(self.client.core_create, 'test'))
ok_(json.loads(res.body.decode('utf8')))
eq_(200, res.code)
yield gen.Task(partial(self.client.core_unload, 'test'))
yield gen.Task(partial(self.client.core_reload, 'test'))
@gen_test(timeout=15)
def test_core_reload(self):
yield gen.Task(partial(self.client.core_create, 't'))
res = yield gen.Task(partial(self.client.core_reload, 't'))
ok_(json.loads(res.body.decode('utf8')))
eq_(200, res.code)
yield gen.Task(partial(self.client.core_unload, 't'))
yield gen.Task(partial(self.client.core_reload, 't'))
#@gen_test(timeout=25)
#def test_core_rename(self):
# yield gen.Task(partial(self.client.core_create, 'baz'))
# yield gen.Task(partial(self.client.core_reload, 'baz'))
# res = yield gen.Task(partial(self.client.core_rename, 'baz', 'qux'))
# eq_(200, res.code)
# print(res.body)
# yield gen.Task(partial(self.client.core_reload, 'baz'))
# yield gen.Task(partial(self.client.core_reload, 'qux'))
# yield gen.Task(partial(self.client.core_unload, 'qux'))
# yield gen.Task(partial(self.client.core_reload, 'qux'))
@gen_test
def test_add_json_document(self):
d = {"id":"123", "title":"test_add"}
yield gen.Task(partial(self.client.core_create, 'add_j'))
yield gen.Task(partial(self.client.core_reload, 'add_j'))
res = yield gen.Task(partial(self.client.add_json_document, 'add_j', d))
ok_(json.loads(res.body.decode('utf8')))
eq_(200, res.code)
|
Python
| 0
|
@@ -2823,28 +2823,497 @@
%0A eq_(200, res.code)%0A
+%0A @gen_test%0A def test_add_json_documents(self):%0A d = %5B%0A %7B%22id%22:%22123%22, %22title%22:%22test_add%22%7D,%0A %7B%22id%22:%22456%22, %22title%22:%22bar_baz%22%7D,%0A %5D%0A yield gen.Task(partial(self.client.core_create, 'add_docs'))%0A yield gen.Task(partial(self.client.core_reload, 'add_docs'))%0A%0A res = yield gen.Task(partial(self.client.add_json_document, 'add_docs', d))%0A%0A ok_(json.loads(res.body.decode('utf8')))%0A eq_(200, res.code)%0A
|
823189201f00ceefcd55ebf2c2eb20e7ac8aeee5
|
Fix cloner test
|
tests/test_cloner.py
|
tests/test_cloner.py
|
#!/usr/bin/env python
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2014 Wikimedia Foundation Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import shutil
import git
import zuul.lib.cloner
from tests.base import ZuulTestCase
from tests.base import FIXTURE_DIR
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-32s '
'%(levelname)-8s %(message)s')
class TestCloner(ZuulTestCase):
log = logging.getLogger("zuul.test.cloner")
workspace_root = None
def setUp(self):
super(TestCloner, self).setUp()
self.workspace_root = os.path.join(self.test_root, 'workspace')
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-gating.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
def test_cloner(self):
self.worker.hold_jobs_in_build = True
A = self.fake_gerrit.addFakeChange('org/project1', 'master', 'A')
B = self.fake_gerrit.addFakeChange('org/project2', 'master', 'B')
A.addPatchset(['project_one.txt'])
B.addPatchset(['project_two.txt'])
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
A.addApproval('CRVW', 2)
B.addApproval('CRVW', 2)
self.fake_gerrit.addEvent(A.addApproval('APRV', 1))
self.fake_gerrit.addEvent(B.addApproval('APRV', 1))
self.waitUntilSettled()
self.assertEquals(2, len(self.builds), "Two builds are running")
a_zuul_ref = b_zuul_ref = None
for build in self.builds:
self.log.debug("Build parameters: %s", build.parameters)
if build.parameters['ZUUL_CHANGE'] == '1':
a_zuul_ref = build.parameters['ZUUL_REF']
a_zuul_commit = build.parameters['ZUUL_COMMIT']
if build.parameters['ZUUL_CHANGE'] == '2':
b_zuul_ref = build.parameters['ZUUL_REF']
b_zuul_commit = build.parameters['ZUUL_COMMIT']
self.worker.hold_jobs_in_build = False
self.worker.release()
self.waitUntilSettled()
# Repos setup, now test the cloner
for zuul_ref in [a_zuul_ref, b_zuul_ref]:
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=['org/project1', 'org/project2'],
workspace=self.workspace_root,
zuul_branch='master',
zuul_ref=zuul_ref,
zuul_url=self.git_root,
branch='master',
clone_map_file=os.path.join(FIXTURE_DIR, 'clonemap.yaml')
)
cloner.execute()
work_repo1 = git.Repo(os.path.join(self.workspace_root,
'org/project1'))
self.assertEquals(a_zuul_commit, str(work_repo1.commit('HEAD')))
work_repo2 = git.Repo(os.path.join(self.workspace_root,
'org/project2'))
self.assertEquals(b_zuul_commit, str(work_repo2.commit('HEAD')))
shutil.rmtree(self.workspace_root)
|
Python
| 0.999985
|
@@ -1613,224 +1613,8 @@
B')%0A
-%0A A.addPatchset(%5B'project_one.txt'%5D)%0A B.addPatchset(%5B'project_two.txt'%5D)%0A self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))%0A self.fake_gerrit.addEvent(B.getPatchsetCreatedEvent(1))%0A%0A
@@ -1907,47 +1907,8 @@
%22)%0A%0A
- a_zuul_ref = b_zuul_ref = None%0A
@@ -2022,553 +2022,60 @@
-if build.parameters%5B'ZUUL_CHANGE'%5D == '1':%0A a_zuul_ref = build.parameters%5B'ZUUL_REF'%5D%0A a_zuul_commit = build.parameters%5B'ZUUL_COMMIT'%5D%0A if build.parameters%5B'ZUUL_CHANGE'%5D == '2':%0A b_zuul_ref = build.parameters%5B'ZUUL_REF'%5D%0A b_zuul_commit = build.parameters%5B'ZUUL_COMMIT'%5D%0A%0A self.worker.hold_jobs_in_build = False%0A self.worker.release()%0A self.waitUntilSettled()%0A%0A # Repos setup, now test the cloner%0A for zuul_ref in %5Ba_zuul_ref, b_zuul_ref%5D:
+change_number = int(build.parameters%5B'ZUUL_CHANGE'%5D)
%0A
@@ -2334,24 +2334,44 @@
uul_ref=
-zuul_ref
+build.parameters%5B'ZUUL_REF'%5D
,%0A
@@ -2441,82 +2441,8 @@
r',%0A
- clone_map_file=os.path.join(FIXTURE_DIR, 'clonemap.yaml')%0A
@@ -2616,86 +2616,8 @@
'))%0A
- self.assertEquals(a_zuul_commit, str(work_repo1.commit('HEAD')))%0A%0A
@@ -2760,40 +2760,483 @@
-self.assertEquals(b_zuul_commit,
+if change_number %3E= 1:%0A self.assertEquals(%0A self.builds%5B0%5D.parameters%5B'ZUUL_COMMIT'%5D,%0A str(work_repo1.commit('HEAD')))%0A if change_number %3E= 2:%0A self.assertEquals(%0A self.builds%5B1%5D.parameters%5B'ZUUL_COMMIT'%5D,%0A str(work_repo2.commit('HEAD')))%0A else:%0A self.assertEquals(str(work_repo2.commit('master')),%0A
str
@@ -3264,17 +3264,16 @@
EAD')))%0A
-%0A
@@ -3311,8 +3311,118 @@
e_root)%0A
+%0A self.worker.hold_jobs_in_build = False%0A self.worker.release()%0A self.waitUntilSettled()%0A
|
a047ffe078d0e383512e269123a22e6da8ece074
|
use unittest.mock if available.
|
tests/test_colors.py
|
tests/test_colors.py
|
# Copyright © 2015 Jakub Wilk <jwilk@jwilk.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import mock
import io
from nose.tools import (
assert_equal,
)
from lib import colors as M
def with_stdout(encoding):
stdout = io.TextIOWrapper(
io.StringIO(),
encoding=encoding,
)
return mock.patch('sys.stdout', stdout)
@with_stdout('UTF-8')
def test_control_characters():
def t(s, x):
r = M.escape(s)
assert_equal(r, '\x1b[7m' + x + '\x1b[27m')
t('\x00', '^@')
t('\x01', '^A')
t('\x02', '^B')
t('\x03', '^C')
t('\x04', '^D')
t('\x05', '^E')
t('\x06', '^F')
t('\x07', '^G')
t('\x08', '^H')
t('\x09', '^I')
t('\x0A', '^J')
t('\x0B', '^K')
t('\x0C', '^L')
t('\x0D', '^M')
t('\x0E', '^N')
t('\x0F', '^O')
t('\x10', '^P')
t('\x11', '^Q')
t('\x12', '^R')
t('\x13', '^S')
t('\x14', '^T')
t('\x15', '^U')
t('\x16', '^V')
t('\x17', '^W')
t('\x18', '^X')
t('\x19', '^Y')
t('\x1A', '^Z')
t('\x1B', '^[')
t('\x1C', '^\\')
t('\x1D', '^]')
t('\x1E', '^^')
t('\x1F', '^_')
t('\x7F', '^?')
t('\x80', '<U+0080>')
t('\x81', '<U+0081>')
t('\x82', '<U+0082>')
t('\x83', '<U+0083>')
t('\x84', '<U+0084>')
t('\x85', '<U+0085>')
t('\x86', '<U+0086>')
t('\x87', '<U+0087>')
t('\x88', '<U+0088>')
t('\x89', '<U+0089>')
t('\x8A', '<U+008A>')
t('\x8B', '<U+008B>')
t('\x8C', '<U+008C>')
t('\x8D', '<U+008D>')
t('\x8E', '<U+008E>')
t('\x8F', '<U+008F>')
t('\x90', '<U+0090>')
t('\x91', '<U+0091>')
t('\x92', '<U+0092>')
t('\x93', '<U+0093>')
t('\x94', '<U+0094>')
t('\x95', '<U+0095>')
t('\x96', '<U+0096>')
t('\x97', '<U+0097>')
t('\x98', '<U+0098>')
t('\x99', '<U+0099>')
t('\x9A', '<U+009A>')
t('\x9B', '<U+009B>')
t('\x9C', '<U+009C>')
t('\x9D', '<U+009D>')
t('\x9E', '<U+009E>')
t('\x9F', '<U+009F>')
@with_stdout('UTF-8')
def test_escape_safe():
def t(s):
r = M.escape(s)
assert_equal(r, s)
t('A')
t('Á')
# vim:ts=4 sts=4 sw=4 et
|
Python
| 0
|
@@ -11,16 +11,21 @@
t %C2%A9 2015
+-2016
Jakub W
@@ -1104,16 +1104,91 @@
TWARE.%0A%0A
+try:%0A import unittest.mock as mock%0Aexcept ImportError:%0A import mock%0A%0A
import m
|
a4afaf066e2c90ea86f3462a13bc3e3045369024
|
change to doc
|
signapp.py
|
signapp.py
|
#!/usr/bin/env python
"""
signapp.py
created by Rolly Maulana Awangga
"""
import config
import pymongo
import urllib
import random
import time
from Crypto.Cipher import AES
class Signapp(object):
def __init__(self):
self.key = config.key
self.iv = config.iv
self.opendb()
def opendb(self):
self.conn = pymongo.MongoClient(config.mongohost, config.mongoport)
self.db = self.conn.signapp
def tokenUri(self):
return config.tokenuri
def random(self,ln):
ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
chars=[]
for i in range(ln):
chars.append(random.choice(ALPHABET))
return "".join(chars)
def urlEncode16(self,uri):
ln = len(uri)
sp = 16 - ln - len(str(ln))
if ln>9:
dt = str(ln)+uri+self.random(sp)
else:
dt = "0"+str(ln)+uri+self.random(sp-1)
return self.encodeData16(dt)
def urlDecode16(self,uri):
if len(uri)%16 == 0:
dt = self.decodeData16(uri)
try:
int(dt[:2])
ln = int(dt[:2])
ret = dt[2:2+ln]
except ValueError:
ret = dt
else:
ret = uri
return ret
def getAllSign(self,NPM):
self.db.sign
return self.db.sign.find({"NPM":NPM})
def getLastSign(self,NPM):
self.db.sign
return self.db.sign.find_one({"NPM":NPM})
def insertSign(self,NPM,Nilai,Pembimbing):
self.db.sign
data = {"NPM":NPM,"Nilai":Nilai,"waktu":time.strftime("%d/%m/%Y"),"Pembimbing":Pembimbing}
idProcess = self.db.sign.insert_one(doc).inserted_id
return str(idProcess)
def encodeData(self,msg):
obj=AES.new(self.key,AES.MODE_CFB,self.iv)
cp = obj.encrypt(msg)
return cp.encode("hex")
def decodeData(self,msg):
obj=AES.new(self.key,AES.MODE_CFB,self.iv)
dec = msg.decode("hex")
return obj.decrypt(dec)
def encodeData16(self,msg):
obj=AES.new(self.key,AES.MODE_CBC,self.iv)
cp = obj.encrypt(msg)
return cp.encode("hex")
def decodeData16(self,msg):
obj=AES.new(self.key,AES.MODE_CBC,self.iv)
dec = msg.decode("hex")
return obj.decrypt(dec)
def getHtmlBegin(self):
return config.html_begin
def getHtmlEnd(self):
return config.html_end
def getHtmlForm(self):
return config.html_form
def getMenu(self,uri):
if uri == config.keyuri:
opsi = "key"
elif uri == config.tokenuri:
opsi = "token"
else:
opsi = "other"
return opsi
def getTokenData(self,token):
url = config.tokenurl+token
response = urllib.urlopen(url)
html = response.read()
return html
def tokenValidation(self,token):
html = self.getTokenData(token)
if (html.find(config.aud)>0) and (html.find(config.iss)>0):
ret = "valid"
else:
ret = "invalid"
return ret
def getJsonData(self,name,json):
lookup = '"%s": "'%name
b = json.find(lookup)
c = json[b:].find(':')
c+=1
b = b+c
c = json[b:].find(',')
c = b+c
data = json[b:c].strip().strip('"')
return data
|
Python
| 0.000002
|
@@ -1379,19 +1379,18 @@
sign%0A%09%09d
-ata
+oc
= %7B%22NPM
|
0c30226cf6037ce6a3938cfb1e8b98fe5ef4d767
|
Test for miss configured skeletor cfg
|
tests/test_config.py
|
tests/test_config.py
|
import sys
from skeletor.config import Config
from .base import BaseTestCase
from .helpers import nostdout
class ConfigTests(BaseTestCase):
""" Argument Passing & Config Tests. """
base_args = ['-n', 'test_skeleton']
def _set_cli_args(self, args):
with nostdout():
sys.argv = sys.argv + args
self.config = Config()
def should_exit_with_no_arguments(self):
try:
with nostdout():
Config()
except SystemExit:
assert True
def ensure_valid_project_name(self):
self._set_cli_args(['-n', 'this_is_valid'])
self.assertEquals(self.config.project_name, 'this_is_valid')
self._set_cli_args(['-n', 'Thisisvalid'])
self.assertEquals(self.config.project_name, 'Thisisvalid')
def should_exit_on_invalid_name(self):
try:
self._set_cli_args(['-n', 'this_is_not-valid'])
except SystemExit:
assert True
try:
self._set_cli_args(['-n', 'this_is not_valid'])
except SystemExit:
assert True
try:
self._set_cli_args(['-n', '*this_is_not_valid'])
except SystemExit:
assert True
def ensure_template_var_is_set_from_cli(self):
self._set_cli_args(self.base_args + ['--template', self.test_tpl_path])
self.assertEquals(self.config.template, self.test_tpl_path)
def should_raise_exit_if_template_section_is_not_list(self):
self._set_cli_args(self.base_args)
try:
self.config.set_template_options('this is not a list')
except SystemExit:
assert True
|
Python
| 0
|
@@ -1477,32 +1477,49 @@
not_list(self):%0A
+ try:%0A
self._se
@@ -1545,29 +1545,16 @@
e_args)%0A
- try:%0A
@@ -1631,32 +1631,32 @@
ept SystemExit:%0A
-
asse
@@ -1643,28 +1643,387 @@
it:%0A assert True%0A
+ else:%0A assert False%0A%0A def should_exit_if_skeletor_cfg_is_miss_configured(self):%0A try:%0A with nostdout():%0A self._set_cli_args(self.base_args)%0A self.config.set_attributes('not valid', %7B'not': 'valid'%7D)%0A except SystemExit:%0A assert True%0A else:%0A assert False%0A
|
b3e7bfab5920c45a19ba0ca67a8c0119714579ad
|
Update dtruss() tests
|
tests/test_dtrace.py
|
tests/test_dtrace.py
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
import os
import sys
import unittest
import subprocess
from dtrace.dtruss import *
TESTS_DIR = os.path.dirname(os. path.abspath(__file__))
class TestDtrace(unittest.TestCase):
def setUp(self):
build_target(self.current_target())
def tearDown(self):
cleanup_target(self.current_target())
def current_target(self):
return self._testMethodName
def test_dtruss_helloworld(self):
# given
print_hello_world_syscall = ('write_nocancel', ['0x1', 'Hello, world!\\n\\0', '0xE'], 14, 0)
# when
output = dtruss("./tests/assets/"+self.current_target())
#then
self.assertIn(print_hello_world_syscall, output)
def build_target(target):
# clang -arch x86_64 -o $target_name $target_name.c
output = executable_name_for_target(target)
source = sourcefile_name_for_target(target)
subprocess.check_call(["clang", "-arch", "x86_64", "-o", output, source])
def cleanup_target(target):
os.remove(executable_name_for_target(target))
def sourcefile_name_for_target(target):
return "%s/assets/%s.c" % (TESTS_DIR, target)
def executable_name_for_target(target):
return "%s/assets/%s" % (TESTS_DIR, target)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000018
|
@@ -890,16 +890,88 @@
output)%0A
+%09%09self.assertEqual(sum(x.name == %22write_nocancel%22 for x in output), 1)%0A%0A
%0Adef bui
|
83ef747830be964d43612bb081fba40e13c79b9f
|
fix test at Travis CI
|
tests/test_engine.py
|
tests/test_engine.py
|
#!/usr/bin/env python
"""
Tests for `plumbery` module.
"""
import io
import socket
import unittest
from libcloud.common.types import InvalidCredsError
from plumbery.__main__ import main
from plumbery.engine import PlumberyEngine
myPlan = """
---
safeMode: True
---
# Frankfurt in Europe
locationId: EU6
regionId: dd-eu
blueprints:
- myBlueprint:
domain:
name: myDC
ethernet:
name: myVLAN
subnet: 10.1.10.0
nodes:
- myServer
"""
myFacility = {
'regionId': 'dd-eu',
'locationId': 'EU7',
'blueprints': [{
'fake': {
'domain': {
'name': 'VDC1',
'service': 'ADVANCED',
'description': 'fake'},
'ethernet': {
'name': 'vlan1',
'subnet': '10.0.10.0',
'description': 'fake'},
'nodes': [{
'stackstorm': {
'description': 'fake',
'appliance': 'RedHat 6 64-bit 4 CPU'
}
}]
}
}]
}
class FakeLocation:
id = 'EU7'
name = 'data centre in Amsterdam'
country = 'Netherlands'
class TestPlumberyEngine(unittest.TestCase):
def test_configure(self):
settings = {
'safeMode': False,
'polishers': [
{'ansible': {}},
{'spit': {}},
]
}
self.engine = PlumberyEngine()
self.engine.set_shared_secret('fake_secret')
self.assertEqual(self.engine.get_shared_secret(), 'fake_secret')
self.engine.set_user_name('fake_name')
self.assertEqual(self.engine.get_user_name(), 'fake_name')
self.engine.set_user_password('fake_password')
self.assertEqual(self.engine.get_user_password(), 'fake_password')
self.engine.configure(settings)
self.assertEqual(self.engine.safeMode, False)
try:
self.engine.setup(io.TextIOWrapper(io.BytesIO(myPlan)))
self.engine.add_facility(myFacility)
self.assertEqual(len(self.engine.facilities), 2)
except socket.gaierror:
pass
except InvalidCredsError:
pass
def test_lifecycle(self):
self.engine = PlumberyEngine()
self.engine.set_shared_secret('fake_secret')
self.assertEqual(self.engine.get_shared_secret(), 'fake_secret')
self.engine.set_user_name('fake_name')
self.assertEqual(self.engine.get_user_name(), 'fake_name')
self.engine.set_user_password('fake_password')
self.assertEqual(self.engine.get_user_password(), 'fake_password')
try:
self.engine.build_all_blueprints()
self.engine.build_blueprint('myBlueprint')
self.engine.start_all_nodes()
self.engine.start_nodes('myBlueprint')
self.engine.polish_all_blueprints()
self.engine.polish_blueprint('myBlueprint')
self.engine.stop_all_nodes()
self.engine.stop_nodes('myBlueprint')
self.engine.destroy_all_nodes()
self.engine.destroy_nodes('myBlueprint')
self.engine.destroy_all_blueprints()
self.engine.destroy_blueprint('myBlueprint')
except socket.gaierror:
pass
except InvalidCredsError:
pass
def test_main(self):
try:
engine = PlumberyEngine()
engine.setup(io.TextIOWrapper(io.BytesIO(myPlan)))
main(['fittings.yaml', 'build', 'web'], engine)
main(['fittings.yaml', 'start', 'web'], engine)
main(['fittings.yaml', 'polish', 'web'], engine)
main(['fittings.yaml', 'rub', 'web'], engine)
main(['fittings.yaml', 'stop', 'web'], engine)
main(['fittings.yaml', 'destroy', 'web'], engine)
main(['fittings.yaml', 'build'], engine)
main(['fittings.yaml', 'start'], engine)
main(['fittings.yaml', 'polish'], engine)
main(['fittings.yaml', 'rub'], engine)
main(['fittings.yaml', 'stop'], engine)
main(['fittings.yaml', 'destroy'], engine)
except IOError:
print("Missing fittings plan")
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
Python
| 0
|
@@ -3716,32 +3716,142 @@
tesIO(myPlan)))%0A
+ self.engine.set_user_name('fake_name')%0A self.engine.set_user_password('fake_password')%0A
main
|
9c07e657bc4cd64adbb7aa6f04afc0dfa4960d3e
|
Update sepal/datasets/tasks.py
|
sepal/datasets/tasks.py
|
sepal/datasets/tasks.py
|
import os
from django.conf import settings
#import yaafelib as yf
import wave
import contextlib
from celery import task
from sepal.datasets.models import *
from sepal.datasets.utils import filter_by_key, find_dict_by_item
@task()
def handle_uploaded_file(f):
'''Saves an uploaded data source to MEDIA_ROOT/data_sources
'''
with open(os.path.join(settings.MEDIA_ROOT, 'data_sources', f.name), 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return destination
@task()
def extract_features(dataset_id, instance_id, audiofile_path):
dataset = Dataset.objects.get(pk=dataset_id)
inst = Instance.objects.get(pk=instance_id)
n_frames, sample_rate, duration = 0, 0, 0
# Calculate the sample rate and duration
with contextlib.closing(wave.open(audiofile_path, 'r')) as audiofile:
n_frames = audiofile.getnframes()
sample_rate = audiofile.getframerate()
duration = n_frames / float(sample_rate)
# Format - {'Display name': 'name: Definition'}
FEATURES = [
{'display_name': 'Spectral Shape Characteristics',
'yaafe_name': 'sss',
'yaafe_definition': 'SpectralShapeStatistics',
'subfeatures': ['Spectral centroid', 'Spectral spread', 'Spectral kurtosis', 'Spectral skewness']
},
{'display_name': 'Temporal Shape Characteristics',
'yaafe_name': 'tss',
'yaafe_definition': 'TemporalShapeStatistics',
'subfeatures': ['Temporal centroid', 'Temporal spread', 'Temporal kurtosis', 'Temporal skewness']
},
{'display_name': 'ZCR',
'yaafe_name': 'zcr',
'yaafe_definition': 'ZCR',
'unit': 'Hz'
},
{'display_name': 'Energy',
'yaafe_name': 'energy',
'yaafe_definition': 'Energy',
},
{'display_name': 'Loudness',
'yaafe_name': 'loudness',
'yaafe_definition': 'Loudness',
},
{'display_name': 'Spectral rolloff',
'yaafe_name': 'spectral_rolloff',
'yaafe_definition': 'SpectralRolloff',
},
{'display_name': 'Perceptual sharpness',
'yaafe_name': 'perceptual_sharpness',
'yaafe_definition': 'PerceptualSharpness',
},
{'display_name': 'Perceptual spread',
'yaafe_name': 'perceptual_spread',
'yaafe_definition': 'PerceptualSpread',
},
{'display_name': 'Duration',
'unit': 's',
},
{'display_name': 'Sample rate',
'unit': 'Hz',
},
{'display_name': 'Spectral decrease',
'yaafe_name': 'spectral_decrease',
'yaafe_definition': 'SpectralDecrease',
},
{'display_name': "Spectral flatness",
'yaafe_name': 'spectral_flatness',
'yaafe_definition': 'SpectralFlatness',
},
# {'display_name': "Spectral flux",
# 'yaafe_name': 'spectral_flux',
# 'yaafe_definition': 'SpectralFlux',
# },
{'display_name': "Spectral slope",
'yaafe_name': 'spectral_slope',
'yaafe_definition': 'SpectralSlope',
},
# {'display_name': "Spectral variation",
# 'yaafe_name': 'spectral_variation',
# 'yaafe_definition': 'SpectralVariation',
# }
]
# Add features to extract
feature_plan = yf.FeaturePlan(sample_rate=sample_rate, resample=False)
for feature in FEATURES:
if 'yaafe_definition' in feature:
# YAAFE feature plans take definitions of the form: 'zcr: ZCR'
full_definition = feature['yaafe_name'] + ': ' + feature['yaafe_definition']
# Add the feature to the feature plan to be extracted
feature_plan.addFeature(full_definition)
# Configure an Engine
engine = yf.Engine()
engine.load(feature_plan.getDataFlow())
# Extract features
afp = yf.AudioFileProcessor()
afp.processFile(engine, audiofile_path)
# outputs dict format - {'Spectral centroid': [[2.33], [4.34],...[2.55]]}
outputs = {}
# Read and store output arrays to outputs dict
for feature in FEATURES:
if 'yaafe_definition' in feature: # Exclude duration and sample rate
output_name = feature['yaafe_name']
# If the feature has subfeatures, e.g. Spec shape stats
if 'subfeatures' in feature:
full_output = engine.readOutput(output_name)
for i, subfeature_display_name in enumerate(feature['subfeatures']):
outputs[subfeature_display_name] = full_output[:, i]
# If the feature has only 1 dimension(1 X T array)
else:
display_name = feature['display_name']
a = engine.readOutput(output_name) # 2D array
# Transpose data to make it a 1D array
outputs[display_name] = a.transpose()[0]
# Create YAAFE feature objects
feature_obj_list = []
for display_name in outputs.keys():
feature = find_dict_by_item(('display_name', display_name), FEATURES)
f, created = Feature.objects.get_or_create(
name=display_name.lower(),
display_name=display_name
)
if feature and ('unit' in feature):
f.unit = feature['unit']
f.save()
feature_obj_list.append(f)
# Create Sample rate and Duration objects
rate_obj, created = Feature.objects.get_or_create(name='sample rate')
if not rate_obj.unit:
rate_obj.unit = 'Hz'
rate_obj.save()
feature_obj_list.append(rate_obj)
duration_obj, created = Feature.objects.get_or_create(name='duration')
if not duration_obj.unit:
duration_obj.unit = 's'
duration_obj.save()
feature_obj_list.append(duration_obj)
# Associate features with instance
# for feature in feature_obj_list:
# inst.features.add(feature)
# If dataset has labels
if dataset.labels():
# NOTE: This assumes there's only one label name per dataset.
# Just indexes the first label name
label_name = dataset.labels()[0]
else:
# attach a placeholder LabelName called 'variable'
filtered = LabelName.objects.filter(name='variable')
# make sure that 'get' doesn't return an error if there are more than 1
# LabelName called 'variable'
if len(filtered) <= 1:
label_name, c = LabelName.objects.get_or_create(name='variable')
else:
label_name = filtered[0]
# Add a placeholder label value called "none" to instance
# This is necessary in order for plotting to work
filtered = LabelValue.objects.filter(value="none", label_name=label_name)
if len(filtered) <= 1:
no_label, c = LabelValue.objects.get_or_create(value="none",
label_name=label_name)
else:
no_label = filtered[0]
inst.label_values.add(no_label)
inst.save()
# Save output data and associate it with inst
for display_name, output in outputs.iteritems():
if output.size > 0: # Avoid empty data
for i in range(output[0].size):
output_mean = output[i].mean()
FeatureValue.objects.create(value=output_mean,
feature=Feature.objects.get(name__iexact=display_name.lower()),
instance=inst)
# Save sample_rate and duration data
FeatureValue.objects.create(value=sample_rate,
feature=Feature.objects.get(name='sample rate'),
instance=inst)
FeatureValue.objects.create(value=duration,
feature=Feature.objects.get(name='duration'),
instance=inst)
|
Python
| 0
|
@@ -37,17 +37,16 @@
ttings%0A%0A
-#
import y
|
52d8442068af3fbd848c32334327e48e623769c2
|
Change test class name
|
tests/test_helper.py
|
tests/test_helper.py
|
# -*- coding: utf-8 -*-
# from python_utils.helper import _Helper
#from python_utils import helper
import unittest
import python_utils
class TestPprint(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
|
Python
| 0.000004
|
@@ -16,16 +16,47 @@
tf-8 -*-
+%0A%22%22%22Tests for _Helper class.%22%22%22
%0A%0A# from
@@ -92,16 +92,17 @@
Helper%0A#
+
from pyt
@@ -142,22 +142,20 @@
nittest%0A
-import
+from
python_
@@ -159,16 +159,39 @@
on_utils
+.helper import _Helper%0A
%0A%0Aclass
@@ -199,13 +199,18 @@
estP
-print
+ythonUtils
(uni
@@ -230,78 +230,368 @@
e):%0A
-%0A def setUp(self):%0A pass%0A%0A def tearDown(self):%0A pass%0A
+ %22%22%22Add documentation here.%22%22%22%0A%0A def setUp(self):%0A %22%22%22Add documentation here.%22%22%22%0A self.helper = _Helper()%0A%0A def tearDown(self):%0A %22%22%22Add documentation here.%22%22%22%0A del self.helper%0A%0A def test__1_add_section(self):%0A %22%22%22Add documentation here.%22%22%22%0A self.helper.add_section_name(%22TESTE%22)%0A print self.helper.name
%0A
|
0a2951103ba70dc94f685b6fa3261ff371a78205
|
Revert ganesha integration - update due to intermediate code changes between initial/revert action
|
ovs/extensions/fs/exportfs.py
|
ovs/extensions/fs/exportfs.py
|
# Copyright 2014 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import subprocess
from ovs.log.logHandler import LogHandler
logger = LogHandler('extensions', name='exportfs')
class Nfsexports(object):
"""
Basic management for /etc/exports
"""
def __init__(self):
self._exportsFile = '/etc/exports'
self._cmd = ['/usr/bin/sudo', '-u', 'root', '/usr/sbin/exportfs']
def _slurp(self):
"""
Read from /etc/exports
"""
f = open(self._exportsFile, 'r')
dlist = []
for line in f:
if not re.match('^\s*$', line):
dlist.append(line)
f.close()
dlist = [i.strip() for i in dlist if not i.startswith('#')]
dlist = [re.split('\s+|\(|\)', i) for i in dlist]
keys = ['dir', 'network', 'params']
ldict = [dict(zip(keys, line)) for line in dlist]
return ldict
def add(self, directory, network, params):
"""
Add entry to /etc/exports
@param directory: directory to export
@param network: network range allowed
@param params: params for export (eg, 'ro,async,no_root_squash,no_subtree_check')
"""
l = self._slurp()
for i in l:
if i['dir'] == directory:
logger.info('Directory already exported, to export with different params please first remove')
return
f = open(self._exportsFile, 'a')
f.write('%s %s(%s)\n' % (directory, network, params))
f.close()
def remove(self, directory):
"""
Remove entry from /etc/exports
"""
l = self._slurp()
for i in l:
if i['dir'] == directory:
l.remove(i)
f = open(self._exportsFile, 'w')
for i in l:
f.write("%s %s(%s) \n" % (i['dir'], i['network'], i['params']))
f.close()
return
def list_exported(self):
"""
List the current exported filesystems
"""
exports = {}
output = subprocess.check_output(self._cmd)
for export in re.finditer('(\S+?)[\s\n]+(\S+)\n?', output):
exports[export.group(1)] = export.group(2)
return exports
def unexport(self, directory):
"""
Unexport a filesystem
"""
cmd = list(self._cmd)
exports = self.list_exported()
if not directory in exports.keys():
logger.info('Directory %s currently not exported' % directory)
return
logger.info('Unexporting {}:{}'.format(exports[directory] if exports[directory] != '<world>' else '*', directory))
cmd.extend(['-u', '{}:{}'.format(exports[directory] if exports[directory] != '<world>' else '*', directory)])
subprocess.call(cmd)
def export(self, directory, network='*'):
"""
Export a filesystem
"""
cmd = list(self._cmd)
exports = self.list_exported()
if directory in exports.keys():
logger.info('Directory already exported with options %s' % exports[directory])
return
logger.info('Exporting {}:{}'.format(network, directory))
cmd.extend(['-v', '{}:{}'.format(network, directory)])
subprocess.call(cmd)
|
Python
| 0
|
@@ -918,16 +918,116 @@
portfs'%5D
+%0A self._restart = %5B'/usr/bin/sudo', '-u', 'root', '/etc/init.d/nfs-kernel-server', 'restart'%5D
%0A%0A de
@@ -3920,28 +3920,67 @@
subprocess.call(cmd)%0A
+ subprocess.call(self._restart)%0A
|
6b2f403ce33205ec681ba1a511c2d52db02f6a36
|
Use pipelines for cache busting scans for better performance
|
oz/plugins/aws_cdn/actions.py
|
oz/plugins/aws_cdn/actions.py
|
from __future__ import absolute_import, division, print_function, with_statement, unicode_literals
# Module for generating hashes for files that match a glob, and putting that
# hash in redis to allow us to generate cache-busting URLs later
import os
import oz
import oz.app
import oz.plugins.redis
import oz.plugins.aws_cdn
@oz.action
def cache_busting_scan(*prefixes):
"""
(Re-)generates the cache buster values for all files with the specified
prefixes.
"""
settings = oz.app.settings
redis = oz.plugins.redis.create_connection()
# Get all items that match any of the patterns. Put it in a set to
# prevent duplicates.
if settings["s3_bucket"]:
bucket = oz.plugins.aws_cdn.get_bucket()
matches = set([oz.plugins.aws_cdn.S3File(key) for prefix in prefixes for key in bucket.list(prefix)])
else:
matches = set([])
static_path = settings["static_path"]
for root, _, filenames in os.walk(static_path):
for filename in filenames:
path = os.path.relpath(os.path.join(root, filename), static_path)
for prefix in prefixes:
if path.startswith(prefix):
matches.add(oz.plugins.aws_cdn.LocalFile(static_path, path))
break
# Set the cache busters
for f in matches:
file_hash = f.hash(override=settings.get("hash_override", ""))
print(file_hash, f.path())
oz.plugins.aws_cdn.set_cache_buster(redis, f.path(), file_hash)
|
Python
| 0
|
@@ -553,16 +553,44 @@
ection()
+%0A pipe = redis.pipeline()
%0A%0A #
@@ -1542,21 +1542,20 @@
_buster(
-redis
+pipe
, f.path
@@ -1569,8 +1569,28 @@
e_hash)%0A
+%0A pipe.execute()%0A
|
b587557ab27598d7b1d273fbc445f27b40613a29
|
Update production bucket name.
|
us_ignite/settings/production.py
|
us_ignite/settings/production.py
|
# Production settings for us_ignite
import datetime
import os
import urlparse
from us_ignite.settings import *
# Sensitive values are saved as env variables:
env = os.getenv
PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__))
# settings is one directory up now
here = lambda *x: os.path.join(PROJECT_ROOT, '..', *x)
SITE_URL = 'https://us-ignite.herokuapp.com'
ALLOWED_HOSTS = [
'us-ignite.herokuapp.com',
]
# HTTPS configuration:
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 60 * 5
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
# Make this unique, and don't share it with anybody.
SECRET_KEY = env('SECRET_KEY')
# Remote storage settings:
STATICFILES_STORAGE = 'us_ignite.common.storage.StaticS3Storage'
DEFAULT_FILE_STORAGE = 'us_ignite.common.storage.MediaS3Storage'
THUMBNAIL_DEFAULT_STORAGE = DEFAULT_FILE_STORAGE
AWS_ACCESS_KEY_ID = env('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'local-us-ignite-dot-org'
expire_date = datetime.date.today() + datetime.timedelta(days=365)
expire_seconds = 30 * 24 * 60 * 60
AWS_HEADERS = {
'Expires': expire_date.strftime('%a, %d %b %Y 00:00:00 GMT'),
'Cache-Control': 'max-age=%s' % expire_seconds,
}
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
STATIC_URL = '//%s/static/' % AWS_S3_CUSTOM_DOMAIN
redis_url = urlparse.urlparse(env('REDISTOGO_URL'))
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:%s' % (redis_url.hostname, redis_url.port),
'OPTIONS': {
'DB': 0,
'PASSWORD': redis_url.password,
}
}
}
# Email
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = env('EMAIL_HOST')
EMAIL_PORT = env('EMAIL_PORT')
EMAIL_HOST_USER = env('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = env('EMAIL_HOST_PASSWORD')
# Twitter API:
TWITTER_API_KEY = env('TWITTER_API_KEY')
TWITTER_API_SECRET = env('TWITTER_API_SECRET')
# WP email
WP_EMAIL = env('WP_EMAIL')
# Enable dummy content generation on this build:
ENABLE_DUMMY = True
if ENABLE_DUMMY:
INSTALLED_APPS += ('us_ignite.dummy', )
# List of words:
WORDS_PATH = here('..', 'words')
MAILCHIMP_API_KEY = env('MAILCHIMP_API_KEY')
MAILCHIMP_LIST = env('MAILCHIMP_LIST')
|
Python
| 0
|
@@ -986,14 +986,8 @@
= '
-local-
us-i
|
e509bb74406243829810d011af7c5d1a7a5368e7
|
add test case for client credential. #6
|
tests/test_oauth2.py
|
tests/test_oauth2.py
|
# coding: utf-8
import os
import tempfile
import unittest
from urlparse import urlparse
from flask import Flask
from .oauth2_server import create_server, db
from .oauth2_client import create_client
class BaseSuite(unittest.TestCase):
def setUp(self):
app = Flask(__name__)
app.debug = True
app.testing = True
app.secret_key = 'development'
self.db_fd, self.db_file = tempfile.mkstemp()
config = {
'SQLALCHEMY_DATABASE_URI': 'sqlite:///%s' % self.db_file
}
app.config.update(config)
app = create_server(app)
app = create_client(app)
self.app = app
self.client = app.test_client()
return app
def tearDown(self):
db.session.remove()
db.drop_all()
os.close(self.db_fd)
os.unlink(self.db_file)
authorize_url = (
'/oauth/authorize?response_type=code&client_id=dev'
'&redirect_uri=http%3A%2F%2Flocalhost%3A8000%2Fauthorized&scope=email'
)
class TestWebAuth(BaseSuite):
def test_login(self):
rv = self.client.get('/login')
assert 'response_type=code' in rv.location
def test_oauth_authorize_invalid_url(self):
rv = self.client.get('/oauth/authorize')
assert 'invalid_client_id' in rv.location
#rv = self.client.get('/oauth/authorize?client_id=dev')
#print rv.data
def test_oauth_authorize_valid_url(self):
rv = self.client.get(authorize_url)
# valid
assert '</form>' in rv.data
rv = self.client.post(authorize_url, data=dict(
confirm='no'
))
assert 'access_denied' in rv.location
rv = self.client.post(authorize_url, data=dict(
confirm='yes'
))
# success
assert 'code=' in rv.location
def test_get_access_token(self):
rv = self.client.post(authorize_url, data={'confirm': 'yes'})
rv = self.client.get(clean_url(rv.location))
assert 'access_token' in rv.data
def test_full_flow(self):
rv = self.client.post(authorize_url, data={'confirm': 'yes'})
rv = self.client.get(clean_url(rv.location))
assert 'access_token' in rv.data
rv = self.client.get('/')
assert 'username' in rv.data
class TestPasswordAuth(BaseSuite):
def test_get_access_token(self):
auth_code = 'confidential:confidential'.encode('base64').strip()
url = ('/oauth/access_token?grant_type=password'
'&scope=email+address&username=admin&password=admin')
rv = self.client.get(url, headers={
'HTTP_AUTHORIZATION': 'Basic %s' % auth_code,
}, data={'confirm': 'yes'})
assert 'access_token' in rv.data
def clean_url(location):
ret = urlparse(location)
return '%s?%s' % (ret.path, ret.query)
|
Python
| 0
|
@@ -2738,16 +2738,480 @@
.data%0A%0A%0A
+class TestCredentialAuth(BaseSuite):%0A def test_get_access_token(self):%0A auth_code = 'confidential:confidential'.encode('base64').strip()%0A url = ('/oauth/access_token?grant_type=client_credentials'%0A '&scope=email+address&username=admin&password=admin')%0A rv = self.client.get(url, headers=%7B%0A 'HTTP_AUTHORIZATION': 'Basic %25s' %25 auth_code,%0A %7D, data=%7B'confirm': 'yes'%7D)%0A assert 'access_token' in rv.data%0A%0A%0A
def clea
|
9c61b0d27873c8c1ea2ba2311f547625a83bf7be
|
Add cached_function to API
|
tests/test_parser.py
|
tests/test_parser.py
|
import requests
import yaml
from unittest import TestCase
from mfnf.api import HTTPMediaWikiAPI
from mfnf.parser import HTML2JSONParser, ArticleContentParser
class TestParser(TestCase):
def setUp(self):
self.api = HTTPMediaWikiAPI(requests.Session())
self.title = "Mathe für Nicht-Freaks: Analysis 1"
self.maxDiff = None
def parse(self, text):
return ArticleContentParser(api=self.api, title=self.title)(text)
def test_html2json_parser(self):
with open("docs/html.spec.yml") as spec_file:
spec = yaml.load(spec_file)
for html, target_json in ((x["in"], x["out"]) for x in spec):
with self.subTest(html=html):
parser = HTML2JSONParser()
parser.feed(html)
self.assertListEqual(parser.content, target_json, msg=html)
def test_parsing_block_elements(self):
with open("docs/mfnf-block-elements.spec.yml") as spec_file:
spec = yaml.load(spec_file)
for text, target in ((x["in"], x["out"]) for x in spec):
with self.subTest(text=text):
self.assertListEqual(self.parse(text), target, msg=text)
def test_parsing_inline_elements(self):
with open("docs/mfnf-inline-elements.spec.yml") as spec_file:
spec = yaml.load(spec_file)
for text, target in ((x["in"], x["out"]) for x in spec):
with self.subTest(text=text):
target = [{"type": "paragraph", "content": [target]}]
self.assertListEqual(self.parse(text), target, msg=text)
|
Python
| 0.000002
|
@@ -4,28 +4,14 @@
ort
-requests%0Aimport yaml
+shelve
%0A%0Afr
@@ -38,16 +38,46 @@
estCase%0A
+%0Aimport requests%0Aimport yaml%0A%0A
from mfn
@@ -172,82 +172,577 @@
ser%0A
-%0Aclass TestParser(TestCase):%0A%0A def setUp(self):%0A self
+from mfnf.utils import CachedFunction%0A%0Aclass TestParser(TestCase):%0A%0A @classmethod%0A def setUpClass(cls):%0A cls.database = shelve.open(%22.cache.db%22, %22c%22)%0A cached_function = CachedFunction(cls.database)%0A%0A class CachedMediaWikiAPI(HTTPMediaWikiAPI):%0A @cached_function%0A def get_content(self, title):%0A return super().get_content(title)%0A%0A @cached_function%0A def convert_text_to_html(self, title, text):%0A return super().convert_text_to_html(title, text)%0A%0A cls
.api =
-HTTP
+Cached
Medi
@@ -769,16 +769,113 @@
ssion())
+%0A%0A @classmethod%0A def tearDownClass(cls):%0A cls.database.close()%0A%0A def setUp(self):
%0A
|
e4e24755dd6dc78e26b3653a5a01d3a1f3c2055f
|
Check that upload parameters are processed correctly
|
tests/test_photos.py
|
tests/test_photos.py
|
import unittest
import openphoto
import test_base
class TestPhotos(test_base.TestBase):
def test_delete_upload(self):
""" Test photo deletion and upload """
# Delete one photo using the OpenPhoto class, passing in the id
self.assertTrue(self.client.photo.delete(self.photos[0].id))
# Delete one photo using the OpenPhoto class, passing in the object
self.assertTrue(self.client.photo.delete(self.photos[1]))
# And another using the Photo object directly
self.assertTrue(self.photos[2].delete())
# Check that they're gone
self.assertEqual(self.client.photos.list(), [])
# Re-upload the photos, one of them using Bas64 encoding
ret_val = self.client.photo.upload("tests/test_photo1.jpg",
title=self.TEST_TITLE)
self.client.photo.upload("tests/test_photo2.jpg",
title=self.TEST_TITLE)
self.client.photo.upload_encoded("tests/test_photo3.jpg",
title=self.TEST_TITLE)
# Check there are now three photos
self.photos = self.client.photos.list()
self.assertEqual(len(self.photos), 3)
# Check that the upload return value was correct
pathOriginals = [photo.pathOriginal for photo in self.photos]
self.assertIn(ret_val.pathOriginal, pathOriginals)
# Delete all photos in one go
self.assertTrue(self.client.photos.delete(self.photos))
# Check they're gone
self.photos = self.client.photos.list()
self.assertEqual(len(self.photos), 0)
# Regenerate the original test photos
self._delete_all()
self._create_test_photos()
def test_edit(self):
""" Check that the edit request returns an HTML form """
# Test using the OpenPhoto class
html = self.client.photo.edit(self.photos[0])
self.assertIn("<form", html.lower())
# And the Photo object directly
html = self.photos[0].edit()
self.assertIn("<form", html.lower())
def test_upload_duplicate(self):
""" Ensure that duplicate photos are rejected """
# Attempt to upload a duplicate
with self.assertRaises(openphoto.OpenPhotoDuplicateError):
self.client.photo.upload("tests/test_photo1.jpg",
title=self.TEST_TITLE)
# Check there are still three photos
self.photos = self.client.photos.list()
self.assertEqual(len(self.photos), 3)
def test_update(self):
""" Update a photo by editing the title """
title = u"\xfcmlaut" # umlauted umlaut
# Get a photo and check that it doesn't have the magic title
photo = self.photos[0]
self.assertNotEqual(photo.title, title)
# Add the title to a photo using the OpenPhoto class
ret_val = self.client.photo.update(photo, title=title)
# Check that it's there
self.photos = self.client.photos.list()
photo = self.photos[0]
self.assertEqual(photo.title, title)
# Check that the return value was correct
self.assertEqual(ret_val.pathOriginal, photo.pathOriginal)
# Revert the title using the Photo object directly
photo.update(title=self.TEST_TITLE)
# Check that it's gone back
self.photos = self.client.photos.list()
self.assertEqual(self.photos[0].title, self.TEST_TITLE)
def test_update_multiple(self):
""" Update multiple photos by adding tags """
tag_id = "update_photo_tag"
# Get a couple of photos
photos = self.photos[:2]
# Add the tag using a list of photo objects
self.client.photos.update(photos, tagsAdd=tag_id)
# Check that it's there
for photo in self.client.photos.list()[:2]:
self.assertIn(tag_id, photo.tags)
# Remove the tags using a list of photo ids
self.client.photos.update([photo.id for photo in photos],
tagsRemove=tag_id)
def test_view(self):
""" Test photo view """
# Check that our magic sizes aren't present
photo = self.photos[0]
self.assertFalse(hasattr(photo, "path9x9"))
self.assertFalse(hasattr(photo, "path19x19"))
# View at a particular size using the OpenPhoto class
photo = self.client.photo.view(photo, returnSizes="9x9")
self.assertTrue(hasattr(photo, "path9x9"))
# View at a particular size using the Photo object directly
photo.view(returnSizes="19x19")
self.assertTrue(hasattr(photo, "path19x19"))
def test_next_previous(self):
""" Test the next/previous links of the middle photo """
next_prev = self.client.photo.next_previous(self.photos[1])
self.assertEqual(next_prev["previous"][0].id, self.photos[0].id)
self.assertEqual(next_prev["next"][0].id, self.photos[2].id)
# Do the same using the Photo object directly
next_prev = self.photos[1].next_previous()
self.assertEqual(next_prev["previous"][0].id, self.photos[0].id)
self.assertEqual(next_prev["next"][0].id, self.photos[2].id)
def test_replace(self):
""" If photo.replace gets implemented, write a test! """
with self.assertRaises(openphoto.NotImplementedError):
self.client.photo.replace(None, None)
def test_replace_encoded(self):
""" If photo.replace_encoded gets implemented, write a test! """
with self.assertRaises(openphoto.NotImplementedError):
self.client.photo.replace_encoded(None, None)
def test_dynamic_url(self):
""" If photo.dynamic_url gets implemented, write a test! """
with self.assertRaises(openphoto.NotImplementedError):
self.client.photo.dynamic_url(None)
def test_transform(self):
""" If photo.transform gets implemented, write a test! """
with self.assertRaises(openphoto.NotImplementedError):
self.client.photo.transform(None)
|
Python
| 0
|
@@ -1119,32 +1119,56 @@
now three photos
+ with the correct titles
%0A self.ph
@@ -1241,24 +1241,117 @@
.photos), 3)
+%0A for photo in self.photos:%0A self.assertEqual(photo.title, self.TEST_TITLE)
%0A%0A #
|
6ddaf77adb3a3d1ad42eee06aae657fe15f77fa7
|
revert to assertions
|
tests/test_readme.py
|
tests/test_readme.py
|
import doctest
def test_readme():
errs, _ = doctest.testfile('../README.rst', report=True)
if errs > 0:
raise ValueError(
'{} errors encountered in README.rst'.format(
errs))
|
Python
| 0.999542
|
@@ -98,124 +98,20 @@
-if errs %3E 0:%0A raise ValueError(%0A '%7B%7D errors encountered in README.rst'.format(%0A
+assert not
errs
-))
%0A
|
55e6f07a804bb857fab63dfe82dcb228ce1de12e
|
Replace os.path with ntpath in tests
|
tests/test_rename.py
|
tests/test_rename.py
|
from unittest import TestCase
from mock import mock
from exifread import IfdTag
from pictures.rename import rename
from tests import helpers
def ifd_tag_from(date_time_original):
return IfdTag(None, None, None, date_time_original, None, None)
class MockFile(object):
def __init__(self, filename, mode):
self.filename = filename
self.mode = mode
def __enter__(self):
return self
def __exit__(self, *args):
pass
def create_mock_process_file(files):
return lambda f_mock: files[f_mock.filename]
def create_mock_isfile(files):
return lambda f: f in files
def create_mock_join():
return lambda path, *paths: r'{}\{}'.format(path, '\\'.join(paths))
class TestRename(TestCase):
FILES = {
r'C:\dir\no_exif_tags.jpeg': {},
r'C:\dir\timestamp_does_not_exist.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2016:10:29 15:43:56')}, # 1 check
r'C:\dir\timestamp_does_exist.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2016:02:04 12:03:35')}, # 2 checks
r'C:\dir\20160204_120335.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2016:02:04 12:03:35')},
r'C:\dir\timestamp_does_exist_multiple.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2017:01:03 14:23:45')}, # 4 checks
r'C:\dir\20170103_142345.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2017:01:03 14:23:45')},
r'C:\dir\20170103_142345_1.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2017:01:03 14:23:45')},
r'C:\dir\20170103_142345_2.jpeg': {'EXIF DateTimeOriginal': ifd_tag_from('2017:01:03 14:23:45')}
}
@mock.patch('os.rename')
@mock.patch('os.path.isfile', side_effect=create_mock_isfile(FILES))
@mock.patch('os.path.join', side_effect=create_mock_join())
@mock.patch('exifread.process_file', side_effect=create_mock_process_file(FILES))
@mock.patch('builtins.open', side_effect=MockFile)
def test_rename(self, mock_open, mock_process_file, mock_join, mock_isfile, mock_rename):
rename(self.FILES)
self.assertEquals(mock_open.mock_calls, helpers.calls_from(zip(self.FILES.keys(), ['rb'] * len(self.FILES))))
self.assertEquals(mock_process_file.call_count, len(self.FILES))
self.assertEquals(mock_join.call_count, 7) # number of checks
self.assertEquals(mock_isfile.call_count, 7) # number of checks
self.assertEquals(mock_rename.mock_calls, helpers.calls_from([
(r'C:\dir\timestamp_does_not_exist.jpeg', r'C:\dir\20161029_154356.jpeg'),
(r'C:\dir\timestamp_does_exist.jpeg', r'C:\dir\20160204_120335_1.jpeg'),
(r'C:\dir\timestamp_does_exist_multiple.jpeg', r'C:\dir\20170103_142345_3.jpeg')
]))
|
Python
| 0
|
@@ -1,12 +1,26 @@
+import ntpath%0A
from unittes
@@ -40,30 +40,8 @@
Case
-%0Afrom mock import mock
%0A%0Afr
@@ -65,16 +65,38 @@
t IfdTag
+%0Afrom mock import mock
%0A%0Afrom p
@@ -632,106 +632,8 @@
s%0A%0A%0A
-def create_mock_join():%0A return lambda path, *paths: r'%7B%7D%5C%7B%7D'.format(path, '%5C%5C'.join(paths))%0A%0A%0A
clas
@@ -1548,145 +1548,8 @@
e')%0A
- @mock.patch('os.path.isfile', side_effect=create_mock_isfile(FILES))%0A @mock.patch('os.path.join', side_effect=create_mock_join())%0A
@@ -1685,16 +1685,112 @@
ckFile)%0A
+ @mock.patch('os.path.isfile', create_mock_isfile(FILES))%0A @mock.patch('os.path', ntpath)%0A
def
@@ -1840,32 +1840,8 @@
ile,
- mock_join, mock_isfile,
moc
@@ -2100,151 +2100,14 @@
als(
-mock_join.call_count, 7) # number of checks%0A self.assertEquals(mock_isfile.call_count, 7) # number of checks%0A self.assertEquals
+sorted
(moc
@@ -2117,34 +2117,42 @@
ename.mock_calls
+)
,
+sorted(
helpers.calls_fr
@@ -2421,16 +2421,16 @@
.jpeg')%0A
-
@@ -2432,9 +2432,10 @@
%5D))
+)
%0A
|
7ef0fe9f1a2b91c72c2709ed025780547e329403
|
Update test
|
tests/test_ricker.py
|
tests/test_ricker.py
|
import pytest
from ricker.ricker import ricker
class TestRicker:
def test_output_number(self):
assert len(ricker()) == 2
def test_default_output(self):
t, s = ricker()
assert len(t) == len(s)
def test_error(self):
with pytest.raises(ValueError):
ricker(f=0)
|
Python
| 0.000001
|
@@ -78,21 +78,22 @@
est_
-output_number
+default_output
(sel
@@ -108,33 +108,117 @@
-assert len(ricker()) == 2
+dt = 0.002%0A length = 1%0A s = ricker(len=length, dt=dt)%0A assert len(s) == int(length / dt)
%0A%0A
@@ -224,38 +224,37 @@
def test_
-default_output
+input_check_f
(self):%0A
@@ -261,54 +261,165 @@
-t, s = ricker()%0A assert len(t) == len(s
+with pytest.raises(ValueError):%0A ricker(f=0)%0A%0A def test_input_check_len(self):%0A with pytest.raises(ValueError):%0A ricker(len=0
)%0A%0A
@@ -434,13 +434,22 @@
est_
-error
+input_check_dt
(sel
@@ -495,29 +495,148 @@
or):%0A ricker(
-f
+dt
=0)%0A
+%0A def test_input_len_peak_loc(self):%0A with pytest.warns(UserWarning):%0A ricker(len=1, peak_loc=2)%0A
|
298b85a7c36e536a985b7ccffc8fefa135baa187
|
Fix TestRunner test case
|
tests/test_runner.py
|
tests/test_runner.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from multiprocessing import Lock
import git
import mock
import pytest
from badwolf.runner import TestContext, TestRunner
@pytest.fixture(scope='function')
def push_context():
return TestContext(
'deepanalyzer/badwolf',
'git@bitbucket.org:deepanalyzer/badwolf.git',
{},
'commit',
'Update',
{
'branch': {'name': 'master'},
'commit': {'hash': '2cedc1af762'},
}
)
@pytest.fixture(scope='function')
def push_runner(push_context):
return TestRunner(push_context, Lock())
def test_clone_repo_failed(app, push_runner):
with mock.patch.object(push_runner, 'update_build_status') as status, \
mock.patch.object(push_runner, 'clone_repository') as clone_repo, \
mock.patch.object(push_runner, 'validate_settings') as validate_settings:
status.return_value = None
clone_repo.side_effect = git.GitCommandError('git clone', 1)
push_runner.run()
validate_settings.assert_not_called()
|
Python
| 0.000001
|
@@ -74,16 +74,42 @@
iterals%0A
+import os%0Aimport tempfile%0A
from mul
@@ -223,16 +223,70 @@
tRunner%0A
+from badwolf.bitbucket import PullRequest, Changesets%0A
%0A%0A@pytes
@@ -674,29 +674,31 @@
text):%0A r
-eturn
+unner =
TestRunner(
@@ -719,16 +719,177 @@
Lock())%0A
+ runner.clone_path = os.path.join(%0A tempfile.gettempdir(),%0A 'badwolf',%0A runner.task_id,%0A runner.repo_name%0A )%0A return runner%0A
%0A%0Adef te
@@ -1168,16 +1168,157 @@
settings
+, %5C%0A mock.patch.object(PullRequest, 'comment') as pr_comment, %5C%0A mock.patch.object(Changesets, 'comment') as cs_comment
:%0A
@@ -1415,16 +1415,95 @@
ne', 1)%0A
+ pr_comment.return_value = None%0A cs_comment.return_value = None%0A%0A
|
050e6ee000e89fb0ebeff5dcb2b6d79b10e92069
|
Fix monkeypatching for older scipy versions
|
tests/test_things.py
|
tests/test_things.py
|
from __future__ import division
import stft
import numpy
import pytest
@pytest.fixture(params=[1, 2])
def channels(request):
return request.param
@pytest.fixture(params=[0, 1, 4])
def padding(request):
return request.param
@pytest.fixture(params=[2048])
def length(request):
return request.param
@pytest.fixture
def signal(channels, length):
return numpy.squeeze(numpy.random.random((length, channels)))
@pytest.fixture(params=[512])
def framelength(request):
return request.param
def test_shape(length, framelength):
a = numpy.squeeze(numpy.random.random((length, 1)))
x = stft.spectrogram(a, framelength=framelength, halved=True)
assert x.shape[0] == framelength / 2 + 1
x_2 = stft.spectrogram(a, framelength=framelength, halved=False)
assert x_2.shape[0] == framelength
def test_windowlength_errors():
"""
Test if way too short signals can be transformed
"""
siglen = 512
framelen = 2048
stft.spectrogram(numpy.random.random(siglen), framelength=framelen)
def test_precision(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.allclose(a, y)
def test_rms(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.sqrt(numpy.mean((a - y) ** 2)) < 1e-8
def test_maxdim():
a = numpy.random.random((512, 2, 2))
with pytest.raises(ValueError):
stft.spectrogram(a)
b = numpy.random.random((512, 2, 2, 3))
with pytest.raises(ValueError):
stft.ispectrogram(b)
def test_issue1():
a = numpy.random.random((512, 1))
b = stft.spectrogram(a)
assert b.ndim == 2
def raiser(*args):
raise AttributeError
def test_fallback(monkeypatch):
import scipy.signal
monkeypatch.setattr("scipy.signal.cosine", raiser)
return test_windowlength_errors()
|
Python
| 0
|
@@ -2168,24 +2168,159 @@
nkeypatch):%0A
+ # Try monkeypatching signal.cosine away.%0A # Ignore AttributeErrors during monkeypatching, for older scipy versions%0A try:%0A
import s
@@ -2331,16 +2331,20 @@
.signal%0A
+
monk
@@ -2390,16 +2390,56 @@
raiser)%0A
+ except AttributeError:%0A pass%0A
retu
|
893e09b14eabff3a6ec2ff87db0499bc3fd2a213
|
fix tests to use forced aligner
|
tests/transcriber.py
|
tests/transcriber.py
|
import os
import unittest
class Transcriber(unittest.TestCase):
audio = 'examples/data/lucier.mp3'
def test_resources(self):
from gentle import Resources
from gentle.util.paths import get_binary
resources = Resources()
k3 = get_binary("ext/k3")
self.assertEqual(os.path.exists(resources.full_hclg_path), True)
self.assertEqual(os.path.exists(self.audio), True)
self.assertEqual(os.path.exists(k3), True)
def test_transcriber(self):
import subprocess
from gentle import resampled, kaldi_queue, standard_kaldi, Resources
from gentle.transcriber import MultiThreadedTranscriber
standard_kaldi.STDERR = subprocess.STDOUT
resources = Resources()
k_queue = kaldi_queue.build(resources, 1)
trans = MultiThreadedTranscriber(k_queue)
with resampled(self.audio, 10.5, 2.5) as filename:
words, duration = trans.transcribe(filename)
self.assertEqual(words[0].word, "different")
|
Python
| 0
|
@@ -26,25 +26,21 @@
%0A%0Aclass
-Transcrib
+Align
er(unitt
@@ -92,16 +92,58 @@
ier.mp3'
+%0A transcript = %22i am sitting in a room%22
%0A%0A de
@@ -319,25 +319,24 @@
xt/k3%22)%0A
-%0A
self.ass
@@ -331,72 +331,73 @@
-self.assertEqual(os.path.exists(resources.full_hclg_path), True)
+model = get_binary(%22exp/tdnn_7b_chain_online/final.mdl%22 ) %0A
%0A
@@ -502,16 +502,70 @@
), True)
+%0A self.assertEqual(os.path.exists(model), True)
%0A%0A de
@@ -571,25 +571,21 @@
ef test_
-transcrib
+align
er(self)
@@ -653,21 +653,8 @@
led,
- kaldi_queue,
sta
@@ -700,51 +700,89 @@
tle.
-transcriber import MultiThreadedTranscriber
+forced_aligner import ForcedAligner%0A from gentle.transcription import Word
%0A%0A
@@ -874,98 +874,68 @@
-k_queue = kaldi_queue.build(resources, 1)%0A trans = MultiThreadedTranscriber(k_queue
+align = ForcedAligner(resources, self.transcript, nthreads=1
)%0A%0A
@@ -972,17 +972,16 @@
io,
-10.5, 2.5
+5.0, 5.0
) as
@@ -1007,31 +1007,29 @@
-words, dura
+transcrip
tion =
-trans
+align
.tra
@@ -1058,49 +1058,189 @@
-self.assertEqual(words%5B0%5D.word, %22different%22)
+ words = transcription.words%0A self.assertEqual(words%5B0%5D.word, %22i%22)%0A self.assertEqual(words%5B1%5D.word, %22am%22)%0A self.assertEqual(words%5B1%5D.case, Word.SUCCESS)
%0A
|
72e948719145579eb7dfb9385b921f8eb6ea1384
|
Add more exemplar primitive generators
|
tests/v4/conftest.py
|
tests/v4/conftest.py
|
from .context import tohu
from tohu.v4.primitive_generators import *
from tohu.v4.derived_generators import *
__all__ = ['EXEMPLAR_GENERATORS', 'EXEMPLAR_PRIMITIVE_GENERATORS', 'EXEMPLAR_DERIVED_GENERATORS']
def add(x, y):
return x + y
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=6),
FakerGenerator(method="name"),
IterateOver('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcdefghijklmnopqrstuvwxyz'),
SelectOne('abcde', p=[0.1, 0.05, 0.7, 0.03, 0.12]),
Timestamp(date='2018-01-01'),
]
EXEMPLAR_DERIVED_GENERATORS = [
Apply(add, Integer(100, 200), Integer(300, 400)),
Apply(add, Apply(add, Integer(100, 200), Integer(300, 400)), Apply(add, Integer(500, 600), Integer(700, 800))),
]
EXEMPLAR_CUSTOM_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS + EXEMPLAR_CUSTOM_GENERATORS
|
Python
| 0
|
@@ -267,24 +267,44 @@
ERATORS = %5B%0A
+ Boolean(p=0.3),%0A
Constant
@@ -317,32 +317,69 @@
%22),%0A
-Integer(100, 200
+FakerGenerator(method=%22name%22),%0A Float(12.34, 56.78
),%0A H
@@ -407,36 +407,24 @@
-FakerGenerator(method=%22name%22
+Integer(100, 200
),%0A
|
e6305725a57bd6daca24e66699a8e3b0ead8d866
|
Split long line
|
utils/ci/topology_integration.py
|
utils/ci/topology_integration.py
|
#!/usr/bin/env python2
# pylint: disable=missing-docstring
import time
import signal
import threading
from emuvim.dcemulator.net import DCNetwork
from mininet.node import RemoteController
from emuvim.api.sonata import SonataDummyGatekeeperEndpoint
class SigTermCatcher:
def __init__(self, net):
self.net = net
signal.signal(signal.SIGTERM, self.stop_containernet)
signal.signal(signal.SIGINT, self.stop_containernet)
def stop_containernet(self, signum, frame):
self.net.stop()
time.sleep(2)
exit(1)
def _in_separate_thread(net):
net.start()
def setup_topology(net):
dc = net.addDatacenter("dc") # pylint: disable=invalid-name
# add the SONATA dummy gatekeeper to each DC
sdkg1 = SonataDummyGatekeeperEndpoint("0.0.0.0", 5000, deploy_sap=True)
sdkg1.connectDatacenter(dc)
# run the dummy gatekeeper (in another thread, don't block)
sdkg1.start()
def main():
net = DCNetwork(controller=RemoteController, monitor=True, enable_learning=True)
SigTermCatcher(net)
setup_topology(net)
sub_thread = threading.Thread(target=_in_separate_thread, args=(net,))
sub_thread.start()
while True:
time.sleep(120)
exit(2)
if __name__ == "__main__":
main()
|
Python
| 0.000995
|
@@ -997,22 +997,62 @@
ler,
- monitor=True,
+%0A monitor=True,%0A
ena
|
0efe12805e67fd08d7d1a63274faf993e2514b88
|
remove debug code
|
vagrant/tournament/tournament.py
|
vagrant/tournament/tournament.py
|
#!/usr/bin/env python
#
# tournament.py -- implementation of a Swiss-system tournament
#
# Allows recording of tied matches.
# Matches opponents of relative standings.
# Pairs players in unique matches.
#
# TODO: implement match byes
# TODO: implement pairing for odd number of players
# TODO: implement Opponent match win tie breaker algorithm
# TODO: implement tournament tracking
#
import psycopg2
import pprint
pp = pprint.PrettyPrinter(indent=4)
def connect():
"""Connect to the PostgreSQL database. Returns a database connection."""
return psycopg2.connect("dbname=tournament")
def deleteMatches():
"""Remove all the match records from the database."""
dbh = connect()
sth = dbh.cursor()
sth.execute("TRUNCATE TABLE matches")
dbh.commit()
dbh.close()
def deletePlayers():
"""Remove all the player records from the database."""
dbh = connect()
sth = dbh.cursor()
sth.execute("TRUNCATE TABLE players CASCADE")
dbh.commit()
dbh.close()
def countPlayers():
"""Returns the number of players currently registered."""
dbh = connect()
sth = dbh.cursor()
sth.execute("SELECT count(players) FROM players")
result = sth.fetchone()
dbh.commit()
dbh.close()
return result[0]
def registerPlayer(name):
"""Adds a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
dbh = connect()
sth = dbh.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
values = [name]
sth.execute(query, values)
dbh.commit()
dbh.close()
def playerStandings():
"""Returns a list of the players and their win records, sorted by wins.
The first entry in the list should be the player in first place, or a player
tied for first place if there is currently a tie.
Returns:
A list of tuples, each of which contains (id, name, wins, matches):
id: the player's unique id (assigned by the database)
name: the player's full name (as registered)
wins: the number of matches the player has won
matches: the number of matches the player has played
"""
dbh = connect()
sth = dbh.cursor()
query = '''
SELECT * FROM standings
'''
sth.execute(query)
result = sth.fetchall()
dbh.commit()
dbh.close()
return result
def reportMatch(winner, challenger, tied=None):
"""Records the outcome of a single match between two players.
Args:
winner: the id number of the player who won
challenger: the id number of the player who lost
"""
dbh = connect()
sth = dbh.cursor()
query = "INSERT INTO matches (winner_id, challenger_id, tie) VALUES (%s, %s, %s)"
values = [winner, challenger, tied]
sth.execute(query, values)
dbh.commit()
dbh.close()
def getPlayerOpponents():
"""Returns list of opponents for all players
Returns:
A list of tuples, each of which contains (id, list)
id: player's unique id
list: list of opponent id
"""
dbh = connect()
sth = dbh.cursor()
query = '''
SELECT
opponents.id,
array_agg(challenger_id) AS challenger_id_list
FROM opponents
GROUP BY opponents.id
'''
sth.execute(query)
result = sth.fetchall()
dbh.commit()
dbh.close()
return result
def getStandingGroups():
"""Returns a list of standings grouped by win, tie, loss
Assuming standings are provided ordered by (win, match, tie), each standings
group contains players with equivalent standings
Returns:
A list of sets of tuples, each of which contains (id, name)
id: player's unique ID
name: player's name
"""
standings = playerStandings()
standings_groups = []
group = set()
# set initial standings
(win, match, tie) = standings[0][2:5]
for player in standings:
# test if player standings does not match current standings
if ((win, match, tie) != player[2:5]):
# append current player group to the standings group
standings_groups.append(group.copy())
# set new standings
(win, match, tie) = player[2:5]
# reset group
group.clear()
# add (player id, player name) to group of players
group.add(player[0:2])
# add last group to standings_groups
standings_groups.append(group.copy())
return standings_groups
def swissPairings():
"""Returns a list of pairs of players for the next round of a match.
Assuming that there are an even number of players registered, each player
appears exactly once in the pairings. Each player is paired with another
player with an equal or nearly-equal win record, that is, a player adjacent
to him or her in the standings.
Returns:
A list of tuples, each of which contains (id1, name1, id2, name2)
id1: the first player's unique id
name1: the first player's name
id2: the second player's unique id
name2: the second player's name
"""
# reduce opponents to a dictionary of player_id and the set of their
# previously played opponent_id
opponents = {}
for (id, cid_list) in getPlayerOpponents():
opponents[id] = set(cid_list)
standings_groups = getStandingGroups()
pending_players = set()
pending_players.update(set(standings_groups.pop(0)))
pairs = []
player = None
challenger = None
while len(pending_players) > 0:
player = pending_players.pop()
# if no more pending players add players from next group
if len(pending_players) == 0 and len(standings_groups) > 0:
pending_players.update(set(standings_groups.pop(0)))
challenger = pending_players.pop()
if len(pending_players) == 0 and len(standings_groups) > 0:
pending_players.update(set(standings_groups.pop(0)))
if challenger[0] in opponents[player[0]]:
new_challenger = pending_players.pop()
pending_players.add(challenger)
challenger = new_challenger
pairs.append((player[0], player[1], challenger[0], challenger[1]))
return pairs
|
Python
| 0.000492
|
@@ -399,59 +399,8 @@
pg2%0A
-import pprint%0A%0App = pprint.PrettyPrinter(indent=4)%0A
%0A%0Ade
|
d49997058c54bfeabe21a7284bdf3cf07c76075b
|
add doc
|
usr/sbin/local_fs_job_manager.py
|
usr/sbin/local_fs_job_manager.py
|
#!/usr/bin/env python
###############################################################################
# Copyright (c) 2015 Tencent Inc.
# Distributed under the MIT license
# (See accompanying file LICENSE or copy at http://opensource.org/licenses/MIT)
#
# Project: Cloud Image Migration Tool
# Filename: local_fs_job_manager.py
# Version: 2.0
# Author: Jamis Hoo
# E-mail: hoojamis@gmail.com
# Date: Sep 7, 2015
# Time: 14:29:44
# Description: derived job manager for local FS
###############################################################################
from base_job_manager import BaseJobManager
import os
class LocalFSJobManager(BaseJobManager):
mandatory_options = [
("local", "local.image_root_path"),
]
def __init__(self, config):
super(LocalFSJobManager, self).__init__(config)
@staticmethod
def check_config(config):
for section, option in LocalFSJobManager.mandatory_options:
if section not in config or option not in config[section]:
return "Error: Option %s.%s is required. " % (section, option)
if not os.path.isabs(config["local"]["local.image_root_path"]):
return "Error: Image root path %s is not absolute path. " % config["local"]["local.image_root_path"]
if not os.path.isdir(config["local"]["local.image_root_path"]):
return "Error: Image root path %s is not directory. " % config["local"]["local.image_root_path"]
# implementation of abstract method
def do(self):
image_root_path = self.config["local"]["local.image_root_path"]
for dirpath, dirs, files in os.walk(image_root_path, followlinks = True):
for filename in files:
full_name = os.path.join(dirpath, filename)
fileid = os.path.relpath(full_name, image_root_path)
self.submit(fileid, "file://%s" % full_name)
|
Python
| 0
|
@@ -451,58 +451,8 @@
:44%0A
- # Description: derived job manager for local FS%0A
####
@@ -624,16 +624,350 @@
anager):
+%0A %22%22%22%0A Derived class of BaseJobManager.%0A Traverse local files and submit.%0A%0A Attributes:%0A mandatory_options: Configuration options required by this class. This is%0A a list of tuples each of which contains two strings, section name and %0A property name, both of which are case-insensitive.%0A %22%22%22
%0A%0A ma
@@ -1081,32 +1081,87 @@
(self, config):%0A
+ %22%22%22%0A Initialize base class.%0A %22%22%22%0A
super(Lo
@@ -1201,17 +1201,16 @@
onfig)%0A%0A
-%0A
@sta
@@ -1241,32 +1241,349 @@
config(config):%0A
+ %22%22%22%0A Check whether all required options are provided. %0A Also check the validity of some options.%0A%0A Args:%0A config: configuration dict%0A%0A Returns:%0A Returns string containing error message if there are some errors.%0A Returns none otherwise.%0A %22%22%22%0A
for sect
@@ -2162,11 +2162,43 @@
-# i
+def do(self):%0A %22%22%22%0A I
mple
@@ -2225,34 +2225,153 @@
t method
+.
%0A
-def do(self):
+ Traverse a directory and submit each file, with relative path as its%0A file id and absolute path as its src.%0A %22%22%22%0A
%0A
|
4be23d19f25c0b6f0231339608301558d477a0f6
|
add program details
|
vagrant/tournament/tournament.py
|
vagrant/tournament/tournament.py
|
#!/usr/bin/env python
#
# tournament.py -- implementation of a Swiss-system tournament
#
import psycopg2
import pprint
pp = pprint.PrettyPrinter(indent=4)
def connect():
"""Connect to the PostgreSQL database. Returns a database connection."""
return psycopg2.connect("dbname=tournament")
def deleteMatches():
"""Remove all the match records from the database."""
dbh = connect()
sth = dbh.cursor()
sth.execute("TRUNCATE TABLE matches")
dbh.commit()
dbh.close()
def deletePlayers():
"""Remove all the player records from the database."""
dbh = connect()
sth = dbh.cursor()
sth.execute("TRUNCATE TABLE players CASCADE")
dbh.commit()
dbh.close()
def countPlayers():
"""Returns the number of players currently registered."""
dbh = connect()
sth = dbh.cursor()
sth.execute("SELECT count(players) FROM players")
result = sth.fetchone()
dbh.commit()
dbh.close()
return result[0]
def registerPlayer(name):
"""Adds a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
dbh = connect()
sth = dbh.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
values = [name]
sth.execute(query, values)
dbh.commit()
dbh.close()
def playerStandings():
"""Returns a list of the players and their win records, sorted by wins.
The first entry in the list should be the player in first place, or a player
tied for first place if there is currently a tie.
Returns:
A list of tuples, each of which contains (id, name, wins, matches):
id: the player's unique id (assigned by the database)
name: the player's full name (as registered)
wins: the number of matches the player has won
matches: the number of matches the player has played
"""
dbh = connect()
sth = dbh.cursor()
query = '''
SELECT * FROM standings
'''
sth.execute(query)
result = sth.fetchall()
dbh.commit()
dbh.close()
return result
def reportMatch(winner, challenger, tied=None):
"""Records the outcome of a single match between two players.
Args:
winner: the id number of the player who won
challenger: the id number of the player who lost
"""
dbh = connect()
sth = dbh.cursor()
query = "INSERT INTO matches (winner_id, challenger_id, tie) VALUES (%s, %s, %s)"
values = [winner, challenger, tied]
sth.execute(query, values)
dbh.commit()
dbh.close()
def getPlayerOpponents():
"""Returns list of opponents for all players
Returns:
A list of tuples, each of which contains (id, list)
id: player's unique id
list: list of opponent id
"""
dbh = connect()
sth = dbh.cursor()
query = '''
SELECT
opponents.id,
array_agg(challenger_id) AS challenger_id_list
FROM opponents
GROUP BY opponents.id
'''
sth.execute(query)
result = sth.fetchall()
dbh.commit()
dbh.close()
return result
def getStandingGroups():
"""Returns a list of standings grouped by win, tie, loss
Assuming standings are provided ordered by (win, match, tie), each standings
group contains players with equivalent standings
Returns:
A list of sets of tuples, each of which contains (id, name)
id: player's unique ID
name: player's name
"""
standings = playerStandings()
standings_groups = []
group = set()
# set initial standings
(win, match, tie) = standings[0][2:5]
for player in standings:
# test if player standings does not match current standings
if ((win, match, tie) != player[2:5]):
# append current player group to the standings group
standings_groups.append(group.copy())
# set new standings
(win, match, tie) = player[2:5]
# reset group
group.clear()
# add (player id, player name) to group of players
group.add(player[0:2])
# add last group to standings_groups
standings_groups.append(group.copy())
return standings_groups
def swissPairings():
"""Returns a list of pairs of players for the next round of a match.
Assuming that there are an even number of players registered, each player
appears exactly once in the pairings. Each player is paired with another
player with an equal or nearly-equal win record, that is, a player adjacent
to him or her in the standings.
Returns:
A list of tuples, each of which contains (id1, name1, id2, name2)
id1: the first player's unique id
name1: the first player's name
id2: the second player's unique id
name2: the second player's name
"""
# reduce opponents to a dictionary of player_id and the set of their
# previously played opponent_id
opponents = {}
for (id, cid_list) in getPlayerOpponents():
opponents[id] = set(cid_list)
standings_groups = getStandingGroups()
pending_players = set()
pending_players.update(set(standings_groups.pop(0)))
pairs = []
player = None
challenger = None
while len(pending_players) > 0:
player = pending_players.pop()
# if no more pending players add players from next group
if len(pending_players) == 0 and len(standings_groups) > 0:
pending_players.update(set(standings_groups.pop(0)))
challenger = pending_players.pop()
if len(pending_players) == 0 and len(standings_groups) > 0:
pending_players.update(set(standings_groups.pop(0)))
if challenger[0] in opponents[player[0]]:
new_challenger = pending_players.pop()
pending_players.add(challenger)
challenger = new_challenger
pairs.append((player[0], player[1], challenger[0], challenger[1]))
return pairs
|
Python
| 0
|
@@ -81,16 +81,312 @@
nament%0A#
+%0A# Allows recording of tied matches.%0A# Matches opponents of relative standings.%0A# Pairs players in unique matches.%0A#%0A# TODO: implement match byes%0A# TODO: implement pairing for odd number of players%0A# TODO: implement Opponent match win tie breaker algorithm%0A# TODO: implement tournament tracking%0A#
%0A%0Aimport
|
12d22221df5786caee510cc167c9ef29f9155488
|
Correct name of output file
|
var/www/cgi-bin/abundanceConf.py
|
var/www/cgi-bin/abundanceConf.py
|
#!/home/daniel/Software/anaconda3/bin/python
# Import modules for CGI handling
import cgi, cgitb
from abundanceDriver import abundancedriver
from emailSender import sendEmail
def cgi2dict(form):
"""Convert the form from cgi.FieldStorage to a python dictionary"""
params = {}
for key in form.keys():
params[key] = form[key].value
return params
def abundance(form):
"""Create the configuration file for running the abundance driver"""
# Make the StarMe_ares.cfg
fout = '/tmp/linelist.moog {Teff} {logg} {feh} {vt}'.format(**form)
with open('/tmp/StarMe_abundance.cfg', 'w') as f:
f.writelines(fout+'\n')
abundancedriver('/tmp/StarMe_abundance.cfg')
if __name__ == '__main__':
# Enable debugging
import os
os.system('touch /tmp/test1')
cgitb.enable()
form = cgi.FieldStorage()
# Run ARES for one or several line lists
formDict = cgi2dict(form)
abundance(formDict)
sendEmail(to=formDict['email'], driver='abundances', data='/tmp/abundances.dat')
# Show the finished html page
print "Content-type: text/html\n\n"
with open('../html/finish.html', 'r') as lines:
for line in lines:
print line
|
Python
| 0.000365
|
@@ -755,56 +755,8 @@
ing%0A
- import os%0A os.system('touch /tmp/test1')%0A
@@ -974,20 +974,22 @@
mp/abund
-ance
+result
s.dat')%0A
|
ced3fd5fc8945fbb0ac79b3e90833173b1c72e93
|
disable not callable
|
pages/tasks.py
|
pages/tasks.py
|
from celery import task
from pages.models import UploadedImage
from pages.settings import IMG_PATH
@task()
def upload_to_s3(img, account, tags, filename):
img_obj = UploadedImage(
img=img,
account=account,
tags=tags
)
img_obj.save()
print filename
full_path = IMG_PATH + account.slug + '/' + filename
UploadedImage.objects.filter(
img=full_path).order_by('-created_at')[0].delete()
|
Python
| 0.00012
|
@@ -1,12 +1,13 @@
+%0A
from celery
@@ -97,15 +97,78 @@
ATH%0A
-@task()
+%0A# XXX - not callable on pylint!%0A@task()#pylint: disable=not-callable
%0Adef
|
438d21cc81355c3cf0768d8d8a84834252e5d56d
|
Add fix to get rid of random file closings and CRC-32 errors
|
parse/utils.py
|
parse/utils.py
|
import bz2
import zipfile
import tarfile
import re
from chemtools import fileparser
from project.utils import StringIO
def parse_file_list(files):
for f in files:
if f.name.endswith(".zip"):
with zipfile.ZipFile(f, "r") as zfile:
for name in [x for x in zfile.namelist() if not x.endswith("/")]:
yield zfile.open(name)
elif f.name.endswith(".tar.bz2") or f.name.endswith(".tar.gz"):
end = f.name.split(".")[-1]
with tarfile.open(fileobj=f, mode='r:' + end) as tfile:
for name in tfile.getnames():
if tfile.getmember(name).isfile():
yield tfile.extractfile(name)
else:
yield f
def find_sets(files):
logs = []
datasets = []
for f in files:
if f.name.endswith(".log"):
logs.append(f)
else:
datasets.append(f)
logsets = {}
for f in logs:
nums = re.findall(r'n(\d+)', f.name)
if not nums:
continue
num = nums[-1]
name = f.name.replace(".log", '').replace("n%s" % num, '')
if name in logsets.keys():
logsets[name].append((num, f))
else:
logsets[name] = [(num, f)]
return logsets, datasets
def convert_logs(logsets):
converted = []
for key in logsets:
nvals = []
homovals = []
lumovals = []
gapvals = []
for num, log in logsets[key]:
parser = fileparser.Log(log)
nvals.append(num)
homovals.append(parser["HOMO"])
lumovals.append(parser["LUMO"])
gapvals.append(parser["BandGap"])
f = StringIO(name=key)
f.write(', '.join(nvals) + '\n')
f.write(', '.join(homovals) + '\n')
f.write(', '.join(lumovals) + '\n')
f.write(', '.join(gapvals) + '\n')
f.seek(0)
converted.append(f)
return converted
|
Python
| 0
|
@@ -355,30 +355,96 @@
-yield zfile.open(name)
+newfile = StringIO(zfile.open(name).read(), name=name)%0A yield newfile
%0A
@@ -745,22 +745,35 @@
-yield
+newfile = StringIO(
tfile.ex
@@ -783,24 +783,81 @@
ctfile(name)
+.read(), name=name)%0A yield newfile
%0A els
@@ -880,16 +880,17 @@
ield f%0A%0A
+%0A
def find
|
1742665f867233b32a49bfb47d354a3d7b9c034a
|
Add uninstalled versions of lcmtypes to the PYTHONPATH. This way, #2250 doesn't bite us as we edit the code.
|
drake/examples/Cars/steering_command_driver.py
|
drake/examples/Cars/steering_command_driver.py
|
#!/usr/bin/env python
"""Publishes steering commands over LCM.
"""
# Adapted from drake/examples/Cars/SteeringCommandDriver.py.
import argparse
import copy
import math
import os
import sys
import pygame
THIS_FILE = os.path.abspath(__file__)
THIS_DIR = os.path.dirname(THIS_FILE)
DRAKE_DIR = os.path.dirname(os.path.dirname(THIS_DIR))
DRAKE_DIST_DIR = os.path.dirname(DRAKE_DIR)
DRAKE_PYTHON_DIR = os.path.join(DRAKE_DIST_DIR, "build/lib/python2.7")
sys.path.extend([os.path.join(DRAKE_PYTHON_DIR, "dist-packages"),
os.path.join(DRAKE_PYTHON_DIR, "site-packages")])
import lcm
from drake.lcmt_driving_command_t import lcmt_driving_command_t as lcm_msg
STEERING_AXIS = 0
ACCEL_AXIS = 1
BRAKE_AXIS = 2
MAX_STEERING_ANGLE = math.radians(45)
THROTTLE_MULTIPLIER = 1.0
BRAKE_MULTIPLIER = 1.0
STEERING_BUTTON_STEP_FACTOR = 100
class KeyboardEventProcessor:
def __init__(self):
pygame.event.set_allowed(None)
pygame.event.set_allowed([pygame.QUIT, pygame.KEYUP, pygame.KEYDOWN])
pygame.key.set_repeat(100, 10)
def processEvent(self, event, last_msg):
new_msg = copy.copy(last_msg)
if event.key == pygame.K_UP:
new_msg.throttle = (
(event.type == pygame.KEYDOWN) * THROTTLE_MULTIPLIER)
elif event.key == pygame.K_DOWN:
new_msg.brake = (
(event.type == pygame.KEYDOWN) * BRAKE_MULTIPLIER)
elif event.key == pygame.K_RIGHT:
new_msg.steering_angle = min(
MAX_STEERING_ANGLE,
(MAX_STEERING_ANGLE/STEERING_BUTTON_STEP_FACTOR) *
(event.type == pygame.KEYDOWN) + last_msg.steering_angle)
elif event.key == pygame.K_LEFT:
new_msg.steering_angle = max(
MAX_STEERING_ANGLE * -1,
(MAX_STEERING_ANGLE / STEERING_BUTTON_STEP_FACTOR) * -1 *
(event.type == pygame.KEYDOWN) + last_msg.steering_angle)
return new_msg
class JoystickEventProcessor:
def __init__(self, joy_name):
pygame.event.set_allowed(None)
pygame.event.set_allowed([pygame.QUIT, pygame.JOYAXISMOTION])
if pygame.joystick.get_count() == 0:
pygame.quit()
sys.exit('ERROR: No joysticks detected')
joysticks = [pygame.joystick.Joystick(x)
for x in xrange(pygame.joystick.get_count())]
self.joystick = None
for joystick in joysticks:
if joystick.get_name() == joy_name:
self.joystick = joystick
break
if self.joystick is None:
pygame.quit()
sys.exit('ERROR: Joystick with system name "%s" not detected' %
(joy_name))
self.joystick.init()
def processEvent(self, event, last_msg):
new_msg = copy.copy(last_msg)
if event.axis == STEERING_AXIS:
new_msg.steering_angle = -1 * event.value * MAX_STEERING_ANGLE
elif event.axis == ACCEL_AXIS:
new_msg.throttle = -0.5 * event.value + 0.5
elif event.axis == BRAKE_AXIS:
new_msg.brake = -0.5 * event.value + 0.5
return new_msg
class SteeringCommandPublisher:
def __init__(self, input_method, lcm_tag, joy_name):
print 'Initializing...'
pygame.init()
self.screen = pygame.display.set_mode((300, 70))
pygame.display.set_caption('Steering Command Driver')
self.font = pygame.font.SysFont('Courier', 20)
if input_method == 'keyboard':
self.event_processor = KeyboardEventProcessor()
else:
self.event_processor = JoystickEventProcessor(joy_name)
self.last_msg = lcm_msg()
self.lc = lcm.LCM()
self.lcm_tag = lcm_tag
print 'Ready'
def printLCMValues(self):
self.screen.fill(5)
surface = self.font.render(
'Steering Angle: %f' % (self.last_msg.steering_angle),
True, (250, 250, 250))
self.screen.blit(surface, (2, 0))
surface = self.font.render(
'Throttle Value: %f' % (self.last_msg.throttle),
True, (250, 250, 250))
self.screen.blit(surface, (2, 22))
surface = self.font.render(
'Brake Value : %f' % (self.last_msg.brake),
True, (250, 250, 250))
self.screen.blit(surface, (2, 44))
pygame.display.flip()
def start(self):
self.printLCMValues()
while True:
event = pygame.event.wait()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
else:
self.last_msg = self.event_processor.processEvent(
event, self.last_msg)
self.lc.publish(self.lcm_tag, self.last_msg.encode())
self.printLCMValues()
def main():
parser = argparse.ArgumentParser(
description=__doc__)
parser.add_argument(
'--input_method', choices=['joystick', 'keyboard'], default='keyboard',
help='the input method to use for publishing LCM steering commands'
' (default keyboard)')
parser.add_argument(
'--lcm_tag', default='DRIVING_COMMAND',
help='tag to publish the LCM messages with (default STEERING_DRIVER)')
parser.add_argument(
'--joy_name', default='Driving Force GT',
help='system name of the joystick (default Driving Force GT)')
args = parser.parse_args()
publisher = SteeringCommandPublisher(
args.input_method, args.lcm_tag, args.joy_name)
publisher.start()
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -375,16 +375,95 @@
KE_DIR)%0A
+DRAKE_LCMTYPES_DIR = os.path.join(%0A DRAKE_DIR, %22pod-build/lcmgen/lcmtypes%22)%0A
DRAKE_PY
@@ -542,16 +542,92 @@
extend(%5B
+%0A DRAKE_LCMTYPES_DIR, # First (to pick up local edits to messages).%0A
os.path.
@@ -667,29 +667,16 @@
ages%22),%0A
-
os.p
|
56181811197ad7e7b2d2d92f39f118ae0195afe5
|
use dictionary comprehension instead of explicitly building dict iteratively
|
web_frontend/osmaxx/countries/utils.py
|
web_frontend/osmaxx/countries/utils.py
|
import os
from django.contrib.gis.geos import MultiPolygon, Polygon, GEOSGeometry
from osmaxx.countries._settings import POLYFILE_LOCATION
POLYFILE_ENDING = '.poly'
def get_polyfile_name_to_file_mapping():
polyfile_mapping = {}
for possible_polyfile in os.listdir(POLYFILE_LOCATION):
if possible_polyfile.endswith(POLYFILE_ENDING):
name, _ = possible_polyfile.split(POLYFILE_ENDING)
polyfile_mapping[name] = possible_polyfile
return polyfile_mapping
def polyfile_to_geos_geometry(relative_polygon_file, simplify_tolerance=None):
with open(os.path.join(POLYFILE_LOCATION, relative_polygon_file)) as poly_file:
poly = parse_poly(poly_file.readlines())
if simplify_tolerance:
poly = poly.simplify(tolerance=simplify_tolerance, preserve_topology=True)
# simplifying can lead to a polygon. Assure, it stays a multipolygon.
if poly and isinstance(poly, Polygon):
poly = MultiPolygon(poly)
return GEOSGeometry(poly)
def parse_poly_string(poly_string):
return parse_poly(poly_string.splitlines())
def parse_poly(lines):
"""
Parse an Osmosis polygon filter file.
Accept a sequence of lines from a polygon file, return a django.contrib.gis.geos.MultiPolygon object.
Adapted Python 2 code from
http://wiki.openstreetmap.org/wiki/Osmosis/Polygon_Filter_File_Python_Parsing_Geodjango for Python 3
"""
in_ring = False
coords = []
for (index, line) in enumerate(lines):
if index == 0:
# first line is junk.
continue
elif in_ring and line.strip() == 'END':
# we are at the end of a ring, perhaps with more to come.
in_ring = False
elif in_ring:
# we are in a ring and picking up new coordinates.
ring.append([val for val in map(float, line.split())]) # noqa: this is too complicated for flake to understand that ring will be defined if we reach this point
elif not in_ring and line.strip() == 'END':
# we are at the end of the whole polygon.
break
elif not in_ring and line.startswith('!'):
# we are at the start of a polygon part hole.
coords[-1].append([])
ring = coords[-1][-1]
in_ring = True
elif not in_ring:
# we are at the start of a polygon part.
coords.append([[]])
ring = coords[-1][0] # noqa: it is in fact used in the next iteration.
in_ring = True
return MultiPolygon(*(Polygon(*polycoords) for polycoords in coords))
|
Python
| 0.000001
|
@@ -212,58 +212,19 @@
-polyfile_mapping = %7B%7D%0A for possible_polyfile in
+filenames =
os.
@@ -253,111 +253,215 @@
ION)
-:
%0A
- if possible_polyfile.endswith(POLYFILE_ENDING):%0A name, _ = possible_polyfile.spl
+return %7B%0A _extract_country_name_from_polyfile_name(filename): filename%0A for filename in filenames if _is_polyfile(filename)%0A %7D%0A%0A%0Adef _is_polyfile(filename):%0A return filename.endsw
it
+h
(POL
@@ -478,90 +478,127 @@
NG)%0A
- polyfile_mapping%5Bname%5D = possible_polyfile%0A return polyfile_mapping
+%0A%0Adef _extract_country_name_from_polyfile_name(filename):%0A name, _ = filename.split(POLYFILE_ENDING)%0A return name
%0A%0A%0Ad
|
08b5ccc5ff94ced8d582d1f023901d2ea25aca53
|
Disable timeout on reindex
|
udata/core/search/commands.py
|
udata/core/search/commands.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from udata.commands import manager
from udata.search import es, adapter_catalog
log = logging.getLogger(__name__)
@manager.option('-t', '--type', dest='doc_type', default=None, help='Only reindex a given type')
def reindex(doc_type=None):
'''Reindex models'''
for model, adapter in adapter_catalog.items():
if not doc_type or doc_type == adapter.doc_type():
print 'Reindexing {0} objects'.format(model.__name__)
if es.indices.exists_type(index=es.index_name, doc_type=adapter.doc_type()):
es.indices.delete_mapping(index=es.index_name, doc_type=adapter.doc_type())
es.indices.put_mapping(index=es.index_name, doc_type=adapter.doc_type(), body=adapter.mapping)
qs = model.objects.visible() if hasattr(model.objects, 'visible') else model.objects
for obj in qs:
es.index(index=es.index_name, doc_type=adapter.doc_type(), id=obj.id, body=adapter.serialize(obj))
es.indices.refresh(index=es.index_name)
|
Python
| 0.000001
|
@@ -927,16 +927,31 @@
bj in qs
+.timeout(False)
:%0A
|
a54ab99145cdf986705d02877bf8ff13834e3f70
|
Use abspath for virtualenv
|
km3pipe/utils/qrunprocessor.py
|
km3pipe/utils/qrunprocessor.py
|
#!/usr/bin/env python
# Filename: qrunprocessor.py
# Author: Tamas Gal <tgal@km3net.de>
"""
=========================================
Create a summary for a given list of runs
=========================================
Use this batch runner to process a given list of run numbers with a
script, which takes a `-o` to create a summary file, which has the name
of the processed file + a given suffix.
The actual call will look like this, with a bit of copy safeness:
/abs/path/to/SCRIPT run.root -o /abs/path/to/OUTPUT_PATH+SUFFIX
Before constructing the job scripts, the ``OUTPUT_PATH`` will be traversed
to find files which have already been converted to avoid multiple conversions.
.. code-block:: console
Usage:
qrunprocessor [options] DET_ID RUN_LIST OUTPUT_PATH SCRIPT
qrunprocessor (-h | --help)
Options:
DET_ID Detector ID (e.g. 29).
RUN_LIST Path to the file containing the space separated run IDs.
OUTPUT_PATH Path to store the individual summary files.
SCRIPT The script to fire up.
-s SUFFIX The suffix, appended by SCRIPT [default: .summary.h5].
-n N_FILES Number of files to process per job [default: 10].
-e ET Estimated walltime per file in minutes [default: 15].
-f FSIZE Estimated filesystem size for a job [default: 12G].
-m VMEM Estimated vmem for a job [default: 8G].
-j JOBNAME The name of the submitted jobs [default: qrunprocessor].
-l LOG_PATH Path of the job log files [default: qlogs].
-v PYTHONVENV Path to the Python virtual env.
-c CLUSTER Cluster to run on (in2p3, woody, ...) [default: in2p3].
-q Dryrun: don't submit jobs, just print the job script.
-h --help Show this screen.
"""
__author__ = "Tamas Gal"
__email__ = "tgal@km3net.de"
__version__ = "1.0"
def main():
from docopt import docopt
args = docopt(__doc__, version=__version__)
from glob import glob
import os
from os.path import basename, join, abspath
import pathlib
import time
import km3pipe as kp
from km3pipe.tools import chunks, iexists
try:
from tqdm import tqdm
except ImportError:
def tqdm(x):
return x
cprint = kp.logger.get_printer('qrunprocessor')
log = kp.logger.get_logger('qrunprocessor')
RUN_LIST = os.path.abspath(args['RUN_LIST'])
OUTPUT_PATH = os.path.abspath(args['OUTPUT_PATH'])
SCRIPT = os.path.abspath(args['SCRIPT'])
SUFFIX = args['-s']
DET_ID = int(args['DET_ID'])
ET_PER_FILE = int(args['-e']) * 60 # [s]
FILES_PER_JOB = int(args['-n'])
FSIZE = args['-f']
VMEM = args['-m']
LOG_PATH = os.path.abspath(args['-l'])
JOB_NAME = args['-j']
DRYRUN = args['-q']
PYTHONVENV = args['-v']
CLUSTER = args['-c']
pathlib.Path(OUTPUT_PATH).mkdir(parents=True, exist_ok=True)
with open(RUN_LIST, 'r') as fobj:
run_numbers = [int(run) for run in fobj.read().split()]
irods_files = []
for run in run_numbers:
irods_path = kp.tools.irods_filepath(DET_ID, run)
irods_files.append(irods_path)
processed_files = [
basename(f) for f in glob(join(OUTPUT_PATH, '*{}'.format(SUFFIX)))
]
rem_files = []
for irods_file in irods_files:
if basename(irods_file) + SUFFIX not in processed_files:
rem_files.append(irods_file)
cprint("Checking if files are accessible on iRODS")
missing_files_on_irods = 0
for rem_file in tqdm(rem_files):
if not iexists(rem_file):
log.error(
"Skipping file, since not found on iRODS: {}".format(rem_file)
)
rem_files.remove(rem_file)
missing_files_on_irods += 1
cprint(
"{} runs in total, {} already processed ({} missing on iRODS).".format(
len(irods_files), len(processed_files), missing_files_on_irods
)
)
cprint("Proceeding with the remaining {} files.".format(len(rem_files)))
s = kp.shell.Script()
for job_id, file_chunk in enumerate(chunks(rem_files, FILES_PER_JOB)):
n_files = len(file_chunk)
s.add("echo Creating run summary for {} files".format(n_files))
s.add("cd $TMPDIR; mkdir -p $USER; cd $USER")
if PYTHONVENV is not None:
s.add('. {}/bin/activate'.format(PYTHONVENV))
s.add("echo")
for ipath in file_chunk:
fname = basename(ipath)
s.separator(' ')
s.separator('=')
s.echo("Processing {}:".format(fname))
s.add('pwd')
s.iget(ipath)
s.add('ls -al {}'.format(fname))
s.add('km3pipe --version')
s.add('KPrintTree -f {}'.format(fname))
out_fname = fname + SUFFIX
out_fpath = join(OUTPUT_PATH, out_fname)
tmp_fname = out_fname + '.copying'
tmp_fpath = join(OUTPUT_PATH, tmp_fname)
s.add("{} {} -o {}".format(SCRIPT, fname, out_fname))
s.cp(out_fname, tmp_fpath)
s.add("rm {}".format(out_fname))
s.mv(tmp_fpath, out_fpath)
s.echo("File '{}' processed.".format(fname))
s.separator('-')
walltime = time.strftime(
'%H:%M:%S', time.gmtime(ET_PER_FILE * n_files)
)
kp.shell.qsub(
s,
'{}_{}'.format(JOB_NAME, job_id),
walltime=walltime,
fsize=FSIZE,
vmem=VMEM,
log_path=LOG_PATH,
irods=True,
cluster=CLUSTER,
dryrun=DRYRUN
)
if DRYRUN:
break
s.clear()
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -2860,16 +2860,32 @@
NVENV =
+os.path.abspath(
args%5B'-v
@@ -2886,16 +2886,17 @@
gs%5B'-v'%5D
+)
%0A CLU
|
a7fc707a9bf34547fca3c4cc70f8b834d0e1f9ac
|
Fix docstring formatting (blocks doc build)
|
numpy/testing/decorators.py
|
numpy/testing/decorators.py
|
"""Decorators for labeling test objects
Decorators that merely return a modified version of the original
function object are straightforward. Decorators that return a new
function object need to use
nose.tools.make_decorator(original_function)(decorator) in returning
the decorator, in order to preserve metadata such as function name,
setup and teardown functions and so on - see nose.tools for more
information.
"""
def slow(t):
"""Labels a test as 'slow'.
The exact definition of a slow test is obviously both subjective and
hardware-dependent, but in general any individual test that requires more
than a second or two should be labeled as slow (the whole suite consits of
thousands of tests, so even a second is significant)."""
t.slow = True
return t
def setastest(tf=True):
''' Signals to nose that this function is or is not a test
Parameters
----------
tf : bool
If True specifies this is a test, not a test otherwise
e.g
>>> from numpy.testing.decorators import setastest
>>> @setastest(False)
... def func_with_test_in_name(arg1, arg2): pass
...
>>>
This decorator cannot use the nose namespace, because it can be
called from a non-test module. See also istest and nottest in
nose.tools
'''
def set_test(t):
t.__test__ = tf
return t
return set_test
def skipif(skip_condition, msg=None):
''' Make function raise SkipTest exception if skip_condition is true
Parameters
---------
skip_condition : bool or callable.
Flag to determine whether to skip test. If the condition is a
callable, it is used at runtime to dynamically make the decision. This
is useful for tests that may require costly imports, to delay the cost
until the test suite is actually executed.
msg : string
Message to give on raising a SkipTest exception
Returns
-------
decorator : function
Decorator, which, when applied to a function, causes SkipTest
to be raised when the skip_condition was True, and the function
to be called normally otherwise.
Notes
-----
You will see from the code that we had to further decorate the
decorator with the nose.tools.make_decorator function in order to
transmit function name, and various other metadata.
'''
def skip_decorator(f):
# Local import to avoid a hard nose dependency and only incur the
# import time overhead at actual test-time.
import nose
# Allow for both boolean or callable skip conditions.
if callable(skip_condition):
skip_val = lambda : skip_condition()
else:
skip_val = lambda : skip_condition
def get_msg(func,msg=None):
"""Skip message with information about function being skipped."""
if msg is None:
out = 'Test skipped due to test condition'
else:
out = '\n'+msg
return "Skipping test: %s%s" % (func.__name__,out)
# We need to define *two* skippers because Python doesn't allow both
# return with value and yield inside the same function.
def skipper_func(*args, **kwargs):
"""Skipper for normal test functions."""
if skip_val():
raise nose.SkipTest(get_msg(f,msg))
else:
return f(*args, **kwargs)
def skipper_gen(*args, **kwargs):
"""Skipper for test generators."""
if skip_val():
raise nose.SkipTest(get_msg(f,msg))
else:
for x in f(*args, **kwargs):
yield x
# Choose the right skipper to use when building the actual decorator.
if nose.util.isgenerator(f):
skipper = skipper_gen
else:
skipper = skipper_func
return nose.tools.make_decorator(f)(skipper)
return skip_decorator
def knownfailureif(fail_condition, msg=None):
''' Make function raise KnownFailureTest exception if fail_condition is true
Parameters
---------
fail_condition : bool or callable.
Flag to determine whether to mark test as known failure (True)
or not (False). If the condition is a callable, it is used at
runtime to dynamically make the decision. This is useful for
tests that may require costly imports, to delay the cost
until the test suite is actually executed.
msg : string
Message to give on raising a KnownFailureTest exception
Returns
-------
decorator : function
Decorator, which, when applied to a function, causes SkipTest
to be raised when the skip_condition was True, and the function
to be called normally otherwise.
Notes
-----
You will see from the code that we had to further decorate the
decorator with the nose.tools.make_decorator function in order to
transmit function name, and various other metadata.
'''
if msg is None:
msg = 'Test skipped due to known failure'
# Allow for both boolean or callable known failure conditions.
if callable(fail_condition):
fail_val = lambda : fail_condition()
else:
fail_val = lambda : fail_condition
def knownfail_decorator(f):
# Local import to avoid a hard nose dependency and only incur the
# import time overhead at actual test-time.
import nose
from noseclasses import KnownFailureTest
def knownfailer(*args, **kwargs):
if fail_val():
raise KnownFailureTest, msg
else:
return f(*args, **kwargs)
return nose.tools.make_decorator(f)(knownfailer)
return knownfail_decorator
|
Python
| 0.000156
|
@@ -4148,24 +4148,25 @@
---------
+-
%0A fail_co
|
bd0745f9434686907a2951e2056701ae4db53978
|
allow numeric values for SUGAR_LOGGER_LEVEL (silbe) #1038
|
src/sugar/logger.py
|
src/sugar/logger.py
|
# Copyright (C) 2007 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""Logging service setup.
STABLE.
"""
import errno
import sys
import os
import logging
# Let's keep this self contained so that it can be easily
# pasted in external sugar service like the datastore.
def get_logs_dir():
profile = os.environ.get('SUGAR_PROFILE', 'default')
logs_dir = os.environ.get('SUGAR_LOGS_DIR',
os.path.join(os.path.expanduser('~'),
'.sugar', profile, 'logs'))
return logs_dir
def set_level(level):
levels = { 'error' : logging.ERROR,
'warning' : logging.WARNING,
'debug' : logging.DEBUG,
'info' : logging.INFO }
if levels.has_key(level):
logging.getLogger('').setLevel(levels[level])
# pylint: disable-msg=E1101,F0401
def _except_hook(exctype, value, traceback):
# Attempt to provide verbose IPython tracebacks.
# Importing IPython is slow, so we import it lazily.
try:
from IPython.ultraTB import AutoFormattedTB
sys.excepthook = AutoFormattedTB(mode='Verbose', color_scheme='NoColor')
except ImportError:
sys.excepthook = sys.__excepthook__
sys.excepthook(exctype, value, traceback)
def start(log_filename=None):
# remove existing handlers, or logging.basicConfig() won't have no effect.
root_logger = logging.getLogger('')
for handler in root_logger.handlers:
root_logger.removeHandler(handler)
class SafeLogWrapper(object):
"""Small file-like wrapper to gracefully handle ENOSPC errors when
logging."""
def __init__(self, stream):
self._stream = stream
def write(self, s):
try:
self._stream.write(s)
except IOError, e:
# gracefully deal w/ disk full
if e.errno != errno.ENOSPC:
raise e
def flush(self):
try:
self._stream.flush()
except IOError, e:
# gracefully deal w/ disk full
if e.errno != errno.ENOSPC:
raise e
logging.basicConfig(level=logging.WARNING,
format="%(created)f %(levelname)s %(name)s: %(message)s",
stream=SafeLogWrapper(sys.stderr))
if os.environ.has_key('SUGAR_LOGGER_LEVEL'):
set_level(os.environ['SUGAR_LOGGER_LEVEL'])
if log_filename:
try:
log_path = os.path.join(get_logs_dir(), log_filename + '.log')
log_fd = os.open(log_path, os.O_WRONLY | os.O_CREAT)
os.dup2(log_fd, sys.stdout.fileno())
os.dup2(log_fd, sys.stderr.fileno())
os.close(log_fd)
sys.stdout = SafeLogWrapper(sys.stdout)
sys.stderr = SafeLogWrapper(sys.stderr)
except OSError, e:
# if we're out of space, just continue
if e.errno != errno.ENOSPC:
raise e
sys.excepthook = _except_hook
|
Python
| 0.000092
|
@@ -1248,40 +1248,15 @@
ir%0A%0A
-def set_level(level):%0A levels
+_LEVELS
= %7B
@@ -1283,19 +1283,16 @@
.ERROR,%0A
-
@@ -1336,19 +1336,16 @@
-
'debug'
@@ -1363,19 +1363,16 @@
.DEBUG,%0A
-
@@ -1406,36 +1406,133 @@
O %7D%0A
- if levels.has_key(level)
+def set_level(level):%0A if level in _LEVELS:%0A logging.getLogger('').setLevel(_LEVELS%5Blevel%5D)%0A return%0A%0A try
:%0A
@@ -1572,22 +1572,113 @@
vel(
+int(
level
-s%5Blevel%5D)
+))%0A except ValueError:%0A logging.warning('Invalid log level: %25r' %25 level)%0A pass%0A
%0A%0A#
|
bec88bc52a0eb410353b68c10c946c58360dabf2
|
exists return tuple not string
|
erpnext/crm/doctype/appointment/test_appointment.py
|
erpnext/crm/doctype/appointment/test_appointment.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
import datetime
def create_test_lead():
test_lead = frappe.db.exists({'doctype':'Lead','lead_name':'Test Lead'})
if test_lead:
return frappe.get_doc('Lead',test_lead)
test_lead = frappe.get_doc({
'doctype':'Lead',
'lead_name':'Test Lead',
'email_id':'test@example.com'
})
test_lead.insert(ignore_permissions=True)
return test_lead
def create_test_appointments():
test_appointment = frappe.db.exists({ 'doctype':'Appointment', 'email':'test@example.com' })
if test_appointment:
return frappe.get_doc('Appointment',test_appointment)
test_appointment = frappe.get_doc({
'doctype':'Appointment',
'email':'test@example.com',
'status':'Open',
'customer_name':'Test Lead',
'customer_phone_number':'666',
'customer_skype':'test',
'customer_email':'test@example.com',
'scheduled_time':datetime.datetime.now()
})
test_appointment.insert()
return test_appointment
class TestAppointment(unittest.TestCase):
test_appointment = test_lead = None
def setUp(self):
self.test_lead = create_test_lead()
self.test_appointment = create_test_appointments()
def test_calendar_event_created(self):
cal_event = frappe.get_doc('Event',self.test_appointment.calendar_event)
self.assertEqual(cal_event.starts_on ,self.test_appointment.scheduled_time)
def test_lead_linked(self):
lead = frappe.get_doc('Lead',self.lead)
self.assertIsNotNone(lead)
|
Python
| 0.999999
|
@@ -353,24 +353,30 @@
d',test_lead
+%5B0%5D%5B0%5D
)%0A test_l
@@ -784,16 +784,22 @@
ointment
+%5B0%5D%5B0%5D
)%0A te
|
6a2d047d857d6b40a43c826b23b49921a7c8fc3b
|
use issubset
|
onadata/libs/permissions.py
|
onadata/libs/permissions.py
|
from collections import defaultdict
from django.contrib.contenttypes.models import ContentType
from guardian.shortcuts import (
assign_perm,
remove_perm,
get_users_with_perms)
from onadata.apps.api.models import OrganizationProfile
from onadata.apps.main.models.user_profile import UserProfile
from onadata.apps.logger.models import XForm
from onadata.apps.api.models import Project
CAN_ADD_XFORM_TO_PROFILE = 'can_add_xform'
CAN_VIEW_ORGANIZATION_PROFILE = 'view_organizationprofile'
CAN_VIEW_PROFILE = 'view_profile'
CAN_CHANGE_XFORM = 'change_xform'
CAN_ADD_XFORM = 'add_xform'
CAN_DELETE_XFORM = 'delete_xform'
CAN_VIEW_XFORM = 'view_xform'
CAN_ADD_SUBMISSIONS = 'report_xform'
CAN_TRANSFER_OWNERSHIP = 'transfer_xform'
CAN_MOVE_TO_FOLDER = 'move_xform'
# Project Permissions
CAN_VIEW_PROJECT = 'view_project'
CAN_CHANGE_PROJECT = 'change_project'
CAN_TRANSFER_PROJECT_OWNERSHIP = 'transfer_project'
CAN_DELETE_PROJECT = 'delete_project'
CAN_ADD_DATADICTIONARY = 'add_datadictionary'
CAN_CHANGE_DATADICTIONARY = 'change_datadictionary'
CAN_DELETE_DATADICTIONARY = 'delete_datadictionary'
class Role(object):
class_to_permissions = None
permissions = None
name = None
@classmethod
def _remove_obj_permissions(self, user, obj):
content_type = ContentType.objects.get(
model=obj.__class__.__name__.lower(),
app_label=obj.__class__._meta.app_label
)
object_permissions = user.userobjectpermission_set.filter(
object_pk=obj.pk, content_type=content_type)
for perm in object_permissions:
remove_perm(perm.permission.codename, user, obj)
@classmethod
def add(cls, user, obj):
cls._remove_obj_permissions(user, obj)
for codename, klass in cls.permissions:
if isinstance(obj, klass):
assign_perm(codename, user, obj)
@classmethod
def has_role(cls, permissions, obj):
"""Check that permission correspond to this role for this object.
:param permissions: A list of permissions.
:param obj: An object to get the permissions of.
"""
return all([l in set(permissions)
for l in set(cls.class_to_permissions[type(obj)])])
@classmethod
def user_has_role(cls, user, obj):
"""Check that a user has this role.
:param user: A user object.
:param obj: An object to get the permissions of.
"""
return user.has_perms(cls.class_to_permissions[type(obj)], obj)
class ReadOnlyRole(Role):
name = 'readonly'
permissions = (
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_XFORM, XForm),
(CAN_VIEW_PROJECT, Project),
)
class DataEntryRole(Role):
name = 'dataentry'
permissions = (
(CAN_ADD_SUBMISSIONS, XForm),
(CAN_ADD_XFORM, Project),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
)
class EditorRole(Role):
name = 'editor'
permissions = (
(CAN_ADD_SUBMISSIONS, XForm),
(CAN_ADD_XFORM, Project),
(CAN_CHANGE_PROJECT, Project),
(CAN_CHANGE_XFORM, XForm),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
)
class ManagerRole(Role):
name = 'manager'
permissions = (
(CAN_ADD_XFORM, XForm),
(CAN_ADD_XFORM_TO_PROFILE, OrganizationProfile),
(CAN_ADD_XFORM_TO_PROFILE, UserProfile),
(CAN_CHANGE_PROJECT, Project),
(CAN_CHANGE_XFORM, XForm),
(CAN_DELETE_PROJECT, Project),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROFILE, UserProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
)
class MemberRole(Role):
"""This is a role for a member of an organization.
"""
name = 'member'
class OwnerRole(Role):
"""This is a role for an owner of a dataset, organization, or project.
"""
name = 'owner'
permissions = (
(CAN_ADD_XFORM, Project),
(CAN_ADD_XFORM, XForm),
(CAN_ADD_XFORM_TO_PROFILE, OrganizationProfile),
(CAN_ADD_XFORM_TO_PROFILE, UserProfile),
(CAN_CHANGE_PROJECT, Project),
(CAN_CHANGE_XFORM, XForm),
(CAN_DELETE_PROJECT, Project),
(CAN_DELETE_XFORM, XForm),
(CAN_MOVE_TO_FOLDER, XForm),
(CAN_TRANSFER_OWNERSHIP, XForm),
(CAN_TRANSFER_PROJECT_OWNERSHIP, Project),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROFILE, UserProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
(CAN_ADD_DATADICTIONARY, XForm),
(CAN_CHANGE_DATADICTIONARY, XForm),
(CAN_DELETE_DATADICTIONARY, XForm),
(CAN_ADD_SUBMISSIONS, XForm),
)
ROLES = {role.name: role for role in [ReadOnlyRole,
DataEntryRole,
EditorRole,
ManagerRole,
OwnerRole]}
ROLES_ORDERED = [ReadOnlyRole,
DataEntryRole,
EditorRole,
ManagerRole,
OwnerRole]
# Memoize a class to permissions dict.
for role in ROLES.values():
role.class_to_permissions = defaultdict(list)
[role.class_to_permissions[k].append(p) for p, k in role.permissions]
def get_role(permissions, obj):
for role in reversed(ROLES_ORDERED):
if role.has_role(permissions, obj):
return role.name
def get_object_users_with_permissions(obj):
"""Returns users, roles and permissions for a object.
"""
users_with_perms = []
if obj:
users_with_perms = [{
'user': user,
'role': get_role(permissions, obj),
'permissions': permissions} for user, permissions in
get_users_with_perms(obj,
attach_perms=True,
with_group_users=False).items()]
return users_with_perms
|
Python
| 0.000008
|
@@ -2144,112 +2144,122 @@
-return all(%5Bl in set(permissions)%0A for l in set(cls.class_to_permissions%5Btype(obj)%5D)%5D
+perms_for_role = set(cls.class_to_permissions%5Btype(obj)%5D)%0A%0A return perms_for_role.issubset(set(permissions)
)%0A%0A
|
56e9a075ce238cdb9a4d8664a8886d20bf41d476
|
make role list DRY
|
onadata/libs/permissions.py
|
onadata/libs/permissions.py
|
from collections import defaultdict
from django.contrib.contenttypes.models import ContentType
from guardian.shortcuts import (
assign_perm,
remove_perm,
get_users_with_perms)
from onadata.apps.api.models import OrganizationProfile
from onadata.apps.main.models.user_profile import UserProfile
from onadata.apps.logger.models import XForm
from onadata.apps.api.models import Project
CAN_ADD_XFORM_TO_PROFILE = 'can_add_xform'
CAN_VIEW_ORGANIZATION_PROFILE = 'view_organizationprofile'
CAN_VIEW_PROFILE = 'view_profile'
CAN_CHANGE_XFORM = 'change_xform'
CAN_ADD_XFORM = 'add_xform'
CAN_DELETE_XFORM = 'delete_xform'
CAN_VIEW_XFORM = 'view_xform'
CAN_ADD_SUBMISSIONS = 'report_xform'
CAN_TRANSFER_OWNERSHIP = 'transfer_xform'
CAN_MOVE_TO_FOLDER = 'move_xform'
# Project Permissions
CAN_VIEW_PROJECT = 'view_project'
CAN_CHANGE_PROJECT = 'change_project'
CAN_TRANSFER_PROJECT_OWNERSHIP = 'transfer_project'
CAN_DELETE_PROJECT = 'delete_project'
CAN_ADD_DATADICTIONARY = 'add_datadictionary'
CAN_CHANGE_DATADICTIONARY = 'change_datadictionary'
CAN_DELETE_DATADICTIONARY = 'delete_datadictionary'
class Role(object):
class_to_permissions = None
permissions = None
name = None
@classmethod
def _remove_obj_permissions(self, user, obj):
content_type = ContentType.objects.get(
model=obj.__class__.__name__.lower(),
app_label=obj.__class__._meta.app_label
)
object_permissions = user.userobjectpermission_set.filter(
object_pk=obj.pk, content_type=content_type)
for perm in object_permissions:
remove_perm(perm.permission.codename, user, obj)
@classmethod
def add(cls, user, obj):
cls._remove_obj_permissions(user, obj)
for codename, klass in cls.permissions:
if isinstance(obj, klass):
assign_perm(codename, user, obj)
@classmethod
def has_role(cls, permissions, obj):
"""Check that permission correspond to this role for this object.
:param permissions: A list of permissions.
:param obj: An object to get the permissions of.
"""
perms_for_role = set(cls.class_to_permissions[type(obj)])
return perms_for_role.issubset(set(permissions))
@classmethod
def user_has_role(cls, user, obj):
"""Check that a user has this role.
:param user: A user object.
:param obj: An object to get the permissions of.
"""
return user.has_perms(cls.class_to_permissions[type(obj)], obj)
class ReadOnlyRole(Role):
name = 'readonly'
permissions = (
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_XFORM, XForm),
(CAN_VIEW_PROJECT, Project),
)
class DataEntryRole(Role):
name = 'dataentry'
permissions = (
(CAN_ADD_SUBMISSIONS, XForm),
(CAN_ADD_XFORM, Project),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
)
class EditorRole(Role):
name = 'editor'
permissions = (
(CAN_ADD_SUBMISSIONS, XForm),
(CAN_ADD_XFORM, Project),
(CAN_CHANGE_PROJECT, Project),
(CAN_CHANGE_XFORM, XForm),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
)
class ManagerRole(Role):
name = 'manager'
permissions = (
(CAN_ADD_XFORM, XForm),
(CAN_ADD_XFORM_TO_PROFILE, OrganizationProfile),
(CAN_ADD_XFORM_TO_PROFILE, UserProfile),
(CAN_CHANGE_PROJECT, Project),
(CAN_CHANGE_XFORM, XForm),
(CAN_DELETE_PROJECT, Project),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROFILE, UserProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
)
class MemberRole(Role):
"""This is a role for a member of an organization.
"""
name = 'member'
class OwnerRole(Role):
"""This is a role for an owner of a dataset, organization, or project.
"""
name = 'owner'
permissions = (
(CAN_ADD_XFORM, Project),
(CAN_ADD_XFORM, XForm),
(CAN_ADD_XFORM_TO_PROFILE, OrganizationProfile),
(CAN_ADD_XFORM_TO_PROFILE, UserProfile),
(CAN_CHANGE_PROJECT, Project),
(CAN_CHANGE_XFORM, XForm),
(CAN_DELETE_PROJECT, Project),
(CAN_DELETE_XFORM, XForm),
(CAN_MOVE_TO_FOLDER, XForm),
(CAN_TRANSFER_OWNERSHIP, XForm),
(CAN_TRANSFER_PROJECT_OWNERSHIP, Project),
(CAN_VIEW_ORGANIZATION_PROFILE, OrganizationProfile),
(CAN_VIEW_PROFILE, UserProfile),
(CAN_VIEW_PROJECT, Project),
(CAN_VIEW_XFORM, XForm),
(CAN_ADD_DATADICTIONARY, XForm),
(CAN_CHANGE_DATADICTIONARY, XForm),
(CAN_DELETE_DATADICTIONARY, XForm),
(CAN_ADD_SUBMISSIONS, XForm),
)
ROLES = {role.name: role for role in [ReadOnlyRole,
DataEntryRole,
EditorRole,
ManagerRole,
OwnerRole]}
ROLES_ORDERED = [ReadOnlyRole,
DataEntryRole,
EditorRole,
ManagerRole,
OwnerRole]
# Memoize a class to permissions dict.
for role in ROLES.values():
role.class_to_permissions = defaultdict(list)
[role.class_to_permissions[k].append(p) for p, k in role.permissions]
def get_role(permissions, obj):
for role in reversed(ROLES_ORDERED):
if role.has_role(permissions, obj):
return role.name
def get_object_users_with_permissions(obj):
"""Returns users, roles and permissions for a object.
"""
users_with_perms = []
if obj:
users_with_perms = [{
'user': user,
'role': get_role(permissions, obj),
'permissions': permissions} for user, permissions in
get_users_with_perms(obj,
attach_perms=True,
with_group_users=False).items()]
return users_with_perms
|
Python
| 0.000022
|
@@ -4929,264 +4929,8 @@
)%0A%0A
-ROLES = %7Brole.name: role for role in %5BReadOnlyRole,%0A DataEntryRole,%0A EditorRole,%0A ManagerRole,%0A OwnerRole%5D%7D%0A
ROLE
@@ -4952,24 +4952,24 @@
adOnlyRole,%0A
+
@@ -5043,24 +5043,24 @@
anagerRole,%0A
-
@@ -5076,16 +5076,69 @@
rRole%5D%0A%0A
+ROLES = %7Brole.name: role for role in ROLES_ORDERED%7D%0A%0A
# Memoiz
|
a705a96be4f8616c5fcd6bca33300b0536a143d8
|
Set the login url in settings.
|
onlineweb4/settings/base.py
|
onlineweb4/settings/base.py
|
import os
import sys
from django.contrib.messages import constants as messages
# Directory that contains this file.
PROJECT_SETTINGS_DIRECTORY = os.path.dirname(globals()['__file__'])
# Root directory. Contains manage.py
PROJECT_ROOT_DIRECTORY = os.path.join(PROJECT_SETTINGS_DIRECTORY, '../..')
#PROJECT_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(__file__))
TEST_RUNNER = "django_nose.NoseTestSuiteRunner"
NOSE_ARGS = ['--with-coverage', '--cover-package=apps']
DEBUG = False
TEMPLATE_DEBUG = DEBUG
TEST_RUNNER = "django_nose.NoseTestSuiteRunner"
ADMINS = (
('dotKom', 'dotkom@online.ntnu.no'),
)
MANAGERS = ADMINS
# Email settings
DEFAULT_FROM_EMAIL = 'online@online.ntnu.no'
EMAIL_ARRKOM = 'arrkom@online.ntnu.no'
EMAIL_BEDKOM = 'bedkom@online.ntnu.no'
EMAIL_DOTKOM = 'dotkom@online.ntnu.no'
EMAIL_FAGKOM = 'fagkom@online.ntnu.no'
EMAIL_PROKOM = 'prokom@online.ntnu.no'
EMAIL_TRIKOM = 'trikom@online.ntnu.no'
# We will receive errors and other django messages from this email
SERVER_EMAIL = 'onlineweb4-error@online.ntnu.no'
TIME_ZONE = 'Europe/Oslo'
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'nb'
LANGUAGES = (
('nb', 'Norwegian'),
('en_US', 'English'),
)
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = False
DATETIME_FORMAT = 'N j, Y, H:i'
SECRET_KEY = 'q#wy0df(7&$ucfrxa1j72%do7ko*-6(g!8f$tc2$3x@3cq5@6c'
AUTH_USER_MODEL = 'authentication.OnlineUser'
MEDIA_ROOT = os.path.join(PROJECT_ROOT_DIRECTORY, 'uploaded_media') # Override this in local.py in prod.
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PROJECT_ROOT_DIRECTORY, 'static')
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT_DIRECTORY, 'files/static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
'compressor.finders.CompressorFinder',
)
COMPRESS_OUTPUT_DIR = 'cache'
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc {infile} {outfile}'),
)
COMPRESS_CSS_FILTERS = [
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor-filters.cssmin.CSSMinFilter',
]
COMPRESS_JS_FILTERS = [
'compressor.filters.jsmin.JSMinFIlter',
]
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc -x {infile} {outfile}'),
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'middleware.http.Http403Middleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'onlineweb4.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'onlineweb4.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT_DIRECTORY, 'templates/')
)
# Grappelli settings
GRAPPELLI_ADMIN_TITLE = '<a href="/">Onlineweb</a>'
INSTALLED_APPS = (
# Third party dependencies
'django_nose',
'south',
'grappelli',
'filebrowser',
'chunks',
'crispy_forms',
'django_extensions',
'django_dynamic_fixture',
'captcha',
'compressor',
# Django apps
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.markup',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'django.contrib.markup',
# Onlineweb 4 apps
'apps.article',
'apps.authentication',
'apps.autoconfig',
'apps.careeropportunity',
'apps.companyprofile',
'apps.events',
'apps.marks',
'apps.offline',
'apps.feedback',
'apps.mommy',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'apscheduler.scheduler': {
'handlers': ['console'],
'level': 'INFO',
},
}
}
# crispy forms settings
CRISPY_TEMPLATE_PACK = 'bootstrap'
# bootstrap messages classes
MESSAGE_TAGS = {messages.DEBUG: 'alert-debug',
messages.INFO: 'alert-info',
messages.SUCCESS: 'alert-success',
messages.WARNING: 'alert-warning',
messages.ERROR: 'alert-error'}
# Not really sure what this does.
# Has something to do with django-dynamic-fixture bumped from 1.6.4 to 1.6.5 in order to run a syncdb with mysql/postgres (OptimusCrime)
IMPORT_DDF_MODELS = False
# Remember to keep 'local' last, so it can override any setting.
for settings_module in ['filebrowser', 'local']: # local last
if not os.path.exists(os.path.join(PROJECT_SETTINGS_DIRECTORY,
settings_module + ".py")):
sys.stderr.write("Could not find settings module '%s'.\n" %
settings_module)
if settings_module == 'local':
sys.stderr.write("You need to copy the settings file "
"'onlineweb4/settings/example-local.py' to "
"'onlineweb4/settings/local.py'.\n")
sys.exit(1)
try:
exec('from %s import *' % settings_module)
except ImportError, e:
print "Could not import settings for '%s' : %s" % (settings_module,
str(e))
|
Python
| 0
|
@@ -1456,16 +1456,43 @@
ineUser'
+%0ALOGIN_URL = '/auth/login/'
%0A%0AMEDIA_
|
c5984a34e3bef6d69ebffe41f690819e5693f1d5
|
fix post model
|
src/tours/models.py
|
src/tours/models.py
|
import authtools
from django.conf import settings
from django.db import models
from django.http import request
class Tour(models.Model):
title_pt = models.CharField(max_length=100, blank=True, null=False)
title_gb = models.CharField(max_length=100, blank=True, null=False)
title_de = models.CharField(max_length=100, blank=True, null=False)
description_pt = models.TextField(max_length=1000, blank=True, null=False)
description_gb = models.TextField(max_length=1000, blank=True, null=False)
description_de = models.TextField(max_length=1000, blank=True, null=False)
price = models.CharField(max_length=100, blank=True, null=False)
img = models.FileField(null=True, blank=True)
url = models.URLField(max_length=200, blank=True, null=False)
created_on = models.DateTimeField(auto_now_add=True, auto_created=False)
def __str__(self):
return self.title_pt
def __unicode__(self):
return self.title_pt
class Offer(models.Model):
title = models.CharField(max_length=100, blank=True, null=False)
text = models.TextField(max_length=1000, blank=True, null=False)
created_on = models.DateTimeField(auto_now_add=True, auto_created=False)
img = models.FileField(null=True, blank=True)
def __str__(self):
return self.title
def __unicode__(self):
return self.title
class Gallery(models.Model):
img = models.FileField(null=True, blank=True)
title = models.TextField(max_length=100, blank=True, null=False)
video = models.URLField(max_length=1000, blank=True, null=False)
def __str__(self):
return self.title
def __unicode__(self):
return self.title
class Contact(models.Model):
first_name = models.CharField(max_length=100, blank=True, null=False)
last_name = models.CharField(max_length=100, blank=True, null=False)
img = models.FileField(null=True, blank=True)
category = models.CharField(max_length=100, blank=True, null=False)
mobile = models.CharField(max_length=100, blank=True, null=False)
email = models.CharField(max_length=100, blank=True, null=False)
whatsapp = models.CharField(max_length=100, blank=True, null=False)
viber = models.CharField(max_length=100, blank=True, null=False)
telegram = models.CharField(max_length=100, blank=True, null=False)
skype = models.CharField(max_length=100, blank=True, null=False)
facebook = models.URLField(max_length=100, blank=True, null=False)
twitter = models.URLField(max_length=100, blank=True, null=False)
pinterest = models.URLField(max_length=100, blank=True, null=False)
google = models.URLField(max_length=100, blank=True, null=False)
instagram = models.URLField(max_length=100, blank=True, null=False)
def __str__(self):
return self.last_name
def __unicode__(self):
return self.name
class Post(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
title = models.CharField(max_length=500)
publish = models.DateField(auto_now=False, auto_now_add=False)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
|
Python
| 0
|
@@ -2992,75 +2992,8 @@
00)%0A
- publish = models.DateField(auto_now=False, auto_now_add=False)%0A
|
ea87db99ff5e662ddda4ea6bcf3369c75969fd3c
|
Refactor user-creation parameters
|
src/users/models.py
|
src/users/models.py
|
import datetime
from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, BaseUserManager, PermissionsMixin,
)
from django.core import signing
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.db import models
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.translation import ugettext, ugettext_lazy as _
class EmailUserManager(BaseUserManager):
"""Custom manager for EmailUser."""
def _create_user(
self, email, password, is_staff, is_superuser, **extra_fields):
"""Create and save an EmailUser with the given email and password.
:param str email: user email
:param str password: user password
:param bool is_staff: whether user staff or not
:param bool is_superuser: whether user admin or not
:return custom_user.models.EmailUser user: user
:raise ValueError: email is not set
"""
now = timezone.now()
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
is_active = extra_fields.pop('is_active', False)
user = self.model(
email=email, is_staff=is_staff, is_active=is_active,
is_superuser=is_superuser, last_login=now, date_joined=now,
**extra_fields
)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
"""Create and save an EmailUser with the given email and password.
:param str email: user email
:param str password: user password
:return custom_user.models.EmailUser user: regular user
"""
is_staff = extra_fields.pop("is_staff", False)
return self._create_user(
email, password, is_staff, False, **extra_fields
)
def create_superuser(self, email, password, **extra_fields):
"""Create and save an EmailUser with the given email and password.
:param str email: user email
:param str password: user password
:return custom_user.models.EmailUser user: admin user
"""
return self._create_user(
email, password, True, True, **extra_fields
)
def get_with_activation_key(self, activation_key):
"""Get a user from activation key.
"""
try:
username = signing.loads(
activation_key,
salt=settings.USER_ACTIVATION_KEY_SALT,
max_age=settings.USER_ACTIVATION_EXPIRE_SECONDS,
)
except signing.BadSignature:
raise self.model.DoesNotExist
return self.get(**{self.model.USERNAME_FIELD: username})
def photo_upload_to(instance, filename):
return 'avatars/{pk}/{date}-{filename}'.format(
pk=instance.pk,
date=str(datetime.date.today()),
filename=filename,
)
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
verbose_name=_('email address'),
max_length=255, unique=True, db_index=True,
)
speaker_name = models.CharField(
verbose_name=_('speaker name'),
max_length=100,
)
bio = models.TextField(
verbose_name=_('biography'),
max_length=140,
help_text=_("About you. There will be no formatting."),
)
photo = models.ImageField(
verbose_name=_('photo'),
blank=True, default='', upload_to=photo_upload_to,
)
facebook_id = models.CharField(
verbose_name=_('facebook'),
blank=True, max_length=100,
)
twitter_id = models.CharField(
verbose_name=_('twitter'),
blank=True, max_length=100,
)
github_id = models.CharField(
verbose_name=_('github'),
blank=True, max_length=100,
)
is_staff = models.BooleanField(
verbose_name=_('staff status'),
default=False,
help_text=_(
"Designates whether the user can log into this admin site."
),
)
is_active = models.BooleanField(
verbose_name=_('active'),
default=False,
help_text=_(
"Designates whether this user should be treated as "
"active. Unselect this instead of deleting accounts."
),
)
date_joined = models.DateTimeField(
verbose_name=_('date joined'),
default=timezone.now,
)
objects = EmailUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
swappable = 'AUTH_USER_MODEL'
def __str__(self):
return self.email
def get_full_name(self):
return self.speaker_name
def get_short_name(self):
return self.speaker_name
@property
def profile_filled(self):
return self.is_active and self.speaker_name and self.bio
def get_activation_key(self):
key = signing.dumps(
obj=getattr(self, self.USERNAME_FIELD),
salt=settings.USER_ACTIVATION_KEY_SALT,
)
return key
def email_user(self, subject, message, from_email=None, **kwargs):
"""Send an email to this user.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
def send_activation_email(self, request):
activation_key = self.get_activation_key()
activation_url = request.build_absolute_uri(
reverse('user_activate', kwargs={
'activation_key': activation_key,
}),
)
context = {
'user': self,
'activation_key': activation_key,
'activation_url': activation_url,
}
text_message = render_to_string(
'registration/activation_email.txt', context,
)
html_message = render_to_string(
'registration/activation_email.html', context,
)
self.email_user(
subject=ugettext('Complete your registration on tw.pycon.org'),
message=text_message, html_message=html_message,
fail_silently=False,
)
|
Python
| 0
|
@@ -545,29 +545,16 @@
te_user(
-%0A
self, em
@@ -571,32 +571,8 @@
ord,
- is_staff, is_superuser,
**e
@@ -1132,25 +1132,84 @@
-is_active
+last_login = extra_fields.pop('last_login', now)%0A date_joined
= extra
@@ -1225,25 +1225,25 @@
op('
-is_active', False
+date_joined', now
)%0A
@@ -1296,87 +1296,19 @@
il,
-is_staff=is_staff, is_active=is_active,%0A is_superuser=is_superuser,
+last_login=
last
@@ -1313,20 +1313,16 @@
st_login
-=now
, date_j
@@ -1327,19 +1327,27 @@
_joined=
-now
+date_joined
,%0A
@@ -1769,63 +1769,8 @@
%22%22%22%0A
- is_staff = extra_fields.pop(%22is_staff%22, False)%0A
@@ -1794,37 +1794,24 @@
create_user(
-%0A
email, passw
@@ -1818,25 +1818,8 @@
ord,
- is_staff, False,
**e
@@ -1825,33 +1825,24 @@
extra_fields
-%0A
)%0A%0A def c
@@ -2193,20 +2193,82 @@
ord,
- True, True,
+%0A is_active=True, is_staff=True, is_superuser=True,%0A
**e
|
eb76abed3402a403637ae7486f692df8bba72ea3
|
reorder code for better readability
|
src/vimpdb/proxy.py
|
src/vimpdb/proxy.py
|
import os
import sys
import socket
from subprocess import call
from subprocess import Popen
from subprocess import PIPE
PROGRAM = os.environ.get("VIMPDB_VIMSCRIPT", "vimpdb")
SERVERNAME = os.environ.get("VIMPDB_SERVERNAME", "VIMPDB")
def getPackagePath(instance):
module = sys.modules[instance.__module__]
return os.path.dirname(module.__file__)
class ProxyToVim(object):
def setupRemote(self):
if not self.isRemoteSetup():
filename = os.path.join(getPackagePath(self), "vimpdb.vim")
command = "<C-\><C-N>:source %s<CR>" % filename
self._send(command)
def getText(self, prompt):
self.setupRemote()
command = self._expr('PDB_get_command("%s")' % prompt)
return command
def showFeedback(self, feedback):
if not feedback:
return
feedback_list = feedback.splitlines()
self.setupRemote()
self._send(':call PDB_show_feedback(%s)<CR>' % repr(feedback_list))
def showFileAtLine(self, filename, lineno):
if os.path.exists(filename):
self._showFileAtLine(filename, lineno)
def _send(self, command):
return_code = call([PROGRAM, '--servername', SERVERNAME,
'--remote-send', command])
if return_code:
raise RemoteUnavailable()
print "sent:", command
def _showFileAtLine(self, filename, lineno):
# Windows compatibility:
# Command-line does not play well with backslash in filename.
# So turn backslash to slash; Vim knows how to translate them back.
filename = filename.replace('\\', '/')
self.setupRemote()
self._send(':call PDB_show_file_at_line("%s", "%d")<CR>'
% (filename, lineno))
def _expr(self, expr):
print "expr:", expr
result = self._remote_expr(expr)
print "result:", result
return result
def _remote_expr(self, expr):
p = Popen([PROGRAM, '--servername',
SERVERNAME, "--remote-expr", expr],
stdin=PIPE, stdout=PIPE)
return_code = p.wait()
if return_code:
raise RemoteUnavailable()
child_stdout = p.stdout
output = child_stdout.read()
return output.strip()
def isRemoteSetup(self):
status = self._remote_expr("exists('*PDB_init')")
return status == '1'
class ProxyFromVim(object):
PORT = 6666
BUFLEN = 512
def __init__(self):
self.socket_inactive = True
def bindSocket(self):
if self.socket_inactive:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.IPPROTO_UDP)
self.socket.bind(('', self.PORT))
self.socket_inactive = False
def closeSocket(self):
self.socket.close()
self.socket_inactive = True
def waitFor(self, pdb):
self.bindSocket()
(message, address) = self.socket.recvfrom(self.BUFLEN)
print "command:", message
return message
class RemoteUnavailable(Exception):
pass
def eat_stdin(self):
sys.stdout.write('-- Type Ctrl-D to continue --\n')
sys.stdout.flush()
sys.stdin.readlines()
|
Python
| 0.000323
|
@@ -373,24 +373,773 @@
Vim(object):
+%0A %22%22%22%0A use subprocess to launch Vim instance that use clientserver mode%0A to communicate with Vim instance used for debugging.%0A %22%22%22%0A%0A def _remote_expr(self, expr):%0A p = Popen(%5BPROGRAM, '--servername',%0A SERVERNAME, %22--remote-expr%22, expr%5D,%0A stdin=PIPE, stdout=PIPE)%0A return_code = p.wait()%0A if return_code:%0A raise RemoteUnavailable()%0A child_stdout = p.stdout%0A output = child_stdout.read()%0A return output.strip()%0A%0A def _send(self, command):%0A return_code = call(%5BPROGRAM, '--servername', SERVERNAME,%0A '--remote-send', command%5D)%0A if return_code:%0A raise RemoteUnavailable()%0A print %22sent:%22, command
%0A%0A def se
@@ -1369,119 +1369,85 @@
def
-getText(self, prompt):%0A self.setupRemote()%0A command = self._expr('PDB_get_command(%22%25s%22)' %25 prompt
+isRemoteSetup(self):%0A status = self._remote_expr(%22exists('*PDB_init')%22
)%0A
@@ -1459,23 +1459,29 @@
return
-command
+status == '1'
%0A%0A de
@@ -1847,252 +1847,8 @@
o)%0A%0A
- def _send(self, command):%0A return_code = call(%5BPROGRAM, '--servername', SERVERNAME,%0A '--remote-send', command%5D)%0A if return_code:%0A raise RemoteUnavailable()%0A print %22sent:%22, command%0A%0A
@@ -1935,17 +1935,25 @@
#
-C
+Windows c
ommand-l
@@ -2261,131 +2261,157 @@
-def _expr(self, expr):%0A print %22expr:%22, expr%0A result = self._remote_expr(expr)%0A print %22result:%22, result
+# code leftover from hacking%0A def getText(self, prompt):%0A self.setupRemote()%0A command = self._expr('PDB_get_command(%22%25s%22)' %25 prompt)
%0A
@@ -2426,14 +2426,15 @@
urn
-result
+command
%0A%0A
@@ -2431,39 +2431,32 @@
ommand%0A%0A def
-_remote
_expr(self, expr
@@ -2471,414 +2471,99 @@
p
- = Popen(%5BPROGRAM, '--servername',%0A SERVERNAME, %22--remote-
+rint %22
expr
+:
%22, expr
-%5D,%0A stdin=PIPE, stdout=PIPE)%0A return_code = p.wait()%0A if return_code:%0A raise RemoteUnavailable()%0A child_stdout = p.stdout%0A output = child_stdout.read()%0A return output.strip()%0A%0A def isRemoteSetup(self):%0A status = self._remote_expr(%22exists('*PDB_init')%22)
+%0A result = self._remote_expr(expr)%0A print %22result:%22, result
%0A
@@ -2574,29 +2574,22 @@
return
-status == '1'
+result
%0A%0A%0Aclass
@@ -3280,16 +3280,45 @@
pass%0A%0A%0A
+# code leftover from hacking%0A
def eat_
|
148da580d7b15c51aa147b3472168b816842310f
|
change arg format
|
ssh_auth_methods.py
|
ssh_auth_methods.py
|
import subprocess, sys, threading
from queue import Queue
from math import ceil
def get_auth_methods(hostname, port=22, timeout=5.0, verbose=False):
try:
if sys.version_info.minor < 3:
success_output = subprocess.check_output([
'ssh',
# prevents ominous error on changed host key
'-o', 'StrictHostKeyChecking=no',
# the point - prevents attempted authentication
'-o', 'PreferredAuthentications=none',
# prevents warning associated with unrecognized host key
'-o', 'LogLevel=ERROR',
# maximum time per connections
# NOTE: there can be multiple connections if a domain
# resolves to multiple IPs
'-o', 'ConnectTimeout=%d' % ceil(timeout),
'-p', str(port),
'root@' + hostname, # use root user to prevent leaking username
'exit'], # the command to be executed upon successful auth
stderr=subprocess.STDOUT)
else:
success_output = subprocess.check_output([
'ssh',
'-o', 'StrictHostKeyChecking=no',
'-o', 'PreferredAuthentications=none',
'-o', 'LogLevel=ERROR',
'-p', str(port),
'root@' + hostname,
'exit'],
stderr=subprocess.STDOUT,
# only available in Python 3.3+ (reason for condition_
timeout=timeout)
# If we make it here, the server allowed us shell access without
# authentication. Thankfully, the 'exit' command should have
# left immediately.
if verbose:
print('Eek! Server allowed unauthenticated login! Exiting.')
return ['none']
# This is in fact the expected case, as we expect the SSH server to
# reject the unauthenticated connection, and therefore expect exit code
# 255, OpenSSH's sole error code.
except subprocess.CalledProcessError as e:
# ssh's result to stderr
result = str(e.output.strip(), 'utf-8')
if e.returncode != 255:
if verbose:
print('Eek! Server allowed unauthenticated login! '
'Also, the command passed had a non-zero exit status.')
return ['none']
elif result.startswith('ssh: Could not resolve hostname'):
if verbose:
print('hostname resolution failed - '
'maybe the server is down, '
'the SSH server is on another port, '
'or your IP is blacklisted?')
raise Exception('resolution of hostname ' + hostname + ' failed')
elif result.endswith('Connection timed out'):
raise Exception('connection to ' + hostname + ' timed out')
elif result.startswith('Permission denied (') \
and result.endswith(').'):
# assume the format specified in the above condition with
# comma-delimited auth methods
return result[19:-2].split(',')
else:
raise Exception('unexpected SSH error response: ' + result)
# we leave subprocess.TimeoutExpired uncaught, so it will propagate
def _ssh_worker(host_queue, response_queue, ssh_args):
hostname = host_queue.get()
try:
resp = get_auth_methods(hostname, **ssh_args)
except:
resp = None
response_queue.put((hostname, resp))
host_queue.task_done()
def _threaded_auth_methods(host_file, delay=0.1, timeout=5.0, verbose=False):
# All get_auth_methods() args aside from hostname are optional,
# and are the same across all calls.
# We therefore use a dict of args that is unpacked in calls.
# TODO: add port
ssh_args = {'verbose': verbose, timeout=timeout}
host_queue, response_queue = Queue(), Queue()
num_hosts = 0
for line in host_file:
num_hosts += 1
host_queue.put(line.strip())
t = threading.Thread(
target=_ssh_worker,
args=[host_queue, response_queue, ssh_args])
t.start()
host_queue.join()
return [response_queue.get() for _ in range(num_hosts)]
def main():
if sys.version_info.major != 3:
print('this script only runs on Python 3, which should be '
'available on your platform',
file=sys.stderr)
sys.exit(1)
# the only two currently acceptable argument situations
# a more complex argument system (using argparse, for example) may
# be added later if needed.
if len(sys.argv) == 1 or \
len(sys.argv) == 2 and sys.argv[1] == '--verbose':
verbose = len(sys.argv) == 2
'''
# loop through newline-delimited hostnames
for line in sys.stdin:
hostname = line.strip()
try:
auth_methods = get_auth_methods(hostname, verbose=verbose)
except:
# could probably use a verbose print option there
auth_methods = []
print('\t'.join([hostname] + auth_methods))
'''
response_tups = _threaded_auth_methods(sys.stdin, verbose=verbose)
for hostname, methods in response_tups:
print('\t'.join([hostname] + methods))
else:
print('ERROR: input must be line-delimited hostnames from stdin',
file=sys.stdin)
print('usage: python3 ssh_password.py [-v]',
file=sys.stdin)
sys.exit(1)
if __name__ == '__main__':
main()
|
Python
| 0.001303
|
@@ -3853,23 +3853,25 @@
erbose,
+'
timeout
+'
=timeout
|
f577ff84c7231dff4fe01e7e9b73f1b497993b41
|
remove commented out code and debug statements.
|
openxc/tools/obd2scanner.py
|
openxc/tools/obd2scanner.py
|
"""
This module contains the methods for the ``openxc-obd2scanner`` command line
program.
`main` is executed when ``openxc-obd2scanner`` is run, and all other callables in this
module are internal only.
"""
import argparse
from .common import device_options, configure_logging, select_device
import json
def scan(controller, bus=None):
# TODO could read the response from the "PIDs supported" requests to see
# what the vehicle reports that it *should* support.
print("Beginning sequential scan of all OBD-II PIDs")
for pid in range(0, 0x88):
response = controller.create_diagnostic_request(0x7df, mode=0x1, bus=bus,
wait_for_first_response=True, pid=pid)
if response is not None:
no_response = True
for item in response[1]:
if 'success' in item:
no_response = False
print(("PID 0x%x responded with: %s" % (pid, item)))
# if item['success']:
# if 'name' in item:
# print('found success true response at ' + item['name'])
# elif 'id' in item:
# print('found success true response at id ' + str(item['id']))
# else:
# # print('idk')
# print(("PID 0x%x responded with: %s" % (pid, response)))
if (no_response == True):
print(("PID 0x%x did not respond" % pid))
# else:
# print(("PID 0x%x did not respond" % pid))
def parse_options():
parser = argparse.ArgumentParser(description="Send requests for all "
"OBD-II PIDs sequentially to see what actually responds",
parents=[device_options()])
parser.add_argument("--bus")
return parser.parse_args()
def main():
configure_logging()
arguments = parse_options()
controller_class, controller_kwargs = select_device(arguments)
controller = controller_class(**controller_kwargs)
controller.start()
scan(controller, arguments.bus)
|
Python
| 0
|
@@ -553,14 +553,21 @@
ge(0
-, 0x88
+xd0ff, 0xd101
):%0A
@@ -625,17 +625,17 @@
est(0x7d
-f
+0
, mode=0
@@ -635,17 +635,18 @@
mode=0x
-1
+22
, bus=bu
@@ -959,466 +959,9 @@
)))%0A
- # if item%5B'success'%5D:%0A # if 'name' in item:%0A # print('found success true response at ' + item%5B'name'%5D)%0A # elif 'id' in item:%0A # print('found success true response at id ' + str(item%5B'id'%5D))%0A # else:%0A # # print('idk')%0A # print((%22PID 0x%25x responded with: %25s%22 %25 (pid, response)))
%0A
+
@@ -1056,79 +1056,8 @@
d))%0A
- # else:%0A # print((%22PID 0x%25x did not respond%22 %25 pid))
%0A%0Ade
|
24b8e2f7440926d6d1c384a7289dfb5d1124e82f
|
Add article on core admin
|
opps/core/admin/__init__.py
|
opps/core/admin/__init__.py
|
# -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
from opps.core.admin.source import *
|
Python
| 0
|
@@ -17,16 +17,54 @@
f-8 -*-%0A
+from opps.core.admin.article import *%0A
from opp
|
b1903d4cca8e9fb46468b52250610a9ecf2f3c90
|
Make some functions private
|
atamaTrack.py
|
atamaTrack.py
|
"""atamaTrack
"""
import os.path
import sys
import cv2
import numpy
from modules import getXY, piv
# constants
TIME_STEP = 0.1 # time step in second
TOTAL_FRAMES = 20 # number of frames to track
D_RANGE = 15 # ???: something parametor for the pattern finding
def main(file_path):
# load a movie file
file_name = os.path.basename(file_path)
capture = cv2.cv.CreateFileCapture(file_path)
# open a window
cv2.cv.NamedWindow(file_name, cv2.cv.CV_WINDOW_AUTOSIZE)
# click heads' positions on the first frame
image = load_image(capture, 0.0)
initial_jjii = getXY.getXY(image).astype(numpy.int)
ii = initial_jjii[:, 1]
jj = initial_jjii[:, 0]
# output
for idx, (x, y) in enumerate(zip(jj, ii)):
dump_result(0.0, idx, x, y)
# process each frame
for time in numpy.arange(TOTAL_FRAMES) * TIME_STEP:
image = load_image(capture, time)
gray_image = to_grayscale(image)
next_image = load_image(capture, time + TIME_STEP)
gray_next_image = to_grayscale(next_image)
# find similar patterns around points of the present frame from
# the next frame
di, dj, ccmax = piv.find_flow(gray_image, gray_next_image, ii, jj,
kernel_size=(25, 25),
di_range=(-D_RANGE, D_RANGE),
dj_range=(-D_RANGE, D_RANGE))
# translate positions
ii += di
jj += dj
# output
for idx, (x, y) in enumerate(zip(jj, ii)):
draw_marker(image, x, y)
dump_result(time + TIME_STEP, idx, x, y)
cv2.cv.ShowImage(file_name, image)
cv2.waitKey(0)
cv2.destroyAllWindows()
def dump_result(time, idx, x, y):
"""Print result to the standard output.
Arguments:
time -- [float] time in second
idx -- [int] index number of person
x -- [int] x coordinate
y -- [int] y coordinate
"""
print("{} {} {} {}".format(time, idx, y, x))
def draw_marker(image, x, y, radius=10, color=(255, 0, 0), stroke=2):
""" Draw a circle at the desired coordinate on the image."""
cv2.cv.Circle(image, (x, y), radius, color, stroke)
def load_image(capture, time_sec):
"""Load image at the desired time."""
cv2.cv.SetCaptureProperty(capture, cv2.cv.CV_CAP_PROP_POS_MSEC,
time_sec * 1000)
return cv2.cv.QueryFrame(capture)
def to_grayscale(image):
"""Convert given image to grayscale."""
return numpy.asarray(cv2.cv.GetMat(image)).astype(numpy.double)[:, :, 0]
if __name__ == "__main__":
main(sys.argv[1])
|
Python
| 0.000024
|
@@ -536,32 +536,33 @@
ame%0A image =
+_
load_image(captu
@@ -743,32 +743,33 @@
, ii)):%0A
+_
dump_result(0.0,
@@ -870,32 +870,33 @@
image =
+_
load_image(captu
@@ -918,32 +918,33 @@
gray_image =
+_
to_grayscale(ima
@@ -977,16 +977,17 @@
image =
+_
load_ima
@@ -1042,16 +1042,17 @@
image =
+_
to_grays
@@ -1587,16 +1587,17 @@
+_
draw_mar
@@ -1625,16 +1625,17 @@
+_
dump_res
@@ -1769,16 +1769,17 @@
)%0A%0A%0Adef
+_
dump_res
@@ -2054,16 +2054,17 @@
)%0A%0A%0Adef
+_
draw_mar
@@ -2248,16 +2248,17 @@
)%0A%0A%0Adef
+_
load_ima
@@ -2482,16 +2482,17 @@
)%0A%0A%0Adef
+_
to_grays
|
6dd8294b414c6b997171a36c3f28a029dc0dab78
|
Update rest_framework_simplejwt.authentication
|
rest_framework_simplejwt/authentication.py
|
rest_framework_simplejwt/authentication.py
|
from __future__ import unicode_literals
from django.contrib.auth import get_user_model
from django.utils.six import text_type
from django.utils.translation import ugettext_lazy as _
from jose import jwt
from rest_framework import HTTP_HEADER_ENCODING, authentication
from rest_framework.exceptions import AuthenticationFailed
from .settings import api_settings
AUTH_HEADER_TYPE_BYTES = api_settings.AUTH_HEADER_TYPE.encode('utf-8')
User = get_user_model()
class JWTAuthentication(authentication.BaseAuthentication):
"""
An authentication plugin that authenticates requests through a JSON web
token provided in a request header.
"""
www_authenticate_realm = 'api'
def authenticate(self, request):
header = self.get_header(request)
if header is None:
return None
token = self.get_token(header)
if token is None:
return None
payload = self.get_payload(token)
user_id = self.get_user_id(payload)
return (self.get_user(user_id), None)
def authenticate_header(self, request):
return '{0} realm="{1}"'.format(
api_settings.AUTH_HEADER_TYPE,
self.www_authenticate_realm,
)
def get_header(self, request):
"""
Extracts a header containing a JSON web token from the given request.
"""
header = request.META.get('HTTP_AUTHORIZATION')
if isinstance(header, text_type):
# Work around django test client oddness
header = header.encode(HTTP_HEADER_ENCODING)
return header
def get_token(self, header):
"""
Extracts a JSON web token from the given header.
"""
parts = header.split()
if parts[0] != AUTH_HEADER_TYPE_BYTES:
# Assume the header does not contain a JSON web token
return None
if len(parts) != 2:
raise AuthenticationFailed(
_('Authorization header must contain two space-delimited values.'),
)
return parts[1]
def get_payload(self, token):
"""
Extracts a data payload from the given JSON web token.
"""
try:
return jwt.decode(token, api_settings.SECRET_KEY, algorithms=['HS256'])
except TypeError:
raise AuthenticationFailed(_('Token is invalid.'))
def get_user_id(self, payload):
"""
Extracts a recognizable user identifier from the given data payload
object.
"""
try:
return payload[api_settings.PAYLOAD_ID_FIELD]
except KeyError:
raise AuthenticationFailed(_('Token contained no recognizable user identification.'))
def get_user(self, user_id):
"""
Attempts to find and return a user record with the given user
identifier.
"""
try:
user = User.objects.get(**{api_settings.USER_ID_FIELD: user_id})
except User.DoesNotExist:
raise AuthenticationFailed(_('User not found.'))
if not user.is_active:
raise AuthenticationFailed(_('User is inactive.'))
return user
|
Python
| 0.000001
|
@@ -197,16 +197,54 @@
ort jwt%0A
+from jose.exceptions import JOSEError%0A
from res
@@ -461,15 +461,28 @@
ode(
-'utf-8'
+HTTP_HEADER_ENCODING
)%0A%0AU
@@ -2352,12 +2352,12 @@
ept
-Type
+JOSE
Erro
|
4d8cd2d206a85b31737ecad62f7c756d9aecce3d
|
Add the target name to the GwtIncompatible strip rule.
|
build_defs/internal_do_not_use/j2cl_java_library.bzl
|
build_defs/internal_do_not_use/j2cl_java_library.bzl
|
"""Common utilities for creating J2CL targets and providers."""
load(":j2cl_transpile.bzl", "J2CL_TRANSPILE_ATTRS", "j2cl_transpile")
load(":j2cl_js_common.bzl", "J2CL_JS_ATTRS", "JS_PROVIDER_NAME", "j2cl_js_provider")
# Constructor for the Bazel provider for J2CL.
_J2clInfo = provider(fields = ["_J2clJavaInfo"])
def _impl_j2cl_library(ctx):
# Categorize the sources.
js_srcs = []
java_srcs = []
for src in ctx.files.srcs:
(js_srcs if src.extension in ["js", "zip"] else java_srcs).append(src)
# Validate the attributes.
if not java_srcs:
if ctx.files.deps:
fail("deps not allowed without java srcs")
if js_srcs:
fail("js sources not allowed without java srcs")
java_provider = _java_compile(ctx, java_srcs)
js_zip = j2cl_transpile(ctx, java_provider, js_srcs)
js_outputs = [js_zip] if java_srcs else []
# This is a workaround to b/35847804 to make sure the zip ends up in the runfiles.
js_runfiles = _collect_runfiles(ctx, js_outputs, ctx.attr.deps + ctx.attr.exports)
# Write an empty .jslib output (work around b/38349075 and maybe others).
ctx.actions.write(ctx.outputs.dummy_jslib, "")
return struct(
providers = [
DefaultInfo(
files = depset(js_outputs + [ctx.outputs.jar, ctx.outputs.dummy_jslib]),
runfiles = js_runfiles,
),
_J2clInfo(_J2clJavaInfo = java_provider),
],
**j2cl_js_provider(ctx, srcs = js_outputs, deps = ctx.attr.deps, exports = ctx.attr.exports)
)
def _collect_runfiles(ctx, files, deps):
transitive_runfiles = [d[DefaultInfo].default_runfiles.files for d in deps]
return ctx.runfiles(
files = files,
transitive_files = depset(transitive = transitive_runfiles),
)
def _java_compile(ctx, java_srcs):
stripped_java_srcs = [_strip_gwt_incompatible(ctx, java_srcs)] if java_srcs else []
java_deps = [d[_J2clInfo]._J2clJavaInfo for d in ctx.attr.deps if _J2clInfo in d]
java_exports = [d[_J2clInfo]._J2clJavaInfo for d in ctx.attr.exports if _J2clInfo in d]
plugins = [p[JavaInfo] for p in ctx.attr.plugins]
exported_plugins = [p[JavaInfo] for p in ctx.attr.exported_plugins]
return java_common.compile(
ctx,
source_files = ctx.files.srcs_hack,
source_jars = stripped_java_srcs,
deps = java_deps,
exports = java_exports,
plugins = plugins,
exported_plugins = exported_plugins,
output = ctx.outputs.jar,
java_toolchain = ctx.attr._java_toolchain,
host_javabase = ctx.attr._host_javabase,
javac_opts = java_common.default_javac_opts(ctx, java_toolchain_attr = "_java_toolchain"),
)
def _strip_gwt_incompatible(ctx, java_srcs):
output_file = ctx.actions.declare_file(ctx.label.name + "_stripped-src.jar")
args = ctx.actions.args()
args.use_param_file("@%s", use_always = True)
args.set_param_file_format("multiline")
args.add("-d", output_file)
args.add_all(java_srcs)
ctx.actions.run(
progress_message = "Stripping @GwtIncompatible",
inputs = java_srcs,
outputs = [output_file],
executable = ctx.executable._stripper,
arguments = [args],
env = dict(LANG = "en_US.UTF-8"),
execution_requirements = {"supports-workers": "1"},
mnemonic = "J2cl",
)
return output_file
_J2CL_LIB_ATTRS = {
# TODO(goktug): Try to limit this further.
"srcs": attr.label_list(allow_files = [".java", ".js", ".srcjar", ".jar", ".zip"]),
"srcs_hack": attr.label_list(allow_files = True),
"deps": attr.label_list(providers = [JS_PROVIDER_NAME]),
"exports": attr.label_list(providers = [JS_PROVIDER_NAME]),
"plugins": attr.label_list(providers = [JavaInfo]),
"exported_plugins": attr.label_list(providers = [JavaInfo]),
"javacopts": attr.string_list(),
"licenses": attr.license(),
"_java_toolchain": attr.label(
default = Label("@bazel_tools//tools/jdk:toolchain"),
),
"_host_javabase": attr.label(
default = Label("@bazel_tools//tools/jdk:current_host_java_runtime"),
cfg = "host",
),
"_stripper": attr.label(
default = Label("//build_defs/internal_do_not_use:GwtIncompatibleStripper", relative_to_caller_repository = False),
cfg = "host",
executable = True,
),
}
_J2CL_LIB_ATTRS.update(J2CL_TRANSPILE_ATTRS)
_J2CL_LIB_ATTRS.update(J2CL_JS_ATTRS)
j2cl_library = rule(
implementation = _impl_j2cl_library,
attrs = _J2CL_LIB_ATTRS,
fragments = ["java", "js"],
outputs = {
"jar": "lib%{name}.jar",
"srcjar": "lib%{name}-src.jar",
"zip_file": "%{name}.js.zip",
"dummy_jslib": "%{name}.jslib",
},
)
def _impl_java_import(ctx):
return struct(
providers = [_J2clInfo(_J2clJavaInfo = ctx.attr.jar[JavaInfo])],
**j2cl_js_provider(ctx)
)
# helper rule to convert a Java target to a J2CL target.
j2cl_java_import = rule(
implementation = _impl_java_import,
attrs = dict(J2CL_JS_ATTRS, **{
"jar": attr.label(providers = [JavaInfo]),
"licenses": attr.license(),
}),
fragments = ["java", "js"],
)
|
Python
| 0.999998
|
@@ -3139,17 +3139,42 @@
mpatible
-%22
+ from %25s%22 %25 ctx.label.name
,%0A
|
38a0a51ec72733b92f58f2d0208e195ccd8523c7
|
Set authserver's default user/pass to be the default set in settings.py
|
authserver.py
|
authserver.py
|
#!/usr/bin/env python
# ##### BEGIN AGPL LICENSE BLOCK #####
# This file is part of SimpleMMO.
#
# Copyright (C) 2011, 2012 Charles Nelson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ##### END AGPL LICENSE BLOCK #####
'''AuthServer
A server providing authentication, and allows a user to get a list of their characters.
'''
# TODO: Write a function to pull in the docstrings from defined classes here and
# append them to the module docstring
import json
import tornado
import settings
from baseserver import BaseServer, SimpleHandler, BaseHandler
from elixir_models import User, session
class PingHandler(BaseHandler):
def get(self):
self.write("pong")
# TODO: Make an SQLUserController
# TODO: Make an SQLCharacterController
class AuthHandler(BaseHandler):
'''AuthHandler authenticates a user and sets a session in the database.'''
def post(self):
username = self.get_argument("username", "")
password = self.get_argument("password", "")
auth = self.authenticate(username, password)
if auth:
self.set_current_user(username)
self.set_admin(username)
self.write('Login successful.')
else:
raise tornado.web.HTTPError(401, 'Login Failed, username and/or password incorrect.')
def authenticate(self, username, password):
'''Compares a username/password pair against that in the database.
If they match, return True.
Else, return False.'''
# Do some database stuff here to verify the user.
user = User.query.filter_by(username=username, password=password).first()
return True if user else False
def set_admin(self, user):
# Look up username in admins list in database
# if present, set secure cookie for admin
if user in settings.ADMINISTRATORS:
self.set_secure_cookie("admin", 'true')
else:
self.clear_cookie("admin")
def set_current_user(self, user):
if user:
self.set_secure_cookie("user", user)
else:
self.clear_cookie("user")
class LogoutHandler(BaseHandler):
'''Unsets the user's cookie.'''
def get(self):
self.clear_cookie("user")
class CharacterHandler(BaseHandler):
'''CharacterHandler gets a list of characters for the given user account.'''
@tornado.web.authenticated
def get(self):
self.write(json.dumps(self.get_characters(self.get_current_user())))
def get_characters(self, username):
'''Queries the database for all characters owned by a particular username.'''
return ['Graxnor', 'Rumtiddlykins']
if __name__ == "__main__":
handlers = []
handlers.append((r"/", lambda x, y: SimpleHandler(__doc__, x, y)))
handlers.append((r"/ping", PingHandler))
handlers.append((r"/login", AuthHandler))
handlers.append((r"/logout", LogoutHandler))
handlers.append((r"/characters", CharacterHandler))
server = BaseServer(handlers)
server.listen(settings.AUTHSERVERPORT)
# Connect to the elixir db
from elixir_models import setup
setup()
user = User.query.filter_by(username=settings.DEFAULT_USERNAME, password=settings.DEFAULT_PASSWORD).first()
if not user:
User(username="Username", password="Password")
session.commit()
print "Starting up Authserver..."
server.start()
|
Python
| 0.000001
|
@@ -3887,39 +3887,69 @@
ame=
-%22Username%22, password=%22Password%22
+settings.DEFAULT_USERNAME, password=settings.DEFAULT_PASSWORD
)%0A
|
cf096184562d723d321f179732aa25f03be35c6d
|
build graphs without saving the dents
|
auto_fetch.py
|
auto_fetch.py
|
#!/usr/bin/env python
###############################################################################
##
## digger - Digging into some data mines
## Copyright (C) 2010 Thammi
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
###############################################################################
from microblogging import *
def search(service, query):
urls = {
'identica' : "http://identi.ca/api/search.json",
'twitter' : "http://search.twitter.com/search.json",
}
if service not in urls:
raise UnknownServiceException(service)
url_parts = {
'query': urllib.urlencode({'q': query}),
'url': urls[service],
}
res = urllib.urlopen("{url}?{query}".format(**url_parts))
if res.getcode() < 300:
return json.load(res)
else:
msg = "Unable to fetch: %i" % res.getcode()
raise ServiceFailedException(msg)
def main(argv):
service = argv[0]
tags = argv[1:]
updates = search(service, ' '.join('#' + tag for tag in tags))['results']
users = set(update['from_user'] for update in updates)
save_users(service, users)
if __name__ == '__main__':
import sys
main(sys.argv[1:])
|
Python
| 0.000001
|
@@ -935,21 +935,74 @@
rom
-microblogging
+os.path import join, exists%0A%0Afrom microblogging import *%0Afrom test
imp
@@ -1613,16 +1613,163 @@
(msg)%0A %0A
+def user_exists(service, user):%0A return exists(user_path(service, user))%0A%0Adef user_path(service, user):%0A return join('auto', service, user)%0A%0A
def main
@@ -1961,39 +1961,421 @@
es)%0A
-%0A
-save_users(service, users)
+users = filter(lambda u: not user_exists(service, u), users)%0A%0A print %22Fetching: %22 + ', '.join(users)%0A%0A for user in users:%0A try:%0A print %22==%3E Fetching '%25s'%22 %25 user%0A updates = get_statuses(service, user, 1000)%0A%0A blob_graph(updates, user_path(service, user), microblogging_date)%0A except Exception as e:%0A # the show must go on ...%0A print e
%0A%0Aif
|
faf222f62ae748b7ebd964cd04a4e579022032ca
|
Fix #154 (#155)
|
dftimewolf/lib/state.py
|
dftimewolf/lib/state.py
|
"""This class maintains the internal dfTimewolf state.
Use it to track errors, abort on global failures, cleanup after modules, etc.
"""
# TODO(tomchop): Make sure docstrings here follow the same type hinting as the
# rest of the codebase
from __future__ import print_function
from __future__ import unicode_literals
import sys
import threading
import traceback
from dftimewolf.lib import utils
from dftimewolf.lib.errors import DFTimewolfError
class DFTimewolfState(object):
"""The main State class.
Attributes:
errors: [(str, bool)] The errors generated by a module. These should be
cleaned up after each module run using the cleanup() method.
global_errors: [(str, bool)] the cleanup() method moves non critical errors
to this attribute for later reporting.
input: list, The data that the current module will use as input.
output: list, The data that the current module generates.
store: dict, store of arbitrary data for modules.
"""
def __init__(self, config):
self.errors = []
self.global_errors = []
self.input = []
self.output = []
self.store = {}
self._store_lock = threading.Lock()
self._module_pool = {}
self.config = config
self.recipe = None
self.events = {}
def load_recipe(self, recipe):
"""Populates the internal module pool with modules declared in a recipe.
Args:
recipe: Dict, recipe declaring modules to load.
"""
self.recipe = recipe
for module_description in recipe['modules']:
# Combine CLI args with args from the recipe description
module_name = module_description['name']
module = self.config.get_module(module_name)(self)
self._module_pool[module_name] = module
def store_container(self, container):
"""Thread-safe method to store data in the state's store.
Args:
container (containers.interface.AttributeContainer): The data to store.
"""
with self._store_lock:
self.store.setdefault(container.CONTAINER_TYPE, []).append(container)
def get_containers(self, container_class):
"""Thread-safe method to retrieve data from the state's store.
Args:
container_class: AttributeContainer class used to filter data.
Returns:
A list of AttributeContainer objects of matching CONTAINER_TYPE.
"""
with self._store_lock:
return self.store.get(container_class.CONTAINER_TYPE, [])
def setup_modules(self, args):
"""Performs setup tasks for each module in the module pool.
Threads declared modules' setup() functions. Takes CLI arguments into
account when replacing recipe parameters for each module.
Args:
args: Command line arguments that will be used to replace the parameters
declared in the recipe.
"""
def _setup_module_thread(module_description):
"""Calls the module's setup() function and sets an Event object for it.
Args:
module_description (dict): Corresponding recipe module description.
"""
new_args = utils.import_args_from_dict(
module_description['args'], vars(args), self.config)
module = self._module_pool[module_description['name']]
try:
module.setup(**new_args)
except Exception as error: # pylint: disable=broad-except
self.add_error(
'An unknown error occurred: {0!s}\nFull traceback:\n{1:s}'.format(
error, traceback.format_exc()),
critical=True)
self.events[module_description['name']] = threading.Event()
self.cleanup()
threads = []
for module_description in self.recipe['modules']:
t = threading.Thread(
target=_setup_module_thread,
args=(module_description, )
)
threads.append(t)
t.start()
for t in threads:
t.join()
self.check_errors(is_global=True)
def run_modules(self):
"""Performs the actual processing for each module in the module pool."""
def _run_module_thread(module_description):
"""Runs the module's process() function.
Waits for any blockers to have finished before running process(), then
sets an Event flag declaring the module has completed.
"""
for blocker in module_description['wants']:
self.events[blocker].wait()
module = self._module_pool[module_description['name']]
try:
module.process()
except DFTimewolfError as error:
self.add_error(error.message, critical=True)
except Exception as error: # pylint: disable=broad-except
self.add_error(
'An unknown error occurred: {0!s}\nFull traceback:\n{1:s}'.format(
error, traceback.format_exc()),
critical=True)
print('Module {0:s} completed'.format(module_description['name']))
self.events[module_description['name']].set()
self.cleanup()
threads = []
for module_description in self.recipe['modules']:
t = threading.Thread(
target=_run_module_thread,
args=(module_description, )
)
threads.append(t)
t.start()
for t in threads:
t.join()
self.check_errors(is_global=True)
def add_error(self, error, critical=False):
"""Adds an error to the state.
Args:
error: The text that will be added to the error list.
critical: If set to True and the error is checked with check_errors, will
dfTimewolf will abort.
"""
self.errors.append((error, critical))
def cleanup(self):
"""Basic cleanup after modules.
The state's output becomes the input for the next stage. Any errors are
moved to the global_errors attribute so that they can be reported at a
later stage.
"""
# Move any existing errors to global errors
self.global_errors.extend(self.errors)
self.errors = []
# Make the previous module's output available to the next module
self.input = self.output
self.output = []
def check_errors(self, is_global=False):
"""Checks for errors and exits if any of them are critical.
Args:
is_global: If True, check the global_errors attribute. If false, check the
error attribute.
"""
errors = self.global_errors if is_global else self.errors
if errors:
print('dfTimewolf encountered one or more errors:')
for error, critical in errors:
print('{0:s} {1:s}'.format('CRITICAL: ' if critical else '', error))
if critical:
print('Critical error found. Aborting.')
sys.exit(-1)
|
Python
| 0.000002
|
@@ -6373,17 +6373,17 @@
0:s%7D %7B1
-:
+!
s%7D'.form
|
8b504c7913a23d77d3dfb36e3d77722e5a1243aa
|
Add example manager_url
|
autosnatch.py
|
autosnatch.py
|
#!/usr/bin/env python3
import requests
import json
import time
from re import compile
from requests.auth import HTTPBasicAuth
from time import sleep
import pydle
#edit this
_what_username = ''
_what_password = ''
_what_irc_token = ''
_manager_url = '' # also accepts the transcode add url http://seedbox/transcode/request
_manager_username = ''
_manager_password = ''
headers = {
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3)'\
'AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.79'\
'Safari/535.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9'\
',*/*;q=0.8',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'en-US,en;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'}
#regex = compile('(?P<artist>.+)\s-\s(?P<release>.+)\s\[(?P<year>\d+)\]\s\[(?P<type>.+)\]\s-\s(?P<format>.+)\s/\s(?P<bitrate>.+)\s/\s(?P<media>.+)\s-\shttps://what\.cd/torrents\.php\?id=(?P<id>\d+)\s/\shttps://what\.cd/torrents\.php\?action=download&id=(?P<torrentid>\d+)\s-\s(?P<tags>.*)')
regex = compile('(.+?) - (.+) \[(\d+)\] \[([^\]]+)\] - (MP3|FLAC|Ogg|AAC|AC3|DTS|Ogg Vorbis) / ((?:24bit)?(?: ?Lossless)?(?:[\d|~|\.xVq|\s]*(?:AAC|APX|APS|Mixed|Auto|VBR)?(?: LC)?)?(?: ?(?:\(VBR\)|\(?ABR\)?|[K|k][b|p]{1,2}s)?)?)(?: / (?:Log))?(?: / (?:[-0-9\.]+)\%)?(?: / (?:Cue))?(?: / (CD|DVD|Vinyl|Soundboard|SACD|Cassette|DAT|WEB|Blu-ray))(?: / (Scene))?(?: / (?:Freeleech!))? - https://what\.cd/torrents\.php\?id=(\d+) / https://what\.cd/torrents\.php\?action=download&id=(\d+) - ?(.*)')
class MyOwnBot(pydle.Client):
def on_connect(self):
print("Authing with what.cd")
self.session = requests.Session()
self.session.headers.update(headers)
data = { 'username': _what_username, 'password': _what_password, 'keeplogged': 1, 'login': 'Login' }
r = self.session.post('https://what.cd/login.php', data=data)
if r.status_code != 200:
raise Exception("Can't log in")
self.last_request = time.time()
self.rate_limit = 2.1
self.authkey = None
self.passkey = None
accountinfo = self.request('index')
self.authkey = accountinfo['authkey']
self.passkey = accountinfo['passkey']
self.userid = accountinfo['id']
print('Authed as user id {}'.format(self.userid))
print("Poking drone")
self.message('Drone', 'ENTER #what.cd-announce {} {}'.format(_what_username, _what_irc_token))
def on_message(self, source, target, message):
print("{}: {}".format(source, message))
self.parse_line(message)
def parse_line(self, line):
match = regex.match(line)
if not match:
return False
artist, release, year, release_type, \
release_format, bitrate, media, _, id, \
torrent_id, tags = match.groups()
year = int(year)
tags = tags.split(', ')
if 'Freeleech!' in line:
return False # Removing this is not allowed by the golden rules
year += 3
bitrate = 'whatever'
if year < 2016:
print("Too old: {}".format(year))
return False
if bitrate.lower() not in ['lossless', 'v0 (vbr)', 'v0', '320', '24bit lossless', 'whatever']:
print("Wrong bitrate: {}".format(bitrate))
return False
if self.request('torrent', id=torrent_id)['torrent']['userId'] == self.userid:
print("Skipping because it's a torrent {} made...".format(_what_username))
return False
print("Fetching: {}".format(line))
sleep(2)
fetch_torrent(torrent_id)
def request(self, target, **params):
while time.time() - self.last_request < self.rate_limit:
sleep(0.1)
url = 'https://what.cd/ajax.php'
params['action'] = target
if self.authkey:
params['auth'] = target
r = self.session.get(url, params=params, allow_redirects=False)
self.last_request = time.time()
return r.json()['response']
def fetch_torrent(torrent_id):
return requests.post(_manager_url,
auth=HTTPBasicAuth(_manager_username, _manager_password),
data={'id':torrent_id}).json()['success']
if __name__ == '__main__':
client = MyOwnBot('{}-autosnatch'.format(_what_username), realname='bot')
client.connect('irc.what-network.net', 6697, tls=True, tls_verify=False)
client.handle_forever()
|
Python
| 0
|
@@ -245,16 +245,59 @@
_url = '
+http://seedbox.example.com/json/add_torrent
' # also
|
cf46e08ebcd8a5d855ef7f21554d2d8edfac04f1
|
Update the navboxes to be somewhat relevant.
|
avenue/web.py
|
avenue/web.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Michael Babich
# See LICENSE.txt or http://opensource.org/licenses/MIT
'''Acts as an interface between what Flask serves and what goes on in
the rest of the application.
'''
from avenue import app, api
from flask import render_template, make_response
from copy import copy
import yaml
from os import path
heading = 'Zombie Raptor'
navbar = []
navbar.append({'title' : 'Avenue',
'content' : 'Read about the Avenue platform that runs this website.',
'link' : '/about'})
navbar.append({'title' : 'Federation',
'content' : 'Form federations with your friends and plot to take over the galaxy!',
'link' : '/'})
navbar.append({'title' : 'Zombie Raptor Blog',
'content' : 'Read new updates from the Zombie Raptor team!',
'link' : '/'})
navbar.append({'title' : 'Forums',
'content' : 'Visit the forums!',
'link' : '/f'})
browser_upgrade = '<p><img src="static/dl/firefox-g.png"></img><img src="static/dl/chrome-g.png"></img><img src="static/dl/safari-g.png"></img><img src="static/dl/opera-g.png"></img></p>'
def button(text):
return '<div class="button">%s</div>' % text
buttons = '<p>%s%s%s%s </p>' % (button(u'↳'), button('+'), button('-'), button('#'))
@app.route('/')
def index():
'''The main page.
'''
words = '<h1>Zombie Raptor Launches... at some point.</h1><p>Expect stuff from Zombie Raptor in the near future.</p>'
page_title = heading
css = 'night'
return render_template('wiki.html',
style=css,
main_title=heading,
post=words,
title=page_title,
sidebar=navbar)
@app.route('/f/')
def f():
return ''
@app.route('/f/main/')
def main_forum():
test = [{'level' : 0, 'content' : '<h1><a href="/f/main/post/1">This is a Sample Thread</a></h1>', 'author' : 'John', 'date' : '1 day ago'},
{'level' : 0, 'content' : '<h1><a href="http://example.com/">test post please ignore</a></h1>', 'author' : 'obviously_original_content', 'date' : '3 years ago'},
{'level' : 0, 'content' : '<h1><a href="http://example.com/">Hey guys, I think I might have discovered a new continent!</a></h1>', 'author' : 'christopher', 'date' : '520 years ago'}]
return render_template('forum.html', style='night', main_title=heading, thread_title='Main Forum', posts=test, sidebar=navbar)
@app.route('/f/main/post/')
def post():
return ''
@app.route('/f/main/post/1')
def sample_post():
page_title = '%s :: %s' % ('Forums', heading)
sample = open(path.join(path.dirname(__file__), 'data', 'sample.yml'))
thread = yaml.load(sample)
sample.close()
return render_template('forum.html', style='night', main_title=heading, posts=thread['posts'], sidebar=navbar, title=page_title, thread_title=thread['title'], threaded=True)
@app.route('/night.css')
def night():
conf = open(path.join(path.dirname(__file__), 'data', 'style.yml'))
style = yaml.load(conf)
conf.close()
response = make_response(render_template('main.css',
text=style['text'],
background=style['background'],
post=style['post']))
response.mimetype = 'text/css'
return response
|
Python
| 0
|
@@ -387,346 +387,8 @@
%5B%5D%0A
-navbar.append(%7B'title' : 'Avenue',%0A 'content' : 'Read about the Avenue platform that runs this website.',%0A 'link' : '/about'%7D)%0A%0Anavbar.append(%7B'title' : 'Federation',%0A 'content' : 'Form federations with your friends and plot to take over the galaxy!',%0A 'link' : '/'%7D)%0A%0A
navb
@@ -570,22 +570,26 @@
e' : '
+Main
Forum
-s
',%0A
@@ -625,14 +625,18 @@
the
+main
forum
-s
!',%0A
@@ -665,16 +665,21 @@
: '/f
+/main
'%7D)%0A%0Abro
|
f21a53ccb0c354cf925d059f0ca007cf0fbba829
|
fix bug: rpc cup 100%
|
spider/rpc.py
|
spider/rpc.py
|
# -*- coding: utf-8 -*-
import logging
import sys
import json
from collections import deque
from time import sleep
from multiprocessing import Process
import pika
from scrapy.utils.project import get_project_settings
from task import crawl, gen_lxmlspider, gen_blogspider
SETTINGS = get_project_settings()
def task(callback, key):
logger = logging.getLogger(__name__)
consumers = deque()
url = '{}?heartbeat=600'.format(SETTINGS['BROKER_URL'])
connection = pika.BlockingConnection(pika.connection.URLParameters(url))
channel = connection.channel()
channel.exchange_declare(exchange='direct_logs', exchange_type='direct')
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
channel.queue_bind(
exchange='direct_logs',
queue=queue_name,
routing_key=key
)
channel.basic_qos(prefetch_count=1)
def consume(ch, method, properties, body):
logger.info('get job[%s] from rabbitmq', callback.__name__)
args = json.loads(body)
p = Process(target=callback, args=(args,))
p.daemon = True
consumers.append((p, ch, method))
channel.basic_consume(consume, queue=queue_name)
while True:
connection.process_data_events()
try:
p, ch, method = consumers[0]
except IndexError:
pass
else:
if p.is_alive():
continue
status = p.exitcode
if status is None:
p.start()
else:
if status == 0:
logger.info('job[%s] finished', callback.__name__)
else:
logger.error(
'job[%s] exited with %d',
callback.__name__,
status
)
p.join()
ch.basic_ack(delivery_tag=method.delivery_tag)
consumers.popleft()
def main():
def init_logger():
root = logging.getLogger()
root.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(SETTINGS['LOG_LEVEL'])
handler.setFormatter(
logging.Formatter(
SETTINGS['LOG_FORMAT'],
SETTINGS['LOG_DATEFORMAT']
)
)
root.addHandler(handler)
init_logger()
logger = logging.getLogger(__name__)
TASKS = [
(crawl, SETTINGS['CRAWL_KEY']),
(gen_lxmlspider, SETTINGS['LXMLSPIDER_KEY']),
(gen_blogspider, SETTINGS['BLOGSPIDER_KEY'])
]
sleep(60)
tasks = [(Process(target=task, args=_), _) for _ in TASKS]
for p, _ in tasks:
p.start()
logger.info('rpc task running ...')
while True:
sleep(60)
for i, (p, args) in enumerate(tasks):
if p.is_alive():
continue
logger.error(
'Error in main task %s quit unexpected',
TASKS[i][0].__name__
)
p.join()
np = Process(target=task, args=args)
np.start()
tasks[i] = (np, args)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1122,16 +1122,34 @@
= True%0A
+ p.start()%0A
@@ -1244,32 +1244,49 @@
while True:%0A
+ sleep(2)%0A
connecti
|
53ff0431378003126199d33a1f70faffa43bf792
|
Add some simple view testing with django.test.Client
|
tic_tac_toe/tests.py
|
tic_tac_toe/tests.py
|
import unittest
from .game import Game, GameException
class TicTacToeTest(unittest.TestCase):
"Basic tests for Tic Tac Toe game"
def testEmptyGame(self):
"Empty game, computer vs computer, always ties"
game = Game()
while not game.is_over():
game.play()
# game should be over and tied
self.assertEqual(game.is_over(), True)
self.assertEqual(game.winner, None)
self.assertEqual(game.check_status(), game.STATUS_TIED)
def testBadBoard(self):
"Bad board raises error"
self.assertRaises(GameException, Game, initial_board=[1, 2, 3, 4])
def testPlayerTurns(self):
"Check player turns alternate properly"
game = Game(initial_board=[0,0,1, 0,0,0, 0,0,0])
self.assertEqual(game.current_player.marker, 2)
game.play()
self.assertEqual(game.current_player.marker, 1)
game.play()
self.assertEqual(game.current_player.marker, 2)
def testWin(self):
"Pass in a board one play away from winning, should take it"
game = Game(initial_board=[1,2,0, 1,2,0, 0,0,0])
self.assertEqual(game.current_player.marker, 1)
game.play()
self.assertEqual(game.check_status(), game.STATUS_X)
self.assertEqual(game.winner, game.player_x)
def testManualPlay(self):
"Test manually playing some squares, with invalid value"
game = Game()
game.play(0)
game.play(4)
game.play(1)
self.assertTrue(game.x_squares, {0, 1}) # x's squares
self.assertTrue(game.o_squares, {4}) # o's squares
self.assertRaises(GameException, game.play, 42) # invalid square
|
Python
| 0.000001
|
@@ -1,12 +1,24 @@
+import json%0A
import unitt
@@ -22,16 +22,109 @@
ittest%0A%0A
+from django.core.urlresolvers import reverse, NoReverseMatch%0Afrom django.test import Client%0A%0A
from .ga
@@ -154,16 +154,17 @@
eption%0A%0A
+%0A
class Ti
@@ -170,16 +170,20 @@
icTacToe
+Game
Test(uni
@@ -1797,8 +1797,1564 @@
square%0A
+%0A%0Adef url_for_board(board):%0A return reverse('tic_tac_toe_play', args=(board, ))%0A%0A%0Aclass TicTacToeAppTest(unittest.TestCase):%0A %22Tests for the tac_tac_toe django app%22%0A def setUp(self):%0A self.client = Client()%0A%0A def testIndexView(self):%0A %22%22%22%0A For now, just do a simple 200.%0A We'd need selenium or something to interact with this page.%0A %22%22%22%0A url = reverse('tic_tac_toe_index')%0A response = self.client.get(url)%0A self.assertEqual(response.status_code, 200)%0A%0A def testFirstPlay(self):%0A %22Try a first move, make sure the response is correct%22%0A response = self.client.get(url_for_board('100000000'))%0A self.assertEqual(response.status_code, 200)%0A data = json.loads(response.content)%0A self.assertEqual(data%5B'is_over'%5D, False)%0A self.assertEqual(data%5B'winner'%5D, None)%0A # for this initial board, the response should always be%0A # to take the center square:%0A self.assertEqual(data%5B'squares'%5D, %5B1,0,0, 0,2,0, 0,0,0%5D)%0A%0A def testWinningPlay(self):%0A %22Give the game a board it can win. Make sure it does%22%0A response = self.client.get(url_for_board('201210000'))%0A data = json.loads(response.content)%0A self.assertEqual(data%5B'is_over'%5D, True)%0A self.assertEqual(data%5B'winner'%5D, 1)%0A self.assertEqual(data%5B'squares'%5D, %5B2,0,1, 2,1,0, 1,0,0%5D)%0A%0A def testBadRequests(self):%0A self.assertRaises(NoReverseMatch, url_for_board, '00100200')%0A self.assertRaises(NoReverseMatch, url_for_board, 'aoeu')%0A
|
93d3012b74dda05e058fc343d252173f33722bc1
|
change default crawl date
|
split_data.py
|
split_data.py
|
#!/usr/bin/env python
import os
import logging
import argparse
import random
import json
import time
import datetime
def load_data(filename):
with open(filename, 'r') as f:
for line in f:
yield json.loads(line.strip())
def read_date(date_str):
try:
return time.strptime(date_str, '%Y-%m-%d')
except:
return time.strptime(date_str, '%y/%m/%d')
def has_progressed(days, t1, t2):
t1 = datetime.datetime.fromtimestamp(time.mktime(t1))
t2 = datetime.datetime.fromtimestamp(time.mktime(t2))
return (t2 - t1).days > days
def main():
logging.basicConfig(level=logging.INFO, format='%(message)s')
parser = argparse.ArgumentParser(description='Split data into train, dev, and test')
parser.add_argument('json', help='Input data file')
parser.add_argument('output_dir')
parser.add_argument('--threshold', type=int, default=10)
parser.add_argument('--seed', type=int, default=0)
parser.add_argument('--crawl_date', type=read_date, default='2013-07-22')
args = parser.parse_args()
random.seed(args.seed)
data = load_data(args.json)
data = filter(lambda d: has_progressed(90,
read_date(d['published_date']),
args.crawl_date), data)
data = list(data) # convert generator into list
pos_data = filter(lambda d: d['report_count'] >= args.threshold, data)
neg_data = filter(lambda d: d['report_count'] < args.threshold, data)
logging.info('Size of pos data: {}'.format(len(pos_data)))
logging.info('Size of neg data: {}'.format(len(neg_data)))
for d in pos_data:
d['label'] = 1
for d in neg_data:
d['label'] = 0
random.shuffle(pos_data)
random.shuffle(neg_data)
train_ratio = 0.7
dev_ratio = 0.2
assert(train_ratio + dev_ratio < 1)
pos_num_data = len(pos_data)
neg_num_data = len(neg_data)
pos_split1 = int(pos_num_data * train_ratio)
pos_split2 = int(pos_num_data * (train_ratio + dev_ratio))
neg_split1 = int(neg_num_data * train_ratio)
neg_split2 = int(neg_num_data * (train_ratio + dev_ratio))
train_data = pos_data[:pos_split1] + neg_data[:neg_split1]
dev_data = pos_data[pos_split1:pos_split2] + neg_data[neg_split1:neg_split2]
test_data = pos_data[pos_split2:] + neg_data[neg_split2:]
def write_file(filename, data):
with open(filename, 'w') as f:
for d in data:
f.write('{}\n'.format(json.dumps(d)))
write_file(os.path.join(args.output_dir, 'train.json'), train_data)
write_file(os.path.join(args.output_dir, 'dev.json'), dev_data)
write_file(os.path.join(args.output_dir, 'test.json'), test_data)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -1025,12 +1025,12 @@
13-0
-7-22
+8-01
')%0A
|
ea416504c287bc5a3716289b57ebfd15bb770b9d
|
Use a string instead of a file
|
sql/branch.py
|
sql/branch.py
|
from gratipay import wireup
env = wireup.env()
db = wireup.db(env)
participants = []
with open('./sql/emails.txt') as f:
emails = [line.rstrip() for line in f]
participants = db.all("""
SELECT p.*::participants
FROM participants p
WHERE email_address IN %s
""", (tuple(emails), ))
for p in participants:
p.queue_email('double_emails')
|
Python
| 0
|
@@ -1,8 +1,20 @@
+import sys%0A%0A
from gra
@@ -78,67 +78,134 @@
v)%0A%0A
-participants = %5B%5D%0A%0Awith open('./sql/
+# Temporary, will fill with actual values when running script%0A
email
-s.txt') as f:%0A
+_txt = %22%22%22%0A rohitpaulk@live.com%0A abcd@gmail.com%0A%22%22%22%0A%0A
emai
@@ -214,14 +214,14 @@
= %5B
-line.r
+email.
stri
@@ -232,23 +232,83 @@
for
-line in f%5D%0A
+email in email_txt.split()%5D%0A%0Aassert len(emails) == 176%0A%0Aparticipants = %5B%5D%0A%0A
part
@@ -329,20 +329,16 @@
all(%22%22%22%0A
-
SELE
@@ -364,20 +364,16 @@
s%0A
-
FROM par
@@ -384,20 +384,16 @@
pants p%0A
-
WHE
@@ -415,20 +415,16 @@
s IN %25s%0A
-
%22%22%22, (tu
@@ -498,8 +498,34 @@
mails')%0A
+%0Aprint(%22Done%22)%0Asys.exit()%0A
|
5949f025227f90f1c2d99b07e7da50aef0dfc94a
|
modify V3 Status return into V2 status return
|
src/geni/am/gram_am2.py
|
src/geni/am/gram_am2.py
|
# AM API V2 version of Gram aggregate manager
# For testing against tools (Flack, portal) that speak AM API V2
# Since Gram is written to support V3
from am2 import Slice, ReferenceAggregateManager
from am2 import AggregateManager, AggregateManagerServer
from am3 import ReferenceAggregateManager as ReferenceAggregateManager_V3, Slice as Slice_V3
class GramReferenceAggregateManager(ReferenceAggregateManager):
def __init__(self, root_cert, urn_authority, url, server):
ReferenceAggregateManager.__init__(self, root_cert, urn_authority, url)
self._v3_am = ReferenceAggregateManager_V3(root_cert, urn_authority, url)
self._am_type = "gram"
self._server = server
self._v3_am._server = server
def GetVersion(self, options):
return ReferenceAggregateManager.GetVersion(self, options)
def ListResources(self, credentials, options):
print "OPTIONS = " + str(options)
credentials = [self.transform_credential(c) for c in credentials]
if 'geni_slice_urn' in options:
slice_urn = options['geni_slice_urn']
slice_urns = [slice_urn]
ret_v3 = self._v3_am.Describe(slice_urns, credentials, options)
else:
ret_v3 = self._v3_am.ListResources(credentials, options)
return ret_v3
def CreateSliver(self, slice_urn, credentials, rspec, users, options):
credentials = [self.transform_credential(c) for c in credentials]
urns = [slice_urn]
# Allocate
ret_allocate_v3 = self._v3_am.Allocate(slice_urn, credentials, \
rspec, options)
# print "ALLOC_RET " + str(ret_allocate_v3)
if ret_allocate_v3['code']['geni_code'] != 0:
return ret_allocate_v3
manifest = ret_allocate_v3['value']['geni_rspec']
# Provision
ret_provision_v3 = self._v3_am.Provision(urns, credentials, options)
# print "PROV_RET " + str(ret_provision_v3)
if ret_provision_v3['code']['geni_code'] != 0:
return ret_provision_v3
# PerformOperationalAction(geni_start)
action = 'geni_start'
self._v3_am.PerformOperationalAction(urns, credentials, \
action, options)
return self.successResult(manifest)
def DeleteSliver(self, slice_urn, credentials, options):
credentials = [self.transform_credential(c) for c in credentials]
urns = [slice_urn]
ret_v3 = self._v3_am.Delete(urns, credentials, options)
return self.successResult(True)
def SliverStatus(self, slice_urn, credentials, options):
credentials = [self.transform_credential(c) for c in credentials]
urns = [slice_urn]
ret_v3 = self._v3_am.Status(urns, credentials, options)
# print "SS RET = " + str(ret_v3)
return ret_v3
def RenewSliver(self, slice_urn, credentials, expiration_time, options):
credentials = [self.transform_credential(c) for c in credentials]
urns = [slice_urn]
ret_v3 = self._v3_am.Renew(urns, credentials,
expiration_time, options)
return ret_v3
def Shutdown(self, slice_urn, credentials, options):
credentials = [self.transform_credential(c) for c in credentials]
ret_v3 = self._v3_am.Shutdown(slice_urn, credentials, options)
return ret_v3
def transform_credential(self, c):
# Make these acceptable for V3 AM
# Create a dictionary [geni_type='geni_sfa', geni_version=3, geni_value=c
if isinstance(c, dict) and c.has_key('geni_value'):
c = c['geni_value']
if isinstance(c, str):
return dict(geni_type='geni_sfa', geni_version=3, geni_value=c)
else:
msg = "Bad Arguments: Received illegal credential %s" % str(c)
raise Exception(msg)
def successResult(self, value):
code_dict = dict(geni_code = 0, am_type = self._am_type, am_code=0)
return dict(code=code_dict, value=value, output="")
class GramAggregateManagerServer(AggregateManagerServer):
def __init__(self, addr, keyfile=None, certfile=None,
trust_roots_dir=None,
ca_certs=None, base_name=None):
AggregateManagerServer.__init__(self, addr, \
keyfile = keyfile, \
certfile = certfile, \
trust_roots_dir = trust_roots_dir, \
ca_certs = ca_certs, \
base_name = base_name)
server_url = "https://%s:%d/" % addr
delegate=GramReferenceAggregateManager(trust_roots_dir, \
base_name, server_url, \
self._server)
self._server.register_instance(AggregateManager(delegate))
|
Python
| 0.000046
|
@@ -2826,33 +2826,551 @@
tials, options)%0A
-#
+ print %22RET_V3%22 + str(ret_v3)%0A ret_v2 = ret_v3%0A value = ret_v2%5B'value'%5D%0A value%5B'geni_resources'%5D = value%5B'geni_slivers'%5D%0A slice_state = 'ready'%0A for res_status in value%5B'geni_resources'%5D:%0A state = 'ready'%0A if res_status%5B'geni_operational_status'%5D != 'geni_ready':%0A state = 'pending'%0A slice_state = state%0A res_status%5B'geni_status'%5D = state%0A value%5B'geni_status'%5D = slice_state%0A ret_v2%5B'value'%5D = value%0A
print %22S
@@ -3382,33 +3382,33 @@
= %22 + str(ret_v
-3
+2
)%0A return
@@ -3405,33 +3405,33 @@
return ret_v
-3
+2
%0A%0A def RenewS
|
36e3ee242098f1768e009fca320c5d94142529d1
|
set debug to false
|
ui/app.py
|
ui/app.py
|
from flask import Flask, redirect, url_for, flash, g, config, session
from flask_restful import Api
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager, UserMixin, current_user, login_user
from werkzeug.security import generate_password_hash, check_password_hash
from flask_restful import Resource
from flask import request
# views
from app.views.views import main as views_blueprints
# forms
from app.forms.forms import LoginForm
# api
from app.api.api import Prediction, Admin
# models
from app.models.models import User
# app
from app import app
# config
from app.config import Config
app.register_blueprint(views_blueprints)
login_manager = LoginManager(app)
login_manager.init_app(app)
@app.before_request
def inject_globals():
with app.app_context():
session['VERSION'] = Config.VERSION
session['MSG'] = Config.MSG
return None
@login_manager.user_loader
# @app.before_request
def load_user(user_id):
return User.query.get(user_id)
api = Api(app)
api.add_resource(Prediction, '/api/1.0/prediction/')
api.add_resource(Admin, '/api/1.0/user/')
if __name__ == '__main__':
app.run(debug=True)
|
Python
| 0.99987
|
@@ -1149,16 +1149,6 @@
run(
-debug=True
)%0A
|
e3d00d26d00875628f4e43873b5fed14b2474f75
|
Revert "FIX: bad logic in Pipeline."
|
scikits/learn/pipeline.py
|
scikits/learn/pipeline.py
|
"""
Pipeline: chain transforms and estimators to build a composite estimator.
"""
# Author: Edouard Duchesnay
# Gael Varoquaux
# Virgile Fritsch
# Alexandre Gramfort
# Licence: BSD
from .base import BaseEstimator
class Pipeline(BaseEstimator):
""" Pipeline of transforms with a final estimator
Sequentialy apply a list of transforms and a final estimator
Intermediate steps of the pipeline must be 'transforms', that
is that they must implements fit & transform methods
The final estimator need only implements fit.
The purpose of the pipeline is to assemble several steps that can
be cross-validated together while setting different parameters.
For this, it enables to setting parameters of the various steps
using their names and the parameter name separated by a '__',
as in the example below.
Attributes
===========
steps: list of (names, object)
List of the named object that compose the pipeline, in the
order that they are applied on the data.
Methods
=======
fit:
Fit all the transforms one after the other and transform the
data, then fit the transformed data using the final estimator
fit_transform:
Fit all the transforms one after the other and transform the
data, then use fit_transform on transformed data using the final
estimator. Valid only if the final estimator implements
fit_transform.
predict:
Applies transforms to the data, and the predict method of the
final estimator. Valid only if the final estimator implements
predict.
transform:
Applies transforms to the data, and the transform method of the
final estimator. Valid only if the final estimator implements
transform.
score:
Applies transforms to the data, and the score method of the
final estimator. Valid only if the final estimator implements
score.
Example
=======
>>> from scikits.learn import svm
>>> from scikits.learn.datasets import samples_generator
>>> from scikits.learn.feature_selection import SelectKBest, f_regression
>>> from scikits.learn.pipeline import Pipeline
>>> # generate some data to play with
>>> X, y = samples_generator.test_dataset_classif(k=5)
>>> # ANOVA SVM-C
>>> anova_filter = SelectKBest(f_regression, k=5)
>>> clf = svm.SVC(kernel='linear')
>>> anova_svm = Pipeline([('anova', anova_filter), ('svc', clf)])
>>> # You can set the parameters using the names issued
>>> # For instance, fit using a k of 10 in the SelectKBest
>>> # and a parameter 'C' of the svn
>>> anova_svm.fit(X, y, anova__k=10, svc__C=.1) #doctest: +ELLIPSIS
Pipeline(steps=[('anova', SelectKBest(k=10, score_func=<function f_regression at ...>)), ('svc', SVC(kernel='linear', C=0.1, probability=False, degree=3, coef0=0.0, tol=0.001,
shrinking=True, gamma=0.0))])
>>> prediction = anova_svm.predict(X)
>>> score = anova_svm.score(X)
"""
#--------------------------------------------------------------------------
# BaseEstimator interface
#--------------------------------------------------------------------------
def __init__(self, steps):
"""
Parameters
==========
steps: list
List of (name, transform) object (implementing
fit/transform) that are chained, in the order in which
they are chained, with the last object an estimator.
"""
self.named_steps = dict(steps)
names, estimators = zip(*steps)
self.steps = steps
assert len(self.named_steps) == len(steps), ("Names provided are "
"not unique: %s" % names)
transforms = estimators[:-1]
estimator = estimators[-1]
for t in transforms:
assert (hasattr(t, "fit") and hasattr(t, "transform")) or \
hasattr(t, "fit_transform"), ValueError(
"All intermediate steps of the chain should be transforms "
"and implement fit and transform or fit_transform",
"'%s' (type %s) doesn't)" % (t, type(t))
)
assert hasattr(estimator, "fit"), \
("Last step of chain should implement fit",
"'%s' (type %s) doesn't)" % (estimator, type(estimator))
)
def _get_params(self, deep=True):
if not deep:
return super(Pipeline, self)._get_params(deep=False)
else:
out = self.named_steps.copy()
for name, step in self.named_steps.iteritems():
for key, value in step._get_params(deep=True).iteritems():
out['%s__%s' % (name, key)] = value
return out
#--------------------------------------------------------------------------
# Estimator interface
#--------------------------------------------------------------------------
def _pre_transform(self, X, y=None, **params):
self._set_params(**params)
Xt = X
for name, transform in self.steps[:-1]:
if hasattr(transform, "fit_transform"):
Xt = transform.fit_transform(Xt, y)
else:
Xt = transform.fit(Xt, y).transform(Xt)
return Xt
def fit(self, X, y=None, **params):
Xt = self._pre_transform(X, y, **params)
self.steps[-1][-1].fit(Xt, y)
return self
def fit_transform(self, X, y=None, **params):
Xt = self._pre_transform(X, y, **params)
return self.steps[-1][-1].fit_transform(Xt, y)
def predict(self, X):
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict(Xt)
def predict_proba(self, X):
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_proba(Xt)
def predict_log_proba(self, X):
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_log_proba(Xt)
def transform(self, X):
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].transform(Xt)
def inverse_transform(self, X):
if X.ndim == 1:
X = X[None, :]
Xt = X
for name, step in self.steps[:-1][::-1]:
Xt = step.inverse_transform(Xt)
return Xt
def score(self, X, y=None):
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].score(Xt, y)
|
Python
| 0
|
@@ -4135,19 +4135,18 @@
%22fit%22)
-and
+or
hasattr
@@ -4150,16 +4150,20 @@
ttr(t, %22
+fit_
transfor
@@ -4167,18 +4167,19 @@
form%22))
-or
+and
%5C%0A
@@ -4197,36 +4197,32 @@
hasattr(t, %22
-fit_
transform%22), Val
@@ -4266,26 +4266,25 @@
diate steps
-of
+a
the chain s
@@ -4357,25 +4357,8 @@
form
- or fit_transform
%22,%0A
|
1ab90959f3cedea320dce0c31bfee9a6775cef5c
|
Remove output_extension config
|
generate_resume.py
|
generate_resume.py
|
import argparse
import jinja2
import os
import yaml
import bnrg.filters
from debug.debug import dprint
class OutputFormat(object):
def __init__(self, arg_name, template_extension, output_suffix, output_extension):
self.arg_name = arg_name
self.template_extension = template_extension
self.output_suffix = output_suffix
self.output_extension = output_extension
# maps output format to template file extension
_OUTPUT_FORMATS = {
'pdf': OutputFormat('pdf', 'tex', None, 'pdf'),
'formatted_text': OutputFormat('formatted_text', 'txt', '_formatted', 'txt'),
'plain_text': OutputFormat('plain_text', 'txt', None, 'txt')
}
def load_templates(template_dir=os.path.join(os.getcwd(), 'template')):
loader = jinja2.FileSystemLoader(template_dir)
environment = jinja2.environment.Environment(loader=loader, trim_blocks=True, lstrip_blocks=True)
_register_filters(environment)
return environment
def _register_filters(environment):
environment.filters['right'] = bnrg.filters.do_right
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generates multiple resume outputs from a singular YAML-formatted source")
parser.add_argument('--formats', '-f', help="output formats to generate. There must be a template of the same name in template/", nargs='+', choices=_OUTPUT_FORMATS.keys(), default=_OUTPUT_FORMATS.keys())
parser.add_argument('--destination', '-d', help="directory used to write generated documents", default="output")
parser.add_argument('--output-name', '-o', dest='output_name', help="base name used for generated files in 'destination'", default="document")
parser.add_argument('source_file', help="yaml-formatted containing the desired resume sections")
args = parser.parse_args()
environment = load_templates()
dprint("found templates {}".format(environment.list_templates()))
with open(args.source_file, 'r') as source_file:
# create an output directory if one doesn't yet exist
try:
os.mkdir(args.destination)
except (OSError):
pass
raw = yaml.load(source_file)
# generate all requested formats
for doc_format in args.formats:
output_ext = _OUTPUT_FORMATS[doc_format].output_extension
template_ext = _OUTPUT_FORMATS[doc_format].template_extension
suffix = _OUTPUT_FORMATS[doc_format].output_suffix
if suffix is None:
suffix = ""
output_file = os.path.join(args.destination, args.output_name + suffix + os.path.extsep + output_ext)
with open(output_file, 'w') as output:
try:
template_name = os.path.join(doc_format, 'base' + os.path.extsep + template_ext)
template = environment.get_template(template_name)
output.write(template.render(root=raw))
except (jinja2.TemplateNotFound):
print("Unable to find base template {}".format(template_name))
|
Python
| 0
|
@@ -196,29 +196,11 @@
ffix
-, output_extension
):%0A
+
@@ -328,57 +328,8 @@
fix%0A
- self.output_extension = output_extension%0A
%0A%0A#
@@ -399,19 +399,21 @@
%7B%0A '
-pdf
+latex
': Outpu
@@ -421,19 +421,21 @@
Format('
-pdf
+latex
', 'tex'
@@ -444,15 +444,8 @@
None
-, 'pdf'
),%0A
@@ -515,23 +515,16 @@
rmatted'
-, 'txt'
),%0A '
@@ -574,23 +574,16 @@
t', None
-, 'txt'
)%0A%7D%0A%0Adef
@@ -2177,22 +2177,24 @@
-output
+template
_ext = _
@@ -2220,22 +2220,24 @@
format%5D.
-output
+template
_extensi
@@ -2255,60 +2255,93 @@
-template_ext = _OUTPUT_FORMATS%5Bdoc_format%5D.template_
+output_ext = template_ext # all existing templates generate files with the same file
exte
|
3dc85ab93e9c295b878579ede6ee797064c71e84
|
update plos with settings days back
|
scrapi/harvesters/plos.py
|
scrapi/harvesters/plos.py
|
"""PLoS-API-harvester
=================
<p>To run "harvester.py" please follow the instructions:</p>
<ol>
<li>Create an account on <a href="http://register.plos.org/ambra-registration/register.action">PLOS API</a></li>
<li>Sign in <a href="http://alm.plos.org/">here</a> and click on your account name. Retrieve your API key.</li>
<li>Create a new file in the folder named "settings.py". In the file, put<br>
<code>API_KEY = (your API key)</code></li>
</ol>
Sample API query: http://api.plos.org/search?q=publication_date:[2015-01-30T00:00:00Z%20TO%202015-02-02T00:00:00Z]&api_key=ayourapikeyhere&rows=999&start=0
"""
from __future__ import unicode_literals
import logging
from datetime import date, timedelta
from lxml import etree
from dateutil.parser import *
from scrapi import requests
from scrapi.base import XMLHarvester
from scrapi.linter.document import RawDocument
from scrapi.base.helpers import default_name_parser, build_properties, compose, single_result
logger = logging.getLogger(__name__)
try:
from scrapi.settings import PLOS_API_KEY
except ImportError:
PLOS_API_KEY = None
logger.error('No PLOS_API_KEY found, PLoS will always return []')
class PlosHarvester(XMLHarvester):
short_name = 'plos'
long_name = 'Public Library of Science'
url = 'http://www.plos.org/'
namespaces = {}
MAX_ROWS_PER_REQUEST = 999
BASE_URL = 'http://api.plos.org/search'
def fetch_rows(self, start_date, end_date):
query = 'publication_date:[{}T00:00:00Z TO {}T00:00:00Z]'.format(start_date, end_date)
resp = requests.get(self.BASE_URL, params={
'q': query,
'rows': '0',
'api_key': PLOS_API_KEY,
})
total_rows = etree.XML(resp.content).xpath('//result/@numFound')
total_rows = int(total_rows[0]) if total_rows else 0
current_row = 0
while current_row < total_rows:
response = requests.get(self.BASE_URL, throttle=5, params={
'q': query,
'start': current_row,
'api_key': PLOS_API_KEY,
'rows': self.MAX_ROWS_PER_REQUEST,
})
for doc in etree.XML(response.content).xpath('//doc'):
yield doc
current_row += self.MAX_ROWS_PER_REQUEST
def harvest(self, start_date=None, end_date=None):
start_date = start_date or date.today() - timedelta(1)
end_date = end_date or date.today()
if not PLOS_API_KEY:
return []
return [
RawDocument({
'filetype': 'xml',
'source': self.short_name,
'doc': etree.tostring(row),
'docID': row.xpath("str[@name='id']")[0].text.decode('utf-8'),
})
for row in
self.fetch_rows(start_date.isoformat(), end_date.isoformat())
if row.xpath("arr[@name='abstract']")
or row.xpath("str[@name='author_display']")
]
schema = {
'uris': {
'canonicalUri': ('//str[@name="id"]/node()', compose('http://dx.doi.org/{}'.format, single_result)),
},
'contributors': ('//arr[@name="author_display"]/str/node()', default_name_parser),
'providerUpdatedDateTime': ('//date[@name="publication_data"]/node()', compose(lambda x: parse(x).date().isoformat().decode('utf-8'), single_result)),
'title': ('//str[@name="title_display"]/node()', single_result),
'description': ('//arr[@name="abstract"]/str/node()', single_result),
'publisher': {
'name': ('//str[@name="journal"]/node()', single_result)
},
'otherProperties': build_properties(
('eissn', '//str[@name="eissn"]/node()'),
('articleType', '//str[@name="article_type"]/node()'),
('score', '//float[@name="score"]/node()')
)
}
|
Python
| 0
|
@@ -794,16 +794,44 @@
equests%0A
+from scrapi import settings%0A
from scr
@@ -2442,17 +2442,34 @@
medelta(
-1
+settings.DAYS_BACK
)%0A
|
62fc93d2847f09d4f4b9f7ed7cf58a55e795d291
|
Switch to multi-process execution
|
utils/eliteprospects_utils.py
|
utils/eliteprospects_utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from urllib.parse import urlparse
from collections import namedtuple
from concurrent.futures import ThreadPoolExecutor, as_completed
import requests
from lxml import html
from utils import remove_non_ascii_chars
# base url for eliteprospects.com
BASE_URL = "http://www.eliteprospects.com"
# url template for draft overview pages at eliteprospects.com
DRAFT_URL_TEMPLATE = "draft.php?year=%d"
# maximum worker count
MAX_WORKERS = 8
# named tuple to contain basic player information
Player = namedtuple('Player', 'first_name last_name date_of_birth')
def retrieve_drafted_players_with_dobs(draft_year):
"""
Retrieves basic player data (first name, last name, date of birth) from all
player pages in the specified list.
"""
# retrieving links to pages of all drafted players first
player_urls = retrieve_drafted_player_links(draft_year)
# setting up target list
players_with_dobs = list()
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as threads:
future_tasks = {
threads.submit(
get_player_with_dob, url): url for url in player_urls[:20]}
for future in as_completed(future_tasks):
try:
# TODO: think of something to do with the result here
result = future.result()
if result is not None:
players_with_dobs.append(result)
except Exception as e:
print
print("Conccurrent task generated an exception: %s" % e)
return players_with_dobs
def get_player_with_dob(url):
"""
Retrieves single player along with date of birth.
"""
req = requests.get(url)
print("+ Working on url %s" % url)
doc = html.fromstring(req.text)
# retrieving birthdate url that contains all necessary information in
# granular form, i.e. <a href="birthdate.php?Birthdate=1998-04-19&
# Firstname=Patrik&Lastname=Laine">1998-04-19</a>
dob_url = doc.xpath("//a[starts-with(@href, 'birthdate')]/@href")
if not dob_url:
return
dob_url = dob_url.pop(0)
# retrieving player information from retrieved url
dob, first_name, last_name = get_player_details_from_url(dob_url)
# adding current player to list dictionary of players w/ date of births
return Player(
remove_non_ascii_chars(first_name),
remove_non_ascii_chars(last_name),
dob)
def retrieve_drafted_player_links(draft_year):
"""
Retrieves links to player pages for all players drafted in the specified
draft year.
"""
url = "/".join((BASE_URL, DRAFT_URL_TEMPLATE % draft_year))
req = requests.get(url)
doc = html.fromstring(req.text)
# stub links to player pages are present at the specified position in
# the main table
return ["/".join((BASE_URL, link)) for link in doc.xpath(
"//tr[@bordercolor='#FFFFFF']/td[3]/a/@href")]
def get_player_details_from_url(dob_url):
"""
Gets player details, i.e. first name, last name and date of birth, from
specifield url.
"""
# exploding url into its components
url_comps = urlparse(dob_url)
# retrieving player details by exploding each part of the url's
# query component
dob, first_name, last_name = [
comp.split("=")[-1] for comp in url_comps.query.split("&")]
return dob, first_name, last_name
|
Python
| 0.000004
|
@@ -140,22 +140,23 @@
import
-Thread
+Process
PoolExec
@@ -474,17 +474,17 @@
RKERS =
-8
+4
%0A# named
@@ -979,14 +979,15 @@
ith
-Thread
+Process
Pool
@@ -1023,22 +1023,24 @@
ERS) as
-thread
+processe
s:%0A
@@ -1075,14 +1075,16 @@
-thread
+processe
s.su
@@ -1164,9 +1164,9 @@
ls%5B:
-2
+6
0%5D%7D%0A
@@ -1742,22 +1742,42 @@
(%22+
-Working on url
+Retrieving player information from
%25s%22
@@ -2766,24 +2766,116 @@
(req.text)%0A%0A
+ print(%0A %22+ Retrieving urls to pages of each player drafted in %25d%22 %25 draft_year)%0A%0A
# stub l
|
bd1ae8fbcbcdfc649c765259f543f52a5a21c303
|
Reset root logger before setting up logging
|
src/hades/common/cli.py
|
src/hades/common/cli.py
|
import argparse
import logging
import os
import sys
from gettext import gettext as _
from hades import constants
class ArgumentParser(argparse.ArgumentParser):
"""
ArgumentParser subclass that exists with os.EX_USAGE exit code if parsing
fails.
"""
def error(self, message):
self.print_usage(sys.stderr)
args = {'prog': self.prog, 'message': message}
self.exit(os.EX_USAGE, _('%(prog)s: error: %(message)s\n') % args)
parser = ArgumentParser(add_help=False)
parser.add_argument('-c', '--config', default=None, help="Path to config file")
parser.add_argument('-v', '--verbose', dest='verbosity',
default=None, action='count', help='Be more verbose')
parser.add_argument('-q', '--quiet', dest='verbosity',
action='store_const', const=0, help='Be quiet')
parser.add_argument('-V', '--version', action='version',
version=constants.PACKAGE_VERSION)
VERBOSITY_LEVELS = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
DEFAULT_VERBOSITY = 1
def setup_cli_logging(program, args):
"""
Setup logging for CLI applications, that do not configure logging
themselves.
Flask and Celery are quite opinionated about logging, so this function
should probably not be called in their launchers.
:param program: The name of the program
:param args: The parsed arguments of the program
"""
if args.verbosity is None:
verbosity = os.environ.get('HADES_VERBOSITY', DEFAULT_VERBOSITY)
try:
verbosity = int(verbosity)
except ValueError:
verbosity = DEFAULT_VERBOSITY
else:
verbosity = args.verbosity
effective_verbosity = max(0, min(len(VERBOSITY_LEVELS) - 1, verbosity))
level = VERBOSITY_LEVELS[effective_verbosity]
if level <= logging.DEBUG:
fmt = ("[%(asctime)s] %(levelname)s in %(filename)s:%(lineno)d: "
"%(message)s")
else:
fmt = "%(message)s"
logging.basicConfig(level=level, style='%', format=fmt, stream=sys.stderr)
|
Python
| 0
|
@@ -1414,24 +1414,48 @@
ram%0A %22%22%22%0A
+ reset_cli_logging()%0A
if args.
@@ -2078,20 +2078,404 @@
stream=sys.stderr)%0A
+%0A%0Adef reset_cli_logging():%0A %22%22%22Reset root logger configuration%22%22%22%0A root = logging.root%0A for h in root.handlers:%0A try:%0A h.acquire()%0A h.flush()%0A h.close()%0A except (OSError, ValueError):%0A pass%0A finally:%0A h.release()%0A root.removeHandler(h)%0A for f in root.filters:%0A root.removeFilter(f)%0A
|
79006657866832d02a8f8f50269246fe1ca78982
|
Add missing indentation
|
src/hades/common/cli.py
|
src/hades/common/cli.py
|
"""Functionality for the Hades command-line utilities in :mod:`hades.bin`."""
import argparse
import functools
import logging.handlers
import os
import sys
import textwrap
import typing
from gettext import gettext as _
from hades import constants
class ArgumentParser(argparse.ArgumentParser):
"""ArgumentParser subclass that exists with :data:`os.EX_USAGE` exit code if
parsing fails."""
def error(self, message):
self.print_usage(sys.stderr)
args = {'prog': self.prog, 'message': message}
self.exit(os.EX_USAGE, _('%(prog)s: error: %(message)s\n') % args)
class VersionAction(argparse.Action):
# noinspection PyShadowingBuiltins
def __init__(self,
option_strings,
version_info=None,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
help="show program's version number, configure options, copyright notice and exit"):
super(VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version_info = version_info
def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace, values, option_string=None):
version_info = self.version_info
print(version_info)
parser.exit()
VERBOSITY_LEVELS = (
logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG, logging.NOTSET
)
DEFAULT_VERBOSITY = 1
parser = ArgumentParser(add_help=False)
parser.add_argument('-c', '--config', default=None, help="Path to config file")
parser.add_argument(
"-v",
"--verbose",
dest="verbosity",
default=DEFAULT_VERBOSITY,
action="count",
help="Be more verbose",
)
parser.add_argument('-q', '--quiet', dest='verbosity',
action='store_const', const=0, help='Be quiet')
parser.add_argument(
'-V', '--version', action=VersionAction, version_info=textwrap.dedent(
"""\
{PACKAGE_NAME} version {PACKAGE_VERSION}
Configure Options: {CONFIGURE_ARGS}
Copyright (c) 2015-2020 {PACKAGE_AUTHOR}
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
).rstrip().format(
PACKAGE_NAME=constants.PACKAGE_NAME,
PACKAGE_VERSION=constants.PACKAGE_VERSION,
CONFIGURE_ARGS=constants.CONFIGURE_ARGS,
PACKAGE_AUTHOR=constants.PACKAGE_AUTHOR,
)
)
parser.add_argument('--syslog', nargs='?', const='/dev/log', metavar='SOCKET',
help="Log to syslog instead of stderr. A path to the log "
"socket may be provided, defaults to /dev/log "
"otherwise")
def setup_cli_logging(program, args):
"""
Setup logging for CLI applications, that do not configure logging
themselves.
Set log level using command-line options parsed with :data:`parser`, the
:std:envvar:`HADES_VERBOSITY` environment variable or finally the default
value :data:`DEFAULT_VERBOSITY`.
Messages are logged to stderr by default, but can also be logged to syslog.
The possible log level settings are:
- :data:`logging.ERROR` is the minimum log level.
- :data:`logging.CRITICAL` will always also be logged to STDERR even if
logging to syslog.
- :data:`logging.WARNING` is the default logging level, but can be
suppressed with ``-q``/``--quiet`` or ``HADES_VERBOSITY=0``.
- Each ``-v``/``--verbose`` increases the verbosity by one level.
When the log level is lower than or equal to :data:`logging.DEBUG` also the
time, the log level and the filename are logged in addition to log message.
Flask and Celery have their own opinionated logging mechanisms. Logging
should probably be reset via :func:`reset_cli_logging` before handing over
control to them.
:param program: The name of the program
:param args: The parsed arguments of the program with :data:`parser` or a
subparser.
"""
# Collect log messages until after we have finished setting up, so that we
# can log them properly
messages: list[typing.Callable[[], None]] = []
reset_cli_logging()
if args.verbosity is None:
verbosity = os.environ.get('HADES_VERBOSITY', DEFAULT_VERBOSITY)
try:
verbosity = int(verbosity)
except ValueError as e:
verbosity = DEFAULT_VERBOSITY
messages.append(
functools.partial(
logging.root.critical,
"Illegal logging level %s",
exc_info=e,
)
)
else:
verbosity = args.verbosity
if verbosity < 0:
messages.append(
functools.partial(
logging.root.critical,
"Verbosity may not be negative"
)
)
effective_verbosity = max(0, min(len(VERBOSITY_LEVELS) - 1, verbosity))
level = VERBOSITY_LEVELS[effective_verbosity]
if level <= logging.DEBUG:
fmt = ("[%(asctime)s] %(levelname)s in %(filename)s:%(lineno)d: "
"%(message)s")
else:
fmt = "%(message)s"
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.name = "stderr"
if args.syslog is not None:
# Also log critical messages to stderr
stderr_handler.setLevel(logging.CRITICAL)
syslog_handler = logging.handlers.SysLogHandler(address=args.syslog)
syslog_handler.name = "syslog"
handlers = [syslog_handler, stderr_handler]
else:
handlers = [stderr_handler]
logging.basicConfig(level=level, style='%', format=fmt, handlers=handlers)
# Log collected messages
for message in messages:
message()
def reset_cli_logging():
"""Reset root logger configuration"""
root = logging.root
for h in root.handlers:
try:
h.acquire()
h.flush()
h.close()
except (OSError, ValueError):
pass
finally:
h.release()
root.removeHandler(h)
for f in root.filters:
root.removeFilter(f)
|
Python
| 0.000001
|
@@ -4458,16 +4458,17 @@
r a%0A
+
subparse
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.