commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
11182c8ff3ef10e01ddda8a858708d0759d4f3ae
|
Remove useless --only option on ./manage.py migrate
|
south/management/commands/migrate.py
|
south/management/commands/migrate.py
|
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.conf import settings
from django.db import models
from optparse import make_option
from south import migration
import sys
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all', action='store_true', dest='all_apps', default=False,
help='Run the specified migration for all apps.'),
make_option('--list', action='store_true', dest='list', default=False,
help='List migrations noting those that have been applied'),
make_option('--skip', action='store_true', dest='skip', default=False,
help='Will skip over out-of-order missing migrations'),
make_option('--merge', action='store_true', dest='merge', default=False,
help='Will run out-of-order missing migrations as they are - no rollbacks.'),
make_option('--no-initial-data', action='store_true', dest='no_initial_data', default=False,
help='Skips loading initial data if specified.'),
make_option('--only', action='store_true', dest='only', default=False,
help='Only runs or rolls back the migration specified, and none around it.'),
make_option('--fake', action='store_true', dest='fake', default=False,
help="Pretends to do the migrations, but doesn't actually execute them."),
make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False,
help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."),
)
if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
option_list += (
make_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
)
help = "Runs migrations for all apps."
def handle(self, app=None, target=None, skip=False, merge=False, only=False, backwards=False, fake=False, db_dry_run=False, list=False, **options):
# Work out what the resolve mode is
resolve_mode = merge and "merge" or (skip and "skip" or None)
# Turn on db debugging
from south.db import db
db.debug = True
# NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb
# This code imports any module named 'management' in INSTALLED_APPS.
# The 'management' module is the preferred way of listening to post_syncdb
# signals, and since we're sending those out with create_table migrations,
# we need apps to behave correctly.
for app_name in settings.INSTALLED_APPS:
try:
__import__(app_name + '.management', {}, {}, [''])
except ImportError, exc:
msg = exc.args[0]
if not msg.startswith('No module named') or 'management' not in msg:
raise
# END DJANGO DUPE CODE
# if all_apps flag is set, shift app over to target
if options['all_apps']:
target = app
app = None
# Migrate each app
if app:
apps = [migration.get_app(app.split(".")[-1])]
else:
apps = migration.get_migrated_apps()
silent = options.get('verbosity', 0) == 0
if list and apps:
list_migrations(apps)
if not list:
for app in apps:
result = migration.migrate_app(
app,
resolve_mode = resolve_mode,
target_name = target,
fake = fake,
db_dry_run = db_dry_run,
silent = silent,
load_inital_data = not options['no_initial_data'],
)
if result is False:
return
def list_migrations(apps):
from south.models import MigrationHistory
apps = list(apps)
names = [migration.get_app_name(app) for app in apps]
applied_migrations = MigrationHistory.objects.filter(app_name__in=names)
applied_migrations = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
print
for app in apps:
print migration.get_app_name(app)
all_migrations = migration.get_migration_names(app)
for migration_name in all_migrations:
long_form = '%s.%s' % (migration.get_app_name(app),migration_name)
if long_form in applied_migrations:
print format_migration_list_item(migration_name)
else:
print format_migration_list_item(migration_name, applied=False)
print
def format_migration_list_item(name, applied=True):
if applied:
return ' * %s' % name
return ' %s' % name
|
Python
| 0.000002
|
@@ -1086,177 +1086,8 @@
'),%0A
- make_option('--only', action='store_true', dest='only', default=False,%0A help='Only runs or rolls back the migration specified, and none around it.'),%0A
@@ -1988,20 +1988,8 @@
lse,
- only=False,
bac
|
49cb47d9d193ffb8aaedf30baa1df938184988b0
|
Allow image name to be empty for simple list test
|
tempest/lib/api_schema/response/compute/v2_1/images.py
|
tempest/lib/api_schema/response/compute/v2_1/images.py
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from tempest.lib.api_schema.response.compute.v2_1 import parameter_types
image_links = copy.deepcopy(parameter_types.links)
image_links['items']['properties'].update({'type': {'type': 'string'}})
common_image_schema = {
'type': 'object',
'properties': {
'id': {'type': 'string'},
'status': {'type': 'string'},
'updated': {'type': 'string'},
'links': image_links,
'name': {'type': ['string', 'null']},
'created': {'type': 'string'},
'minDisk': {'type': 'integer'},
'minRam': {'type': 'integer'},
'progress': {'type': 'integer'},
'metadata': {'type': 'object'},
'server': {
'type': 'object',
'properties': {
'id': {'type': 'string'},
'links': parameter_types.links
},
'additionalProperties': False,
'required': ['id', 'links']
},
'OS-EXT-IMG-SIZE:size': {'type': ['integer', 'null']},
'OS-DCF:diskConfig': {'type': 'string'}
},
'additionalProperties': False,
# 'server' attributes only comes in response body if image is
# associated with any server. 'OS-EXT-IMG-SIZE:size' & 'OS-DCF:diskConfig'
# are API extension, So those are not defined as 'required'.
'required': ['id', 'status', 'updated', 'links', 'name',
'created', 'minDisk', 'minRam', 'progress',
'metadata']
}
get_image = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'image': common_image_schema
},
'additionalProperties': False,
'required': ['image']
}
}
list_images = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'images': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'id': {'type': 'string'},
'links': image_links,
'name': {'type': 'string'}
},
'additionalProperties': False,
'required': ['id', 'links', 'name']
}
},
'images_links': parameter_types.links
},
'additionalProperties': False,
# NOTE(gmann): images_links attribute is not necessary to be
# present always So it is not 'required'.
'required': ['images']
}
}
create_image = {
'status_code': [202],
'response_header': {
'type': 'object',
'properties': parameter_types.response_header
}
}
create_image['response_header']['properties'].update(
{'location': {
'type': 'string',
'format': 'uri'}
}
)
create_image['response_header']['required'] = ['location']
delete = {
'status_code': [204]
}
image_metadata = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'metadata': {'type': 'object'}
},
'additionalProperties': False,
'required': ['metadata']
}
}
image_meta_item = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'meta': {'type': 'object'}
},
'additionalProperties': False,
'required': ['meta']
}
}
list_images_details = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'images': {
'type': 'array',
'items': common_image_schema
},
'images_links': parameter_types.links
},
'additionalProperties': False,
# NOTE(gmann): images_links attribute is not necessary to be
# present always So it is not 'required'.
'required': ['images']
}
}
|
Python
| 0.000019
|
@@ -2730,32 +2730,33 @@
': %7B'type':
+%5B
'string'
%7D%0A
@@ -2739,24 +2739,33 @@
': %5B'string'
+, 'null'%5D
%7D%0A
|
724b4c382015aa933659a24f7be3bd2cabbcb5eb
|
Add flag --exclusive setting whether to run as exclusive or not
|
sherlock.stanford.edu.run_gpaw.py
|
sherlock.stanford.edu.run_gpaw.py
|
"""This is the submission script for GPAW on Sherlock at Stanford"""
#!/usr/bin/env python
from sys import argv
import os
job = argv[1]
nodes = argv[2]
time = argv[3] + ":00"
if len(argv) > 4:
gpaw_options = ' '.join(argv[4:])
else:
gpaw_options = ' '
#options = '-l nodes=' + nodes +':ppn=2' + ' -l' +' walltime=' + time + ' -m abe'
#options = '-N ' + nodes +' -t ' + time + ' -J ' + job
options = ' -J ' + job
#dir = os.getcwd()
f = open('tmp.sh', 'w')
f.write("""\
#!/bin/bash
#SBATCH -n %s
#SBATCH -t %s
#SBATCH -p iric,normal
#SBATCH --exclusive
# Add nodes that always fail
#SBATCH -x gpu-14-1,sh-20-35
# send email about job status changes
##SBATCH --mail-type=ALL
#Set an open-mpi parameter to suppress "fork()" warnings
# GPAW is written to use fork calls
export OMPI_MCA_mpi_warn_on_fork=0
#This next line decides which version of gpaw will be used
source $HOME/environment_scripts/set_paths_gpaw-trunk_scalapack_libvdwxc.sh # Gpaw trunk with mBEEF-vdW fixed for libvdwxc
srun `which gpaw-python` %s %s
""" % (nodes,time,job,gpaw_options))
f.close()
os.system('sbatch ' + options + ' tmp.sh')
|
Python
| 0
|
@@ -1,73 +1,4 @@
-%22%22%22This is the submission script for GPAW on Sherlock at Stanford%22%22%22%0A
#!/u
@@ -101,16 +101,129 @@
%22:00%22%0A%0A
+if '--exclusive' in argv:%0A is_exclusive = True%0A argv.remove('--exclusive')%0Aelse:%0A is_exclusive = False%0A%0A
if len(a
@@ -531,16 +531,93 @@
bin/bash
+%5Cn%22%22%22)%0Aif is_exclusive:%0A f.write(%22%22%22#SBATCH --exclusive%5Cn%22%22%22)%0Af.write(%22%22%22%5C
%0A#SBATCH
@@ -664,28 +664,9 @@
mal%0A
-#SBATCH --exclusive
%0A
+
# Ad
@@ -913,16 +913,16 @@
fork=0%0A%0A
-
#This ne
@@ -972,16 +972,278 @@
be used%0A
+#source $HOME/environment_scripts/set_paths_gpaw_1.1.1b1_libxc-trunk.sh # gpaw version 1.1.1b %0A#source $HOME/environment_scripts/set_paths_gpaw_1.1.1b1_libxc-trunk_scalapack_libvdwxc.sh # gpaw version 1.1.1b with scalapack (does not work) and libvdwxc (works)%0A
source $
|
31cec1c5ab052f237445b8969088aba755ae73cf
|
Clean up now-unnecessary DummyStorage().
|
incuna_test_utils/testcases/integration.py
|
incuna_test_utils/testcases/integration.py
|
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import render
from .request import BaseRequestTestCase
class BaseIntegrationTestCase(BaseRequestTestCase):
"""
A TestCase that operates similarly to a Selenium test.
Contains methods that access pages and render them to strings full of
HTML. Can be used to assert the contents of templates as well as doing
normal TestCase things.
Must be subclassed with the following attributes in order to work:
* user_factory
* view_class (class-based view) or view (method-based view)
"""
def get_view(self):
"""
Returns the class's attached view.
Checks self.view_class, then self.view. Throws an ImproperlyConfigured
exception if neither exist.
"""
try:
return self.view_class.as_view()
except AttributeError:
# Continue on to the next try/catch
pass
try:
return self.view
except AttributeError:
message = "This test must have a 'view_class' or 'view' attribute."
raise ImproperlyConfigured(message)
def access_view(self, *args, request=None, **kwargs):
"""
Helper method that accesses the test's view.
Accepts a request parameter, which can be None. If it is, this method
creates a basic request on your behalf.
Returns a HTTPResponse object with the request (created or otherwise)
attached.
"""
if request is None:
request = self.create_request()
request._messages = DummyStorage()
view = self.get_view()
response = view(request, *args, **kwargs)
# Add the request to the response.
# This is a weird-looking but compact way of ensuring we have access to
# the request everywhere we need it, without doing clunky things like
# returning tuples all the time.
response.request = request
return response
def render_to_str(self, response, request=None):
"""
Render a HTTPResponse into a string that holds the HTML content.
Accepts an optional request parameter, and looks for a request attached
to the response if the optional parameter isn't specified.
"""
if request is None:
request = response.request
response = render(request, response.template_name, response.context_data)
return str(response.content)
def access_view_and_render_response(self, *view_args, request=None, expected_status=200, **view_kwargs):
"""
Accesses the view and returns a string of HTML.
Combines access_view, an assertion on the returned status, and
render_to_str.
Accepts an optional request (but will create a simple one if the
parameter isn't supplied), an expected status code for the response
(which defaults to 200), and args and kwargs for the view method.
"""
response = self.access_view(*view_args, request=request, **view_kwargs)
# Assert that the response has the correct status code before we go
# any further. Throwing accurately descriptive failures when something
# goes wrong is better than trying to run assertions on the content
# of a HTML response for some random 404 page.
self.assertEqual(expected_status, response.status_code)
# Render the response and return it.
return self.render_to_str(response)
def assert_count(self, needle, haystack, count):
"""
Assert that 'needle' occurs exactly 'count' times in 'haystack'.
Used as a snazzier, stricter version of unittest.assertIn.
Outputs a verbose error message when it fails.
"""
actual_count = haystack.count(needle)
# Build a verbose error message in case we need it.
plural = '' if count == 1 else 's'
message = 'Expected {count} instance{plural} of {needle}, but found {actual_count}, in {haystack}'
message = message.format_map(locals())
# Make the assertion.
self.assertEqual(count, actual_count, message)
|
Python
| 0
|
@@ -1598,55 +1598,8 @@
st()
-%0A request._messages = DummyStorage()
%0A%0A
|
ec25f9c1b0212f1f23855eab22078d1563cd7165
|
Use int for survey_id and question_id
|
indico/modules/events/surveys/blueprint.py
|
indico/modules/events/surveys/blueprint.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.modules.events.surveys.controllers.management import (RHManageSurveys, RHCreateSurvey, RHManageSurvey,
RHEditSurvey, RHScheduleSurvey, RHStartSurvey,
RHEndSurvey, RHManageSurveyQuestionnaire,
RHAddSurveyQuestion, RHEditSurveyQuestion,
RHDeleteSurveyQuestion, RHChangeQuestionPosition)
from indico.web.flask.wrappers import IndicoBlueprint
_bp = IndicoBlueprint('survey', __name__, template_folder='templates', virtual_template_folder='events/surveys',
url_prefix='/event/<confId>', event_feature='surveys')
# surveys management
_bp.add_url_rule('/manage/surveys/', 'management', RHManageSurveys)
_bp.add_url_rule('/manage/surveys/create', 'create', RHCreateSurvey, methods=('GET', 'POST'))
# Single survey management
_bp.add_url_rule('/manage/surveys/<survey_id>/', 'manage_survey', RHManageSurvey)
_bp.add_url_rule('/manage/surveys/<survey_id>/edit', 'edit_survey', RHEditSurvey, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/surveys/<survey_id>/schedule', 'schedule_survey', RHScheduleSurvey, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/surveys/<survey_id>/start', 'start_survey', RHStartSurvey, methods=('POST',))
_bp.add_url_rule('/manage/surveys/<survey_id>/end', 'end_survey', RHEndSurvey, methods=('POST',))
# Survey question management
_bp.add_url_rule('/manage/surveys/<survey_id>/questionnaire/', 'manage_questionnaire', RHManageSurveyQuestionnaire)
_bp.add_url_rule('/manage/surveys/<survey_id>/questionnaire/add/<type>', 'add_question', RHAddSurveyQuestion,
methods=('GET', 'POST'))
_bp.add_url_rule('/manage/surveys/<survey_id>/questionnaire/<question_id>', 'edit_question', RHEditSurveyQuestion,
methods=('GET', 'POST'))
_bp.add_url_rule('/manage/surveys/<survey_id>/questionnaire/<question_id>/delete', 'remove_question',
RHDeleteSurveyQuestion, methods=('POST',))
_bp.add_url_rule('/manage/surveys/<survey_id>/questionnaire/change-positions', 'change_question_position',
RHChangeQuestionPosition, methods=('POST',))
|
Python
| 0.000003
|
@@ -1808,32 +1808,36 @@
manage/surveys/%3C
+int:
survey_id%3E/', 'm
@@ -1894,32 +1894,36 @@
manage/surveys/%3C
+int:
survey_id%3E/edit'
@@ -2005,32 +2005,36 @@
manage/surveys/%3C
+int:
survey_id%3E/sched
@@ -2067,32 +2067,49 @@
HScheduleSurvey,
+%0A
methods=('GET',
@@ -2145,32 +2145,36 @@
manage/surveys/%3C
+int:
survey_id%3E/start
@@ -2253,32 +2253,36 @@
manage/surveys/%3C
+int:
survey_id%3E/end',
@@ -2385,32 +2385,36 @@
manage/surveys/%3C
+int:
survey_id%3E/quest
@@ -2505,32 +2505,36 @@
manage/surveys/%3C
+int:
survey_id%3E/quest
@@ -2661,32 +2661,36 @@
manage/surveys/%3C
+int:
survey_id%3E/quest
@@ -2691,32 +2691,36 @@
/questionnaire/%3C
+int:
question_id%3E', '
@@ -2734,16 +2734,33 @@
estion',
+%0A
RHEditS
@@ -2773,33 +2773,16 @@
uestion,
-%0A
methods
@@ -2826,32 +2826,36 @@
manage/surveys/%3C
+int:
survey_id%3E/quest
@@ -2864,16 +2864,20 @@
nnaire/%3C
+int:
question
@@ -3004,16 +3004,20 @@
urveys/%3C
+int:
survey_i
|
98ba2e69daa3ebf460516f5b57f5340ae71fe830
|
Use alert_percentage for coloring the entry red
|
i3pystatus/battery.py
|
i3pystatus/battery.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import configparser
from i3pystatus import IntervalModule, formatp
from i3pystatus.core.util import lchop, TimeWrapper
from i3pystatus.core.desktop import DesktopNotification
class UEventParser(configparser.ConfigParser):
@staticmethod
def parse_file(file):
parser = UEventParser()
with open(file, "r") as file:
parser.read_string(file.read())
return dict(parser.items("id10t"))
def __init__(self):
super().__init__(default_section="id10t")
def optionxform(self, key):
return lchop(key, "POWER_SUPPLY_")
def read_string(self, string):
super().read_string("[id10t]\n" + string)
class Battery:
@staticmethod
def create(from_file):
battery_info = UEventParser.parse_file(from_file)
if "POWER_NOW" in battery_info:
return BatteryEnergy(battery_info)
else:
return BatteryCharge(battery_info)
def __init__(self, battery_info):
self.battery_info = battery_info
self.normalize_micro()
def normalize_micro(self):
for key, micro_value in self.battery_info.items():
if re.match(r"(VOLTAGE|CHARGE|CURRENT|POWER|ENERGY)_(NOW|FULL|MIN)(_DESIGN)?", key):
self.battery_info[key] = float(micro_value) / 1000000.0
def percentage(self, design=False):
return self._percentage("_DESIGN" if design else "") * 100
def status(self):
if self.consumption():
return "Discharging" if self.battery_info["STATUS"] == "Discharging" else "Charging"
else:
return "Full"
class BatteryCharge(Battery):
def consumption(self):
return self.battery_info["VOLTAGE_NOW"] * self.battery_info["CURRENT_NOW"] # V * A = W
def _percentage(self, design):
return self.battery_info["CHARGE_NOW"] / self.battery_info["CHARGE_FULL" + design]
def remaining(self):
if self.status() == "Discharging":
# Ah / A = h * 60 min = min
return self.battery_info["CHARGE_NOW"] / self.battery_info["CURRENT_NOW"] * 60
else:
return (self.battery_info["CHARGE_FULL"] - self.battery_info["CHARGE_NOW"]) / self.battery_info["CURRENT_NOW"] * 60
class BatteryEnergy(Battery):
def consumption(self):
return self.battery_info["POWER_NOW"]
def _percentage(self, design):
return self.battery_info["ENERGY_NOW"] / self.battery_info["ENERGY_FULL" + design]
def remaining(self):
if self.status() == "Discharging":
# Wh / W = h * 60 min = min
return self.battery_info["ENERGY_NOW"] / self.battery_info["POWER_NOW"] * 60
else:
return (self.battery_info["ENERGY_FULL"] - self.battery_info["ENERGY_NOW"]) / self.battery_info["POWER_NOW"] * 60
class BatteryChecker(IntervalModule):
"""
This class uses the /sys/class/power_supply/…/uevent interface to check for the
battery status
Available formatters:
* `{remaining}` — remaining time for charging or discharging, uses TimeWrapper formatting, default format is `%E%h:%M`
* `{percentage}` — battery percentage relative to the last full value
* `{percentage_design}` — absolute battery charge percentage
* `{consumption (Watts)}` — current power flowing into/out of the battery
* `{status}`
* `{battery_ident}` — the same as the setting
"""
settings = (
("battery_ident", "The name of your battery, usually BAT0 or BAT1"),
"format",
("alert", "Display a libnotify-notification on low battery"),
"alert_percentage",
("alert_format_title", "The title of the notification, all formatters can be used"),
("alert_format_body", "The body text of the notification, all formatters can be used"),
("path", "Override the default-generated path"),
("status", "A dictionary mapping ('DIS', 'CHR', 'FULL') to alternative names"),
)
battery_ident = "BAT0"
format = "{status} {remaining}"
status = {
"CHR": "CHR",
"DIS": "DIS",
"FULL": "FULL",
}
alert = False
alert_percentage = 10
alert_format_title = "Low battery"
alert_format_body = "Battery {battery_ident} has only {percentage:.2f}% ({remaining:%E%hh:%Mm}) remaining!"
path = None
def init(self):
if not self.path:
self.path = "/sys/class/power_supply/{0}/uevent".format(
self.battery_ident)
def run(self):
urgent = False
color = "#ffffff"
battery = Battery.create(self.path)
fdict = {
"battery_ident": self.battery_ident,
"percentage": battery.percentage(),
"percentage_design": battery.percentage(design=True),
"consumption": battery.consumption(),
"remaining": TimeWrapper(0, "%E%h:%M"),
}
status = battery.status()
if status in ["Discharging", "Charging"]:
remaining = battery.remaining()
fdict["remaining"] = TimeWrapper(remaining * 60, "%E%h:%M")
if status == "Discharging":
fdict["status"] = "DIS"
if remaining < 15:
urgent = True
color = "#ff0000"
else:
fdict["status"] = "CHR"
else:
fdict["status"] = "FULL"
if self.alert and fdict["status"] == "DIS" and fdict["percentage"] <= self.alert_percentage:
DesktopNotification(
title=formatp(self.alert_format_title, **fdict),
body=formatp(self.alert_format_body, **fdict),
icon="battery-caution",
urgency=2,
timeout=60,
).display()
fdict["status"] = self.status[fdict["status"]]
self.output = {
"full_text": formatp(self.format, **fdict).strip(),
"instance": self.battery_ident,
"urgent": urgent,
"color": color
}
|
Python
| 0.000001
|
@@ -4974,20 +4974,17 @@
us in %5B%22
-Disc
+C
harging%22
@@ -4986,17 +4986,20 @@
ging%22, %22
-C
+Disc
harging%22
@@ -5220,22 +5220,53 @@
if
-remaining %3C 15
+battery.percentage() %3C= self.alert_percentage
:%0A
|
76f19afd5cfb084327740de9346781e730d764f9
|
Add message method for create etc
|
iatiupdates/models.py
|
iatiupdates/models.py
|
# IATI Updates, IATI Registry API augmented
# by Mark Brough
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
from sqlalchemy import *
from iatiupdates import db
from datetime import datetime
class Package(db.Model):
__tablename__ = 'package'
id = Column(UnicodeText, primary_key=True)
packagegroup_id = Column(UnicodeText, ForeignKey('packagegroup.id', ondelete='CASCADE'))
metadata_created = Column(DateTime)
metadata_modified = Column(DateTime)
relationships = Column(UnicodeText)
author_email = Column(UnicodeText)
state = Column(UnicodeText)
license_id = Column(UnicodeText)
resources = Column(UnicodeText)
tags = Column(UnicodeText)
groups = Column(UnicodeText)
name = Column(UnicodeText)
isopen = Column(UnicodeText)
license = Column(UnicodeText)
notes_rendered = Column(UnicodeText)
ckan_url = Column(UnicodeText)
title = Column(UnicodeText)
extras = Column(UnicodeText)
ratings_count = Column(UnicodeText)
revision_id = Column(UnicodeText)
notes = Column(UnicodeText)
ratings_average = Column(UnicodeText)
author = Column(UnicodeText)
packagegroup_name = Column(UnicodeText)
issue_type = Column(UnicodeText, ForeignKey('issuetype.id', ondelete='CASCADE'))
issue_message = Column(UnicodeText)
issue_date = Column(UnicodeText)
hash = Column(UnicodeText)
url = Column(UnicodeText)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class PackageGroup(db.Model):
__tablename__ = 'packagegroup'
id = Column(UnicodeText, primary_key=True)
display_name = Column(UnicodeText)
description = Column(UnicodeText)
created = Column(DateTime)
title = Column(UnicodeText)
state = Column(UnicodeText)
extras = Column(UnicodeText)
revision_id = Column(UnicodeText)
packages = Column(UnicodeText)
name = Column(UnicodeText)
frequency = Column(Integer)
frequency_comment = Column(UnicodeText)
deleted = Column(Boolean)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class Revision(db.Model):
__tablename__ = 'revision'
id = Column(UnicodeText, primary_key=True)
timestamp = Column(DateTime)
package_id = Column(UnicodeText, ForeignKey('package.id', ondelete='CASCADE'))
message = Column(UnicodeText)
author = Column(UnicodeText)
group_id = Column(UnicodeText, ForeignKey('packagegroup.id', ondelete='CASCADE'))
message_type = Column(UnicodeText)
message_text = Column(UnicodeText)
date = Column(DateTime)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class IssueType(db.Model):
__tablename__ = 'issuetype'
id = Column(UnicodeText, primary_key=True)
name = Column(UnicodeText)
|
Python
| 0
|
@@ -2723,24 +2723,65 @@
nicodeText)%0A
+ message_method = Column(UnicodeText)%0A
date = C
|
502d2ca202101ae963f97c38e2c14049eec5848e
|
Add a unique identifier of the event to coalesce start <-> success/failed
|
idb/common/logging.py
|
idb/common/logging.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import functools
import inspect
import logging
import time
from concurrent.futures import CancelledError
from types import TracebackType
from typing import Any, AsyncContextManager, Collection, Optional, Tuple, Type
import idb.common.plugin as plugin
from idb.common.types import LoggingMetadata
from idb.utils.typing import none_throws
logger: logging.Logger = logging.getLogger("idb")
def _initial_info(
args: Collection[Any], metadata: Optional[LoggingMetadata] # pyre-ignore
) -> Tuple[LoggingMetadata, int]:
_metadata: LoggingMetadata = metadata or {}
if len(args):
# pyre-fixme[16]: `Collection` has no attribute `__getitem__`.
self_meta: Optional[LoggingMetadata] = getattr(args[0], "metadata", None)
if self_meta:
_metadata.update(self_meta)
start = int(time.time())
return (_metadata, start)
class log_call(AsyncContextManager[None]):
def __init__(
self, name: Optional[str] = None, metadata: Optional[LoggingMetadata] = None
) -> None:
self.name = name
self.metadata: LoggingMetadata = metadata or {}
self.start: Optional[int] = None
async def __aenter__(self) -> None:
name = none_throws(self.name)
logger.debug(f"{self.name} called")
self.start = int(time.time())
await plugin.before_invocation(name=name, metadata=self.metadata)
# pyre-fixme[14]: `__aexit__` overrides method defined in `AsyncContextManager`
# inconsistently.
# pyre-fixme[14]: `__aexit__` overrides method defined in `AsyncContextManager`
# inconsistently.
async def __aexit__(
self,
exc_type: Optional[Type[Exception]],
exception: Optional[Exception],
traceback: Optional[TracebackType],
) -> bool:
name = none_throws(self.name)
duration = int((time.time() - none_throws(self.start)) * 1000)
if exception:
logger.debug(f"{name} failed")
await plugin.failed_invocation(
name=name,
duration=duration,
exception=exception,
metadata=self.metadata,
)
else:
logger.debug(f"{name} succeeded")
await plugin.after_invocation(
name=name, duration=duration, metadata=self.metadata
)
return False
def __call__(self, function) -> Any: # pyre-ignore
_name = self.name or function.__name__
@functools.wraps(function)
async def _async_wrapper(*args: Any, **kwargs: Any) -> Any: # pyre-ignore
logger.debug(f"{_name} called")
(_metadata, start) = _initial_info(args, self.metadata)
await plugin.before_invocation(name=_name, metadata=_metadata)
try:
value = await function(*args, **kwargs)
logger.debug(f"{_name} succeeded")
await plugin.after_invocation(
name=_name,
duration=int((time.time() - start) * 1000),
metadata=_metadata,
)
return value
except CancelledError as ex:
logger.debug(f"{_name} cancelled")
_metadata["cancelled"] = True
await plugin.after_invocation(
name=_name,
duration=int((time.time() - start) * 1000),
metadata=_metadata,
)
raise ex
except Exception as ex:
logger.debug(f"{_name} failed")
await plugin.failed_invocation(
name=_name,
duration=int((time.time() - start) * 1000),
exception=ex,
metadata=_metadata,
)
raise ex
@functools.wraps(function)
async def _async_gen_wrapper(*args, **kwargs) -> Any: # pyre-ignore
logger.debug(f"{_name} started")
(_metadata, start) = _initial_info(args, self.metadata)
await plugin.before_invocation(name=_name, metadata=_metadata)
try:
async for value in function(*args, **kwargs):
yield value
logger.debug(f"{_name} finished")
await plugin.after_invocation(
name=_name,
duration=int((time.time() - start) * 1000),
metadata=_metadata,
)
except CancelledError as ex:
logger.debug(f"{_name} cancelled")
_metadata["cancelled"] = True
await plugin.after_invocation(
name=_name,
duration=int((time.time() - start) * 1000),
metadata=_metadata,
)
raise ex
except Exception as ex:
logger.debug(f"{_name} failed")
await plugin.failed_invocation(
name=_name,
duration=int((time.time() - start) * 1000),
exception=ex,
metadata=_metadata,
)
raise ex
if inspect.isasyncgenfunction(function):
return _async_gen_wrapper
else:
return _async_wrapper
|
Python
| 0.000024
|
@@ -409,16 +409,39 @@
le, Type
+%0Afrom uuid import uuid4
%0A%0Aimport
@@ -1017,24 +1017,67 @@
(self_meta)%0A
+ _metadata%5B%22event_uuid%22%5D = str(uuid4())%0A
start =
|
6863aaf12dddec37e01fc38f025db6f60dd274a0
|
Remove decorator for fixed test
|
test/functionalities/abbreviation/TestAbbreviations.py
|
test/functionalities/abbreviation/TestAbbreviations.py
|
"""
Test some lldb command abbreviations.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class AbbreviationsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureFreeBSD('llvm.org/pr18805')
@expectedFailureLinux('llvm.org/pr18816')
def test_nonrunning_command_abbreviations (self):
self.expect("ap script",
startstr = "The following built-in commands may relate to 'script':",
substrs = ['breakpoint command add',
'breakpoint command list',
'breakpoint list',
'command alias',
'expression',
'script'])
self.runCmd("com a alias com al")
self.runCmd("alias gurp help")
self.expect("gurp target create",
substrs = ['Syntax: target create <cmd-options> <filename>'])
self.runCmd("com u gurp")
self.expect("gurp",
COMMAND_FAILED_AS_EXPECTED, error = True,
substrs = ["error: 'gurp' is not a valid command."])
# Only one matching command: execute it.
self.expect("h",
startstr = "The following is a list of built-in, permanent debugger commands:")
# Execute cleanup function during test tear down
def cleanup():
self.runCmd("command alias t thread select")
self.addTearDownHook(cleanup)
# Several matching commands: list them and error out.
self.runCmd("command unalias t")
self.expect("t",
COMMAND_FAILED_AS_EXPECTED, error = True,
substrs = ["Ambiguous command 't'. Possible matches:",
"target", "thread", "type"])
self.runCmd("com sou ./change_prompt.lldb")
self.expect("settings show prompt",
startstr = 'prompt (string) = "[with-three-trailing-spaces] "')
self.runCmd("settings clear prompt")
self.expect("settings show prompt",
startstr = 'prompt (string) = "(lldb) "')
self.expect("lo li",
startstr = "Logging categories for ")
self.runCmd("se se prompt 'Sycamore> '")
self.expect("se sh prompt",
startstr = 'prompt (string) = "Sycamore> "')
self.runCmd("se cl prompt")
self.expect("set sh prompt",
startstr = 'prompt (string) = "(lldb) "')
# We don't want to display the stdout if not in TraceOn() mode.
if not self.TraceOn():
self.HideStdout()
self.runCmd (r'''sc print "\n\n\tHello!\n"''')
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
@dsym_test
def test_with_dsym (self):
self.buildDsym ()
self.running_abbreviations ()
@dwarf_test
def test_with_dwarf (self):
self.buildDwarf ()
self.running_abbreviations ()
def running_abbreviations (self):
exe = os.path.join (os.getcwd(), "a.out")
# Use "file", i.e., no abbreviation. We're exactly matching the command
# verbatim when dealing with remote testsuite execution.
# For more details, see TestBase.runCmd().
self.expect("file " + exe,
patterns = [ "Current executable set to .*a.out.*" ])
# By default, the setting interpreter.expand-regex-aliases is false.
self.expect("_regexp-br product", matching=False,
substrs = [ "breakpoint set --name" ])
match_object = lldbutil.run_break_set_command (self, "br s -n sum")
lldbutil.check_breakpoint_result (self, match_object, symbol_name='sum', symbol_match_exact=False, num_locations=1)
match_object = lldbutil.run_break_set_command (self, "br s -f main.cpp -l 32")
lldbutil.check_breakpoint_result (self, match_object, file_name='main.cpp', line_number=32, num_locations=1)
self.runCmd("br co a -s python 1 -o 'print frame'")
self.expect("br co l 1",
substrs = [ "Breakpoint 1:",
"Breakpoint commands:",
"print frame" ])
self.runCmd("br co del 1")
self.expect("breakpoint command list 1",
startstr = "Breakpoint 1 does not have an associated command.")
self.expect("br di",
startstr = 'All breakpoints disabled. (3 breakpoints)')
self.expect("bre e",
startstr = "All breakpoints enabled. (3 breakpoints)")
self.expect("break list",
substrs = ["1: name = 'product', locations = 1",
"2: name = 'sum', locations = 1",
"3: file = 'main.cpp', line = 32, locations = 1"])
self.expect("br cl -l 32 -f main.cpp",
startstr = "1 breakpoints cleared:",
substrs = ["3: file = 'main.cpp', line = 32, locations = 1"])
# Add a future to terminate the current process being debugged.
#
# The test framework relies on detecting either "run" or "process launch"
# command to automatically kill the inferior upon tear down.
# But we'll be using "pro la" command to launch the inferior.
self.addTearDownHook(lambda: self.runCmd("process kill"))
self.expect("pro la",
patterns = [ "Process .* launched: "])
self.expect("pro st",
patterns = [ "Process .* stopped",
"thread #1:",
"a.out",
"sum\(a=1238, b=78392\)",
"at main.cpp\:25",
"stop reason = breakpoint 2.1" ])
# ARCH, if not specified, defaults to x86_64.
if self.getArchitecture() in ["", 'x86_64', 'i386']:
self.expect("dis -f",
startstr = "a.out`sum(int, int)",
substrs = [' mov',
' addl ',
'ret'])
self.expect("i d l main.cpp",
patterns = ["Line table for .*main.cpp in `a.out"])
self.expect("i d se",
patterns = ["Dumping sections for [0-9]+ modules."])
self.expect("i d symf",
patterns = ["Dumping debug symbols for [0-9]+ modules."])
self.expect("i d symt",
patterns = ["Dumping symbol table for [0-9]+ modules."])
if sys.platform.startswith("darwin"):
self.expect("i li",
substrs = [ 'a.out',
'/usr/lib/dyld',
'/usr/lib/libSystem.B.dylib'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
Python
| 0
|
@@ -215,56 +215,8 @@
_)%0A%0A
- @expectedFailureFreeBSD('llvm.org/pr18805')%0A
|
b5a526cfcd01e5b7b9bd0605abe7533a5b5902e7
|
Drop Py2 and six on tests/integration/netapi/rest_cherrypy/test_app_pam.py
|
tests/integration/netapi/rest_cherrypy/test_app_pam.py
|
tests/integration/netapi/rest_cherrypy/test_app_pam.py
|
"""
Integration Tests for restcherry salt-api with pam eauth
"""
import salt.utils.platform
import tests.support.cherrypy_testclasses as cptc
from salt.ext.six.moves.urllib.parse import ( # pylint: disable=no-name-in-module,import-error
urlencode,
)
from tests.support.case import ModuleCase
from tests.support.helpers import destructiveTest, skip_if_not_root, slowTest
from tests.support.unit import skipIf
if cptc.HAS_CHERRYPY:
import cherrypy
USERA = "saltdev-netapi"
USERA_PWD = "saltdev"
HASHED_USERA_PWD = "$6$SALTsalt$ZZFD90fKFWq8AGmmX0L3uBtS9fXL62SrTk5zcnQ6EkD6zoiM3kB88G1Zvs0xm/gZ7WXJRs5nsTBybUvGSqZkT."
AUTH_CREDS = {"username": USERA, "password": USERA_PWD, "eauth": "pam"}
@skipIf(cptc.HAS_CHERRYPY is False, "CherryPy not installed")
class TestAuthPAM(cptc.BaseRestCherryPyTest, ModuleCase):
"""
Test auth with pam using salt-api
"""
@destructiveTest
@skip_if_not_root
def setUp(self):
super().setUp()
try:
add_user = self.run_function("user.add", [USERA], createhome=False)
add_pwd = self.run_function(
"shadow.set_password",
[
USERA,
USERA_PWD if salt.utils.platform.is_darwin() else HASHED_USERA_PWD,
],
)
self.assertTrue(add_user)
self.assertTrue(add_pwd)
user_list = self.run_function("user.list_users")
self.assertIn(USERA, str(user_list))
except AssertionError:
self.run_function("user.delete", [USERA], remove=True)
self.skipTest("Could not add user or password, skipping test")
@slowTest
def test_bad_pwd_pam_chsh_service(self):
"""
Test login while specifying chsh service with bad passwd
This test ensures this PR is working correctly:
https://github.com/saltstack/salt/pull/31826
"""
copyauth_creds = AUTH_CREDS.copy()
copyauth_creds["service"] = "chsh"
copyauth_creds["password"] = "wrong_password"
body = urlencode(copyauth_creds)
request, response = self.request(
"/login",
method="POST",
body=body,
headers={"content-type": "application/x-www-form-urlencoded"},
)
self.assertEqual(response.status, "401 Unauthorized")
@slowTest
def test_bad_pwd_pam_login_service(self):
"""
Test login while specifying login service with bad passwd
This test ensures this PR is working correctly:
https://github.com/saltstack/salt/pull/31826
"""
copyauth_creds = AUTH_CREDS.copy()
copyauth_creds["service"] = "login"
copyauth_creds["password"] = "wrong_password"
body = urlencode(copyauth_creds)
request, response = self.request(
"/login",
method="POST",
body=body,
headers={"content-type": "application/x-www-form-urlencoded"},
)
self.assertEqual(response.status, "401 Unauthorized")
@slowTest
def test_good_pwd_pam_chsh_service(self):
"""
Test login while specifying chsh service with good passwd
This test ensures this PR is working correctly:
https://github.com/saltstack/salt/pull/31826
"""
copyauth_creds = AUTH_CREDS.copy()
copyauth_creds["service"] = "chsh"
body = urlencode(copyauth_creds)
request, response = self.request(
"/login",
method="POST",
body=body,
headers={"content-type": "application/x-www-form-urlencoded"},
)
self.assertEqual(response.status, "200 OK")
@slowTest
def test_good_pwd_pam_login_service(self):
"""
Test login while specifying login service with good passwd
This test ensures this PR is working correctly:
https://github.com/saltstack/salt/pull/31826
"""
copyauth_creds = AUTH_CREDS.copy()
copyauth_creds["service"] = "login"
body = urlencode(copyauth_creds)
request, response = self.request(
"/login",
method="POST",
body=body,
headers={"content-type": "application/x-www-form-urlencoded"},
)
self.assertEqual(response.status, "200 OK")
@destructiveTest
@skip_if_not_root
def tearDown(self):
"""
Clean up after tests. Delete user
"""
super().tearDown()
user_list = self.run_function("user.list_users")
# Remove saltdev user
if USERA in user_list:
self.run_function("user.delete", [USERA], remove=True)
# need to exit cherypy engine
cherrypy.engine.exit()
|
Python
| 0
|
@@ -57,16 +57,36 @@
auth%0A%22%22%22
+%0Aimport urllib.parse
%0A%0Aimport
@@ -160,121 +160,8 @@
ptc%0A
-from salt.ext.six.moves.urllib.parse import ( # pylint: disable=no-name-in-module,import-error%0A urlencode,%0A)%0A
from
@@ -1965,32 +1965,45 @@
%0A body =
+urllib.parse.
urlencode(copyau
@@ -2680,32 +2680,45 @@
%0A body =
+urllib.parse.
urlencode(copyau
@@ -3341,32 +3341,45 @@
%0A body =
+urllib.parse.
urlencode(copyau
@@ -3980,32 +3980,32 @@
ice%22%5D = %22login%22%0A
-
body = u
@@ -4003,16 +4003,29 @@
body =
+urllib.parse.
urlencod
|
c019c337c8642006a7a851c40bbedbb2c32fc5b5
|
Add nuclear option to delete all available caches
|
wger/core/management/commands/clear-cache.py
|
wger/core/management/commands/clear-cache.py
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from wger.core.models import Language
from wger.manager.models import Workout, WorkoutLog
from wger.exercises.models import Exercise
from wger.utils.cache import (
reset_workout_canonical_form,
reset_workout_log,
delete_template_fragment_cache
)
class Command(BaseCommand):
'''
Clears caches (HTML, etc.)
'''
option_list = BaseCommand.option_list + (
make_option('--clear-template',
action='store_true',
dest='clear_template',
default=False,
help='Clear only template caches'),
make_option('--clear-workout-cache',
action='store_true',
dest='clear_workout',
default=False,
help='Clear only the workout canonical view'),
)
help = 'Clears the application cache. You *must* pass an option selecting ' \
'what exactly you want to clear. See available options.'
def handle(self, *args, **options):
'''
Process the options
'''
if not options['clear_template'] and not options['clear_workout']:
raise CommandError('Please select what cache you need to delete, see help')
# Exercises, cached template fragments
if options['clear_template']:
if int(options['verbosity']) >= 2:
self.stdout.write("*** Clearing templates")
for user in User.objects.all():
if int(options['verbosity']) >= 2:
self.stdout.write("* Processing user {0}".format(user.username))
for entry in WorkoutLog.objects.filter(user=user).dates('date', 'year'):
if int(options['verbosity']) >= 3:
self.stdout.write(" Year {0}".format(entry.year))
for month in WorkoutLog.objects.filter(user=user,
date__year=entry.year).dates('date',
'month'):
if int(options['verbosity']) >= 3:
self.stdout.write(" Month {0}".format(entry.month))
reset_workout_log(user.id, entry.year, entry.month)
for day in WorkoutLog.objects.filter(user=user,
date__year=entry.year,
date__month=month.month).dates('date',
'day'):
if int(options['verbosity']) >= 3:
self.stdout.write(" Day {0}".format(day.day))
reset_workout_log(user.id, entry.year, entry.month, day)
for language in Language.objects.all():
delete_template_fragment_cache('muscle-overview', language.id)
delete_template_fragment_cache('exercise-overview', language.id)
delete_template_fragment_cache('exercise-overview-mobile', language.id)
delete_template_fragment_cache('equipment-overview', language.id)
for language in Language.objects.all():
for exercise in Exercise.objects.all():
delete_template_fragment_cache('exercise-detail-header',
exercise.id,
language.id)
delete_template_fragment_cache('exercise-detail-muscles',
exercise.id,
language.id)
# Workout canonical form
if options['clear_workout']:
for w in Workout.objects.all():
reset_workout_canonical_form(w.pk)
|
Python
| 0
|
@@ -801,16 +801,52 @@
andError
+%0Afrom django.core.cache import cache
%0A%0Afrom w
@@ -1666,16 +1666,220 @@
view'),
+%0A%0A make_option('--clear-all',%0A action='store_true',%0A dest='clear_all',%0A default=False,%0A help='Clear ALL cached entries'),
%0A )%0A%0A
@@ -2133,16 +2133,17 @@
if
+(
not opti
@@ -2163,16 +2163,32 @@
mplate'%5D
+%0A
and not
@@ -2204,32 +2204,78 @@
'clear_workout'%5D
+%0A and not options%5B'clear_all'%5D)
:%0A ra
@@ -5080,8 +5080,104 @@
m(w.pk)%0A
+%0A # Nuclear option, clear all%0A if options%5B'clear_all'%5D:%0A cache.clear()%0A
|
4b08904ff8dcdd22ca36d862a14b1afb8f7d0f9d
|
Update igrf12fun.py
|
igrf12py/igrf12fun.py
|
igrf12py/igrf12fun.py
|
#!/usr/bin/env python3
"""
NOTE: The performance of this demo has not been checked at all.
Please do basic sanity checks of output.
Quick demo of calling IGRF12 and IGRF11 using f2py3 from Python
Michael Hirsch
"""
from __future__ import division,absolute_import
from numpy import empty, empty_like, atleast_1d,nditer
from matplotlib.pyplot import figure,subplots
from matplotlib.ticker import ScalarFormatter
#
from histutils.fortrandates import datetime2yeardec
#
import igrf12
#import igrf11
sfmt = ScalarFormatter(useMathText=True) #for 10^3 instead of 1e3
sfmt.set_powerlimits((-2, 2))
sfmt.set_scientific(True)
sfmt.set_useOffset(False)
def runigrf12(dtime,isv,itype,alt,glat,glon):
yeardec = datetime2yeardec(dtime)
colat,elon = latlon2colat(glat,glon)
x = empty(colat.size); y = empty_like(x); z = empty_like(x); f=empty_like(x)
for i,(clt,eln) in enumerate(nditer((colat,elon))):
x[i],y[i],z[i],f[i] = igrf12.igrf12syn(isv, yeardec, itype, alt, clt, eln)
return x.reshape(colat.shape), y.reshape(colat.shape), z.reshape(colat.shape),f.reshape(colat.shape), yeardec
def runigrf11(dtime,isv,itype,alt,glat,glon):
yeardec = datetime2yeardec(dtime)
colat,elon = latlon2colat(glat,glon)
x = empty(colat.size); y = empty_like(x); z = empty_like(x); f=empty_like(x)
for i,(clt,eln) in enumerate(nditer((colat,elon))):
x[i],y[i],z[i],f[i] = igrf11.igrf11syn(isv, yeardec, itype, alt, clt, eln)
return x.reshape(colat.shape), y.reshape(colat.shape), z.reshape(colat.shape),f.reshape(colat.shape)
def latlon2colat(glat,glon):
#atleast_1d for iteration later
colat = 90-atleast_1d(glat)
elon = (360 + atleast_1d(glon)) % 360
return colat,elon
def plotigrf(x,y,z,f,glat,glon,year,isv,mdl):
fg,ax = subplots(2,2,sharex=True)
ax = ax.ravel()
for a,i,j in zip(ax,(x,y,z),('x','y','z')):
hi = a.imshow(i,extent=(glon[0,0],glon[0,-1],glat[0,0],glat[-1,0]),
cmap='bwr',
vmin=-6e4,vmax=6e4) #symmetrix vmin,vmax centers white at zero for bwr cmap
fg.colorbar(hi,ax=a,format=sfmt)
a.set_title('IGRF{} $B_{}$-field on {:.3f}'.format(mdl,j,year))
for a in ax[[0,2]]:
a.set_ylabel('latitude (deg)')
for a in ax[[2,3]]:
a.set_xlabel('longitude (deg)')
if isv==0:
hi = a.imshow(f,extent=(glon[0,0],glon[0,-1],glat[0,0],glat[-1,0]))
fg.colorbar(hi,ax=a,format=sfmt)
a.set_title('IGRF{} $B$-field: total intensity [nT] on {:.2f}'.format(mdl,year))
def plotdiff1112(x,x11,y,y11,z,z11,f,f11,glat,glon,year,isv):
for i,j,k in zip((x,y,z),(x11,y11,z11),('x','y','z')):
fg = figure()
ax = fg.gca()
hi = ax.imshow(i-j,extent=(glon[0,0],glon[0,-1],glat[0,0],glat[-1,0]))
fg.colorbar(hi,format=sfmt)
ax.set_ylabel('latitude (deg)')
ax.set_xlabel('longitude (deg)')
ax.set_title('IGRF12-IGRF11 $B_{}$-field comparison on {:.2f}'.format(k,year))
if isv==0:
fg = figure()
ax = fg.gca()
hi = ax.imshow(f-f11,extent=(glon[0,0],glon[0,-1],glat[0,0],glat[-1,0]))
fg.colorbar(hi)
ax.set_xlabel('latitude (deg)')
ax.set_ylabel('longitude (deg)')
ax.set_title('IGRF12-IGRF11 $B$-field: comparison total intensity [nT] on {:.2f}'.format(year))
|
Python
| 0.000001
|
@@ -213,56 +213,8 @@
%22%22%22%0A
-from __future__ import division,absolute_import%0A
from
|
5952c372ae01672bfce450aec924628faecd3654
|
bump version for release
|
crossbar/crossbar/__init__.py
|
crossbar/crossbar/__init__.py
|
###############################################################################
##
## Copyright (C) 2011-2015 Tavendo GmbH
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License, version 3,
## as published by the Free Software Foundation.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
###############################################################################
__doc__ = """
Crossbar.io - Unified application router.
Crossbar.io is an open-source server software that allows developers to create
distributed systems, composed of application components which are loosely coupled,
communicate in (soft) real-time and can be implemented in different languages.
Crossbar.io features:
- application routing core (RPC+PubSub)
- full WAMP v2 AP implementation
- application component hosting
- multi-process architecture
- and more
For more information, please go to
* Homepage: http://crossbar.io/
* Documentation: https://github.com/crossbario/crossbar/wiki
* Source code: https://github.com/crossbario/crossbar
Open-source licensed under the GNU Affero General Public License version 3.
Created by Tavendo GmbH. Get in contact at http://tavendo.com
"""
__version__ = "0.10.0"
|
Python
| 0
|
@@ -1638,11 +1638,11 @@
= %220.10.
-0
+1
%22%0A
|
0b4fb3dd59ce0940026b1cf212adcf6d17bca7a0
|
Refactor build_update_query (2)
|
mongots/query.py
|
mongots/query.py
|
from datetime import datetime
AGGREGATION_KEYS = [
'',
'months.{month}.',
'months.{month}.days.{day}.',
'months.{month}.days.{day}.hours.{hour}.',
]
DATETIME_KEY = 'datetime'
def build_filter_query(timestamp, tags=None):
filters = tags or {}
filters[DATETIME_KEY] = datetime(timestamp.year, 1, 1)
return filters
def build_update_query(value, timestamp):
datetime_args = {
'month': str(timestamp.month - 1), # Array index: range from 0 to 11
'day': str(timestamp.day - 1), # Array index: range from 0 to 27 / 28 / 29 or 30
'hour': str(timestamp.hour), # range from 0 to 23
}
inc_keys = [
key.format(**datetime_args)
for key in AGGREGATION_KEYS
]
inc_update = {
'%s%s' % (inc_key, aggregate_type): value if aggregate_type is "sum" else 1
for inc_key in inc_keys
for aggregate_type in ['count', 'sum']
}
return {
'$inc': inc_update,
}
|
Python
| 0
|
@@ -382,16 +382,84 @@
stamp):%0A
+ inc_values = %7B%0A 'count': 1,%0A 'sum': value,%0A %7D%0A%0A
date
@@ -876,17 +876,19 @@
e):
+inc_
value
- if
+s%5B
aggr
@@ -901,24 +901,9 @@
type
- is %22sum%22 else 1
+%5D
%0A
@@ -965,24 +965,18 @@
in
-%5B'count', 'sum'%5D
+inc_values
%0A
|
51432aa92e233ba3c9db500e4e3d55b7067e906c
|
Add latest version of py-jinja2 (#13311)
|
var/spack/repos/builtin/packages/py-jinja2/package.py
|
var/spack/repos/builtin/packages/py-jinja2/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "http://jinja.pocoo.org/"
url = "https://pypi.io/packages/source/J/Jinja2/Jinja2-2.9.6.tar.gz"
import_modules = ['jinja2']
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('py-markupsafe', type=('build', 'run'))
depends_on('py-babel@0.8:', type=('build', 'run')) # optional, required for i18n
|
Python
| 0
|
@@ -455,26 +455,39 @@
http
+s
://
-jinja.pocoo.org
+palletsprojects.com/p/jinja
/%22%0A
@@ -551,19 +551,20 @@
inja2-2.
-9.6
+10.3
.tar.gz%22
@@ -615,18 +615,117 @@
on('2.10
+.3
',
+ sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')%0A version('2.10',
sha256=
@@ -812,16 +812,17 @@
'2.9.6',
+
sha256=
@@ -907,16 +907,17 @@
n('2.8',
+
sha25
@@ -1002,24 +1002,25 @@
ion('2.7.3',
+
sha256='2e2
@@ -1103,16 +1103,17 @@
'2.7.2',
+
sha256=
@@ -1200,16 +1200,17 @@
'2.7.1',
+
sha256=
@@ -1295,16 +1295,17 @@
n('2.7',
+
sha25
@@ -1450,16 +1450,22 @@
rkupsafe
+@0.23:
', type=
|
77f155fec48c808724eff1b2631035d2526c170f
|
add version 2.11.3 (#23698)
|
var/spack/repos/builtin/packages/py-jinja2/package.py
|
var/spack/repos/builtin/packages/py-jinja2/package.py
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "https://palletsprojects.com/p/jinja/"
pypi = "Jinja2/Jinja2-2.10.3.tar.gz"
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10.1', sha256='065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013')
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('py-markupsafe@0.23:', type=('build', 'run'))
depends_on('py-babel@0.8:', type=('build', 'run')) # optional, required for i18n
|
Python
| 0.000009
|
@@ -527,16 +527,113 @@
ar.gz%22%0A%0A
+ version('2.11.3', sha256='a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6')%0A
vers
@@ -1498,16 +1498,77 @@
fa4d')%0A%0A
+ depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))%0A
depe
|
e705c2aceae7020781ac18e3352dad7b4e3d5b1e
|
Send currently edited object to template in edit_view
|
mooch/generic.py
|
mooch/generic.py
|
from django.contrib import messages
from django.core.exceptions import PermissionDenied, ValidationError
from django.forms.formsets import all_valid
from django.forms.models import modelform_factory
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, redirect, render_to_response
from django.template import RequestContext
from django.utils.datastructures import SortedDict
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext as _
class ModelView(object):
view_decorator = lambda self, f: f
template_object_list_name = 'object_list'
template_object_name = 'object'
def __init__(self, model, **kwargs):
self.model = model
for k, v in kwargs.items():
setattr(self, k, v)
def get_queryset(self, request):
return self.model.objects.all()
def get_template(self, request, action):
opts = self.model._meta
return '%s/%s_%s.html' % (opts.app_label, opts.module_name, action)
def get_urls(self):
from django.conf.urls.defaults import patterns, url
info = self.model._meta.app_label, self.model._meta.module_name
return patterns('',
url(r'^$', self.view_decorator(self.list_view),
name='%s_%s_list' % info),
url(r'^add/$', self.view_decorator(self.add_view),
name='%s_%s_add' % info),
url(r'^(.+)/edit/$', self.view_decorator(self.edit_view),
name='%s_%s_edit' % info),
url(r'^(.+)/delete/$', self.view_decorator(self.delete_view),
name='%s_%s_delete' % info),
url(r'^(.+)/$', self.view_decorator(self.detail_view),
name='%s_%s_detail' % info),
)
@property
def urls(self):
return self.get_urls()
# HELPERS
def get_object(self, request, **kwargs):
queryset = self.get_queryset(request)
model = queryset.model
try:
return queryset.get(**kwargs)
except (model.DoesNotExist, ValueError, ValidationError):
raise self.model.DoesNotExist
def get_object_or_404(self, request, **kwargs):
try:
return self.get_object(request, **kwargs)
except self.model.DoesNotExist:
raise Http404
def get_form(self, request, **kwargs):
return modelform_factory(self.model, **kwargs)
def get_formset_instances(self, request, instance=None, **kwargs):
return SortedDict()
def message(self, request, message):
messages.info(request, message)
def save_form(self, request, form, change):
return form.save(commit=False)
def save_model(self, request, obj, form, change):
obj.save()
def save_formset(self, request, form, formset, change):
formset.save()
# VIEW HELPERS
def render_list(self, request, context):
return render_to_response(
self.get_template(request, 'list'),
context, context_instance=RequestContext(request))
def render_detail(self, request, context):
return render_to_response(
self.get_template(request, 'detail'),
context, context_instance=RequestContext(request))
def render_form(self, request, context, change):
return render_to_response(
self.get_template(request, 'form'),
context, context_instance=RequestContext(request))
def response_add(self, request, instance, form, formsets):
self.message(request, _('The new object has been successfully created.'))
return redirect(instance)
def response_edit(self, request, instance, form, formsets):
self.message(request, _('The object has been successfully updated.'))
return redirect(instance)
# VIEWS
def list_view(self, request):
return self.render_list(request, {
self.template_object_list_name: self.get_queryset(request),
})
def detail_view(self, request, object_pk):
obj = self.get_object_or_404(request, pk=object_pk)
return self.render_detail(request, {
self.template_object_name: obj,
})
def add_view(self, request):
ModelForm = self.get_form(request)
opts = self.model._meta
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES)
if form.is_valid():
new_object = self.save_form(request, form, change=False)
form_validated = True
else:
new_object = self.model()
form_validated = False
formsets = self.get_formset_instances(request, instance=new_object)
if all_valid(formsets.itervalues()) and form_validated:
self.save_model(request, new_object, form, change=False)
form.save_m2m()
for formset in formsets.itervalues():
self.save_formset(request, form, formset, change=False)
return self.response_add(request, new_object, form, formsets)
else:
form = ModelForm()
context = {
'title': _('Add %s') % force_unicode(opts.verbose_name),
'form': form,
}
return self.render_form(request, context, change=False)
def edit_view(self, request, object_pk):
ModelForm = self.get_form(request)
obj = self.get_object_or_404(request, pk=object_pk)
opts = self.model._meta
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
formsets = self.get_formset_instances(request, instance=obj)
if form.is_valid() and all_valid(formsets):
new_object = self.save_form(request, form, change=True)
form_validated = True
else:
new_object = obj
form_validated = False
formsets = self.get_formset_instances(request, instance=new_object)
if all_valid(formsets.itervalues()) and form_validated:
self.save_model(request, new_object, form, change=True)
form.save_m2m()
for formset in formsets.itervalues():
self.save_Formset(request, form, formset, change=False)
return self.response_edit(request, new_object, form, formsets)
else:
form = ModelForm(instance=obj)
context = {
'title': _('Change %s') % force_unicode(opts.verbose_name),
'form': form,
}
return self.render_form(request, context, change=True)
def delete_view(self, request, object_pk):
obj = self.get_object_or_404(request, pk=object_pk)
obj.delete()
self.message(request, _('The object has been successfully deleted.'))
info = self.model._meta.app_label, self.model._meta.module_name
return redirect('%s_%s_list' % info)
|
Python
| 0
|
@@ -6627,32 +6627,76 @@
'form': form,%0A
+ self.template_object_name: obj,%0A
%7D%0A%0A
|
5b6445e519fa9c03d703144462004ac27b9079ba
|
Add latest version of joblib (#11495)
|
var/spack/repos/builtin/packages/py-joblib/package.py
|
var/spack/repos/builtin/packages/py-joblib/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJoblib(PythonPackage):
"""Python function as pipeline jobs"""
homepage = "http://packages.python.org/joblib/"
url = "https://pypi.io/packages/source/j/joblib/joblib-0.10.3.tar.gz"
version('0.10.3', '455401ccfaf399538d8e5333086df2d3')
version('0.10.2', 'ebb42af4342c2445b175f86bd478d869')
version('0.10.0', '61e40322c4fed5c22905f67d7d1aa557')
# for testing
# depends_on('py-nose', type=('build', 'run'))
|
Python
| 0
|
@@ -407,19 +407,19 @@
blib-0.1
-0.3
+3.2
.tar.gz%22
@@ -420,16 +420,278 @@
ar.gz%22%0A%0A
+ import_modules = %5B%0A 'joblib', 'joblib.externals', 'joblib.externals.cloudpickle',%0A 'joblib.externals.loky', 'joblib.externals.loky.backend'%0A %5D%0A%0A version('0.13.2', sha256='315d6b19643ec4afd4c41c671f9f2d65ea9d787da093487a81ead7b0bac94524')%0A
vers
@@ -860,74 +860,4 @@
7')%0A
-%0A # for testing%0A # depends_on('py-nose', type=('build', 'run'))%0A
|
028391c0a3778d20d162882b6778a164984ceb2a
|
update dependencies and fix build (#9207)
|
var/spack/repos/builtin/packages/py-spyder/package.py
|
var/spack/repos/builtin/packages/py-spyder/package.py
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PySpyder(PythonPackage):
"""Scientific PYthon Development EnviRonment"""
homepage = "https://github.com/spyder-ide/spyder"
url = "https://pypi.io/packages/source/s/spyder/spyder-3.1.3.tar.gz"
version('3.1.3', '4b9b7c8c3e6dc00001e6e98473473c36')
version('2.3.9', 'dd01e07a77123c128ff79ba57b97c1d7')
depends_on('py-setuptools', type='build')
depends_on('py-rope@0.9.4:', type=('build', 'run'), when='^python@:3')
# depends_on('py-rope_py3k', type=('build', 'run'), when='^python@3:')
depends_on('py-jedi@0.9.0', type=('build', 'run'))
# otherwise collision with py-flake8
depends_on('py-pyflakes@1.2.3', type=('build', 'run'))
depends_on('py-pygments@2.0:', type=('build', 'run'))
depends_on('py-qtconsole@4.2.0:', type=('build', 'run'))
depends_on('py-nbconvert', type=('build', 'run'))
depends_on('py-sphinx', type=('build', 'run'))
depends_on('py-pycodestyle', type=('build', 'run'))
depends_on('py-pylint', type=('build', 'run'))
depends_on('py-psutil', type=('build', 'run'))
depends_on('py-qtawesome@0.4.1:', type=('build', 'run'))
depends_on('py-qtpy@1.1.0:', type=('build', 'run'))
depends_on('py-zmq', type=('build', 'run'))
depends_on('py-chardet@2:', type=('build', 'run'))
depends_on('py-pickleshare', type=('build', 'run'))
depends_on('py-numpydoc', type=('build', 'run'))
|
Python
| 0
|
@@ -1604,40 +1604,55 @@
('py
--setuptools',
+thon@2.7.0:2.8.0,3.3.0:',
type=
+(
'build'
+, 'run')
)%0A
@@ -2222,40 +2222,465 @@
-depends_on('py-pycodestyle',
+# The pycodestyle dependency is split in two, because internally it%0A # changes its name from pep8 to pycodestyle, and spyder does not cope%0A # with this change until @3.2.0%0A # https://github.com/PyCQA/pycodestyle/issues/466%0A # https://github.com/spyder-ide/spyder/blob/master/CHANGELOG.md#version-32-2017-07-24%0A depends_on('py-pycodestyle@:1.7.1', when='@:3.1.99', type=('build', 'run'))%0A depends_on('py-pycodestyle@2.1.0:', when='@3.2.0:',
ty
@@ -2952,31 +2952,171 @@
-depends_on('py-zmq',
+# technically this is a transitive dependency in order for py-pyqt%0A # to pick up webkit, but this is the easier solution (see #9207)%0A depends_on('qt+webkit',
@@ -3168,21 +3168,21 @@
'py-
-chardet@2:
+pickleshare
',
-
@@ -3225,32 +3225,93 @@
_on('py-
-pickleshare',
+zmq', type=('build', 'run'))%0A depends_on('py-chardet@2.0.0:',
type=
|
350a5422ed1f874e7b2780348663f320a1af6676
|
Update py-theano dependencies (#14015)
|
var/spack/repos/builtin/packages/py-theano/package.py
|
var/spack/repos/builtin/packages/py-theano/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTheano(PythonPackage):
"""Optimizing compiler for evaluating mathematical expressions on CPUs
and GPUs."""
homepage = "http://deeplearning.net/software/theano/"
url = "https://pypi.io/packages/source/T/Theano/Theano-0.8.2.tar.gz"
git = "https://github.com/Theano/Theano.git"
version('master', branch='master')
version('1.0.4', sha256='35c9bbef56b61ffa299265a42a4e8f8cb5a07b2997dabaef0f8830b397086913')
version('1.0.2', sha256='6768e003d328a17011e6fca9126fbb8a6ffd3bb13cb21c450f3e724cca29abde')
version('1.0.1', sha256='88d8aba1fe2b6b75eacf455d01bc7e31e838c5a0fb8c13dde2d9472495ff4662')
version('0.8.2', sha256='7463c8f7ed1a787bf881f36d38a38607150186697e7ce7e78bfb94b7c6af8930')
variant('gpu', default=False,
description='Builds with support for GPUs via CUDA and cuDNN')
depends_on('python@2.6:2.8,3.3:')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-scipy@0.11:', type=('build', 'run'))
depends_on('py-numpy@1.7.1:', type=('build', 'run'))
depends_on('py-six@1.9.0:', type=('build', 'run'))
depends_on('blas')
depends_on('cuda', when='+gpu')
depends_on('cudnn', when='+gpu')
depends_on('py-pygpu', when='+gpu', type=('build', 'run'))
depends_on('libgpuarray', when='+gpu')
depends_on('py-nose@1.3.0:', type='test')
depends_on('py-nose-parameterized@0.5.0:', type='test')
|
Python
| 0
|
@@ -1179,17 +1179,18 @@
'py-
-scipy@0.1
+numpy@1.9.
1:',
@@ -1236,19 +1236,18 @@
'py-
-numpy@1.7
+scipy@0
.1
+4
:',
@@ -1598,13 +1598,8 @@
'py-
-nose-
para
@@ -1611,15 +1611,49 @@
ized
-@0.5.0:
+', type='test')%0A depends_on('py-flake8
', t
|
38199ce9cfb69b21e45e679d3a6604a72da7cc5b
|
add version 0.5.0 to r-forcats (#20972)
|
var/spack/repos/builtin/packages/r-forcats/package.py
|
var/spack/repos/builtin/packages/r-forcats/package.py
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RForcats(RPackage):
"""Helpers for reordering factor levels (including moving specified levels
to front, ordering by first appearance, reversing, and randomly
shuffling), and tools for modifying factor levels (including collapsing
rare levels into other, 'anonymising', and manually 'recoding')."""
homepage = "http://forcats.tidyverse.org/"
url = "https://cloud.r-project.org/src/contrib/forcats_0.2.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/forcats"
version('0.4.0', sha256='7c83cb576aa6fe1379d7506dcc332f7560068b2025f9e3ab5cd0a5f28780d2b2')
version('0.3.0', sha256='95814610ec18b8a8830eba63751954387f9d21400d6ab40394ed0ff22c0cb657')
version('0.2.0', sha256='b5bce370422d4c0ec9509249ae645373949bfbe9217cdf50dce2bfbdad9f7cd7')
depends_on('r@3.1:', type=('build', 'run'))
depends_on('r-tibble', type=('build', 'run'))
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-ellipsis', when='@0.4.0:', type=('build', 'run'))
depends_on('r-rlang', when='@0.4.0:', type=('build', 'run'))
|
Python
| 0
|
@@ -246,16 +246,76 @@
%0A %22%22%22
+Tools for Working with Categorical Variables (Factors)%0A%0A
Helpers
@@ -381,18 +381,15 @@
vels
+ to
%0A
- to
fro
@@ -445,23 +445,16 @@
randomly
-%0A
shuffli
@@ -457,16 +457,20 @@
ffling),
+%0A
and too
@@ -525,15 +525,8 @@
sing
-%0A
rar
@@ -533,16 +533,20 @@
e levels
+%0A
into ot
@@ -794,16 +794,112 @@
rcats%22%0A%0A
+ version('0.5.0', sha256='8f960e789333ec597ddf2d653a64e330f03b86f465e9b71f6779f227355d90c4')%0A
vers
@@ -1248,15 +1248,97 @@
n('r
--tibble
+@3.2:', when='@0.5.0:', type=('build', 'run'))%0A depends_on('r-ellipsis', when='@0.4.0:
', t
@@ -1425,32 +1425,29 @@
pends_on('r-
-ellipsis
+rlang
', when='@0.
@@ -1494,37 +1494,22 @@
s_on('r-
-rlang', when='@0.4.0:
+tibble
', type=
|
c0297fd4837a83c177a89656c5ef591d7b5430d2
|
add download remote file function
|
mp3Downloader.py
|
mp3Downloader.py
|
import urllib2
import subprocess
import os
import tempfile
import shutil
"""
http://feeds.gimletmedia.com/~r/hearstartup/~5/sqn8_rZ3xTM/GLT6849433183.mp3
"""
TEMP_DIR = './tmp'
OUTPUT_DIR = './output'
def cleanup():
shutil.rmtree(TEMP_DIR)
def create_ancillary_folders():
if not os.path.exists(OUTPUT_DIR):
print "Output directory absent. Creating output directory..."
os.makedirs(OUTPUT_DIR)
if not os.path.exists(TEMP_DIR):
print "Creating tmp directory..."
os.makedirs(TEMP_DIR)
def get_url_from_user():
"""
Function that a URL to be passed as a parameter from the terminal.
The URL should contain an mp3 file to be downloaded
"""
url = raw_input(
"Please enter the URL of the podcast you'd like to transcribe. ")
print "You just entered: ", url
return url
def create_temporary_folder():
dirpath = tempfile.mkdtemp(dir=TEMP_DIR)
print "Just created tmp dir at ", dirpath
return dirpath
def create_temporary_file(directory, suffix):
fp = tempfile.NamedTemporaryFile(dir=directory, suffix=suffix)
print "Just created tmp file at ", fp.name
return fp
def download_mp3_from_url(url):
"""
Once we have received the mp3 url from the user, we download and write it
in a file, in binary. This function writes always in the same file
"""
dirpath = create_temporary_folder()
mp3file = urllib2.urlopen(url)
mp3_uid = url.split('/')[-1:]
print mp3_uid
filepath = create_temporary_file(dirpath, mp3_uid[0])
print filepath.name, "this is the filepath"
with open(filepath.name, 'wb') as output:
output.write(mp3file.read())
if not os.path.exists(filepath.name):
print "Failed to write mp3 in ", filepath
convert_to_wav(filepath.name)
return filepath.name
def convert_to_wav(filepath):
"""
Converts files to a format that pocketsphinx can deal with
(16khz mono 16bit wav)
"""
print filepath
new_file = filepath[:-4]
print new_file, "this the new filename without the .mp3 extension"
new_file = new_file + '.wav'
if os.path.exists(new_file + '.transcription.txt') is False:
subprocess.call(['ffmpeg', '-y', '-i', filepath, '-acodec',
'pcm_s16le', '-ac', '1', '-ar', '16000', new_file])
def main():
create_ancillary_folders()
new_path = download_mp3_from_url(get_url_from_user())
# assuming here a function that does transcribe & write to output
print "I have transcribed the podcast here. "
print ""
print "Proceeding to cleanup"
print ""
cleanup()
# here I need to go and delete the temp files.
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -1180,191 +1180,1226 @@
oad_
-mp3_from_url(url):%0A %22%22%22%0A Once we have received the mp3 url from the user, we download and write it%0A in a file, in binary. This function writes always in the same file%0A %22%22%22
+remote_file(url, dest):%0A %22%22%22%0A Downloads a remote file to the specified location.%0A%0A Params:%0A url (string): The url of the remote file%0A dest (string): The download destination%0A %22%22%22%0A%0A remote_file = urllib2.urlopen(url)%0A meta_info = remote_file.info()%0A file_size = int(meta_info.getheaders(%22Content-Length%22)%5B0%5D)%0A%0A print %22Downloading: %25s %5Cn Bytes: %25s%22 %25 (url.split('/')%5B-1%5D, file_size)%0A%0A file_size_dl = 0%0A block_sz = 8192%0A%0A with open(dest, 'wb') as local_file:%0A while True:%0A buf = remote_file.read(block_sz)%0A if not buf:%0A break%0A%0A file_size_dl += len(buf)%0A local_file.write(buf)%0A%0A status = r%22%2510d %5B%253.2f%25%25%5D%22 %25 (%0A file_size_dl, file_size_dl * 100. / file_size)%0A status = status + chr(8) * (len(status) + 1)%0A print status,%0A%0A%0Adef get_podcast_file(url):%0A %22%22%22%0A Returns the podcast file to process in the right format. First, we download%0A the podcast from the remote location and then we convert the file to the%0A right format for the transcriber.%0A%0A Params:%0A url (string): The url of the remote file%0A %22%22%22%0A%0A # create the download destination
%0A
|
538ffaf6c71944aa4f0d77f1167c2cef570dabe1
|
Store self.action='metadata' for OPTIONS requests on viewsets. Closes #3115.
|
rest_framework/viewsets.py
|
rest_framework/viewsets.py
|
"""
ViewSets are essentially just a type of class based view, that doesn't provide
any method handlers, such as `get()`, `post()`, etc... but instead has actions,
such as `list()`, `retrieve()`, `create()`, etc...
Actions are only bound to methods at the point of instantiating the views.
user_list = UserViewSet.as_view({'get': 'list'})
user_detail = UserViewSet.as_view({'get': 'retrieve'})
Typically, rather than instantiate views from viewsets directly, you'll
register the viewset with a router and let the URL conf be determined
automatically.
router = DefaultRouter()
router.register(r'users', UserViewSet, 'user')
urlpatterns = router.urls
"""
from __future__ import unicode_literals
from functools import update_wrapper
from django.utils.decorators import classonlymethod
from django.views.decorators.csrf import csrf_exempt
from rest_framework import generics, mixins, views
class ViewSetMixin(object):
"""
This is the magic.
Overrides `.as_view()` so that it takes an `actions` keyword that performs
the binding of HTTP methods to actions on the Resource.
For example, to create a concrete view binding the 'GET' and 'POST' methods
to the 'list' and 'create' actions...
view = MyViewSet.as_view({'get': 'list', 'post': 'create'})
"""
@classonlymethod
def as_view(cls, actions=None, **initkwargs):
"""
Because of the way class based views create a closure around the
instantiated view, we need to totally reimplement `.as_view`,
and slightly modify the view function that is created and returned.
"""
# The suffix initkwarg is reserved for identifying the viewset type
# eg. 'List' or 'Instance'.
cls.suffix = None
# actions must not be empty
if not actions:
raise TypeError("The `actions` argument must be provided when "
"calling `.as_view()` on a ViewSet. For example "
"`.as_view({'get': 'list'})`")
# sanitize keyword arguments
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError("You tried to pass in the %s method name as a "
"keyword argument to %s(). Don't do that."
% (key, cls.__name__))
if not hasattr(cls, key):
raise TypeError("%s() received an invalid keyword %r" % (
cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
# We also store the mapping of request methods to actions,
# so that we can later set the action attribute.
# eg. `self.action = 'list'` on an incoming GET request.
self.action_map = actions
# Bind methods to actions
# This is the bit that's different to a standard view
for method, action in actions.items():
handler = getattr(self, action)
setattr(self, method, handler)
# Patch this in as it's otherwise only present from 1.5 onwards
if hasattr(self, 'get') and not hasattr(self, 'head'):
self.head = self.get
# And continue as usual
return self.dispatch(request, *args, **kwargs)
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
# We need to set these on the view function, so that breadcrumb
# generation can pick out these bits of information from a
# resolved URL.
view.cls = cls
view.suffix = initkwargs.get('suffix', None)
return csrf_exempt(view)
def initialize_request(self, request, *args, **kwargs):
"""
Set the `.action` attribute on the view,
depending on the request method.
"""
request = super(ViewSetMixin, self).initialize_request(request, *args, **kwargs)
self.action = self.action_map.get(request.method.lower())
return request
class ViewSet(ViewSetMixin, views.APIView):
"""
The base ViewSet class does not provide any actions by default.
"""
pass
class GenericViewSet(ViewSetMixin, generics.GenericAPIView):
"""
The GenericViewSet class does not provide any actions by default,
but does include the base set of generic view behavior, such as
the `get_object` and `get_queryset` methods.
"""
pass
class ReadOnlyModelViewSet(mixins.RetrieveModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
A viewset that provides default `list()` and `retrieve()` actions.
"""
pass
class ModelViewSet(mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
A viewset that provides default `create()`, `retrieve()`, `update()`,
`partial_update()`, `destroy()` and `list()` actions.
"""
pass
|
Python
| 0
|
@@ -3259,16 +3259,340 @@
lf.get%0A%0A
+ # Explicitly map %60options%60 requests to an (implicit) action named%0A # 'metadata'. This action doesn't actually exist as a named method,%0A # because, unlike other methods, we always route to it.%0A if hasattr(self, 'options'):%0A self.action_map%5B'options'%5D = 'metadata'%0A%0A
|
de9f83d5c573112fabfd923d845e2092c64d0fc7
|
fix whitespace
|
legislature/templatetags/pagination.py
|
legislature/templatetags/pagination.py
|
# Copyright (C) 2012 Michael Mulley (michaelmulley.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# @see https://github.com/rhymeswithcycle/openparliament/blob/master/parliament/core/templatetags/pagination.py
# @see http://djangosnippets.org/snippets/2763/
from django import template
register = template.Library()
LEADING_PAGE_RANGE_DISPLAYED = TRAILING_PAGE_RANGE_DISPLAYED = 8
LEADING_PAGE_RANGE = TRAILING_PAGE_RANGE = 6
NUM_PAGES_OUTSIDE_RANGE = 2
ADJACENT_PAGES = 2
@register.assignment_tag(takes_context=True)
def long_paginator(context):
'''
To be used in conjunction with the object_list generic view.
Adds pagination context variables for use in displaying leading, adjacent and
trailing page links in addition to those created by the object_list generic
view.
'''
try:
page_obj = context['page_obj']
except KeyError:
page_obj = context['page']
try:
paginator = page_obj.paginator
except AttributeError:
return ''
pages = paginator.num_pages
if pages <= 1:
return ''
page = page_obj.number
in_leading_range = in_trailing_range = False
pages_outside_leading_range = pages_outside_trailing_range = range(0)
if pages <= LEADING_PAGE_RANGE_DISPLAYED + NUM_PAGES_OUTSIDE_RANGE + 1:
in_leading_range = in_trailing_range = True
page_range = [n for n in range(1, pages + 1)]
elif page <= LEADING_PAGE_RANGE:
in_leading_range = True
page_range = [n for n in range(1, LEADING_PAGE_RANGE_DISPLAYED + 1)]
pages_outside_leading_range = [n + pages for n in range(0, -NUM_PAGES_OUTSIDE_RANGE, -1)]
elif page > pages - TRAILING_PAGE_RANGE:
in_trailing_range = True
page_range = [n for n in range(pages - TRAILING_PAGE_RANGE_DISPLAYED + 1, pages + 1) if n > 0 and n <= pages]
pages_outside_trailing_range = [n + 1 for n in range(0, NUM_PAGES_OUTSIDE_RANGE)]
else:
page_range = [n for n in range(page - ADJACENT_PAGES, page + ADJACENT_PAGES + 1) if n > 0 and n <= pages]
pages_outside_leading_range = [n + pages for n in range(0, -NUM_PAGES_OUTSIDE_RANGE, -1)]
pages_outside_trailing_range = [n + 1 for n in range(0, NUM_PAGES_OUTSIDE_RANGE)]
# Now try to retain GET params, except for 'page'
# Add 'django.core.context_processors.request' to settings.TEMPLATE_CONTEXT_PROCESSORS beforehand
request = context['request']
params = request.GET.copy()
if 'page' in params:
del params['page']
if 'partial' in params:
del params['partial']
get_params = params.urlencode()
pagination_ctx = {
'pages': pages,
'page': page,
'previous': page_obj.previous_page_number() if page_obj.has_previous() else None,
'next': page_obj.next_page_number() if page_obj.has_next() else None,
'has_previous': page_obj.has_previous(),
'has_next': page_obj.has_next(),
'page_range': page_range,
'in_leading_range': in_leading_range,
'in_trailing_range': in_trailing_range,
'pages_outside_leading_range': pages_outside_leading_range,
'pages_outside_trailing_range': pages_outside_trailing_range,
'get_params': get_params,
}
return template.loader.get_template('long_paginator.html').render(template.Context(pagination_ctx))
|
Python
| 0.999999
|
@@ -308,17 +308,16 @@
rsion.%0A#
-
%0A# This
@@ -559,17 +559,16 @@
tails.%0A#
-
%0A# You s
@@ -2554,17 +2554,16 @@
else:
-
%0A
|
f18db26bdf82d9c921a765408d00203edf0db0c8
|
fix another empty-code bug
|
ELiDE/codeinput.py
|
ELiDE/codeinput.py
|
import re
from functools import partial
from string import ascii_letters, digits
from kivy.clock import Clock
from kivy.properties import (
AliasProperty,
ListProperty,
NumericProperty,
ObjectProperty,
StringProperty
)
from kivy.lang import Builder
from kivy.uix.textinput import TextInput
from kivy.uix.codeinput import CodeInput
from kivy.uix.boxlayout import BoxLayout
from pygments import styles
from pygments.lexers import Python3Lexer
class ELiDECodeInput(CodeInput):
lexer = ObjectProperty(Python3Lexer())
sig_ex = re.compile('^ *def .+?\((.+)\):$')
class FunctionInput(BoxLayout):
font_name = StringProperty('DroidSans')
font_size = NumericProperty(12)
style_name = StringProperty('default')
name = StringProperty()
params = ListProperty()
def _get_source(self):
code = 'def ' + self.name + '(' + ', '.join(self.params) + '):\n'
for line in self.ids.code.text.split('\n'):
code += (' ' * 4 + line + '\n')
return code.rstrip(' \n\t')
def _set_source(self, v, *args):
if 'code' not in self.ids:
Clock.schedule_once(partial(self._set_source, v), 0)
return
lines = v.split('\n')
firstline = lines[0].lstrip()
if firstline == '' or firstline[0] == '@':
del lines[0]
if lines == []:
self.ids.code.text = ''
return
# how indented is it?
spaces = 0
for ch in lines[0]:
if ch == ' ':
spaces += 1
elif ch == '\t':
spaces += 4
else:
break
# and another four because everything should be within the
# same function block
spaces += 4
self.params = [
parm.strip() for parm in
sig_ex.match(lines[0]).groups()[0].split(',')
]
del lines[0]
# hack to allow 'empty' functions
if lines[-1].strip() == 'pass':
del lines[-1]
self.ids.code.text = '\n'.join(line[spaces:] for line in lines)
source = AliasProperty(_get_source, _set_source)
def on_name(self, *args):
if 'funname' not in self.ids:
Clock.schedule_once(self.on_name, 0)
return
self.ids.funname.text = self.name
class FunctionNameInput(TextInput):
def insert_text(self, s, from_undo=False):
if self.text == '':
if s[0] not in (ascii_letters + '_'):
return
return super().insert_text(
''.join(c for c in s if c in (ascii_letters + digits + '_'))
)
kv = """
<FunctionInput>:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint_y: None
height: funname.height
ELiDECodeInput:
id: imafunction
text: 'def'
font_name: root.font_name
font_size: root.font_size
style_name: root.style_name
disabled: True
size_hint: (None, None)
height: self.line_height + self.font_size
width: self.font_size * 2.5
background_disabled_normal: ''
disabled_foreground_color: self.foreground_color
FunctionNameInput:
id: funname
font_name: root.font_name
font_size: root.font_size
size_hint_y: None
height: self.line_height + self.font_size
multiline: False
write_tab: False
ELiDECodeInput:
id: params
text: '(' + ', '.join(root.params) + '):'
font_name: root.font_name
font_size: root.font_size
style_name: root.style_name
disabled: True
size_hint_y: None
height: self.line_height + self.font_size
background_disabled_normal: ''
disabled_foreground_color: self.foreground_color
BoxLayout:
orientation: 'horizontal'
Label:
canvas:
Color:
rgba: params.background_color
Rectangle:
pos: self.pos
size: self.size
Color:
rgba: [1., 1., 1., 1.]
font_name: root.font_name
font_size: root.font_size
# PEP8 standard indentation width is 4 spaces
text: ' ' * 4
size_hint_x: None
width: self.texture_size[0]
ELiDECodeInput:
font_name: root.font_name
font_size: root.font_size
style_name: root.style_name
id: code
"""
Builder.load_string(kv)
if __name__ == '__main__':
from kivy.base import runTouchApp
runTouchApp(ELiDEFunctionInput(header='def foo(bar, bas):', style=styles.get_style_by_name('fruity')))
|
Python
| 0.000059
|
@@ -1892,32 +1892,111 @@
del lines%5B0%5D%0A
+ if lines == %5B%5D:%0A self.ids.code.text = ''%0A return%0A
# hack t
|
a529eb18e9d114672350853a48a16d6036ca0c76
|
split the former RulesView into three parts
|
ELiDE/rulesview.py
|
ELiDE/rulesview.py
|
# This file is part of LiSE, a framework for life simulation games.
# Copyright (C) 2013-2014 Zachary Spector, ZacharySpector@gmail.com
"""Widget to enable browsing rules and the functions that make them."""
from functools import partial
from kivy.clock import Clock
from kivy.logger import Logger
from kivy.adapters import ListAdapter
from kivy.uix.listview import ListView, ListItemButton
from kivy.uix.widget import Widget
from kivy.properties import ObjectProperty, DictProperty
class RulesView(Widget):
subject = ObjectProperty()
func_adapter = DictProperty({})
def get_func_data(self, store):
return list(
self.engine.function.db.func_table_name_plaincode(store)
)
def get_func_adapter(self, store):
if store not in self.func_adapter:
self.func_adapter[store] = ListAdapter(
data=self.get_func_data(store),
cls=ListItemButton,
args_converter=lambda i, (name, code): {
'text': name,
'on_press': lambda inst:
self.show_func_editor(
store,
name,
code
)
},
selection_mode='single',
allow_empty_selection=True
)
return self.func_adapter[store]
def refresh_func_adapter(self, store, *args):
self.get_func_adapter(store).data = self.get_func_data(store)
def on_engine(self):
if self.engine is None:
return
self._func_view_trigger = ListView(
adapter=self.get_func_adapter('trigger')
)
self._trigger_refresh_trigger = Clock.create_trigger(
partial(self.refresh_func_adapter, 'trigger')
)
self._func_view_prereq = ListView(
adapter=self.get_func_adapter('prereq')
)
self._trigger_refresh_prereq = Clock.create_trigger(
partial(self.refresh_func_adapter, 'prereq')
)
self._func_view_action = ListView(
adapter=self.get_func_adapter('action')
)
self._trigger_refresh_action = Clock.create_trigger(
partial(self.refresh_func_adapter, 'action')
)
|
Python
| 0
|
@@ -231,16 +231,46 @@
partial%0A
+from kivy.lang import Builder%0A
from kiv
@@ -290,16 +290,16 @@
t Clock%0A
-
from kiv
@@ -359,16 +359,57 @@
Adapter%0A
+from kivy.uix.boxlayout import BoxLayout%0A
from kiv
@@ -559,12 +559,12 @@
ass
-Rule
+Func
sVie
@@ -578,39 +578,8 @@
t):%0A
- subject = ObjectProperty()%0A
@@ -2295,28 +2295,327 @@
dapter, 'action')%0A )%0A
+%0A%0Aclass RulesView(Widget):%0A subject = ObjectProperty()%0A%0A%0Aclass RulesBench(BoxLayout):%0A subject = ObjectProperty()%0A%0A%0Akv = %22%22%22%0A%3CRulesBench%3E:%0A orientation: 'horizontal'%0A FuncsView:%0A engine: root.subject.engine%0A RulesView:%0A subject: root.subject%0A%22%22%22%0ABuilder.load_string(kv)%0A
|
2848c38519ee806acfe7c591cddb827dcb33c67f
|
check for r2 files for star
|
ehive/runnable/process/alignment/RunSTAR.py
|
ehive/runnable/process/alignment/RunSTAR.py
|
import os,json
from ehive.runnable.IGFBaseProcess import IGFBaseProcess
from igf_data.utils.tools.star_utils import Star_utils
from igf_data.utils.fileutils import get_datestamp_label
from igf_data.utils.tools.reference_genome_utils import Reference_genome_utils
class RunSTAR(IGFBaseProcess):
def param_defaults(self):
params_dict=super(RunSTAR,self).param_defaults()
params_dict.update({
'run_mode':'generate_aligned_bams',
'reference_type':'TRANSCRIPTOME_STAR',
'fasta_fai_reference_type':'GENOME_FAI',
'reference_gtf_type':'GENE_GTF',
'two_pass_mode':True,
'run_thread':4,
'r2_read_file':None,
'stranded':True,
'run_igf_id':None,
'star_patameters':'{"--outFilterMultimapNmax":20, \
"--alignSJoverhangMin":8, \
"--alignSJDBoverhangMin":1, \
"--outFilterMismatchNmax":999, \
"--outFilterMismatchNoverReadLmax":0.04, \
"--alignIntronMin":20, \
"--alignIntronMax":1000000, \
"--alignMatesGapMax":1000000, \
"--outSAMattributes":"NH HI AS NM MD", \
"--limitBAMsortRAM":12000000000 \
}'
})
return params_dict
def run(self):
'''
A method for running STAR alignment
'''
try:
project_igf_id=self.param_required('project_igf_id')
experiment_igf_id=self.param_required('experiment_igf_id')
sample_igf_id=self.param_required('sample_igf_id')
run_igf_id=self.param_required('run_igf_id')
star_exe=self.param_required('star_exe')
run_mode=self.param_required('run_mode')
output_prefix=self.param_required('output_prefix')
run_thread=self.param('run_thread')
igf_session_class=self.param_required('igf_session_class')
species_name=self.param('species_name')
reference_type=self.param('reference_type')
reference_gtf_type=self.param('reference_gtf_type')
fasta_fai_reference_type=self.param('fasta_fai_reference_type')
star_patameters=self.param('star_patameters')
two_pass_mode=self.param('two_pass_mode')
seed_date_stamp=self.param_required('date_stamp')
base_work_dir=self.param_required('base_work_dir')
seed_date_stamp=get_datestamp_label(seed_date_stamp)
work_dir_prefix=os.path.join(base_work_dir,
project_igf_id,
sample_igf_id,
experiment_igf_id)
if run_igf_id is not None:
work_dir_prefix=os.path.join(work_dir_prefix,
run_igf_id)
work_dir=self.get_job_work_dir(work_dir=work_dir_prefix) # get a run work dir
ref_genome=Reference_genome_utils(\
genome_tag=species_name,
dbsession_class=igf_session_class,
gene_gtf_type=reference_gtf_type,
fasta_fai_type=fasta_fai_reference_type,
star_ref_type=reference_type) # setup ref genome utils
star_ref=ref_genome.get_transcriptome_star() # get star ref
gene_gtf=ref_genome.get_gene_gtf() # get gtf file
genome_fai=ref_genome.get_genome_fasta_fai() # fetch genomic fasta fai index
if run_mode=='generate_aligned_bams':
if run_igf_id is None:
raise ValueError('No Run igf id found')
r1_read_file=self.param_required('r1_read_file')
r2_read_file=self.param('r2_read_file')
input_fastq_list=list()
input_fastq_list.append(r1_read_file[0]) # get the first input
if r2_read_file is not None:
input_fastq_list.append(r2_read_file[0]) # get the first input
star_obj=Star_utils(star_exe=star_exe,
input_files=input_fastq_list,
genome_dir=star_ref,
reference_gtf=gene_gtf,
output_dir=work_dir,
output_prefix=output_prefix,
threads=run_thread) # set up star for run
if two_pass_mode is None:
two_pass_mode=True
elif two_pass_mode==0:
two_pass_mode=False # reset srat twopass mode
if isinstance(star_patameters, str):
star_patameters=json.loads(star_patameters) # convert string param to dict
genomic_bam,transcriptomic_bam,star_log_file,star_cmd=\
star_obj.generate_aligned_bams(two_pass_mode=two_pass_mode,
star_patameters=star_patameters) # run star cmd
self.param('dataflow_params',
{'star_genomic_bam':genomic_bam,
'star_transcriptomic_bam':transcriptomic_bam,
'star_log_file':star_log_file,
'seed_date_stamp':seed_date_stamp
})
elif run_mode=='generate_rna_bigwig':
input_bam=self.param_required('input_bam')
bedGraphToBigWig_path=self.param_required('bedGraphToBigWig_path')
chrom_length_file=genome_fai
stranded=self.param('stranded')
star_obj=Star_utils(star_exe=star_exe,
input_files=[input_bam],
genome_dir=star_ref,
reference_gtf=gene_gtf,
output_dir=work_dir,
output_prefix=output_prefix,
threads=run_thread) # set up star for run
output_paths,star_cmd=star_obj.\
generate_rna_bigwig(\
bedGraphToBigWig_path=bedGraphToBigWig_path,
chrom_length_file=chrom_length_file,
stranded=stranded,
) # generate bigwig signal tracks
self.param('dataflow_params',{'star_bigwigs':output_paths}) # passing bigwig paths to dataflow
message='finished star for {0} {1}'.\
format(project_igf_id,
run_igf_id)
self.post_message_to_slack(message,reaction='pass') # send log to slack
message='STAR {0} {1} command: {2}'.\
format(run_igf_id,
output_prefix,
star_cmd)
self.comment_asana_task(task_name=project_igf_id, comment=message) # send commandline to Asana
except Exception as e:
message='project: {2}, sample:{3}, Error in {0}: {1}'.format(self.__class__.__name__, \
e, \
project_igf_id,
sample_igf_id)
self.warning(message)
self.post_message_to_slack(message,reaction='fail') # post msg to slack for failed jobs
raise
|
Python
| 0
|
@@ -3938,32 +3938,69 @@
file is not None
+ and %5C%0A len(r2_read_file)%3E0
:%0A inpu
|
c4d809a3b8ccb24d684c489925dd6c9634dbdf55
|
Remove use of DesiredCapabilities object, use Options object instead (#981)
|
splinter/driver/webdriver/firefox.py
|
splinter/driver/webdriver/firefox.py
|
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from selenium.webdriver import DesiredCapabilities, Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import (
BaseWebDriver,
WebDriverElement as WebDriverElement,
)
from splinter.driver.webdriver.cookie_manager import CookieManager
from selenium.webdriver.firefox.options import Options
class WebDriver(BaseWebDriver):
driver_name = "Firefox"
def __init__(
self,
options=None,
profile=None,
extensions=None,
user_agent=None,
profile_preferences=None,
fullscreen=False,
wait_time=2,
capabilities=None,
headless=False,
incognito=False,
**kwargs
):
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference("extensions.logging.enabled", False)
firefox_profile.set_preference("network.dns.disableIPv6", False)
firefox_capabilities = DesiredCapabilities().FIREFOX
firefox_capabilities["marionette"] = True
options = options or Options()
if capabilities:
for key, value in capabilities.items():
# Selenium 3
firefox_capabilities[key] = value
# Selenium 4
options.set_capability(key, value)
if user_agent is not None:
firefox_profile.set_preference("general.useragent.override", user_agent)
if profile_preferences:
for key, value in profile_preferences.items():
firefox_profile.set_preference(key, value)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
if headless:
options.add_argument("--headless")
if incognito:
options.add_argument("-private")
self.driver = Firefox(
firefox_profile,
capabilities=firefox_capabilities,
options=options,
**kwargs
)
if fullscreen:
self.driver.fullscreen_window()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__(wait_time)
|
Python
| 0
|
@@ -211,29 +211,8 @@
port
- DesiredCapabilities,
Fir
@@ -216,16 +216,16 @@
Firefox%0A
+
from sel
@@ -1087,120 +1087,8 @@
e)%0A%0A
- firefox_capabilities = DesiredCapabilities().FIREFOX%0A firefox_capabilities%5B%22marionette%22%5D = True%0A%0A
@@ -1204,117 +1204,8 @@
():%0A
- # Selenium 3%0A firefox_capabilities%5Bkey%5D = value%0A%0A # Selenium 4%0A
@@ -1847,55 +1847,8 @@
le,%0A
- capabilities=firefox_capabilities,%0A
@@ -1872,16 +1872,16 @@
ptions,%0A
-
@@ -1888,24 +1888,25 @@
**kwargs
+,
%0A )%0A%0A
|
546b55248457055c4803d7ea65c21b92276309bd
|
Reformat and update copyright.
|
spotseeker_server/views/add_image.py
|
spotseeker_server/views/add_image.py
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
""" Copyright 2012, 2013 UW Information Technology, University of Washington
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Changes
=================================================================
sbutler1@illinois.edu: adapt to a simplier RESTDispatch framework.
"""
from spotseeker_server.views.rest_dispatch import RESTDispatch, RESTException
from spotseeker_server.models import SpotImage, Spot
from django.http import HttpResponse
from spotseeker_server.require_auth import *
from PIL import Image
class AddImageView(RESTDispatch):
""" Saves a SpotImage for a particular Spot on POST to
/api/v1/spot/<spot id>/image.
"""
@user_auth_required
@admin_auth_required
def POST(self, request, spot_id):
spot = Spot.objects.get(pk=spot_id)
if "image" not in request.FILES:
raise RESTException("No image", 400)
args = {
'upload_application': request.META.get('SS_OAUTH_CONSUMER_NAME',
''),
'upload_user': request.META.get('SS_OAUTH_USER', ''),
'description': request.POST.get('description', ''),
'display_index': request.POST.get('display_index'),
'image': request.FILES['image']
}
if args['display_index'] is None:
# TODO: is there a better way?
# get display_indexes for all of the existing images
# and set the new one to the biggest + 1
indices = [img.display_index for img in spot.spotimage_set.all()]
if indices:
args['display_index'] = max(indices) + 1
else:
args['display_index'] = 0
image = spot.spotimage_set.create(**args)
response = HttpResponse(status=201)
response["Location"] = image.rest_url()
return response
|
Python
| 0
|
@@ -88,643 +88,8 @@
%0A%22%22%22
- Copyright 2012, 2013 UW Information Technology, University of Washington%0A%0A Licensed under the Apache License, Version 2.0 (the %22License%22);%0A you may not use this file except in compliance with the License.%0A You may obtain a copy of the License at%0A%0A http://www.apache.org/licenses/LICENSE-2.0%0A%0A Unless required by applicable law or agreed to in writing, software%0A distributed under the License is distributed on an %22AS IS%22 BASIS,%0A WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A See the License for the specific language governing permissions and%0A limitations under the License.%0A%0A
Cha
@@ -518,17 +518,16 @@
%0A %22%22%22
-
Saves a
@@ -569,20 +569,16 @@
POST to%0A
-
/api
@@ -611,16 +611,17 @@
%22%22%22%0A
+%0A
@use
@@ -860,25 +860,25 @@
-'
+%22
upload_appli
@@ -879,25 +879,25 @@
_application
-'
+%22
: request.ME
@@ -895,33 +895,50 @@
equest.META.get(
-'
+%0A %22
SS_OAUTH_CONSUME
@@ -947,48 +947,14 @@
NAME
-',%0A
+%22, %22%22%0A
@@ -965,12 +965,8 @@
- ''
),%0A
@@ -976,17 +976,17 @@
-'
+%22
upload_u
@@ -988,17 +988,17 @@
oad_user
-'
+%22
: reques
@@ -1008,17 +1008,17 @@
ETA.get(
-'
+%22
SS_OAUTH
@@ -1022,21 +1022,21 @@
UTH_USER
-', ''
+%22, %22%22
),%0A
@@ -1042,17 +1042,17 @@
-'
+%22
descript
@@ -1054,17 +1054,17 @@
cription
-'
+%22
: reques
@@ -1074,17 +1074,17 @@
OST.get(
-'
+%22
descript
@@ -1090,13 +1090,13 @@
tion
-', ''
+%22, %22%22
),%0A
@@ -1106,17 +1106,17 @@
-'
+%22
display_
@@ -1120,17 +1120,17 @@
ay_index
-'
+%22
: reques
@@ -1140,17 +1140,17 @@
OST.get(
-'
+%22
display_
@@ -1154,17 +1154,17 @@
ay_index
-'
+%22
),%0A
@@ -1170,23 +1170,23 @@
-'
+%22
image
-'
+%22
: reques
@@ -1197,16 +1197,17 @@
LES%5B
-'
+%22
image
-'%5D
+%22%5D,
%0A
@@ -1225,25 +1225,25 @@
if args%5B
-'
+%22
display_inde
@@ -1243,17 +1243,17 @@
ay_index
-'
+%22
%5D is Non
@@ -1531,33 +1531,33 @@
args%5B
-'
+%22
display_index'%5D
@@ -1549,25 +1549,25 @@
isplay_index
-'
+%22
%5D = max(indi
@@ -1614,17 +1614,17 @@
args%5B
-'
+%22
display_
@@ -1632,9 +1632,9 @@
ndex
-'
+%22
%5D =
|
913163a1acddc1d846eb269c04ae3dc60ecbc2bd
|
Update LongitudinalController.py
|
workspace/src/labs/src/lab5/LongitudinalController.py
|
workspace/src/labs/src/lab5/LongitudinalController.py
|
#!/usr/bin/env python
import rospy
import time
from barc.msg import ECU, Encoder
from numpy import pi
# from encoder
v_meas = 0.0
t0 = time.time()
ang_km1 = 0.0
ang_km2 = 0.0
n_FL = 0.0
n_FR = 0.0
n_BL = 0.0
n_BR = 0.0
r_tire = 0.05 # radius of the tire
servo_pwm = 1580.0
motor_pwm = 1500.0
motor_pwm_offset = 1500.0
# reference speed
v_ref = 0.5 # reference speed is 0.5 m/s
# ===================================PID longitudinal controller================================#
class PID():
def __init__(self, kp=1, ki=1, kd=1, integrator=0, derivator=0):
self.kp = kp
self.ki = ki
self.kd = kd
self.integrator = integrator
self.derivator = derivator
self.integrator_max = 10
self.integrator_min = -10
def acc_calculate(self, speed_reference, speed_current):
self.error = speed_reference - speed_current
# Propotional control
self.P_effect = self.kp*self.error
# Integral control
self.integrator = self.integrator + self.error
## Anti windup
if self.integrator >= self.integrator_max:
self.integrator = self.integrator_max
if self.integrator <= self.integrator_min:
self.integrator = self.integrator_min
self.I_effect = self.ki*self.integrator
# Derivative control
self.derivator = self.error - self.derivator
self.D_effect = self.kd*self.derivator
self.derivator = self.error
acc = self.P_effect + self.I_effect + self.D_effect
if acc <= 0:
acc = 20
return acc
# =====================================end of the controller====================================#
# state estimation node
def controller():
global motor_pwm, servo_pwm, motor_pwm_offset
global v_ref, v_meas
# Initialize node:
rospy.init_node('simulationGain', anonymous=True)
# TODO: Add your necessary topic subscriptions / publications, depending on your preferred method of velocity estimation
ecu_pub = rospy.Publisher('ecu_pwm', ECU, queue_size = 10)
# Set node rate
loop_rate = 50
rate = rospy.Rate(loop_rate)
# TODO: Initialize your PID controller here
while not rospy.is_shutdown():
# acceleration calculated from PID controller.
motor_pwm = PID_control.acc_calculate(v_ref, v_meas) + motor_pwm_offset
rospy.logwarn("pwm = {}".format(motor_pwm))
# publish control command
ecu_pub.publish( ECU(motor_pwm, servo_pwm) )
# wait
rate.sleep()
if __name__ == '__main__':
try:
controller()
except rospy.ROSInterruptException:
pass
|
Python
| 0
|
@@ -160,116 +160,8 @@
e()%0A
-ang_km1 = 0.0%0Aang_km2 = 0.0%0An_FL = 0.0%0An_FR = 0.0%0An_BL = 0.0%0An_BR = 0.0%0A
r_ti
@@ -264,16 +264,17 @@
1500.0%0A
+%0A
# refere
@@ -284,16 +284,16 @@
speed %0A
-
v_ref =
@@ -297,16 +297,21 @@
= 0.5 #
+ give
referen
@@ -2198,15 +2198,87 @@
here
+, with your chosen PI gains%0A PID_control = PID(kp = 1, ki = 1, kd = 0)
%0A
-
%0A
-%0A
@@ -2317,16 +2317,26 @@
#
+ calculate
acceler
@@ -2345,19 +2345,8 @@
ion
-calculated
from
@@ -2446,59 +2446,9 @@
set%0A
+
- rospy.logwarn(%22pwm = %7B%7D%22.format(motor_pwm))
%0A
|
b6363044cac862dd5bef54bc210c4beceaa90bdd
|
refactor fixtures and add 2 more tests for old collections
|
test/test_collection.py
|
test/test_collection.py
|
import pytest
import json
from girder.models.collection import Collection
from pytest_girder.assertions import assertStatusOk
@pytest.fixture
def collections(db):
yield [
Collection().createCollection('private collection', public=False),
Collection().createCollection('public collection', public=True)
]
@pytest.fixture
def collection(db):
yield Collection().createCollection('public collection', public=True)
@pytest.fixture
def collectionWithMeta(db, collection, metadata):
def _collectionWithMeta(_metadata=None):
if _metadata is None:
_metadata = metadata
return Collection().setMetadata(collection, _metadata)
yield _collectionWithMeta
@pytest.fixture
def metadata():
return {
'key': 'value',
'apple': 'fruit'
}
@pytest.fixture
def users(admin, user):
yield [admin, user, None]
@pytest.mark.parametrize('userIdx,expected', [
(0, 2),
(1, 1),
(2, 1)
])
def testCollectionsCount(server, userIdx, expected, collections, users):
resp = server.request(path='/collection/details', user=users[userIdx])
assertStatusOk(resp)
assert resp.json['nCollections'] == expected
def testSingleCollectionMetaExists(server, collection, admin):
resp = server.request(path='/collection/%s' % collection['_id'], user=admin)
assertStatusOk(resp)
assert 'meta' in resp.json
def testListCollectionMetaExists(server, collection, admin):
resp = server.request(path='/collection', user=admin)
assertStatusOk(resp)
assert all(('meta' in x) for x in resp.json)
def testCollectionSetMetadata(server, collection, metadata, admin):
resp = server.request(
path='/collection/%s/metadata' % collection['_id'],
user=admin,
method='PUT',
body=json.dumps(metadata),
type='application/json')
assertStatusOk(resp)
assert resp.json['meta'] == metadata
# Check that fetching the object again yields the same result
newDoc = server.request(
path='/collection/%s' % collection['_id'],
user=admin,
method='GET')
assert newDoc.json['meta'] == metadata
def testCollectionDeleteMetadata(server, collectionWithMeta, metadata, admin):
collection = collectionWithMeta(metadata)
resp = server.request(
path='/collection/%s/metadata' % collection['_id'],
user=admin,
method='DELETE',
body=json.dumps(list(metadata.keys())),
type='application/json')
assertStatusOk(resp)
assert resp.json['meta'] != metadata
assert resp.json['meta'] == {}
newDoc = server.request(
path='/collection/%s' % collection['_id'],
user=admin,
method='GET')
assert newDoc.json['meta'] != metadata
assert newDoc.json['meta'] == {}
# Model Layer
def testCollectionModelSetMetadata(collection, metadata):
updatedCollection = Collection().setMetadata(collection, metadata)
assert updatedCollection['meta'] == metadata
# Model Layer
def testCollectionModelDeleteMetadata(collectionWithMeta, metadata):
collection = collectionWithMeta(metadata)
noMeta = Collection().deleteMetadata(collection, list(metadata.keys()))
assert noMeta['meta'] == {}
# Model Layer
def testCollectionLoad(collection, admin):
loadedCollection = Collection().load(collection['_id'], user=admin)
assert 'meta' in loadedCollection
# Model Layer
def testCollectionFilter(collection):
loadedCollection = Collection().filter(collection)
assert 'meta' in loadedCollection
|
Python
| 0
|
@@ -437,17 +437,16 @@
=True)%0A%0A
-%0A
@pytest.
@@ -449,33 +449,36 @@
est.fixture%0Adef
-c
+oldC
ollectionWithMet
@@ -470,24 +470,16 @@
llection
-WithMeta
(db, col
@@ -489,143 +489,301 @@
tion
-, metadata):%0A def _collectionWithMeta(_metadata=None):%0A if _metadata is None:%0A _metadata = metadata%0A return
+):%0A del collection%5B'meta'%5D%0A collection = Collection().save(collection)%0A assert 'meta' not in collection%0A yield collection%0A%0A%0A@pytest.fixture%0Adef oldCollections(db, collections):%0A for i, collection in enumerate(collections):%0A del collection%5B'meta'%5D%0A collections%5Bi%5D =
Col
@@ -789,34 +789,27 @@
llection().s
-etMetadata
+ave
(collection,
@@ -811,21 +811,53 @@
tion
-, _metadata)%0A
+)%0A assert 'meta' not in collections%5Bi%5D
%0A
@@ -863,17 +863,16 @@
yield
-_
collecti
@@ -873,24 +873,17 @@
llection
-WithMeta
+s
%0A%0A%0A@pyte
@@ -917,14 +917,13 @@
-return
+yield
%7B%0A
@@ -1573,51 +1573,465 @@
test
-ListCollectionMetaExists(server, collection
+SingleOldCollectionMetaExists(server, oldCollection, admin):%0A resp = server.request(path='/collection/%25s' %25 oldCollection%5B'_id'%5D, user=admin)%0A assertStatusOk(resp)%0A assert 'meta' in resp.json%0A%0A%0Adef testListCollectionMetaExists(server, collections, admin):%0A resp = server.request(path='/collection', user=admin)%0A assertStatusOk(resp)%0A assert all(('meta' in x) for x in resp.json)%0A%0A%0Adef testListOldCollectionMetaExists(server, oldCollections
, ad
@@ -2784,32 +2784,24 @@
, collection
-WithMeta
, metadata,
@@ -2817,33 +2817,33 @@
collection =
-c
+C
ollectionWithMet
@@ -2831,33 +2831,51 @@
= Collection
-WithMeta(
+().setMetadata(collection,
metadata)%0A
@@ -3647,24 +3647,16 @@
llection
-WithMeta
, metada
@@ -3677,17 +3677,17 @@
ction =
-c
+C
ollectio
@@ -3691,17 +3691,35 @@
tion
-WithMeta(
+().setMetadata(collection,
meta
|
9ac7be20f3b25ca768f7260900928b2c7224f470
|
Improve correlator test
|
test/test_correlator.py
|
test/test_correlator.py
|
# http://www.apache.org/licenses/LICENSE-2.0
import unittest
import time
import numpy as np
import auspex.config as config
config.auspex_dummy_mode = True
from auspex.experiment import Experiment
from auspex.stream import DataStream, DataAxis, DataStreamDescriptor, OutputConnector
from auspex.filters.debug import Print, Passthrough
from auspex.filters.correlator import Correlator
from auspex.filters.io import DataBuffer
from auspex.log import logger
class CorrelatorExperiment(Experiment):
# DataStreams
chan1 = OutputConnector()
chan2 = OutputConnector()
# Constants
samples = 100
idx_1 = 0
idx_2 = 0
# For correlator verification
vals = 2.0 + np.linspace(0, 10*np.pi, samples)
def init_streams(self):
self.chan1.add_axis(DataAxis("samples", list(range(self.samples))))
self.chan2.add_axis(DataAxis("samples", list(range(self.samples))))
def run(self):
logger.debug("Data taker running (inner loop)")
while self.idx_1 < self.samples or self.idx_2 < self.samples:
# Generate random number of samples:
new_1 = np.random.randint(1,5)
new_2 = np.random.randint(1,5)
if self.chan1.points_taken.value < self.chan1.num_points():
if self.chan1.points_taken.value + new_1 > self.chan1.num_points():
new_1 = self.chan1.num_points() - self.chan1.points_taken.value
self.chan1.push(self.vals[self.idx_1:self.idx_1+new_1])
self.idx_1 += new_1
if self.chan2.points_taken.value < self.chan2.num_points():
if self.chan2.points_taken.value + new_2 > self.chan2.num_points():
new_2 = self.chan2.num_points() - self.chan2.points_taken.value
self.chan2.push(self.vals[self.idx_2:self.idx_2+new_2])
self.idx_2 += new_2
time.sleep(0.002)
logger.debug("Idx_1: %d, Idx_2: %d", self.idx_1, self.idx_2)
class CorrelatorTestCase(unittest.TestCase):
def test_correlator(self):
exp = CorrelatorExperiment()
corr = Correlator(name='corr')
buff = DataBuffer()
edges = [(exp.chan1, corr.sink),
(exp.chan2, corr.sink),
(corr.source, buff.sink)]
exp.set_graph(edges)
exp.run_sweeps()
corr_data = buff.output_data['corr']
expected_data = exp.vals*exp.vals
self.assertTrue(np.abs(np.sum(corr_data - expected_data)) <= 1e-4)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000001
|
@@ -2363,16 +2363,39 @@
weeps()%0A
+ time.sleep(0.1)
%0A
@@ -2554,9 +2554,9 @@
1e-
-4
+1
)%0A%0A%0A
|
5c064695d257c1dab055d36a5d1ab0a6d72fd854
|
add a test for using a Ducksboard service with no password
|
test/test_ducksboard.py
|
test/test_ducksboard.py
|
import json
import unittest
from libsaas.executors import test_executor
from libsaas.services import ducksboard
class DucksboardTestCase(unittest.TestCase):
def setUp(self):
self.executor = test_executor.use()
self.executor.set_response(b'{}', 200, {})
self.service = ducksboard.Ducksboard('apikey', 'pass')
def serialize(self, data):
return json.dumps(data)
def expect(self, uri, method=None, params=None, subdomain=None):
if not subdomain:
domain = 'app.ducksboard.com/api'
else:
domain = '{0}.ducksboard.com/values'.format(subdomain)
self.assertEqual(self.executor.request.uri,
'https://{0}/{1}'.format(domain, uri))
if method:
self.assertEqual(method, self.executor.request.method)
if params:
self.assertEqual(self.executor.request.params, params)
def test_dashboard(self):
self.service.dashboards().get()
self.expect('dashboards/', 'GET')
dashboard = {'name': 'x'}
self.service.dashboards().create(dashboard)
self.expect('dashboards/', 'POST', self.serialize(dashboard))
self.service.dashboard('slug').update(dashboard)
self.expect('dashboards/slug', 'PUT', self.serialize(dashboard))
self.service.dashboard('slug').delete()
self.expect('dashboards/slug', 'DELETE')
self.service.dashboard('slug').accessed()
self.expect('dashboards/slug/accessed', 'POST')
self.service.dashboard('slug').widgets()
self.expect('dashboards/slug/widgets/', 'GET')
self.service.dashboard('slug').tokens().get()
self.expect('dashboards/slug/tokens/', 'GET')
self.service.dashboard('slug').token('token').get()
self.expect('dashboards/slug/tokens/token', 'GET')
token = {'password': 'p'}
self.service.dashboard('slug').tokens().create(token)
self.expect('dashboards/slug/tokens/', 'POST', self.serialize(token))
self.service.dashboard('slug').token('token').delete()
self.expect('dashboards/slug/tokens/token', 'DELETE')
def test_widgets(self):
self.service.widgets().get()
self.expect('widgets/', 'GET')
widget = {'widget': 'x'}
self.service.widgets().create(widget)
self.expect('widgets/', 'POST', self.serialize(widget))
self.service.widget('id').update(widget)
self.expect('widgets/id', 'PUT', self.serialize(widget))
self.service.widget('id').delete()
self.expect('widgets/id', 'DELETE')
dashboard = {'dashboard': 'test'}
self.service.widget('id').copy('test')
self.expect('widgets/id/copy', 'POST', self.serialize(dashboard))
positions = {"7": {"row": 1, "column": 1}}
self.service.widgets().positions(positions)
self.expect('widgets/positions', 'POST', self.serialize(positions))
def test_accounts(self):
self.service.accounts().get()
self.expect('accounts/', 'GET')
account = {'account': 'x'}
self.service.accounts().create(account)
self.expect('accounts/', 'POST', self.serialize(account))
self.service.account('id').delete()
self.expect('accounts/id', 'DELETE')
self.service.account('id').get()
self.expect('accounts/id', 'GET')
def test_user(self):
self.service.user().get()
self.expect('user', 'GET')
user = {'name': 'x'}
self.service.user().update(user)
self.expect('user', 'PUT', self.serialize(user))
self.service.user().get_api_key()
self.expect('user/api_key', 'GET')
self.service.user().reset_api_key()
self.expect('user/api_key', 'POST')
def test_datasource(self):
ds = self.service.data_source('label')
value = {"value": 10}
ds.push(value)
self.expect('label', 'POST', self.serialize(value), 'push')
ds.delete()
self.expect('label', 'DELETE', subdomain='push')
ds.last(5)
self.expect('label/last', 'GET', {'count': 5}, subdomain='pull')
ds.since(500)
self.expect('label/since', 'GET', {'seconds': 500}, subdomain='pull')
ds.timespan('daily', 'UTC')
self.expect('label/timespan', 'GET',
{'timespan': 'daily', 'timezone': 'UTC'},
subdomain='pull')
# try binary and unicode labels
_lambda = b'\xce\xbb'
_ulambda = _lambda.decode('utf-8')
ds = self.service.data_source(_lambda)
ds.delete()
ds = self.service.data_source(_ulambda)
ds.delete()
|
Python
| 0
|
@@ -911,16 +911,242 @@
arams)%0A%0A
+ def test_default_password(self):%0A s = ducksboard.Ducksboard('apikey')%0A s.user().get()%0A%0A self.assertEqual(self.executor.request.headers%5B'Authorization'%5D,%0A 'Basic YXBpa2V5Og==')%0A%0A
def
|
0a40882c5b609522f12556d474954623483f6461
|
Add await_data in test_tail_gen
|
test/test_motor_tail.py
|
test/test_motor_tail.py
|
# Copyright 2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Motor, an asynchronous driver for MongoDB and Tornado."""
import datetime
import functools
import threading
import time
import unittest
from pymongo.errors import OperationFailure
from tornado import ioloop, gen
import motor
from test import host, port, MotorTest, async_test_engine, AssertRaises
class MotorTailTest(MotorTest):
def setUp(self):
super(MotorTailTest, self).setUp()
self.sync_db.capped.drop()
# autoIndexId catches test bugs that try to insert duplicate _id's
self.sync_db.create_collection(
'capped', capped=True, size=1000, autoIndexId=True)
self.sync_db.uncapped.drop()
self.sync_db.uncapped.insert({})
def test_tail_callback(self):
test_db = self.motor_connection(host, port).pymongo_test
capped = test_db.capped
self.check_required_callback(capped.find().tail)
def start_insertion_thread(self, pauses):
"""A thread that gradually inserts documents into a capped collection
"""
def add_docs():
i = 0
for pause in pauses:
if pause == 'drop':
self.sync_db.capped.drop()
else:
time.sleep(pause)
self.sync_db.capped.insert({'_id': i})
i += 1
t = threading.Thread(target=add_docs)
t.start()
return t
# Used by test_tail, test_tail_drop_collection, etc.
def each(self, results, n_expected, callback, result, error):
if error:
results.append(type(error))
elif result:
results.append(result)
if len(results) != n_expected:
# Continue
return
# Cancel iteration
callback()
return False
# Need at least one longish pause to ensure tail() recovers when cursor
# times out and returns None
tail_pauses = (
1, 0, 1, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0.1, 0.1, 0, 0)
@async_test_engine(timeout_sec=sum(tail_pauses) + 30)
def test_tail(self, done):
t = self.start_insertion_thread(self.tail_pauses)
results = []
each = functools.partial(
self.each, results, len(self.tail_pauses),
(yield gen.Callback('done')))
test_db = self.motor_connection(host, port).pymongo_test
capped = test_db.capped
# Note we do *not* pass tailable or await_data to find(), the
# convenience method handles it for us.
capped.find().tail(each)
yield gen.Wait('done')
self.assertEqual(
results,
[{'_id': i} for i in range(len(self.tail_pauses))])
t.join()
yield gen.Task(self.wait_for_cursors)
done()
@async_test_engine(timeout_sec=30)
def test_tail_empty(self, done):
pauses = (0, 1)
results = []
each = functools.partial(
self.each, results, len(pauses),
(yield gen.Callback('done')))
test_db = self.motor_connection(host, port).pymongo_test
capped = test_db.capped
capped.find().tail(each)
loop = ioloop.IOLoop.instance()
# Tail empty collection for a while before inserting
yield gen.Task(loop.add_timeout, datetime.timedelta(seconds=2))
t = self.start_insertion_thread(pauses)
yield gen.Wait('done')
self.assertEqual(
results,
[{'_id': i} for i in range(len(pauses))])
t.join()
yield gen.Task(self.wait_for_cursors)
done()
drop_collection_pauses = (0, 0, 1, 'drop', 1, 0, 0)
@async_test_engine(timeout_sec=30)
def test_tail_drop_collection(self, done):
# Ensure tail() throws error when its collection is dropped
t = self.start_insertion_thread(self.drop_collection_pauses)
results = []
each = functools.partial(
self.each, results, len(self.drop_collection_pauses),
(yield gen.Callback('done')))
test_db = self.motor_connection(host, port).pymongo_test
capped = test_db.capped
capped.find().tail(each)
yield gen.Wait('done')
# Don't assume that the first 3 results before the drop will be
# recorded -- dropping a collection kills the cursor even if not
# fully iterated.
self.assertTrue(OperationFailure in results)
self.assertFalse('cancelled' in results)
t.join()
yield gen.Task(self.wait_for_cursors)
done()
@async_test_engine()
def test_tail_uncapped_collection(self, done):
test_db = self.motor_connection(host, port).pymongo_test
uncapped = test_db.uncapped
yield AssertRaises(OperationFailure, uncapped.find().tail)
done()
@async_test_engine(timeout_sec=30)
def test_tail_nonempty_collection(self, done):
self.sync_db.capped.insert([{'_id': -2}, {'_id': -1}])
pauses = (0, 0, 1, 0, 0)
t = self.start_insertion_thread(pauses)
results = []
each = functools.partial(
self.each, results, len(pauses) + 2, (yield gen.Callback('done')))
test_db = self.motor_connection(host, port).pymongo_test
capped = test_db.capped
capped.find().tail(each)
yield gen.Wait('done')
self.assertEqual([{'_id': i} for i in range(-2, len(pauses))], results)
t.join()
yield gen.Task(self.wait_for_cursors)
done()
@async_test_engine(timeout_sec=30)
def test_tail_gen(self, done):
pauses = (1, 0.5, 1, 0, 0)
t = self.start_insertion_thread(pauses)
loop = ioloop.IOLoop.instance()
results = []
test_db = self.motor_connection(host, port).pymongo_test
capped = test_db.capped
cursor = capped.find(tailable=True, await_data=True)
while len(results) < len(pauses):
if not cursor.alive:
# While collection is empty, tailable cursor dies immediately
yield gen.Task(loop.add_timeout, time.time() + 0.1)
cursor = capped.find(tailable=True)
if (yield cursor.fetch_next):
results.append(cursor.next_object())
else:
yield gen.Task(loop.add_timeout, time.time() + 0.1)
t.join()
self.assertEqual([{'_id': i} for i in range(len(pauses))], results)
yield motor.Op(cursor.close)
yield gen.Task(self.wait_for_cursors)
done()
if __name__ == '__main__':
unittest.main()
|
Python
| 0.00005
|
@@ -6707,24 +6707,41 @@
ailable=True
+, await_data=True
)%0A%0A
|
99241ab49a0a76472bb6f107a078248782af9626
|
fix string_types in _compat
|
myhdl/_compat.py
|
myhdl/_compat.py
|
import sys
PY2 = sys.version_info[0] == 2
if not PY2:
string_types = (str, unicode)
integer_types = (int,)
long = int
import builtins
else:
string_types = (str,)
integer_types = (int, long)
long = long
import __builtin__ as builtins
|
Python
| 0.999999
|
@@ -78,16 +78,8 @@
str,
- unicode
)%0A
@@ -160,32 +160,40 @@
ng_types = (str,
+ unicode
)%0A integer_ty
|
52429b9300f30ae28b81604bbc0933b456c654f2
|
use PORT instead of VCAP_APP_PORT
|
extensions/varnish/extension.py
|
extensions/varnish/extension.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import os.path
import logging
_log = logging.getLogger('varnish')
DEFAULTS = {
'VARNISH_HOST': 'raw.githubusercontent.com',
'VARNISH_VERSION': '3.0.7',
'VARNISH_PACKAGE': 'varnish-{VARNISH_VERSION}.tar.gz',
'VARNISH_DOWNLOAD_URL': 'https://gitlab.liip.ch/chregu/cf-varnish-binary/raw/master/vendor/varnish-{VARNISH_VERSION}.tar.gz',
'VARNISHNCSA': 'no'
}
class VarnishInstaller(object):
def __init__(self, ctx):
self._log = _log
self._ctx = ctx
self._merge_defaults()
def _merge_defaults(self):
for key, val in DEFAULTS.iteritems():
if key not in self._ctx:
self._ctx[key] = val
def should_install(self):
return self._ctx['CACHE_SERVER'] == 'varnish'
def install(self):
_log.info("Installing Varnish METHOD")
self._builder.install()._installer._install_binary_from_manifest(
self._ctx['VARNISH_DOWNLOAD_URL'],
os.path.join('app'),
extract=True)
def preprocess_commands(ctx):
return (('mkdir', '/home/vcap/tmp/varnish/'),
('$HOME/.bp/bin/rewrite', '"$HOME/varnish/etc/varnish"'))
def service_commands(ctx):
returnVal = {
'varnish': (
'$HOME/varnish/sbin/varnishd',
'-F',
'-f $HOME/varnish/etc/varnish/default.vcl',
'-a 0.0.0.0:$VCAP_APP_PORT',
'-t 120',
'-w 50,1000,120',
'-s malloc,$VARNISH_MEMORY_LIMIT',
'-T 127.0.0.1:6082',
'-p http_resp_hdr_len=32768'
'2>&1'
)
}
if ('VARNISHNCSA' in ctx and ctx['VARNISHNCSA'] == "yes"):
varnishncsa = ('sleep 5;', '$HOME/varnish/bin/varnishncsa')
if 'VARNISHNCSA_OPTIONS' in ctx:
varnishncsa += (ctx.get('VARNISHNCSA_OPTIONS', format=False),)
returnVal['varnishncsa'] = varnishncsa
return returnVal
def service_environment(ctx):
if 'VARNISH_MEMORY_LIMIT' in ctx:
varnish_memory_limit = ctx['VARNISH_MEMORY_LIMIT']
else:
varnish_memory_limit = ctx['MEMORY_LIMIT'];
_log.info('Varnish memory limit is [%s]', varnish_memory_limit)
env = {
'LD_LIBRARY_PATH': '$LD_LIBRARY_PATH:$HOME/varnish/lib/varnish',
'VARNISH_MEMORY_LIMIT': varnish_memory_limit,
}
return env
def compile(install):
varnish = VarnishInstaller(install.builder._ctx)
if varnish.should_install():
_log.info("Installing Varnish")
(install
.package('VARNISH')
.config()
.from_application('.bp-config/varnish') # noqa
.or_from_build_pack('defaults/config/varnish/{VARNISH_VERSION}')
.to('varnish/etc/varnish')
.rewrite()
.done())
_log.info("Varnish Installed.")
return 0
|
Python
| 0.000001
|
@@ -2227,17 +2227,8 @@
.0:$
-VCAP_APP_
PORT
|
97fa2f41bb00d2ceb0726b9cffdaa7c4ea97bc45
|
Remove trailing whitespace from test/test_xml_parser.py
|
test/test_xml_parser.py
|
test/test_xml_parser.py
|
import unittest
from apel.db.loader.xml_parser import XMLParser, get_primary_ns
class XMLParserTest(unittest.TestCase):
'''
Test case for XMLParser
'''
data1 = '''<?xml version="1.0"?>
<ns:node xmlns:ns="http://fake.namespace.org" xmlns:ons="http://fake.othernamespace.org">
<ns:title>Some title</ns:title>
<ns:values>
<ns:value>data1</ns:value>
<ns:value>data2</ns:value>
</ns:values>
<ns:attributes ns:attr="value">
<ns:attribute ns:id="test1">attribute 1</ns:attribute>
<ns:attribute ns:id="test2">attribute 2</ns:attribute>
</ns:attributes>
<ns:mixednamespace>
<ons:attribute ons:type="test3">test4</ons:attribute>
<ons:attribute ons:type="nope">notthis</ons:attribute>
</ns:mixednamespace>
</ns:node>'''
def setUp(self):
self.parser = XMLParser(self.data1)
self.parser.NAMESPACE = 'http://fake.namespace.org'
self.parser.OTHERNAMESPACE = 'http://fake.othernamespace.org'
def test_get_text(self):
values = self.parser.doc.getElementsByTagNameNS(self.parser.NAMESPACE, 'value')
self.assertEqual('data1', self.parser.getText(values[0].childNodes))
self.assertEqual('data2', self.parser.getText(values[1].childNodes))
def test_get_tag_by_attr(self):
attributes = self.parser.doc.getElementsByTagNameNS(self.parser.NAMESPACE, 'attribute')
#print len(attributes)
self.assertEqual(len(self.parser.getTagByAttr(attributes, 'id', 'test1')), 1)
self.assertEqual(len(self.parser.getTagByAttr(attributes, 'id', 'test2')), 1)
def test_mixed_namespace(self):
"""Check that nested elements with a diff namespace can be retrieved."""
# Get all attribute nodes in 'ons' namespace
allnodes = self.parser.doc.getElementsByTagNameNS(self.parser.OTHERNAMESPACE, 'attribute')
# Get just the type=test3 nodes in 'ons' namespace
nodes = self.parser.getTagByAttr(allnodes, 'type', 'test3', self.parser.OTHERNAMESPACE)
# Check value in the first node is as expected
self.assertEqual(self.parser.getText(nodes[0].childNodes), 'test4')
def test_get_attr(self):
attributes_tag = self.parser.doc.getElementsByTagNameNS(self.parser.NAMESPACE, 'attributes')[0]
self.assertEqual(self.parser.getAttr(attributes_tag, 'attr'), 'value')
def test_get_primary_ns(self):
test_xml = '<?xml version="1.0" ?><ur:UsageRecord xmlns:ur="booboob"/>'
ns = get_primary_ns(test_xml)
self.assertEqual("booboob", ns)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.999959
|
@@ -160,20 +160,16 @@
'''%0A
-
%0A dat
@@ -418,19 +418,16 @@
values%3E%0A
-
%0A %3Cns:
@@ -975,20 +975,16 @@
ce.org'%0A
-
%0A def
@@ -1247,20 +1247,16 @@
Nodes))%0A
-
%0A def
|
37d20fe09aa19dde2ac50816958d9b1372bc76eb
|
indent (tabs!)
|
integration-test/1251-early-track-roads.py
|
integration-test/1251-early-track-roads.py
|
from . import FixtureTest
class EarlyUnclassifiedRoads(FixtureTest):
def test_early_track_road_z11_grade1_paved(self):
# asphalt, grade1, track (default zoom 11, no demotion)
self.load_fixtures([
'https://www.openstreetmap.org/way/329375413',
])
self.assert_has_feature(
11, 396, 781, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
def test_early_track_road_z12_grade1_private(self):
# private, grade1, track (since private demoted from zoom 11)
self.load_fixtures([
'https://www.openstreetmap.org/way/10611894',
])
self.assert_no_matching_feature(
11, 330, 781, 'roads',
{ 'kind': 'path',
'kind_detail': 'track'})
self.assert_has_feature(
12, 661, 1562, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
def test_early_track_road_z12_grade1_dirt(self):
# dirt, grade1, track (since dirt demoted from zoom 11)
self.load_fixtures([
'https://www.openstreetmap.org/way/286309045',
])
self.assert_no_matching_feature(
11, 399, 782, 'roads',
{ 'kind': 'path',
'kind_detail': 'track'})
self.assert_has_feature(
12, 799, 1565, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
def test_early_track_road_z12_grade2_dirt(self):
# dirt, grade2, track (default zoom 12, no demotion)
self.load_fixtures([
'https://www.openstreetmap.org/way/330951783',
])
self.assert_has_feature(
12, 778, 1575, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
def test_remain_z13_track_road_no_grade1(self):
# gravel, track (no grade so default track at zoom 13)
self.load_fixtures([
'https://www.openstreetmap.org/way/313839575',
])
self.assert_has_feature(
13, 1561, 3146, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
def test_remain_z13_track_road_no_grade2(self):
# gravel, track (no grade so default track at zoom 13)
self.load_fixtures([
'https://www.openstreetmap.org/way/14351002',
])
self.assert_has_feature(
13, 1500, 3170, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
def test_remain_z13_track_road_grade5_gravel(self):
# gravel, grade5, track (fails zoom 12 test
# so default zoom 13 for track)
self.load_fixtures([
'https://www.openstreetmap.org/way/10103047',
])
self.assert_has_feature(
13, 1550, 3167, 'roads',
{'kind': 'path',
'kind_detail': 'track'})
|
Python
| 0
|
@@ -792,26 +792,24 @@
%0A %0A
-
self.assert_
@@ -825,28 +825,24 @@
re(%0A
-
12, 661, 156
@@ -853,36 +853,32 @@
roads',%0A
-
-
%7B'kind': 'path',
@@ -870,36 +870,32 @@
'kind': 'path',%0A
-
'kind_d
|
2e761252093b41d33cf57599ba8f05ec01e90a6a
|
delete noneffective codes
|
fate_flow/flowpy/client/base.py
|
fate_flow/flowpy/client/base.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import json
import inspect
import requests
import traceback
from fate_flow.flowpy.client.api.base import BaseFlowAPI
def _is_api_endpoint(obj):
return isinstance(obj, BaseFlowAPI)
class BaseFlowClient:
API_BASE_URL = ''
def __new__(cls, *args, **kwargs):
self = super().__new__(cls)
api_endpoints = inspect.getmembers(self, _is_api_endpoint)
for name, api in api_endpoints:
print('name: {}, api: {}'.format(name, api))
api_cls = type(api)
api = api_cls(self)
setattr(self, name, api)
return self
def __init__(self, ip, port, version):
self._http = requests.Session()
self.ip = ip
self.port = port
self.version = version
def _request(self, method, url, **kwargs):
request_url = self.API_BASE_URL + url
try:
response = self._http.request(method=method, url=request_url, **kwargs)
return response
except Exception as e:
exc_type, exc_value, exc_traceback_obj = sys.exc_info()
response = {'retcode': 100, 'retmsg': str(e),
'traceback': traceback.format_exception(exc_type, exc_value, exc_traceback_obj)}
if 'Connection refused' in str(e):
response['retmsg'] = 'Connection refused, Please check if the fate flow service is started'
del response['traceback']
return response
@staticmethod
def _decode_result(response):
try:
result = json.loads(response.content.decode('utf-8', 'ignore'), strict=False)
except (TypeError, ValueError):
return response
else:
return result
def _handle_result(self, response):
if isinstance(response, requests.models.Response):
return response.json()
elif isinstance(response, dict):
return response
else:
return self._decode_result(response)
def get(self, url, **kwargs):
return self._request(method='get', url=url, **kwargs)
def post(self, url, **kwargs):
return self._request(method='post', url=url, **kwargs)
|
Python
| 0.000003
|
@@ -1039,65 +1039,8 @@
ts:%0A
- print('name: %7B%7D, api: %7B%7D'.format(name, api))%0A
|
7acc609cc75c315b32bbca5b0f4e39df942bbec1
|
Mention the real name in the email
|
fedorasummerofhardware/views.py
|
fedorasummerofhardware/views.py
|
import logging
from pyramid.url import route_url
from pyramid.security import remember, authenticated_userid, forget
from pyramid.exceptions import Forbidden
from pyramid.httpexceptions import HTTPFound, HTTPMovedPermanently
from pyramid.view import view_config
from pyramid_mailer import get_mailer
from pyramid_mailer.message import Message
from sqlalchemy import func
from fedora.client import FasProxyClient
from .models import DBSession, Application
log = logging.getLogger(__name__)
def login(username, password):
fas = FasProxyClient()
user = fas.get_user_info({'username': username, 'password': password})
roles = [g.name for g in user[1]['approved_memberships']]
return roles
def authorized_admin(request):
user = authenticated_userid(request)
settings = request.registry.settings
if not user:
raise Forbidden
if user not in settings['admin_usernames'].split():
request.session.flash('%s is not an administrator' % user)
raise Forbidden
return user
@view_config(route_name='login',
renderer='fedorasummerofhardware:templates/login.mak')
def login_view(request):
if request.POST:
log.info('Logging into admin view as %s' % request.params['username'])
try:
login(request.params['username'], request.params['password'])
except Exception, e:
request.session.flash('Invalid Fedora Credentials')
log.error(str(e))
return {}
headers = remember(request, request.params['username'])
response = HTTPFound(request.environ['HTTP_REFERER'])
response.headerlist.extend(headers)
return response
return {}
@view_config(route_name='logout')
def logout(request):
headers = forget(request)
return HTTPFound(location=request.application_url, headers=headers)
@view_config(route_name='home',
renderer='fedorasummerofhardware:templates/index.mak')
def index(request):
if 'HTTP_X_FORWARDED_PROTO' in request.environ and \
request.environ['HTTP_X_FORWARDED_PROTO'] != 'https':
return HTTPMovedPermanently(location='https://%s/' %
request.environ['HTTP_HOST'])
return {}
@view_config(route_name='csv',
renderer='fedorasummerofhardware:templates/csv.mak')
def csv(request):
authorized_admin(request)
return {'applications': DBSession.query(Application).all()}
@view_config(route_name='admin',
renderer='fedorasummerofhardware:templates/submissions.mak')
def admin(request):
authorized_admin(request)
all = DBSession.query(Application).all()
unapproved = DBSession.query(func.count(Application.hardware),
Application.hardware) \
.filter_by(approved=False).group_by(Application.hardware).all()
approved = DBSession.query(func.count(Application.hardware),
Application.hardware) \
.filter_by(approved=True).group_by(Application.hardware).all()
return {'applications': all, 'unapproved': unapproved, 'approved': approved}
@view_config(route_name='approve', renderer='json', accept='application/json')
def approve(request):
authorized_admin(request)
settings = request.registry.settings
application = DBSession.query(Application).get(int(request.params['id']))
hardware = application.hardware
num_hardware = int(settings['num_%s' % hardware])
num_approved = DBSession.query(Application).filter_by(
hardware=hardware, approved=True).count()
if num_approved >= num_hardware:
log.error('Unable to approve application: %s already approved for %s' %
(num_approved, hardware))
return {'error': 'There are already %s %s approved' %
(num_approved, hardware)}
log.info("Approving application: %s" % application)
application.approved = True
DBSession.commit()
mailer = get_mailer(request)
recipient = '%s@fedoraproject.org' % application.username
message = Message(subject=settings['email_subject'],
sender=settings['email_from'],
body=settings['email_body'] % (
request.application_url + '/accept'),
recipients=[recipient])
mailer.send_immediately(message, fail_silently=False)
return {}
@view_config(route_name='accept',
renderer='fedorasummerofhardware:templates/accept.mak')
def accept(request):
if 'HTTP_X_FORWARDED_PROTO' in request.environ and \
request.environ['HTTP_X_FORWARDED_PROTO'] != 'https':
return HTTPMovedPermanently(location='https://%s/accept' %
request.environ['HTTP_HOST'])
return {}
@view_config(route_name='save_address', request_method='POST')
def save_address(request):
username = request.params['username']
try:
login(username, request.params['password'])
except:
request.session.flash('Error: Invalid Fedora Credentials')
return HTTPFound(route_url('accept', request))
app = DBSession.query(Application).filter_by(username=username).first()
if not app:
request.session.flash('Error: You did not submit an application.')
return HTTPFound(route_url('accept', request))
if not app.approved:
request.session.flash('Error: Your application has not been approved.')
return HTTPFound(route_url('accept', request))
app.address = request.params['address']
mailer = get_mailer(request)
admins = request.registry.settings['admin_email'].split()
sender = request.registry.settings['email_from']
body = """\
Username: %s
Hardware: %s
Shield: %s
Date Submitted: %s
Address: %s
""" % (app.username, app.hardware, app.shield, app.date, app.address)
message = Message(subject="Address submitted for %s" % username,
sender=sender, recipients=admins, body=body)
DBSession.commit()
mailer.send_immediately(message, fail_silently=False)
request.session.flash('Your address has been submitted.')
return HTTPFound(request.application_url)
@view_config(route_name='submit', request_method='POST')
def submit(request):
def error(msg):
request.session.flash('Error: %s' % msg)
return HTTPFound(request.application_url)
username = request.params['username']
try:
groups = login(username, request.params['password'])
except:
return error('Invalid Fedora Credentials')
if 'cla_done' not in groups:
return error('You must first sign the Fedora CLA')
groups = [group for group in groups if not group.startswith('cla_')]
if not groups:
return error('You must be a member of at least one '
'non-CLA / FPCA Fedora Group')
hardware = request.registry.settings['hardware'].split()
if request.params['hardware'] not in hardware:
return error('Invalid hardware specified')
if DBSession.query(Application).filter_by(username=username).first():
return error('You can only submit one application')
application = Application(username=username,
realname=request.params['realname'],
hardware=request.params['hardware'],
shield=request.params.get('shield'),
text=request.params['text'])
DBSession.add(application)
DBSession.commit()
request.session.flash('Your application has been submitted!')
return HTTPFound(request.application_url)
|
Python
| 0.999999
|
@@ -5641,16 +5641,38 @@
= %22%22%22%5C%0A
+ Real Name: %25s%0A
@@ -5782,16 +5782,30 @@
%22%22%22 %25 (
+app.realname,
app.user
@@ -5835,16 +5835,27 @@
.shield,
+%0A
app.dat
|
a9cfd2bc842631431e20b6c13d3d98535b643b3b
|
Fix mispelling
|
ixdjango/management/commands/copystatic.py
|
ixdjango/management/commands/copystatic.py
|
"""
Copy static files to nginx location
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import logging
import os
from shutil import copy2, copystat
from django.conf import settings
from django.core.management.base import NoArgsCommand
LOGGER = logging.getLogger(__name__)
def copytree(src, dst):
"""
A version of copytree I don't hate
"""
if not (os.path.exists(dst) and os.path.isdir(dst)):
LOGGER.INFO("Creating directory %s", dst)
os.makedirs(dst)
copystat(src, dst)
for name in os.listdir(src):
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
LOGGER.DEBUG("Copying directory %s", name)
copytree(srcname, dstname)
else:
LOGGER.DEBUG("Copying file %s", name)
copy2(srcname, dstname)
class Command(NoArgsCommand):
"""
Copy static files to nginx location
"""
def handle_noargs(self, **options):
try:
static_dir = settings.NGINX_STATIC_DIR
except AttributeError:
static_dir = None
if not static_dir:
LOGGER.info("static dir not defined, copy static content skipped")
return
LOGGER.info("Copying static content to %s", static_dir)
copytree(settings.STATIC_ROOT, static_dir)
|
Python
| 0.999687
|
@@ -463,12 +463,12 @@
GER.
-INFO
+info
(%22Cr
@@ -711,37 +711,37 @@
LOGGER.
-DEBUG
+debug
(%22Copying direct
@@ -831,13 +831,13 @@
GER.
-DEBUG
+debug
(%22Co
|
b6df83242128c3c383696eae2057bd3725c3139e
|
Support arbitrary primary key fields
|
flask_mongoengine/wtf/fields.py
|
flask_mongoengine/wtf/fields.py
|
"""
Useful form fields for use with the mongoengine.
"""
from gettext import gettext as _
import json
import sys
from wtforms import widgets
from wtforms.fields import SelectFieldBase, TextAreaField, StringField
from wtforms.validators import ValidationError
from mongoengine.queryset import DoesNotExist
from mongoengine.python_support import txt_type, bin_type
__all__ = (
'ModelSelectField', 'QuerySetSelectField',
)
class QuerySetSelectField(SelectFieldBase):
"""
Given a QuerySet either at initialization or inside a view, will display a
select drop-down field of choices. The `data` property actually will
store/keep an ORM model instance, not the ID. Submitting a choice which is
not in the queryset will result in a validation error.
Specifying `label_attr` in the constructor will use that property of the
model instance for display in the list, else the model object's `__str__`
or `__unicode__` will be used.
If `allow_blank` is set to `True`, then a blank choice will be added to the
top of the list. Selecting this choice will result in the `data` property
being `None`. The label for the blank choice can be set by specifying the
`blank_text` parameter.
"""
widget = widgets.Select()
def __init__(self, label=u'', validators=None, queryset=None, label_attr='',
allow_blank=False, blank_text=u'---', **kwargs):
super(QuerySetSelectField, self).__init__(label, validators, **kwargs)
self.label_attr = label_attr
self.allow_blank = allow_blank
self.blank_text = blank_text
self.queryset = queryset
def iter_choices(self):
if self.allow_blank:
yield (u'__None', self.blank_text, self.data is None)
if self.queryset is None:
return
self.queryset.rewind()
for obj in self.queryset:
label = self.label_attr and getattr(obj, self.label_attr) or obj
if isinstance(self.data, list):
selected = obj in self.data
else:
selected = self._is_selected(obj)
yield (obj.id, label, selected)
def process_formdata(self, valuelist):
if valuelist:
if valuelist[0] == '__None':
self.data = None
else:
if self.queryset is None:
self.data = None
return
try:
# clone() because of https://github.com/MongoEngine/mongoengine/issues/56
obj = self.queryset.clone().get(id=valuelist[0])
self.data = obj
except DoesNotExist:
self.data = None
def pre_validate(self, form):
if not self.allow_blank or self.data is not None:
if not self.data:
raise ValidationError(_(u'Not a valid choice'))
def _is_selected(self, item):
return item == self.data
class QuerySetSelectMultipleField(QuerySetSelectField):
widget = widgets.Select(multiple=True)
def __init__(self, label=u'', validators=None, queryset=None, label_attr='',
allow_blank=False, blank_text=u'---', **kwargs):
super(QuerySetSelectMultipleField, self).__init__(label, validators, queryset, label_attr, allow_blank, blank_text, **kwargs)
def process_formdata(self, valuelist):
if valuelist:
if valuelist[0] == '__None':
self.data = None
else:
if not self.queryset:
self.data = None
return
self.queryset.rewind()
self.data = [obj for obj in self.queryset if str(obj.id) in valuelist]
if not len(self.data):
self.data = None
def _is_selected(self, item):
return item in self.data if self.data else False
class ModelSelectField(QuerySetSelectField):
"""
Like a QuerySetSelectField, except takes a model class instead of a
queryset and lists everything in it.
"""
def __init__(self, label=u'', validators=None, model=None, **kwargs):
queryset = kwargs.pop('queryset', model.objects)
super(ModelSelectField, self).__init__(label, validators, queryset=queryset, **kwargs)
class ModelSelectMultipleField(QuerySetSelectMultipleField):
"""
Allows multiple select
"""
def __init__(self, label=u'', validators=None, model=None, **kwargs):
queryset = kwargs.pop('queryset', model.objects)
super(ModelSelectMultipleField, self).__init__(label, validators, queryset=queryset, **kwargs)
class JSONField(TextAreaField):
def _value(self):
if self.raw_data:
return self.raw_data[0]
else:
return self.data and unicode(json.dumps(self.data)) or u''
def process_formdata(self, value):
if value:
try:
self.data = json.loads(value[0])
except ValueError:
raise ValueError(self.gettext(u'Invalid JSON data.'))
class DictField(JSONField):
def process_formdata(self, value):
super(DictField, self).process_formdata(value)
if value and not isinstance(self.data, dict):
raise ValueError(self.gettext(u'Not a valid dictionary.'))
class NoneStringField(StringField):
"""
Custom StringField that counts "" as None
"""
def process_formdata(self, valuelist):
if valuelist:
self.data = valuelist[0]
if self.data == "":
self.data = None
class BinaryField(TextAreaField):
"""
Custom TextAreaField that converts its value with bin_type.
"""
def process_formdata(self, valuelist):
if valuelist:
if sys.version_info >= (3, 0):
self.data = bin_type( valuelist[0], 'utf-8' )
else:
self.data = bin_type( valuelist[0] )
|
Python
| 0
|
@@ -2585,18 +2585,18 @@
e().get(
-id
+pk
=valueli
|
f8cf08e41ccb024683c8bdf075f129b64dc38190
|
Add support for executing the code without X
|
bitcoin_tools/analysis/plots.py
|
bitcoin_tools/analysis/plots.py
|
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
from bitcoin_tools import CFG
label_size = 11
mpl.rcParams['xtick.labelsize'] = label_size
mpl.rcParams['ytick.labelsize'] = label_size
mpl.rcParams['legend.numpoints'] = 1
def get_counts(samples, normalize=False):
"""
Counts the number of occurrences of each value in samples.
:param samples: list with the samples
:param normalize: boolean, indicates if counts have to be normalized
:return: list of two lists: first list returns x values (unique values in samples), second list returns occurrence
counts
"""
xs, ys = np.unique(samples, return_counts=True)
if normalize:
total = sum(ys)
ys = [float(y)/float(total) for y in ys]
return [xs, ys]
def get_cdf(samples, normalize=False):
"""
Compute the cumulative count over samples.
:param samples: list with the samples
:param normalize: boolean, indicates if counts have to be normalized
:return: list of two lists: first list returns x values (unique values in samples), second list returns cumulative
occurrence counts (number of samples with value <= xi).
"""
[xs, ys] = get_counts(samples, normalize)
ys = np.cumsum(ys)
return [xs, ys]
def plot_distribution(xs, ys, title, xlabel, ylabel, log_axis=None, save_fig=False, legend=None, legend_loc=1,
font_size=20, y_sup_lim=None):
"""
Plots a set of values (xs, ys) with matplotlib.
:param xs: either a list with x values or a list of lists, representing different sample sets to be plotted in the
same figure.
:param ys: either a list with y values or a list of lists, representing different sample sets to be plotted in the
same figure.
:param title: String, plot title
:param xlabel: String, label on the x axis
:param ylabel: String, label on the y axis
:param log_axis: String (accepted values are False, "x", "y" or "xy"), determines which axis are plotted using
logarithmic scale
:param save_fig: String, figure's filename or False (to show the interactive plot)
:param legend: list of strings with legend entries or None (if no legend is needed)
:param legend_loc: integer, indicates the location of the legend (if present)
:param font_size: integer, title, xlabel and ylabel font size
:param y_sup_lim: float, y axis superior limit (if None or not present, use default matplotlib value)
:return: None
:type: None
"""
plt.figure()
ax = plt.subplot(111)
# Plot data
if not (isinstance(xs[0], list) or isinstance(xs[0], np.ndarray)):
plt.plot(xs, ys) # marker='o'
else:
for i in range(len(xs)):
plt.plot(xs[i], ys[i], ' ', linestyle='solid') # marker='o'
# Plot title and xy labels
plt.title(title, {'color': 'k', 'fontsize': font_size})
plt.ylabel(ylabel, {'color': 'k', 'fontsize': font_size})
plt.xlabel(xlabel, {'color': 'k', 'fontsize': font_size})
# Change axis to log scale
if log_axis == "y":
plt.yscale('log')
elif log_axis == "x":
plt.xscale('log')
elif log_axis == "xy":
plt.loglog()
# Include legend
if legend:
lgd = ax.legend(legend, loc=legend_loc)
# Force y limit
if y_sup_lim:
ymin, ymax = plt.ylim()
plt.ylim(ymin, y_sup_lim)
# Output result
if save_fig:
plt.savefig(CFG.figs_path + save_fig + '.pdf', format='pdf', dpi=600)
plt.close()
else:
plt.show()
def plot_pie(values, labels, title, colors, save_fig=False, font_size=20, labels_out=False):
"""
Plots a set of values in a pie chart with matplotlib.
:param values: list of values to plot.
:param values: list of numbers
:param labels: List of labels (one label for each piece of the pie)
:type labels: str list
:param title: String, plot title
:type title: String
:param colors: List of colors (one color for each piece of the pie)
:type colors: str lit
:param save_fig: String, figure's filename or False (to show the interactive plot)
:param font_size: integer, title, xlabel and ylabel font size
"""
plt.figure()
ax = plt.subplot(111)
if labels_out:
# Plots percentages and labels as legend (in a separate box)
ax.pie(values, colors=colors,
autopct='%1.1f%%', startangle=90, pctdistance=1.3, wedgeprops={'linewidth': 0})
s = float(np.sum(values))
perc = [v/s*100 for v in values]
plt.legend(loc="best", labels=['%s, %1.1f %%' % (l, s) for l, s in zip(labels, perc)], fontsize="x-small")
else:
# Plot percentages in the pie and labels around the pie
ax.pie(values, labels=labels, colors=colors,
autopct='%1.1f%%', startangle=90, labeldistance=1.1, wedgeprops={'linewidth': 0})
# Equal aspect ratio ensures that pie is drawn as a circle
ax.axis('equal')
plt.title(title, {'color': 'k', 'fontsize': font_size})
# Output result
if save_fig:
plt.savefig(CFG.figs_path + save_fig + '.pdf', format='pdf', dpi=600)
else:
plt.show()
|
Python
| 0.000004
|
@@ -1,24 +1,34 @@
+import os%0A
import matplotlib as mpl
@@ -28,16 +28,74 @@
as mpl%0A
+if not %22DISPLAY%22 in os.environ.keys():%0A mpl.use('Agg')%0A
import m
|
7888b2b14a26deead0b4f1559b755fcf17cbb6f8
|
correct style link
|
cte-collation-poc/fullbook.py
|
cte-collation-poc/fullbook.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import argparse
import requests
from lxml import etree
ARCHIVEJS = 'http://archive.cnx.org/contents/{}.json'
ARCHIVEHTML = 'http://archive.cnx.org/contents/{}.html'
NS = {'x': 'http://www.w3.org/1999/xhtml'}
HTMLWRAPPER = """<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>{title}</title>
<style href="styles.css" rel="stylsheet" type="text/css"/>
</head>
</html>
"""
parts = ['page', 'chapter', 'unit', 'book', 'series']
def debug(*args, **kwargs):
if verbose:
print(*args, file=sys.stderr, **kwargs)
def main(code, html_out=sys.stdout):
"""Generate complete book HTML."""
res = requests.get(ARCHIVEJS.format(code))
b_json = res.json()
html = etree.fromstring(HTMLWRAPPER.format(title=b_json['title']))
book_elem = etree.SubElement(html, 'body', attrib={'data-type': 'book'})
html_nest([b_json['tree']], book_elem)
print(etree.tostring(html), file=html_out)
def html_nest(tree, parent):
"""Recursively construct HTML nested div version of book tree."""
for node in tree:
div_elem = etree.SubElement(parent, 'div')
if node['id'] != 'subcol':
page_nodes(node['id'], div_elem)
mytype = parts.index(div_elem.get('data-type'))
if parent.get('data-type'):
parenttype = parts.index(parent.get('data-type'))
if parenttype <= mytype:
parent.set('data-type', parts[mytype + 1])
else:
parent.set('data-type', parts[mytype + 1])
title_xpath = etree.XPath("//x:div[@data-type='document-title']",
namespaces=NS)
try:
title_elem = title_xpath(div_elem)[0]
except IndexError:
title_elem = etree.SubElement(div_elem, 'div',
attrib={'data-type':
'document-title'})
title_elem.text = node['title']
debug(node['title'])
if 'contents' in node:
elem = etree.SubElement(div_elem, 'div')
html_nest(node['contents'], elem)
def page_nodes(page_id, elem):
"""Fetch page return body wrapped in provided element."""
debug(page_id)
res = requests.get(ARCHIVEHTML.format(page_id))
xpath = etree.XPath('//x:body', namespaces=NS)
body = xpath(etree.fromstring(res.content))[0]
elem.set('data-type', 'page')
for c in body.iterchildren():
elem.append(c)
return elem
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Assemble complete book "
"as single HTML file")
parser.add_argument("bookid", help="Identifier of book: "
"<uuid|shortId>[@ver]")
parser.add_argument("html_out", nargs="?",
type=argparse.FileType('w'),
help="assembled HTML file output (default stdout)",
default=sys.stdout)
parser.add_argument('-v', '--verbose', action='store_true',
help='Send debugging info to stderr')
args = parser.parse_args()
verbose = args.verbose
main(args.bookid, args.html_out)
|
Python
| 0.000001
|
@@ -367,21 +367,20 @@
title%3E%0A%3C
-style
+link
href=%22s
@@ -399,16 +399,17 @@
el=%22styl
+e
sheet%22 t
|
df85906e8e2a872ca99002b26af6ea5d495b23ca
|
fix wrong document string
|
data_migrator/emitters/__init__.py
|
data_migrator/emitters/__init__.py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from .mysql import MySQLEmitter
from .csv import CSVEmitter
"""
This module contains all classes for models, managers and fields
* :class:`BaseEmitter`
* :class:`MySQLEmitter`
* ...
"""
|
Python
| 0.999949
|
@@ -39,70 +39,8 @@
-*-%0A
-%0Afrom .mysql import MySQLEmitter%0Afrom .csv import CSVEmitter%0A%0A
%22%22%22%0A
@@ -158,12 +158,73 @@
%60%0A* ...%0A%22%22%22%0A
+%0Afrom .mysql import MySQLEmitter%0Afrom .csv import CSVEmitter%0A
|
0a05e6479ee907c3702cc895c5a180cd816a5433
|
Build interdependencies.
|
d1_common_python/src/setup.py
|
d1_common_python/src/setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:mod:`setup`
====================
:Synopsis: Create egg.
:Author: DataONE (Dahl)
"""
from setuptools import setup, find_packages
setup(
name='Python DataONE Common',
#version=d1_client.__version__,
description='Contains functionality common to projects that interact with the DataONE infrastructure via Python',
author='DataONE Project',
url='http://dataone.org',
packages=find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires=[
# iso860
'iso8601 >= 0.1',
# PyXB
'pyxb >= 1.1.2',
],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
|
Python
| 0.00002
|
@@ -173,16 +173,33 @@
packages
+%0Aimport d1_common
%0A%0Asetup(
@@ -211,23 +211,16 @@
me='
-Python
DataONE
-
+_
Comm
@@ -226,17 +226,16 @@
mon',%0A
-#
version=
@@ -242,13 +242,13 @@
d1_c
-lient
+ommon
.__v
@@ -257,16 +257,113 @@
sion__,%0A
+ author='DataONE Project',%0A author_email='developers@dataone.org',%0A url='http://dataone.org',%0A
descri
@@ -476,59 +476,44 @@
,%0A
-author='DataONE Project',%0A url='http://dataone.org
+license='Apache License, Version 2.0
',%0A
@@ -630,21 +630,8 @@
s=%5B%0A
- # iso860%0A
@@ -652,19 +652,8 @@
1',%0A
- # PyXB%0A
|
9664f6e6bf64e10fe0ce6fbfc3bbf20d4775cdb6
|
Update MotorsControlFile.py
|
ProBot_BeagleBone/MotorsControlFile.py
|
ProBot_BeagleBone/MotorsControlFile.py
|
#!/usr/bin/python
import SabertoothFile
import PWMFile
import ProBotConstantsFile
# Initialization of classes from local files
Sabertooth = SabertoothFile.SabertoothClass()
PWM = PWMFile.PWMClass()
Pconst = ProBotConstantsFile.Constants()
class MotorsControlClass():
def MotorsControl(self,rightMotor, leftMotor, userChoice):
if userChoice=='1':
# Sending the values to the Sabertooth that is connected to the motors
Sabertooth.drive(Pconst.addr, 1, int(rightMotor))
Sabertooth.drive(Pconst.addr, 2, int(leftMotor))
if userChoice=='2':
PWM.PWM_Signals(int(rightMotor), int(leftMotor))
|
Python
| 0
|
@@ -13,16 +13,50 @@
ython%0A %0A
+# Python Standart Library Imports%0A
import S
@@ -592,16 +592,93 @@
e=='2':%0A
+%09%09# Sending the values to the pwm controller that is connected to the motors%0A
%09%09PWM.PW
|
76a39d6ab95f3b036b93a4a4680f7a5e37e981ec
|
Fix ElasticNet distance unit test
|
foolbox/tests/test_distances.py
|
foolbox/tests/test_distances.py
|
import pytest
import numpy as np
from foolbox import distances
from pytest import approx
def test_abstract_distance():
with pytest.raises(TypeError):
distances.Distance()
def test_base_distance():
class TestDistance(distances.Distance):
def _calculate(self):
return 22, 2
distance = TestDistance(None, None, bounds=(0, 1))
assert distance.name() == 'TestDistance'
assert distance.value == 22
assert distance.gradient == 2
assert '2.2' in str(distance)
assert 'TestDistance' in str(distance)
assert distance == distance
assert not distance < distance
assert not distance > distance
assert distance <= distance
assert distance >= distance
with pytest.raises(TypeError):
distance < 3
with pytest.raises(TypeError):
distance == 3
def test_mse():
assert distances.MSE == distances.MeanSquaredDistance
def test_mae():
assert distances.MAE == distances.MeanAbsoluteDistance
def test_linf():
assert distances.Linf == distances.Linfinity
def test_mean_squared_distance():
d = distances.MeanSquaredDistance(
np.array([0, .5]),
np.array([.5, .5]),
bounds=(0, 1))
assert d.value == 1. / 8.
assert (d.gradient == np.array([.5, 0])).all()
def test_mean_absolute_distance():
d = distances.MeanAbsoluteDistance(
np.array([0, .5]),
np.array([.7, .5]),
bounds=(0, 1))
assert d.value == approx(0.35)
assert (d.gradient == np.array([0.5, 0])).all()
def test_linfinity():
d = distances.Linfinity(
np.array([0, .5]),
np.array([.7, .5]),
bounds=(0, 1))
assert d.value == approx(.7)
with pytest.raises(NotImplementedError):
d.gradient
def test_l0():
d = distances.L0(
np.array([0, .5]),
np.array([.7, .5]),
bounds=(0, 1))
assert d.value == approx(1.)
with pytest.raises(NotImplementedError):
d.gradient
def test_en():
en = distances.EN(0.1)
d = en(
np.array([0, .5]),
np.array([.7, .5]),
bounds=(0, 1))
assert d.value == approx(0.56)
with pytest.raises(NotImplementedError):
d.gradient
@pytest.mark.parametrize('Distance', [
distances.MeanSquaredDistance,
distances.MeanAbsoluteDistance,
distances.Linfinity,
distances.L0,
distances.EN(1),
])
def test_str_repr(Distance):
"""Tests that str and repr contain the value
and that str does not fail when initialized
with a value rather than calculated."""
reference = np.zeros((10, 10))
other = np.ones((10, 10))
d = Distance(reference, other, bounds=(0, 1))
assert isinstance(str(d), str)
if 'L0' in str(d):
assert '100' in str(d)
assert '100' in repr(d)
else:
assert '1.00e+' in str(d)
assert '1.00e+' in repr(d)
|
Python
| 0.000001
|
@@ -2148,67 +2148,55 @@
-with pytest.raises(NotImplementedError):%0A d.gradient
+assert (d.gradient == np.array(%5B2.4, 0%5D)).all()
%0A%0A%0A@
@@ -2364,17 +2364,17 @@
nces.EN(
-1
+0
),%0A%5D)%0Ade
|
19b0391aad11748cfca4b22616159a7b2893ff9b
|
Change api to return money objects
|
bluebottle/utils/serializers.py
|
bluebottle/utils/serializers.py
|
from HTMLParser import HTMLParser
import re
from moneyed import Money
from rest_framework import serializers
from .validators import validate_postal_code
from .models import Address, Language
class MoneySerializer(serializers.DecimalField):
def __init__(self, max_digits=12, decimal_places=2, **kwargs):
super(MoneySerializer, self).__init__(
max_digits=max_digits,
decimal_places=decimal_places,
**kwargs
)
def to_representation(self, instance):
return instance.amount
def to_internal_value(self, data):
if not data:
return data
return Money(data, 'EUR')
class MoneyTotalSerializer(serializers.ListField):
"""
Serialize money totals with multiple currencies, e.g.
[(450, 'EUR'), (23050, 'XEF')]
"""
child = MoneySerializer()
class ShareSerializer(serializers.Serializer):
share_name = serializers.CharField(max_length=256, required=True)
share_email = serializers.EmailField(required=True)
share_motivation = serializers.CharField(default="")
share_cc = serializers.BooleanField(default=False)
project = serializers.CharField(max_length=256, required=True)
class LanguageSerializer(serializers.ModelSerializer):
class Meta:
model = Language
fields = ('id', 'code', 'language_name', 'native_name')
class MLStripper(HTMLParser):
""" Used to strip HTML tags for meta fields (e.g. description) """
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
class AddressSerializer(serializers.ModelSerializer):
def validate_postal_code(self, attrs, source):
value = attrs[source]
if value:
country_code = ''
if 'country' in attrs:
country_code = attrs['country']
elif self.object and self.object.country:
country_code = self.object.country.alpha2_code
if country_code:
validate_postal_code(value, country_code)
return attrs
class Meta:
model = Address
fields = (
'id', 'line1', 'line2', 'city', 'state', 'country', 'postal_code')
SCHEME_PATTERN = r'^https?://'
class URLField(serializers.URLField):
""" URLField allowing absence of url scheme """
def to_internal_value(self, value):
""" Allow exclusion of http(s)://, add it if it's missing """
if not value:
return None
m = re.match(SCHEME_PATTERN, value)
if not m: # no scheme
value = "http://%s" % value
return value
|
Python
| 0.000001
|
@@ -526,23 +526,105 @@
urn
-instance.amount
+%7B%0A 'amount': instance.amount,%0A 'currency': str(instance.currency)%0A %7D
%0A%0A
|
8fe5beda751145b00ac2484e884ae40659649563
|
Set actual connection sockets non-blocking
|
networkserver.py
|
networkserver.py
|
# Eloipool - Python Bitcoin pool server
# Copyright (C) 2011-2012 Luke Dashjr <luke-jr+eloipool@utopios.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import asynchat
import logging
import select
import socket
from time import time
import traceback
from util import ScheduleDict, tryErr
EPOLL_READ = select.EPOLLIN | select.EPOLLPRI | select.EPOLLERR | select.EPOLLHUP
EPOLL_WRITE = select.EPOLLOUT
class SocketHandler:
ac_in_buffer_size = 4096
ac_out_buffer_size = 4096
def handle_close(self):
self.close()
def handle_error(self):
self.logger.debug(traceback.format_exc())
self.handle_close()
def handle_read(self):
try:
data = self.recv (self.ac_in_buffer_size)
except socket.error as why:
self.handle_error()
return
if self.closeme:
# All input is ignored from sockets we have "closed"
return
if isinstance(data, str) and self.use_encoding:
data = bytes(str, self.encoding)
self.ac_in_buffer = self.ac_in_buffer + data
self.handle_readbuf()
def push(self, data):
self.wbuf += data
self.server.register_socket_m(self.fd, EPOLL_READ | EPOLL_WRITE)
def handle_timeout(self):
self.close()
def handle_write(self):
if self.wbuf is None:
# Socket was just closed by remote peer
return
bs = self.socket.send(self.wbuf)
self.wbuf = self.wbuf[bs:]
if not len(self.wbuf):
if self.closeme:
self.close()
return
self.server.register_socket_m(self.fd, EPOLL_READ)
recv = asynchat.async_chat.recv
def close(self):
if self.wbuf:
self.closeme = True
return
self.server.unregister_socket(self.fd)
self.socket.close()
def changeTask(self, f, t = None):
tryErr(self.server.rmSchedule, self._Task, IgnoredExceptions=KeyError)
if f:
self._Task = self.server.schedule(f, t, errHandler=self)
def __init__(self, server, sock, addr):
self.ac_in_buffer = b''
self.wbuf = b''
self.closeme = False
self.server = server
self.socket = sock
self.addr = addr
self._Task = None
self.fd = sock.fileno()
server.register_socket(self.fd, self)
self.changeTask(self.handle_timeout, time() + 15)
class NetworkListener:
logger = logging.getLogger('SocketListener')
def __init__(self, server, server_address):
self.server = server
self.server_address = server_address
tryErr(self.setup_socket, server_address, Logger=self.logger, ErrorMsg=server_address)
def setup_socket(self, server_address):
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.setblocking(0)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error:
pass
sock.bind(server_address)
sock.listen(100)
self.server.register_socket(sock.fileno(), self)
self.socket = sock
def handle_read(self):
server = self.server
conn, addr = self.socket.accept()
h = server.RequestHandlerClass(server, conn, addr)
def handle_error(self):
# Ignore errors... like socket closing on the queue
pass
class AsyncSocketServer:
logger = logging.getLogger('SocketServer')
def __init__(self, RequestHandlerClass):
self.RequestHandlerClass = RequestHandlerClass
self._epoll = select.epoll()
self._fd = {}
self._sch = ScheduleDict()
self._schEH = {}
def register_socket(self, fd, o, eventmask = EPOLL_READ):
self._epoll.register(fd, eventmask)
self._fd[fd] = o
def register_socket_m(self, fd, eventmask):
try:
self._epoll.modify(fd, eventmask)
except IOError:
raise socket.error
def unregister_socket(self, fd):
del self._fd[fd]
try:
self._epoll.unregister(fd)
except IOError:
raise socket.error
def schedule(self, task, startTime, errHandler=None):
self._sch[task] = startTime
if errHandler:
self._schEH[id(task)] = errHandler
return task
def rmSchedule(self, task):
del self._sch[task]
k = id(task)
if k in self._schEH:
del self._schEH[k]
def serve_forever(self):
while True:
self.pre_schedule()
if len(self._sch):
timeNow = time()
while True:
timeNext = self._sch.nextTime()
if timeNow < timeNext:
timeout = timeNext - timeNow
break
f = self._sch.shift()
k = id(f)
EH = None
if k in self._schEH:
EH = self._schEH[k]
del self._schEH[k]
try:
f()
except socket.error:
if EH: tryErr(EH.handle_error)
except:
self.logger.error(traceback.format_exc())
if EH: tryErr(EH.handle_close)
if not len(self._sch):
timeout = -1
break
else:
timeout = -1
try:
events = self._epoll.poll(timeout=timeout)
except (IOError, select.error):
continue
except:
self.logger.error(traceback.format_exc())
for (fd, e) in events:
o = self._fd[fd]
try:
if e & EPOLL_READ:
o.handle_read()
if e & EPOLL_WRITE:
o.handle_write()
except socket.error:
tryErr(o.handle_error)
except:
self.logger.error(traceback.format_exc())
tryErr(o.handle_close)
|
Python
| 0
|
@@ -3417,16 +3417,42 @@
ccept()%0A
+%09%09conn.setblocking(False)%0A
%09%09h = se
|
d78aa6e9a659c4ef5aa19024245eb0b3e56f2761
|
remove bytecode message
|
box/mixins.py
|
box/mixins.py
|
import re
import sublime
import os
from .utils import execute_command, read_registry
class RscriptMixin:
message_shown = False
def custom_env(self):
paths = self.additional_paths()
env = os.environ.copy()
if paths:
sep = ";" if sublime.platform() == "windows" else ":"
env["PATH"] = env["PATH"] + sep + sep.join(paths)
return env
def rcmd(self, script=None, file=None, args=None):
cmd = [self.rscript_binary()]
if script:
cmd = cmd + ["-e", script]
elif file:
cmd = cmd + [file]
if args:
cmd = cmd + args
try:
return execute_command(cmd, env=self.custom_env())
except FileNotFoundError:
print("Rscript binary not found.")
if not self.message_shown:
sublime.message_dialog(
"Rscript binary cannot be found automatically."
"The path to `Rscript` can be specified in the R-Box settings.")
self.message_shown = True
return ""
except Exception as e:
print("R-Box:", e)
return ""
def installed_packages(self):
return self.rcmd("cat(rownames(installed.packages()))").strip().split(" ")
def list_package_objects(self, pkg, exported_only=True):
if exported_only:
objects = self.rcmd("cat(getNamespaceExports(asNamespace('{}')))".format(pkg))
else:
objects = self.rcmd("cat(objects(asNamespace('{}')))".format(pkg))
return objects.strip().split(" ")
def get_function_call(self, pkg, funct):
out = self.rcmd("args({}:::{})".format(pkg, funct))
out = re.sub(r"^function ", funct, out).strip()
out = re.sub(r"NULL(?:\n|\s)*$", "", out).strip()
return out
def list_function_args(self, pkg, funct):
out = self.rcmd("cat(names(formals({}:::{})))".format(pkg, funct))
return out.strip().split(" ")
class RBoxSettingsMixin:
_rscript_binary = None
_additional_paths = None
def rbox_settings(self, key, default):
s = sublime.load_settings('R-Box.sublime-settings')
return s.get(key, default)
def rscript_binary(self):
rscript_binary = self.rbox_settings("rscript_binary", self._rscript_binary)
if not rscript_binary:
if sublime.platform() == "windows":
try:
rscript_binary = os.path.join(
read_registry("Software\\R-Core\\R", "InstallPath")[0],
"bin",
"Rscript.exe")
except:
pass
if not rscript_binary:
rscript_binary = "Rscript"
self._rscript_binary = rscript_binary
return rscript_binary
def additional_paths(self):
additional_paths = self.rbox_settings("additional_paths", [])
if not additional_paths:
additional_paths = self._additional_paths
if not additional_paths:
if sublime.platform() == "osx":
additional_paths = execute_command(
"/usr/bin/login -fpql $USER $SHELL -l -c 'echo -n $PATH'", shell=True)
additional_paths = additional_paths.strip().split(":")
if not additional_paths:
additional_paths = "Rscript"
self._additional_paths = additional_paths
return additional_paths
class RBoxViewMixin:
VALIDCALL = re.compile(r"(?:([a-zA-Z][a-zA-Z0-9.]*)(?::::?))?([.a-zA-Z0-9_-]+)\s*\($")
def function_name_at_point(self, view, pt):
if not view.match_selector(pt, "meta.function-call.r"):
return None, None
scope_begin = view.extract_scope(pt).begin()
if view.match_selector(scope_begin, "support.function.r, variable.function.r"):
scope_begin = view.find("\(", scope_begin).begin() + 1
line = self.extract_line(view, scope_begin, truncated=True)
m = self.VALIDCALL.search(line)
if m:
return m.groups()
else:
return None, None
def _render_from_mdpopups_view(self, view):
mdpops_view = view.window().find_output_panel("mdpopups")
var_scope = "source.r meta.function-call.r " \
"meta.function-call.parameters.r variable.parameter.r "
comma_scope = "source.r meta.function-call.r " \
"meta.function-call.parameters.r punctuation.separator.parameters.r "
regions = mdpops_view.find_by_selector(var_scope)
regions = [r for r in regions if mdpops_view.scope_name(r.begin()) == var_scope]
count = len(regions)
for r in reversed(regions):
sep_point = r.end()
while True:
pt = mdpops_view.find(",", sep_point)
if pt.end() == -1:
sep_point = mdpops_view.size() - 1
break
if mdpops_view.scope_name(pt.begin()) == comma_scope:
sep_point = pt.begin()
break
sep_point = pt.begin() + 1
mdpops_view.run_command(
"r_box_replace_selection",
{"region": (r.end(), sep_point),
"text": " = $%d" % count})
count = count - 1
return mdpops_view.substr(sublime.Region(0, mdpops_view.size()))
def replace_function_at_point(self, view, point):
text = self._render_from_mdpopups_view(view)
text = " ".join([x.strip() for x in text.split("\n")])
function_region = view.extract_scope(point)
view.sel().clear()
view.sel().add(function_region)
view.run_command("insert_snippet", {"contents": text})
def inline_packages_for_view(self, view):
packages = []
for s in view.find_all(r"""(library|require)\(["']?[a-zA-Z][a-zA-Z0-9.]*"""):
pkg = packages.append(re.sub(r"""(library|require)\(["']?""", "", view.substr(s)))
if pkg and pkg not in packages:
packages.append(pkg)
return packages
def extract_line(self, view, pt, truncated=False):
if truncated:
row, _ = view.rowcol(pt)
line_begin = view.text_point(row, 0)
return view.substr(sublime.Region(line_begin, pt))
else:
return view.substr(view.line(pt))
class RBoxMixins(RBoxViewMixin, RscriptMixin, RBoxSettingsMixin):
pass
|
Python
| 0.030551
|
@@ -1762,32 +1762,92 @@
t, out).strip()%0A
+ out = re.sub(r%22%3Cbytecode: %5B%5E%3E%5D+%3E%22, %22%22, out).strip()%0A
out = re
|
e39bca3bf4da5a6593c8cfd614e92c6ce3a4080b
|
fix for test of to_angstrom
|
sunpy/tests/net/test_vso.py
|
sunpy/tests/net/test_vso.py
|
# -*- coding: utf-8 -*-
# Author: Florian Mayer <florian.mayer@bitsrc.org>
#pylint: disable=W0613
from __future__ import absolute_import
import pytest
from sunpy.net import vso
from sunpy.net.vso import attrs as va
from sunpy.net import attr
from sunpy.util.unit_conversion import energy, frequency
def pytest_funcarg__eit(request):
return va.Instrument('eit')
def pytest_funcarg__client(request):
return vso.VSOClient()
def pytest_funcarg__iclient(request):
return vso.InteractiveVSOClient()
def test_simpleattr_apply():
a = attr.ValueAttr({('test', ): 1})
dct = {}
va.walker.apply(a, None, dct)
assert dct['test'] == 1
def test_simpleattr_create(client):
a = attr.ValueAttr({('instrument', ): 'eit'})
assert va.walker.create(a, client.api)[0].instrument == 'eit'
def test_simpleattr_and_duplicate():
attr = va.Instrument('foo')
pytest.raises(TypeError, lambda: attr & va.Instrument('bar'))
attr |= va.Source('foo')
pytest.raises(TypeError, lambda: attr & va.Instrument('bar'))
otherattr = va.Instrument('foo') | va.Source('foo')
pytest.raises(TypeError, lambda: attr & otherattr)
pytest.raises(TypeError, lambda: (attr | otherattr) & va.Instrument('bar'))
tst = va.Instrument('foo') & va.Source('foo')
pytest.raises(TypeError, lambda: tst & tst)
def test_simpleattr_or_eq():
attr = va.Instrument('eit')
assert attr | attr == attr
assert attr | va.Instrument('eit') == attr
def test_complexattr_apply():
tst = {('test', 'foo'): 'a', ('test', 'bar'): 'b'}
a = attr.ValueAttr(tst)
dct = {'test': {}}
va.walker.apply(a, None, dct)
assert dct['test'] == {'foo': 'a', 'bar': 'b'}
def test_complexattr_create(client):
a = attr.ValueAttr({('time', 'start'): 'test'})
assert va.walker.create(a, client.api)[0].time.start == 'test'
def test_complexattr_and_duplicate():
attr = va.Time((2011, 1, 1), (2011, 1, 1, 1))
pytest.raises(
TypeError,
lambda: attr & va.Time((2011, 2, 1), (2011, 2, 1, 1))
)
attr |= va.Source('foo')
pytest.raises(
TypeError,
lambda: attr & va.Time((2011, 2, 1), (2011, 2, 1, 1))
)
def test_complexattr_or_eq():
attr = va.Time((2011, 1, 1), (2011, 1, 1, 1))
assert attr | attr == attr
assert attr | va.Time((2011, 1, 1), (2011, 1, 1, 1)) == attr
def test_attror_and():
attr = va.Instrument('foo') | va.Instrument('bar')
one = attr & va.Source('bar')
other = (
(va.Instrument('foo') & va.Source('bar')) |
(va.Instrument('bar') & va.Source('bar'))
)
assert one == other
def test_wave_toangstrom():
for name, factor in energy:
w = va.Wave(62 / factor, 62 / factor, name)
assert int(w.min) == 199
w = va.Wave(62, 62, 'eV')
assert int(w.min) == 199
w = va.Wave(62e-3, 62e-3, 'keV')
assert int(w.min) == 199
for name, factor in frequency:
w = va.Wave(1.506e16 / factor, 1.506e16 / factor, name)
assert int(w.min) == 199
w = va.Wave(1.506e16, 1.506e16, 'Hz')
assert int(w.min) == 199
w = va.Wave(1.506e7, 1.506e7, 'GHz')
assert int(w.min) == 199
def test_time_xor():
one = va.Time((2010, 1, 1), (2010, 1, 2))
a = one ^ va.Time((2010, 1, 1, 1), (2010, 1, 1, 2))
assert a == attr.AttrOr(
[va.Time((2010, 1, 1), (2010, 1, 1, 1)),
va.Time((2010, 1, 1, 2), (2010, 1, 2))]
)
a ^= va.Time((2010, 1, 1, 4), (2010, 1, 1, 5))
assert a == attr.AttrOr(
[va.Time((2010, 1, 1), (2010, 1, 1, 1)),
va.Time((2010, 1, 1, 2), (2010, 1, 1, 4)),
va.Time((2010, 1, 1, 5), (2010, 1, 2))]
)
def test_wave_xor():
one = va.Wave(0, 1000)
a = one ^ va.Wave(200, 400)
assert a == attr.AttrOr([va.Wave(0, 200), va.Wave(400, 1000)])
a ^= va.Wave(600, 800)
assert a == attr.AttrOr(
[va.Wave(0, 200), va.Wave(400, 600), va.Wave(800, 1000)])
def test_err_dummyattr_create():
with pytest.raises(TypeError):
va.walker.create(attr.DummyAttr(), None, {})
def test_err_dummyattr_apply():
with pytest.raises(TypeError):
va.walker.apply(attr.DummyAttr(), None, {})
|
Python
| 0
|
@@ -244,66 +244,8 @@
tr%0A%0A
-from sunpy.util.unit_conversion import energy, frequency%0A%0A
def
@@ -2599,24 +2599,207 @@
angstrom():%0A
+ frequency = %5B%0A ('Hz', 1),%0A ('kHz', 1e3),%0A ('MHz', 1e6),%0A ('GHz', 1e9)%5D%0A%0A energy = %5B%0A ('eV', 1),%0A ('keV', 1e3),%0A ('MeV', 1e6)%5D%0A%0A
for name
|
d0d80c459bcac9b86fff146726e9e0e9ec788652
|
fix some broken doctests
|
sympy/assumptions/assume.py
|
sympy/assumptions/assume.py
|
# doctests are disabled because of issue #1521
from sympy.core import Basic, Symbol
from sympy.core.relational import Relational
class AssumptionsContext(set):
"""Set representing assumptions.
This is used to represent global assumptions, but you can also use this
class to create your own local assumptions contexts. It is basically a thin
wrapper to Python's set, so see its documentation for advanced usage.
Examples:
>>> from sympy import *
>>> global_assumptions
set([])
>>> x = Symbol('x')
>>> global_assumptions.add(Assume(x, Q.real))
>>> global_assumptions
set([Assume(x, 'real', True)])
>>> global_assumptions.remove(Assume(x, Q.real))
>>> global_assumptions
set([])
>>> global_assumptions.clear()
"""
def add(self, *assumptions):
"""Add an assumption."""
for a in assumptions:
assert isinstance(a, Assume), 'can only store instances of Assume'
super(AssumptionsContext, self).add(a)
global_assumptions = AssumptionsContext()
class Assume(Basic):
"""New-style assumptions.
>>> from sympy import *
>>> x = Symbol('x')
>>> Assume(x, Q.integer)
Assume(x, 'integer', True)
>>> Assume(x, Q.integer, False)
Assume(x, 'integer', False)
>>> Assume( x > 1 )
Assume(1 < x, 'relational', True)
"""
def __init__(self, expr, key='relational', value=True):
self._args = (expr, key, value)
is_Atom = True # do not attempt to decompose this
@property
def expr(self):
"""
Return the expression used by this assumption.
Examples:
>>> from sympy import *
>>> x = Symbol('x')
>>> a = Assume(x+1, Q.integer)
>>> a.expr
1 + x
"""
return self._args[0]
@property
def key(self):
"""
Return the key used by this assumption.
It is a string, e.g. 'integer', 'rational', etc.
Examples:
>>> from sympy import *
>>> x = Symbol('x')
>>> a = Assume(x, Q.integer)
>>> a.key
'integer'
"""
return self._args[1]
@property
def value(self):
"""
Return the value stored by this assumptions.
It's a boolean. True means that the assumption
holds always, and False means the assumption
does not hold
Examples:
>>> from sympy import *
>>> x = Symbol('x')
>>> a = Assume(x, Q.integer)
>>> a.value
True
>>> b = Assume(x, Q.integer, False)
>>> b.value
False
"""
return self._args[2]
def __eq__(self, other):
if type(other) == Assume:
return self._args == other._args
return False
def eliminate_assume(expr, symbol=None):
"""
Convert an expression with assumptions to an equivalent with all assumptions
replaced by symbols.
Assume(x, integer=True) --> integer
Assume(x, integer=False) --> ~integer
Examples:
>>> from sympy import *
>>> x = Symbol('x')
>>> eliminate_assume(Assume(x, Q.positive))
positive
>>> eliminate_assume(Assume(x, Q.positive, False))
Not(positive)
"""
if type(expr) == Assume:
if symbol is not None:
if not expr.expr.has(symbol): return
if expr.value: return Symbol(expr.key)
return ~Symbol(expr.key)
args = []
for a in expr.args:
args.append(eliminate_assume(a))
return type(expr)(*args)
|
Python
| 0.000041
|
@@ -500,38 +500,51 @@
mptions%0A
-set(%5B%5D
+AssumptionsContext(
)%0A %3E%3E%3E x
@@ -650,18 +650,33 @@
-se
+AssumptionsContex
t(%5BAssum
@@ -796,14 +796,27 @@
-set(%5B%5D
+AssumptionsContext(
)%0A
|
c1a71ff5f5a777bb9ea28b6109334067f186eb5a
|
add Q.infinity(Add(args)) <==> any(map(Q.infinity, args))
|
sympy/assumptions/newask.py
|
sympy/assumptions/newask.py
|
from __future__ import print_function, division
from sympy.core import Basic, Mul, Add, Pow
from sympy.assumptions.assume import global_assumptions, AppliedPredicate
from sympy.logic.inference import satisfiable
from sympy.logic.boolalg import And, Implies, Equivalent, Or
from sympy.assumptions.ask import Q
from sympy.utilities.iterables import sift
def newask(proposition, assumptions=True, context=global_assumptions):
relevant_facts = get_all_relevant_facts(proposition, assumptions, context)
# TODO: Can this be faster to do it in one pass using xor?
can_be_true = satisfiable(And(proposition, assumptions,
relevant_facts, *context))
can_be_false = satisfiable(And(~proposition, assumptions,
relevant_facts, *context))
if can_be_true and can_be_false:
return None
if can_be_true and not can_be_false:
return True
if not can_be_true and can_be_false:
return False
if not can_be_true and not can_be_false:
# TODO: Run additional checks to see which combination of the
# assumptions, global_assumptions, and relevant_facts are
# inconsistent.
raise ValueError("Inconsistent assumptions")
equiv_any_args = set(((Q.zero, Mul),))
def get_relevant_facts(proposition, assumptions=True, context=global_assumptions):
keys = proposition.atoms(AppliedPredicate)
if isinstance(assumptions, Basic):
# XXX: We need this since True/False are not Basic
keys |= assumptions.atoms(AppliedPredicate)
if context:
keys |= And(*context).atoms(AppliedPredicate)
relevant_facts = True
keys_by_predicate = sift(keys, lambda ap: ap.func)
# TODO: Write this in a more scalable and extendable way
# To keep things straight, for implications, only worry about the
# Implies(key, Q.something(key.args[0])) fact.
for key in keys_by_predicate[Q.positive]:
relevant_facts &= Implies(key, Q.real(key.args[0]))
for key in keys_by_predicate[Q.zero]:
relevant_facts &= Equivalent(key, ~Q.nonzero(key.args[0]))
relevant_facts &= Implies(key, ~Q.positive(key.args[0]))
relevant_facts &= Implies(key, Q.real(key.args[0]))
# Now for something interesting...
if isinstance(key.args[0], Pow):
relevant_facts &= Implies(key, Q.zero(key.args[0].base))
relevant_facts &= Implies(And(Q.zero(key.args[0].base),
Q.positive(key.args[0].exp)), key)
for key in keys_by_predicate[Q.nonzero]:
relevant_facts &= Equivalent(key, ~Q.zero(key.args[0]))
if isinstance(key.args[0], Add):
relevant_facts &= Implies(And(*[Q.positive(i) for i in
key.args[0].args]), key)
for key in keys:
predicate = key.func
expr = key.args[0]
if (predicate, type(expr)) in equiv_any_args:
relevant_facts &= Equivalent(key, Or(*map(predicate, expr.args)))
return relevant_facts
def get_all_relevant_facts(proposition, assumptions=True, context=global_assumptions):
# The relevant facts might introduce new keys, e.g., Q.zero(x*y) will
# introduce the keys Q.zero(x) and Q.zero(y), so we need to run it until
# we stop getting new things. Hopefully this strategy won't lead to an
# infinite loop in the future.
relevant_facts = True
old_relevant_facts = False
while relevant_facts != old_relevant_facts:
old_relevant_facts, relevant_facts = (relevant_facts,
get_relevant_facts(proposition, assumptions & relevant_facts,
context))
return relevant_facts
|
Python
| 0.000029
|
@@ -1233,16 +1233,56 @@
o, Mul),
+%0A (Q.infinity, Add)
))%0A%0Adef
|
50b19958b531cd94b537f3d911ce9b0c0b7f1ea2
|
add ordereddictionary to store information about file .rooms loaded
|
trunk/editor/structdata/project.py
|
trunk/editor/structdata/project.py
|
#!/usr/bin/env python
try:
from collections import OrderedDict
except ImportError:
from misc.dict import OrderedDict
from subject import Subject
class Project(Subject):
def __init__(self):
super(Project, self).__init__()
self.informations = None
self.images = {}
self.items = OrderedDict()
self.vars = {}
self.events = OrderedDict()
self.rooms = OrderedDict()
self.selected_room = None
g_project = Project()
|
Python
| 0
|
@@ -254,20 +254,55 @@
elf.
-informations
+data = OrderedDict()%0A self.data%5B'world'%5D
= N
@@ -322,14 +322,22 @@
elf.
+data%5B'
images
+'%5D
= %7B
@@ -355,13 +355,21 @@
elf.
+data%5B'
items
+'%5D
= O
@@ -398,12 +398,20 @@
elf.
+data%5B'
vars
+'%5D
= %7B
@@ -429,14 +429,22 @@
elf.
+data%5B'
events
+'%5D
= O
@@ -473,13 +473,21 @@
elf.
+data%5B'
rooms
+'%5D
= O
@@ -502,42 +502,8 @@
ct()
-%0A self.selected_room = None
%0A%0Ag_
|
01e9fa344259faa6eeb7f0480975547d375e132f
|
add function to change and image. The function remove the image from the dictionary and add an image with the new key and new path to file
|
trunk/editor/structdata/project.py
|
trunk/editor/structdata/project.py
|
#!/usr/bin/env python
from misc.odict import OrderedDict
from subject import Subject
class Project(Subject):
def __init__(self):
super(Project, self).__init__()
self.data = OrderedDict()
self.data['world'] = None
self.data['images'] = {}
self.data['items'] = OrderedDict()
self.data['vars'] = {}
self.data['events'] = OrderedDict()
self.data['rooms'] = OrderedDict()
def changeEventName(self, old_name, new_name):
event = self.data['events'].pop(old_name)
event.setName(new_name)
self.data['events'][event.id] = event
self.notify()
def changeStartRoom(self, new_start_room_name):
self.data['world'].start = new_start_room_name
self.notify()
def changeRoomName(self, old_room_name, new_room_name):
room = self.data['rooms'].pop(old_room_name)
self.data['rooms'][new_room_name] = room
if self.data['world'].start == old_room_name:
self.changeStartRoom(new_room_name)
room.setName(new_room_name)
self.notify()
def removeRoom(self, room_name):
self.data['rooms'].pop(room_name)
if self.data['world'].start == room_name:
if g_project.data['rooms']:
new_start_room_name = g_project.data['rooms'].keys()[0]
else:
new_start_room_name = ""
self.changeStartRoom(new_start_room_name)
self.notify()
g_project = Project()
|
Python
| 0.000002
|
@@ -429,24 +429,226 @@
eredDict()%0A%0A
+%0A def changeImage(self, old_image, new_image):%0A image = self.data%5B'images'%5D.pop(old_image)%0A image.file = new_image%0A self.data%5B'images'%5D%5Bnew_image%5D = image%0A self.notify()%0A%0A
def chan
|
a5aa4672ea04dd67cbbf70e058e8a6bf3e7f2de6
|
remove support for providers as it complicates things too much
|
systest_manager/commands.py
|
systest_manager/commands.py
|
import sys
import os
import shutil
import argh
import sh
import yaml
import requests
from argh.decorators import arg
import cloudify_cli
from cloudify_rest_client import CloudifyClient
from cloudify_cli.utils import load_cloudify_working_dir_settings
from cosmo_tester.framework import util
from settings import Settings
from completion import Completion
NO_INIT = 'Not initialized'
NO_BOOTSTRAP = 'Not bootstrapped'
def get_manager_ip():
cli_settings = load_cloudify_working_dir_settings()
return cli_settings.get_management_server()
app = argh.EntryPoint('systest')
cfy = sh.cfy.bake(_out=lambda line: sys.stdout.write(line),
_err=lambda line: sys.stderr.write(line))
settings = Settings()
completion = Completion(settings)
@app
@arg('--basedir', required=True)
@arg('--main_suites_yaml', required=True)
@arg('--user_suites_yaml', required=True)
def init(basedir=None, main_suites_yaml=None, user_suites_yaml=None):
settings.write_settings(basedir, main_suites_yaml, user_suites_yaml)
@app
@arg('configuration', completer=completion.all_configurations)
def generate(configuration, reset_config=False):
suites_yaml = settings.load_suites_yaml()
handler_configuration = suites_yaml[
'handler_configurations'][configuration]
is_manager_bootstrap = not handler_configuration.get(
'bootstrap_using_providers', False)
original_inputs_path = os.path.expanduser(handler_configuration['inputs'])
if is_manager_bootstrap:
original_manager_blueprint_path = os.path.expanduser(
handler_configuration['manager_blueprint'])
else:
original_manager_blueprint_path = None
handler_configuration_dir = settings.basedir / configuration
if reset_config and handler_configuration_dir.exists():
shutil.rmtree(handler_configuration_dir)
handler_configuration_dir.makedirs()
inputs_path, manager_blueprint_path = util.generate_unique_configurations(
workdir=handler_configuration_dir,
original_inputs_path=original_inputs_path,
original_manager_blueprint_path=original_manager_blueprint_path,
is_provider_bootstrap=not is_manager_bootstrap)
inputs_path = str(inputs_path)
if is_manager_bootstrap:
new_manager_blueprint_path = (
manager_blueprint_path.dirname() / 'manager-blueprint.yaml')
shutil.move(manager_blueprint_path, new_manager_blueprint_path)
manager_blueprint_path = str(new_manager_blueprint_path)
handler_configuration_path = (
handler_configuration_dir / 'handler-configuration.yaml')
handler_configuration['inputs'] = inputs_path
if is_manager_bootstrap:
handler_configuration['manager_blueprint'] = manager_blueprint_path
def apply_override_and_remove_prop(yaml_path, prop):
with util.YamlPatcher(yaml_path, default_flow_style=False) as patch:
override = util.process_variables(
suites_yaml, handler_configuration.get(prop, {}))
for key, value in override.items():
patch.set_value(key, value)
if prop in handler_configuration:
del handler_configuration[prop]
apply_override_and_remove_prop(inputs_path, 'inputs_override')
if is_manager_bootstrap:
apply_override_and_remove_prop(manager_blueprint_path,
'manager_blueprint_override')
handler_configuration_path.write_text(
yaml.safe_dump(handler_configuration, default_flow_style=False))
@app
@arg('configuration', completer=completion.existing_configurations)
def status(configuration):
config_dir = settings.basedir / configuration
if not config_dir.exists():
return NO_INIT
try:
with settings.basedir / configuration:
manager_ip = get_manager_ip()
if not manager_ip:
return NO_BOOTSTRAP
client = CloudifyClient(manager_ip)
try:
version = client.manager.get_version()['version']
return '[{0}] Running ({1})'.format(manager_ip, version)
except requests.exceptions.ConnectionError:
return '[{0}] Not reachable'.format(manager_ip)
except cloudify_cli.exceptions.CloudifyCliError as e:
if NO_INIT in str(e):
return NO_INIT
else:
raise
@app
@arg('configuration', completer=completion.all_configurations)
def bootstrap(configuration, reset_config=False):
config_dir = settings.basedir / configuration
if not config_dir.exists() or reset_config:
generate(configuration, reset_config=reset_config)
with config_dir:
blueprint_path = (
config_dir / 'manager-blueprint' / 'manager-blueprint.yaml')
cfy.init().wait()
cfy.bootstrap(blueprint_path=blueprint_path,
inputs=config_dir / 'inputs.yaml').wait()
handler_configuration_path = config_dir / 'handler-configuration.yaml'
handler_configuration = yaml.load(handler_configuration_path.text())
handler_configuration['manager_ip'] = get_manager_ip()
handler_configuration_path.write_text(
yaml.safe_dump(handler_configuration, default_flow_style=False))
@app
@arg('configuration', completer=completion.existing_configurations)
def teardown(configuration):
config_dir = settings.basedir / configuration
if not config_dir.exists():
return NO_INIT
with config_dir:
cfy.teardown(force=True, ignore_deployments=True).wait()
@app
def global_status():
if not settings.basedir.exists():
return
for directory in settings.basedir.dirs():
configuration = directory.basename()
yield '{0}: {1}'.format(configuration, status(configuration))
@app
def clear(force=False):
if not force:
raise argh.CommandError('Must pass -f flag to actually clear '
'configurations dir')
if settings.basedir.exists():
shutil.rmtree(settings.basedir)
|
Python
| 0
|
@@ -1280,110 +1280,8 @@
on%5D%0A
- is_manager_bootstrap = not handler_configuration.get(%0A 'bootstrap_using_providers', False)%0A
@@ -1359,41 +1359,8 @@
'%5D)%0A
- if is_manager_bootstrap:%0A
@@ -1417,28 +1417,24 @@
er(%0A
-
handler_conf
@@ -1469,65 +1469,8 @@
'%5D)%0A
- else:%0A original_manager_blueprint_path = None%0A
@@ -1929,64 +1929,8 @@
path
-,%0A is_provider_bootstrap=not is_manager_bootstrap
)%0A
@@ -1966,41 +1966,8 @@
th)%0A
- if is_manager_bootstrap:%0A
@@ -1997,36 +1997,32 @@
ath = (%0A
-
manager_blueprin
@@ -2066,28 +2066,24 @@
.yaml')%0A
-
-
shutil.move(
@@ -2134,20 +2134,16 @@
t_path)%0A
-
mana
@@ -2351,41 +2351,8 @@
ath%0A
- if is_manager_bootstrap:%0A
@@ -2917,41 +2917,8 @@
e')%0A
- if is_manager_bootstrap:%0A
@@ -2964,36 +2964,32 @@
blueprint_path,%0A
-
|
3fcdaf6dcfd5b8752c6785972025d6cc051d4d6b
|
Add utf-8 encoding flag to models file
|
teknologr/members/models.py
|
teknologr/members/models.py
|
from django.db import models
from django_countries.fields import CountryField
class SuperClass(models.Model):
# This class is the base of everything
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Member(SuperClass):
GENDER_CHOICES = (("UN", "Okänd"), ("M", "Man"), ("F", "Kvinna"))
given_names = models.CharField(max_length=64, blank=False, null=False, default="UNKNOWN")
preferred_name = models.CharField(max_length=32, blank=False, null=False, default="UNKNOWN")
surname = models.CharField(max_length=32, blank=False, null=False, default="UNKNOWN")
maiden_name = models.CharField(max_length=32, blank=True, null=False, default="")
nickname = models.CharField(max_length=32, blank=True, null=False, default="")
birth_date = models.DateField(blank=True, null=True)
student_id = models.CharField(max_length=10, blank=True, null=False, default="")
gender = models.CharField(max_length=2, choices=GENDER_CHOICES, default="UN")
# https://pypi.python.org/pypi/django-countries/1.0.1
nationality = CountryField(blank_label="Välj land", blank=True, null=False, default="")
enrolment_year = models.IntegerField(blank=True, null=True)
graduated = models.BooleanField(default=False)
graduated_year = models.IntegerField(blank=True, null=True)
degree_programme = models.CharField(max_length=256, blank=True, null=False)
dead = models.BooleanField(default=False)
mobile_phone = models.CharField(max_length=20, blank=True, null=False, default="")
phone = models.CharField(max_length=20, blank=True, null=False, default="")
street_address = models.CharField(max_length=64, blank=True, null=False, default="")
postal_code = models.CharField(max_length=64, blank=True, null=False, default="")
city = models.CharField(max_length=64, blank=True, null=False, default="")
# https://pypi.python.org/pypi/django-countries/1.0.1
country = CountryField(blank_label="Välj land", blank=True, null=False, default="")
url = models.CharField(max_length=64, blank=True, null=False, default="")
email = models.CharField(max_length=64, blank=True, null=False, default="")
subscribed_to_modulen = models.BooleanField(default=False)
allow_publish_info = models.BooleanField(default=True)
username = models.CharField(max_length=32, blank=False, null=True, editable=False)
bill_code = models.CharField(max_length=8, blank=False, null=True, editable=False)
crm_id = models.CharField(max_length=32, blank=True, null=False, default="")
comment = models.TextField(blank=True, null=True)
def _get_full_name(self):
return "%s %s" % (self.given_names, self.surname)
def _get_full_preferred_name(self):
first_name = self.preferred_name if self.preferred_name != "UNKNOWN" else self.given_names.split()[0]
return "%s %s" % (first_name, self.surname)
full_name = property(_get_full_name)
name = property(_get_full_name)
full_preferred_name = property(_get_full_preferred_name)
def __str__(self):
return self.full_name
class DecorationOwnership(SuperClass):
member = models.ForeignKey("Member")
decoration = models.ForeignKey("Decoration")
acquired = models.DateField()
def __str__(self):
return "%s - %s" % (self.decoration.name, self.member.full_name)
class Decoration(SuperClass):
name = models.CharField(max_length=64, blank=False, null=False, unique=True)
def __str__(self):
return self.name
class GroupMembership(SuperClass):
member = models.ForeignKey("Member")
group = models.ForeignKey("Group")
class Meta:
unique_together = (("member", "group"),)
class Group(SuperClass):
grouptype = models.ForeignKey("GroupType")
begin_date = models.DateField()
end_date = models.DateField()
def __str__(self):
return "{0}: {1} - {2}".format(self.grouptype.name, self.begin_date, self.end_date)
class GroupType(SuperClass):
name = models.CharField(max_length=64, blank=False, null=False, unique=True)
def __str__(self):
return self.name
class Functionary(SuperClass):
member = models.ForeignKey("Member")
functionarytype = models.ForeignKey("FunctionaryType")
begin_date = models.DateField()
end_date = models.DateField()
def _get_str_member(self):
return "{0} - {1}: {2}".format(self.begin_date, self.end_date, self.member)
def _get_str_type(self):
return "{0}: {1} - {2}".format(self.functionarytype, self.begin_date, self.end_date)
str_member = property(_get_str_member)
str_type = property(_get_str_type)
def __str__(self):
return "{0}: {1} - {2}, {3}".format(self.functionarytype, self.begin_date, self.end_date, self.member)
class FunctionaryType(SuperClass):
name = models.CharField(max_length=64, blank=False, null=False, unique=True)
def __str__(self):
return self.name
class MemberType(SuperClass):
TYPES = (
("PH", "Phux"),
("OM", "Ordinarie Medlem"),
("JS", "JuniorStÄlM"),
("ST", "StÄlM"),
("AA", "Aktiv Alumn"),
)
member = models.ForeignKey("Member")
begin_date = models.DateField()
end_date = models.DateField(null=True)
type = models.CharField(max_length=2, choices=TYPES, default="PH")
def __str__(self):
return "{0}: {1} - {2}".format(self.get_type_display(), self.begin_date, self.end_date)
|
Python
| 0
|
@@ -1,16 +1,41 @@
+# -*- coding: utf-8 -*-%0A%0A
from django.db i
|
b04693387be08c1ead880d0e7472026ed76dad80
|
Fix django.conf.urls.defaults imports
|
openstack_auth/urls.py
|
openstack_auth/urls.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls.defaults import patterns, url
from .utils import patch_middleware_get_user
patch_middleware_get_user()
urlpatterns = patterns('openstack_auth.views',
url(r"^login/$", "login", name='login'),
url(r"^logout/$", 'logout', name='logout'),
url(r'^switch/(?P<tenant_id>[^/]+)/$', 'switch', name='switch_tenants'),
url(r'^switch_services_region/(?P<region_name>[^/]+)/$', 'switch_region',
name='switch_services_region')
)
|
Python
| 0.003868
|
@@ -564,17 +564,8 @@
urls
-.defaults
imp
|
78705f598e7e3325e871bd17ff353a31c71bc399
|
Extend all admin form to Container Admin Form (json field)
|
opps/articles/forms.py
|
opps/articles/forms.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
|
Python
| 0
|
@@ -43,73 +43,8 @@
-*-%0A
-from django import forms%0A%0Afrom .models import Post, Album, Link%0A%0A
from
@@ -80,17 +80,16 @@
sEditor%0A
-%0A
from opp
@@ -94,763 +94,176 @@
pps.
-db.models.fields.jsonf import JSONFormField%0Afrom opps.fields.widgets import JSONField%0Afrom opps.fields.models import Field, FieldOption%0A%0A%0Aclass PostAdminForm(forms.ModelForm):%0A json = JSONFormField(widget=JSONField(attrs=%7B'_model': 'Post'%7D), required=False)%0A%0A multiupload_link = '/fileupload/image/'%0A%0A def __init__(self, *args, **kwargs):%0A super(PostAdminForm, self).__init__(*args, **kwargs)%0A%0A for field in Field.objects.filter(%0A application__contains=self._meta.model.__name__):%0A for fo in FieldOption.objects.filter(field=field):%0A self.fields%5B%0A 'json_%7B%7D_%7B%7D'.format(%0A field.slug, fo.option.slug%0A )%5D = forms.CharField(required=False)
+containers.forms import ContainerAdminForm%0A%0Afrom .models import Post, Album, Link%0A%0A%0Aclass PostAdminForm(ContainerAdminForm):%0A multiupload_link = '/fileupload/image/'
%0A%0A
@@ -368,27 +368,29 @@
orm(
-forms.Model
+ContainerAdmin
Form):%0A
-%0A
@@ -561,19 +561,22 @@
orm(
-forms.Model
+ContainerAdmin
Form
|
67a7a3f5bc05265690a831dea7c4310af66870a8
|
add channel obj on set_context_data * long_slug * level
|
opps/articles/utils.py
|
opps/articles/utils.py
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
Python
| 0.000001
|
@@ -566,37 +566,152 @@
nnel
-_long_slug'%5D = self.long_slug
+'%5D = %7B%7D%0A context%5B'channel'%5D%5B'long_slug'%5D = self.long_slug%0A if self.channel:%0A context%5B'channel'%5D%5B'level'%5D = self.channel.get_level()
%0A%0A
|
5ad21e185cf1984eb0a068387fdd1d73a4a56d15
|
Create get context data, set template var opps_channel and opps_channel_conf issue #47
|
opps/articles/views.py
|
opps/articles/views.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sites.models import get_current_site
from django.core.paginator import Paginator, InvalidPage
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.http import Http404
from haystack.views import SearchView
from .models import Post
from opps.channels.models import Channel
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
homepage = Channel.objects.get_homepage(site=self.site)
if not homepage:
return None
long_slug = self.kwargs.get('channel__long_slug',
homepage.long_slug)
if homepage.long_slug != long_slug:
long_slug = long_slug[:-1]
domain_folder = 'channels'
if self.site.id > 1:
domain_folder = "{0}/channels".format(self.site)
return '{0}/{1}.html'.format(domain_folder, long_slug)
@property
def queryset(self):
self.site = get_current_site(self.request)
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True,
site=self.site,
date_available__lte=timezone.now(),
published=True).all()
long_slug = self.kwargs['channel__long_slug'][:-1]
get_object_or_404(Channel, site=self.site, long_slug=long_slug,
date_available__lte=timezone.now(), published=True)
return Post.objects.filter(site=self.site,
channel__long_slug=long_slug,
date_available__lte=timezone.now(),
published=True).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
homepage = Channel.objects.get_homepage(site=self.site)
if not homepage:
return None
long_slug = self.kwargs.get('channel__long_slug', homepage.long_slug)
domain_folder = 'articles'
if self.site.id > 1:
domain_folder = "{0}/articles".format(self.site)
return '{0}/{1}.html'.format(domain_folder,
long_slug)
@property
def queryset(self):
self.site = get_current_site(self.request)
homepage = Channel.objects.get_homepage(site=self.site)
slug = None
if homepage:
slug = homepage.long_slug
long_slug = self.kwargs.get('channel__long_slug', slug)
return Post.objects.filter(site=self.site,
channel__long_slug=long_slug,
slug=self.kwargs['slug'],
date_available__lte=timezone.now(),
published=True).all()
class Search(SearchView):
def get_results(self):
return self.form.search().order_by('-date_available')
def build_page(self):
paginator = Paginator(self.results, self.results_per_page)
try:
paginator.page(int(self.request.GET.get('page', 1)))
except InvalidPage:
raise Http404("No such page!")
return (None, self.results)
|
Python
| 0
|
@@ -364,16 +364,49 @@
Http404
+%0Afrom django.conf import settings
%0A%0Afrom h
@@ -509,71 +509,482 @@
el%0A%0A
-%0Aclass OppsList(ListView):%0A%0A context_object_name = %22context%22
+def set_context_data(self, SUPER, **kwargs):%0A context = super(SUPER, self).get_context_data(**kwargs)%0A article = self.article.get()%0A context%5B'opps_channel'%5D = article.channel%0A context%5B'opps_channel_conf'%5D = settings.OPPS_CHANNEL_CONF%5C%0A .get(article.channel.slug, '')%0A return context%0A%0A%0Aclass OppsList(ListView):%0A%0A context_object_name = %22context%22%0A%0A def get_context_data(self, **kwargs):%0A return set_context_data(self, OppsList, **kwargs)
%0A%0A
@@ -2132,38 +2132,46 @@
d=True)%0A
-return
+self.article =
Post.objects.fi
@@ -2218,32 +2218,40 @@
+
+
channel__long_sl
@@ -2256,32 +2256,40 @@
slug=long_slug,%0A
+
@@ -2370,32 +2370,40 @@
+
published=True).
@@ -2400,32 +2400,60 @@
hed=True).all()%0A
+ return self.article%0A
%0A%0Aclass OppsDeta
@@ -2502,24 +2502,127 @@
%22context%22%0A%0A
+ def get_context_data(self, **kwargs):%0A return set_context_data(self, OppsDetail, **kwargs)%0A%0A
@propert
@@ -3367,38 +3367,46 @@
, slug)%0A
-return
+self.article =
Post.objects.fi
@@ -3680,16 +3680,44 @@
).all()%0A
+ return self.article%0A
%0A%0Aclass
|
bee1a6d52d1cefd08523353d53a4f5ab8838b7bb
|
add filter sim
|
run_scripts/get_q2q_sim.py
|
run_scripts/get_q2q_sim.py
|
#encoding=utf-8
"""
get q2q similariy from service
curl -X POST -d '{"query":"你知罪吗", "question":"你知道错了吗"}' http://10.191.15.89:40919/cgi-bin/ranker/q2qsimilarity
warp this comand for whole file
"""
import os
import sys
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding("utf-8")
import time
import json
import codecs
import argparse
import traceback
import multiprocessing as MP
import subprocess
def get_q2q_sim(q0, q1):
cmd = ''' curl -X POST -d '{{"query":"{}", "question":"{}" }}' http://10.191.15.89:40919/cgi-bin/ranker/q2qsimilarity '''.format(q0, q1)
# print(cmd)
pro = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
outputs, errs = pro.communicate()
return outputs
def jsonWrite(d, file_path, indent=2):
with codecs.open(file_path, "w", "utf-8") as f:
json.dump(d,f,ensure_ascii=False, indent=indent)
def split_join(s):
return "".join(s.strip().split())
def get_q2q_file_sinle_pro(file_path, save_path, parallels=1, time_dealy=2):
# file_path is tokenized file
f = codecs.open(file_path, "r","utf-8")
results = []
pros = []
cnt = 0
while True:
s = f.readline()
if not s:
break
t = f.readline()
f.readline()
s = split_join(s)
t = split_join(t)
s = s.replace("SEQUENCE_END", "")
t = t.replace("SEQUENCE_END", "")
outputs = get_q2q_sim(s,t)
results.append({"source":s, "pred":t})
rj = json.loads(outputs.strip())
if str(rj["data"]["error"]) == "0":
results[cnt]["score"] = rj["data"]["score"]
if "score" not in results[cnt]:
results[cnt]["score"] = -1
cnt += 1
jsonWrite(results, save_path , indent=2)
def get_q2q_file(file_path, save_path, parallels=MP.cpu_count() - 2, time_dealy=2,
in_one_line=False, delimiter="\t", join_space=False):
# file_path is tokenized
f = codecs.open(file_path, "r","utf-8")
results = []
pool = MP.Pool(parallels)
pros = []
while True:
# handle two conditions:
# {s0}\t{s1} || {s0}\n{s1}\n
line0 = f.readline()
if not line0:
break
if in_one_line is True:
try:
s, t = line0.split(delimiter)
except Exception:
print("errors happen in {}".format(line0))
continue
else:
s = line0
t = f.readline()
f.readline()
s , t = s.strip(), t.strip()
if join_space:
s = split_join(s)
t = split_join(t)
s = s.replace("SEQUENCE_END", "")
t = t.replace("SEQUENCE_END", "")
pro = pool.apply_async( get_q2q_sim, args=(s,t,) )
pros.append(pro)
print("{}th process starts...".format(len(pros)))
results.append({"source":s, "predict":t})
if len(pros) % 3000 == 0:
print("waiting for {} secs".format(time_dealy))
time.sleep(time_dealy)
nums = len(pros)
for i, pro in enumerate(pros):
outputs = pro.get().strip()
try:
rj = json.loads(outputs.strip())
if "data" not in rj:
print("errors, {}".format(rj))
results[i]["score"] = -1
else:
if str(rj["data"]["error"]) == "0":
results[i]["score"] = rj["data"]["score"]
if "score" not in results[i]:
results[i]["score"] = -1
except Exception:
results[i]["score"] = -1
print(results[i])
traceback.print_exc()
if i and i % 100000 == 0:
print("finished {}".format(i/nums))
jsonWrite(results, save_path, indent=2)
jsonWrite(results,save_path,indent=2)
if __name__ == "__main__":
# get_q2q_sim("我爱中国", "我爱中华人民共和国")
parser = argparse.ArgumentParser()
parser.add_argument("file_path", type=str, help="model preidct path")
parser.add_argument("save_path", type=str, help="save result path")
parser.add_argument("--pnums", default=max(1, MP.cpu_count() - 5), type=int, help="parallels[cpu.count - 5]")
args = parser.parse_args()
get_q2q_file(args.file_path, args.save_path,parallels=args.pnums)
|
Python
| 0
|
@@ -3418,32 +3418,37 @@
sonWrite(results
+%5B0:i%5D
, save_path, ind
|
6fec92df3eec71b3725ffeeee260d19b77aee309
|
Refactor get items method
|
classes/bucket.py
|
classes/bucket.py
|
from flask import jsonify
from modals.modals import BucketModal, ItemModal
class Bucket(object):
"""
Handles all bucket operations
"""
@staticmethod
def create_bucket(name, desc, user_id):
"""
Creates a new bucket
:param name:
:param desc:
:param user_id:
:return:
"""
if not name:
response = jsonify({'Error': 'Missing name'})
response.status_code = 400
return response
bucket = BucketModal(name=name, desc=desc, user_id=user_id)
if bucket.query.filter_by(name=name).first():
response = jsonify({'Error': 'Bucket name Already exists'})
response.status_code = 400
return response
bucket.save()
response = jsonify({
# 'Status': 'Successfully Added bucket',
# 'id': bucket.id
'id': bucket.id,
'name': bucket.name,
'desc': bucket.desc,
'date_added': bucket.date_added,
'user_id': bucket.user_id
})
response.status_code = 201
return response
@staticmethod
def get_buckets(user_id, search):
"""
Gets all buckets
:param user_id:
:param search:
:return:
"""
response = BucketModal.query.all()
if not response:
response = jsonify({'error': 'No bucketlist has been created'})
response.status_code = 200
return response
else:
if search:
res = [bucket for bucket in response if bucket.name
in search and bucket.user_id == user_id]
if not res:
response = jsonify({
'error': 'The bucket you searched does not exist'
})
return response
else:
bucketlist_data = []
for data in res:
final = {
'id': data.id,
'name': data.name,
'desc': data.desc,
'date_added': data.date_added,
'user_id': data.user_id
}
bucketlist_data.clear()
bucketlist_data.append(final)
response = jsonify(bucketlist_data)
response.status_code = 200
return response
else:
res = [bucket for bucket in
response if bucket.user_id == user_id]
bucketlist_data = []
if not res:
response = jsonify({
'error': 'No bucketlists have been created'
})
response.status_code = 200
return response
else:
for data in res:
final = {
'id': data.id,
'name': data.name,
'desc': data.desc,
'date_added': data.date_added,
'user_id': data.user_id
}
bucketlist_data.append(final)
response = jsonify(bucketlist_data)
response.status_code = 200
return response
@staticmethod
def get_single_bucket(user_id, bucket_id):
"""
Gets single bucket
:param user_id:
:param bucket_id:
"""
bucket = BucketModal.query.filter_by(id=bucket_id,
user_id=user_id).first()
if not bucket:
response = jsonify({
'error': 'bucketlist with id ' +
str(bucket_id) + ' not found'
})
response.status_code = 400
return response
bucket_data = {
'id': bucket.id,
'name': bucket.name,
'desc': bucket.desc,
'date_added': bucket.date_added,
'user_id': bucket.user_id
}
response = jsonify(bucket_data)
response.status_code = 200
return response
@staticmethod
def update_bucket(user_id, bucket_id, bucket_name, desc):
"""
Updates a bucket
:param user_id:
:param bucket_id:
:param bucket_name:
:param desc:
"""
if not bucket_name:
response = jsonify({'Error': 'Missing Bucket name'})
response.status_code = 400
return response
bucket = BucketModal.query.filter_by(id=bucket_id,
user_id=user_id).first()
if not bucket:
bucket = jsonify({'error': 'the bucket does not exist'})
bucket.status_code = 400
return bucket
bucket.name = bucket_name
bucket.desc = desc
bucket.update()
bucket = BucketModal.query.filter_by(id=bucket_id,
user_id=user_id).first()
response = jsonify({
'success': 'bucket updated',
'bucket': bucket.name
})
response.status_code = 200
return response
@staticmethod
def delete_bucket(user_id, bucket_id):
"""
Deletes a bucket
:param user_id:
:param bucket_id:
"""
bucket = BucketModal.query.filter_by(id=bucket_id,
user_id=user_id).first()
if not bucket:
response = jsonify({'error': 'Bucket not found'})
response.status_code = 400
return response
items = ItemModal.query.filter_by(bucket_id=bucket_id)
if items:
for item in items:
item.delete()
bucket.delete()
response = jsonify({
'success': 'bucket deleted'
})
response.status_code = 200
return response
|
Python
| 0
|
@@ -809,91 +809,8 @@
y(%7B%0A
- # 'Status': 'Successfully Added bucket',%0A # 'id': bucket.id%0A
|
0dd7c69ded2491572885954c0665e58f8459eadd
|
Fix no attribute error in inference_on_dataset
|
detectron2/evaluation/evaluator.py
|
detectron2/evaluation/evaluator.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import datetime
import logging
import time
from collections import OrderedDict
from contextlib import contextmanager
import torch
from detectron2.utils.comm import is_main_process
from detectron2.utils.logger import log_every_n_seconds
class DatasetEvaluator:
"""
Base class for a dataset evaluator.
The function :func:`inference_on_dataset` runs the model over
all samples in the dataset, and have a DatasetEvaluator to process the inputs/outputs.
This class will accumulate information of the inputs/outputs (by :meth:`process`),
and produce evaluation results in the end (by :meth:`evaluate`).
"""
def reset(self):
"""
Preparation for a new round of evaluation.
Should be called before starting a round of evaluation.
"""
pass
def process(self, input, output):
"""
Process an input/output pair.
Args:
input: the input that's used to call the model.
output: the return value of `model(input)`
"""
pass
def evaluate(self):
"""
Evaluate/summarize the performance, after processing all input/output pairs.
Returns:
dict:
A new evaluator class can return a dict of arbitrary format
as long as the user can process the results.
In our train_net.py, we expect the following format:
* key: the name of the task (e.g., bbox)
* value: a dict of {metric name: score}, e.g.: {"AP50": 80}
"""
pass
class DatasetEvaluators(DatasetEvaluator):
def __init__(self, evaluators):
super().__init__()
self._evaluators = evaluators
def reset(self):
for evaluator in self._evaluators:
evaluator.reset()
def process(self, input, output):
for evaluator in self._evaluators:
evaluator.process(input, output)
def evaluate(self):
results = OrderedDict()
for evaluator in self._evaluators:
result = evaluator.evaluate()
if is_main_process() and result is not None:
for k, v in result.items():
assert (
k not in results
), "Different evaluators produce results with the same key {}".format(k)
results[k] = v
return results
def inference_on_dataset(model, data_loader, evaluator):
"""
Run model on the data_loader and evaluate the metrics with evaluator.
Also benchmark the inference speed of `model.forward` accurately.
The model will be used in eval mode.
Args:
model (nn.Module): a module which accepts an object from
`data_loader` and returns some outputs. It will be temporarily set to `eval` mode.
If you wish to evaluate a model in `training` mode instead, you can
wrap the given model and override its behavior of `.eval()` and `.train()`.
data_loader: an iterable object with a length.
The elements it generates will be the inputs to the model.
evaluator (DatasetEvaluator): the evaluator to run. Use `None` if you only want
to benchmark, but don't want to do any evaluation.
Returns:
The return value of `evaluator.evaluate()`
"""
num_devices = torch.distributed.get_world_size() if torch.distributed.is_initialized() else 1
logger = logging.getLogger(__name__)
logger.info("Start inference on {} images".format(len(data_loader)))
total = len(data_loader) # inference data loader must have a fixed length
if evaluator is None:
# create a no-op evaluator
evaluator = DatasetEvaluators([])
evaluator.reset()
num_warmup = min(5, total - 1)
start_time = time.perf_counter()
total_compute_time = 0
with inference_context(model), torch.no_grad():
for idx, inputs in enumerate(data_loader):
if idx == num_warmup:
start_time = time.perf_counter()
total_compute_time = 0
start_compute_time = time.perf_counter()
outputs = model(inputs)
if torch.cuda.is_available():
torch.cuda.synchronize()
total_compute_time += time.perf_counter() - start_compute_time
evaluator.process(inputs, outputs)
iters_after_start = idx + 1 - num_warmup * int(idx >= num_warmup)
seconds_per_img = total_compute_time / iters_after_start
if idx >= num_warmup * 2 or seconds_per_img > 5:
total_seconds_per_img = (time.perf_counter() - start_time) / iters_after_start
eta = datetime.timedelta(seconds=int(total_seconds_per_img * (total - idx - 1)))
log_every_n_seconds(
logging.INFO,
"Inference done {}/{}. {:.4f} s / img. ETA={}".format(
idx + 1, total, seconds_per_img, str(eta)
),
n=5,
)
# Measure the time only for this worker (before the synchronization barrier)
total_time = time.perf_counter() - start_time
total_time_str = str(datetime.timedelta(seconds=total_time))
# NOTE this format is parsed by grep
logger.info(
"Total inference time: {} ({:.6f} s / img per device, on {} devices)".format(
total_time_str, total_time / (total - num_warmup), num_devices
)
)
total_compute_time_str = str(datetime.timedelta(seconds=int(total_compute_time)))
logger.info(
"Total inference pure compute time: {} ({:.6f} s / img per device, on {} devices)".format(
total_compute_time_str, total_compute_time / (total - num_warmup), num_devices
)
)
results = evaluator.evaluate()
# An evaluator may return None when not in main process.
# Replace it by an empty dict instead to make it easier for downstream code to handle
if results is None:
results = {}
return results
@contextmanager
def inference_context(model):
"""
A context where the model is temporarily changed to eval mode,
and restored to previous mode afterwards.
Args:
model: a torch Module
"""
training_mode = model.training
model.eval()
yield
model.train(training_mode)
|
Python
| 0.011181
|
@@ -228,16 +228,32 @@
m import
+ get_world_size,
is_main
@@ -3451,87 +3451,24 @@
s =
-torch.distributed.get_world_size() if torch.distributed.is_initialized() else 1
+get_world_size()
%0A
|
9a83ec4c80bec0cec45904a8998cd82a99a9b1b2
|
Save `resources` as extra data in its entirety
|
social_core/backends/atlassian.py
|
social_core/backends/atlassian.py
|
from social_core.backends.oauth import BaseOAuth2
class AtlassianOAuth2(BaseOAuth2):
name = 'atlassian'
AUTHORIZATION_URL = 'https://accounts.atlassian.com/authorize'
ACCESS_TOKEN_METHOD = 'POST'
ACCESS_TOKEN_URL = 'https://api.atlassian.com/oauth/token'
DEFAULT_SCOPE = ['read:jira-user', 'offline_access']
ID_KEY = 'accountId'
EXTRA_DATA = [
('resource_ids', 'resource_ids'),
('refresh_token', 'refresh_token'),
('expires_in', 'expires_in'),
]
def auth_params(self, state=None):
params = super(AtlassianOAuth2, self).auth_params(state)
params.update({'audience': 'api.atlassian.com',
'prompt': 'consent'})
return params
def get_user_details(self, response):
fullname, first_name, last_name = self.get_user_names(response['displayName'])
return {'username': response['name'],
'email': response['emailAddress'],
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
def user_data(self, access_token, *args, **kwargs):
resources = self.get_json('https://api.atlassian.com/oauth/token/accessible-resources',
headers={'Authorization': 'Bearer {}'.format(access_token)})
resource_ids = [resource['id'] for resource in resources]
user_info = self.get_json('https://api.atlassian.com/ex/jira/{}/rest/api/2/myself'.format(resource_ids[0]),
headers={'Authorization': 'Bearer {}'.format(access_token)})
user_info['resource_ids'] = resource_ids
return user_info
|
Python
| 0.000001
|
@@ -385,19 +385,16 @@
resource
-_id
s', 'res
@@ -398,19 +398,16 @@
resource
-_id
s'),%0A
@@ -1318,74 +1318,8 @@
)%7D)%0A
- resource_ids = %5Bresource%5B'id'%5D for resource in resources%5D%0A
@@ -1424,14 +1424,17 @@
urce
-_id
s%5B0
+%5D%5B'id'
%5D),%0A
@@ -1555,19 +1555,16 @@
resource
-_id
s'%5D = re
@@ -1573,11 +1573,8 @@
urce
-_id
s%0A
|
ad1fe8f7f636d8bf5bb92599b37ac8aa7849596e
|
Add small test
|
tests/grab_transport.py
|
tests/grab_transport.py
|
import pickle
import os
import sys
from test_server import Response
from tests.util import BaseGrabTestCase, only_grab_transport, temp_dir
from grab import Grab
from grab.error import GrabMisuseError
FAKE_TRANSPORT_CODE = """
from grab.transport.curl import CurlTransport
class FakeTransport(CurlTransport):
pass
"""
def get_fake_transport_class():
from grab.transport.curl import ( # pylint: disable=import-outside-toplevel
CurlTransport,
)
class FakeTransport(CurlTransport):
pass
return FakeTransport
def get_fake_transport_instance():
return get_fake_transport_class()()
def get_curl_transport_instance():
from grab.transport.curl import ( # pylint: disable=import-outside-toplevel
CurlTransport,
)
return CurlTransport()
class TestTransportTestCase(BaseGrabTestCase):
def assert_transport_response(self, transport, response):
self.server.add_response(Response(data=response), count=2)
grab = Grab(transport=transport)
grab.go(self.server.get_url())
self.assertEqual(grab.doc.body, response)
grab2 = grab.clone()
grab2.go(self.server.get_url())
self.assertEqual(grab2.doc.body, response)
def assert_transport_pickle(self, transport, response):
grab = Grab(transport=transport)
grab2 = grab.clone()
grab2_data = pickle.dumps(grab2, pickle.HIGHEST_PROTOCOL)
grab3 = pickle.loads(grab2_data)
grab3.go(self.server.get_url())
self.assertEqual(grab3.doc.body, response)
@only_grab_transport("pycurl")
def test_transport_option_as_string_curl(self):
self.assert_transport_response("grab.transport.curl.CurlTransport", b"XYZ")
@only_grab_transport("pycurl")
def test_transport_option_as_string_fake(self):
with temp_dir() as dir_:
sys.path.insert(0, dir_)
with open(os.path.join(dir_, "foo.py"), "w", encoding="utf-8") as out:
out.write(FAKE_TRANSPORT_CODE)
self.assert_transport_response("foo.FakeTransport", b"XYZ")
sys.path.remove(dir_)
@only_grab_transport("pycurl")
def test_transport_option_as_class_curl(self):
from grab.transport.curl import ( # pylint: disable=import-outside-toplevel
CurlTransport,
)
self.assert_transport_response(CurlTransport, b"XYZ")
@only_grab_transport("pycurl")
def test_transport_option_as_class_fake(self):
fake_transport_cls = get_fake_transport_class()
self.assert_transport_response(fake_transport_cls, b"XYZ")
@only_grab_transport("pycurl")
def test_transport_option_as_function_curl(self):
self.assert_transport_response(get_curl_transport_instance, b"XYZ")
@only_grab_transport("pycurl")
def test_transport_option_as_function_fake(self):
self.assert_transport_response(get_fake_transport_instance, b"XYZ")
def test_invalid_transport_invalid_alias(self):
with self.assertRaises(GrabMisuseError):
Grab(transport="zzzzzzzzzz").go(self.server.get_url())
def test_invalid_transport_invalid_path(self):
# AttributeError comes from setup_transport method
with self.assertRaises(AttributeError):
Grab(transport="tests.grab_transport.ZZZ").go(self.server.get_url())
def test_invalid_transport_not_collable_or_string(self):
with self.assertRaises(GrabMisuseError):
Grab(transport=13).go(self.server.get_url())
|
Python
| 0.00005
|
@@ -3490,28 +3490,364 @@
).go(self.server.get_url())%0A
+%0A def test_setup_transport_twice(self):%0A transport = %22grab.transport.curl.CurlTransport%22%0A grab = Grab()%0A grab.setup_transport(transport)%0A with self.assertRaises(GrabMisuseError) as ex:%0A grab.setup_transport(transport)%0A self.assertTrue(%22Transport is already set up%22 in str(ex.exception))%0A
|
58d68447839adfb9881a65837d1cd4e171ad22a1
|
fix strange error message (fix #708)
|
tensorpack/utils/develop.py
|
tensorpack/utils/develop.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: develop.py
# Author: tensorpack contributors
""" Utilities for developers only.
These are not visible to users (not automatically imported). And should not
appeared in docs."""
import os
import functools
from datetime import datetime
import importlib
import types
from . import logger
def create_dummy_class(klass, dependency):
"""
When a dependency of a class is not available, create a dummy class which throws ImportError when used.
Args:
klass (str): name of the class.
dependency (str): name of the dependency.
Returns:
class: a class object
"""
class _Dummy(object):
def __init__(self, *args, **kwargs):
raise ImportError("Cannot import '{}', therefore '{}' is not available".format(dependency, klass))
return _Dummy
def create_dummy_func(func, dependency):
"""
When a dependency of a function is not available, create a dummy function which throws ImportError when used.
Args:
func (str): name of the function.
dependency (str or list[str]): name(s) of the dependency.
Returns:
function: a function object
"""
if isinstance(dependency, (list, str)):
dependency = ','.join(dependency)
def _dummy(*args, **kwargs):
raise ImportError("Cannot import '{}', therefore '{}' is not available".format(dependency, func))
return _dummy
def building_rtfd():
"""
Returns:
bool: if tensorpack is being imported to generate docs now.
"""
return os.environ.get('READTHEDOCS') == 'True' \
or os.environ.get('DOC_BUILDING')
def log_deprecated(name="", text="", eos=""):
"""
Log deprecation warning.
Args:
name (str): name of the deprecated item.
text (str, optional): information about the deprecation.
eos (str, optional): end of service date such as "YYYY-MM-DD".
"""
assert name or text
if eos:
eos = "after " + datetime(*map(int, eos.split("-"))).strftime("%d %b")
if name:
if eos:
warn_msg = "%s will be deprecated %s. %s" % (name, eos, text)
else:
warn_msg = "%s was deprecated. %s" % (name, text)
else:
warn_msg = text
if eos:
warn_msg += " Legacy period ends %s" % eos
logger.warn("[Deprecated] " + warn_msg)
def deprecated(text="", eos=""):
"""
Args:
text, eos: same as :func:`log_deprecated`.
Returns:
a decorator which deprecates the function.
Example:
.. code-block:: python
@deprecated("Explanation of what to do instead.", "2017-11-4")
def foo(...):
pass
"""
def get_location():
import inspect
frame = inspect.currentframe()
if frame:
callstack = inspect.getouterframes(frame)[-1]
return '%s:%i' % (callstack[1], callstack[2])
else:
stack = inspect.stack(0)
entry = stack[2]
return '%s:%i' % (entry[1], entry[2])
def deprecated_inner(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
name = "{} [{}]".format(func.__name__, get_location())
log_deprecated(name, text, eos)
return func(*args, **kwargs)
return new_func
return deprecated_inner
def HIDE_DOC(func):
func.__HIDE_SPHINX_DOC__ = True
return func
# Copied from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/util/lazy_loader.py
class LazyLoader(types.ModuleType):
def __init__(self, local_name, parent_module_globals, name):
self._local_name = local_name
self._parent_module_globals = parent_module_globals
super(LazyLoader, self).__init__(name)
def _load(self):
# Import the target module and insert it into the parent's namespace
module = importlib.import_module(self.__name__)
self._parent_module_globals[self._local_name] = module
# Update this object's dict so that if someone keeps a reference to the
# LazyLoader, lookups are efficient (__getattr__ is only called on lookups
# that fail).
self.__dict__.update(module.__dict__)
return module
def __getattr__(self, item):
module = self._load()
return getattr(module, item)
def __dir__(self):
module = self._load()
return dir(module)
|
Python
| 0
|
@@ -1228,19 +1228,21 @@
(list,
-str
+tuple
)):%0A
|
4703caba8a8c98844b9cc63cff97f8a253ef5964
|
rename test case 10.3.4.1.1.2 to WINFF.FT.S.REG.2 (#115)
|
src/harness/testcases/registration_testcase.py
|
src/harness/testcases/registration_testcase.py
|
# Copyright 2016 SAS Project Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import unittest
import sas
from util import winnforum_testcase
class RegistrationTestcase(unittest.TestCase):
def setUp(self):
self._sas, self._sas_admin = sas.GetTestingSas()
self._sas_admin.Reset()
def tearDown(self):
pass
@winnforum_testcase
def test_WINFF_FT_S_REG_1(self):
"""New Multi-Step registration for CBSD Cat A (No existing CBSD ID).
The response should be SUCCESS.
"""
# Pre-load conditional parameters
device_a = json.load(
open(os.path.join('testcases', 'testdata', 'device_a.json')))
conditionals = {'registrationData': [
{'cbsdCategory': 'A',
'fccId': device_a['fccId'],
'cbsdSerialNumber': device_a['cbsdSerialNumber'],
'airInterface': device_a['airInterface'],
'installationParam': device_a['installationParam']}
]}
self._sas_admin.InjectFccId({'fccId': device_a['fccId']})
self._sas_admin.PreloadRegistrationData(conditionals)
# Register the device
del device_a['cbsdCategory']
del device_a['airInterface']
del device_a['installationParam']
request = {'registrationRequest': [device_a]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertTrue('cbsdId' in response)
self.assertFalse('measReportConfig' in response)
self.assertEqual(response['response']['responseCode'], 0)
@winnforum_testcase
def test_10_3_4_1_1_2(self):
"""New Multi-Step registration for CBSD Cat B (No existing CBSD ID).
The response should be SUCCESS.
"""
# Pre-load conditional parameters
device_b = json.load(
open(os.path.join('testcases', 'testdata', 'device_b.json')))
conditionals = {'registrationData': [
{'cbsdCategory': 'B', 'fccId': device_b['fccId'],
'cbsdSerialNumber': device_b['cbsdSerialNumber'],
'airInterface': device_b['airInterface'],
'installationParam': device_b['installationParam']}
]}
self._sas_admin.InjectFccId({'fccId': device_b['fccId']})
self._sas_admin.PreloadRegistrationData(conditionals)
# Register the device
del device_b['cbsdCategory']
del device_b['airInterface']
del device_b['installationParam']
request = {'registrationRequest': [device_b]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertTrue('cbsdId' in response)
self.assertFalse('measReportConfig' in response)
self.assertEqual(response['response']['responseCode'], 0)
@winnforum_testcase
def test_WINFF_FT_S_REG_6(self):
""" Single-Step registration (Cat A CBSD with no existing CBSD ID)
The response should be SUCCESS.
"""
# Register the device
device_a = json.load(
open(os.path.join('testcases', 'testdata', 'device_a.json')))
device_a['measCapability'] = []
self._sas_admin.InjectFccId({'fccId': device_a['fccId']})
request = {'registrationRequest': [device_a]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertTrue('cbsdId' in response)
self.assertFalse('measReportConfig' in response)
self.assertEqual(response['response']['responseCode'], 0)
@winnforum_testcase
def test_10_3_4_2_1(self):
"""CBSD registration request with missing required parameter.
The required parameter 'userId' is missing in a registration request,
the response should be FAIL.
"""
# Register the device, make sure at least one required parameter is missing
device_a = json.load(
open(os.path.join('testcases', 'testdata', 'device_a.json')))
self._sas_admin.InjectFccId({'fccId': device_a['fccId']})
del device_a['userId']
request = {'registrationRequest': [device_a]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertEqual(response['response']['responseCode'], 102)
@winnforum_testcase
def test_WINFF_FT_S_REG_12(self):
"""Pending registration for Cat A CBSD (responseCode 200)
The response should be FAILURE.
"""
# Register the device
device_a = json.load(
open(os.path.join('testcases', 'testdata', 'device_a.json')))
self._sas_admin.InjectFccId({'fccId': device_a['fccId']})
# Make sure one conditional parameter is missing
del device_a['installationParam']['heightType']
request = {'registrationRequest': [device_a]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertFalse('cbsdId' in response)
self.assertEqual(response['response']['responseCode'], 200)
@winnforum_testcase
def test_10_3_4_2_5_1(self):
"""CBSD registration request with invalid required parameter.
The value of required parameter 'fccId' is invalid(exceeds its max
length) in the registration request, the response should be FAIL.
"""
# Register the device, make sure at least one required parameter is invalid
device_a = json.load(
open(os.path.join('testcases', 'testdata', 'device_a.json')))
self._sas_admin.InjectFccId({'fccId': device_a['fccId']})
device_a['fccId'] = 'abcdefghijklmnopqrstuvwxyz'
request = {'registrationRequest': [device_a]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertFalse('cbsdId' in response)
self.assertEqual(response['response']['responseCode'], 103)
@winnforum_testcase
def test_10_3_4_2_5_3(self):
"""CBSD registration request with invalid conditional parameter.
The value of conditional parameter 'radioTechnology' of airInterface
object is invalid in the registration request, the response should be FAIL.
"""
# Register the device, make sure at least one conditional parameter is
# invalid
device_a = json.load(
open(os.path.join('testcases', 'testdata', 'device_a.json')))
self._sas_admin.InjectFccId({'fccId': device_a['fccId']})
device_a['airInterface']['radioTechnology'] = 'invalid value'
request = {'registrationRequest': [device_a]}
response = self._sas.Registration(request)['registrationResponse'][0]
# Check registration response
self.assertFalse('cbsdId' in response)
self.assertEqual(response['response']['responseCode'], 103)
|
Python
| 0.000163
|
@@ -2103,18 +2103,22 @@
est_
-10_3_4_1_1
+WINFF_FT_S_REG
_2(s
|
e66178cc0521426036d4c9166bf76e9379bc62ef
|
disable Run tests temporarily
|
cloudrun/tests.py
|
cloudrun/tests.py
|
import pytest
import uuid
from .cloudrun import Cloudrun
from .run import Run
token = uuid.uuid4().hex
id = uuid.uuid4().hex
def test_cloudrun_init():
assert type(Cloudrun(token)) is Cloudrun
assert Cloudrun(token).token == token
def test_run_init():
assert type(Run(token,id)) is Run
assert Run(token,id).token == token
assert Run(token,id).id == id
def test_cloudrun_get_run_returns_run():
assert type(Cloudrun(token).get_run(id)) is Run
|
Python
| 0.000001
|
@@ -234,16 +234,17 @@
token%0A%0A
+#
def test
@@ -248,32 +248,33 @@
est_run_init():%0A
+#
assert type(
@@ -295,16 +295,17 @@
is Run%0A
+#
asse
@@ -336,16 +336,17 @@
= token%0A
+#
asse
@@ -372,16 +372,17 @@
== id%0A%0A
+#
def test
@@ -410,24 +410,25 @@
urns_run():%0A
+#
assert t
|
cf04dded9623659ed041faf61ffe5568fec97021
|
Add more to gjf and mol2 tests
|
chem/tests.py
|
chem/tests.py
|
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.utils import simplejson
import views
from models import ErrorReport
class MainPageTestCase(TestCase):
names = ["24a_TON", "24b_TSP_24a_24a", "CON_24a"]
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse("chem_index"))
self.assertEqual(response.status_code, 200)
def test_index_redirect(self):
for name in self.names:
url = reverse("chem_index") + "?molecule=%s" % name
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_index_redirect(self):
for name in self.names:
for keywords in ["opt HF/6-31g(d)", "td b3lyp/6-31g(d)"]:
params = "?molecule=%s&keywords=%s" % (name, keywords)
url = reverse("chem_index") + params
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_molecule_detail(self):
for name in self.names:
response = self.client.get(reverse(views.molecule_detail, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_molecule_gjf(self):
for name in self.names:
response = self.client.get(reverse(views.write_gjf, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_molecule_mol2(self):
for name in self.names:
response = self.client.get(reverse(views.write_mol2, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_molecule_mol2(self):
for name in self.names:
response = self.client.get(reverse(views.write_png, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_multi_molecule(self):
names = ",".join(self.names)
response = self.client.get(reverse(views.multi_molecule, args=(names, )))
self.assertEqual(response.status_code, 200)
def test_multi_molecule_zip(self):
names = ",".join(self.names)
response = self.client.get(reverse(views.multi_molecule_zip, args=(names, )))
self.assertEqual(response.status_code, 200)
def test_write_gjf(self):
for name in self.names:
response = self.client.get(reverse(views.write_gjf, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_write_mol2(self):
for name in self.names:
response = self.client.get(reverse(views.write_mol2, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_write_png(self):
for name in self.names:
response = self.client.get(reverse(views.write_png, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_multi_job(self):
response = self.client.get(reverse(views.multi_job))
self.assertEqual(response.status_code, 200)
def test_molecule_check(self):
for name in self.names:
response = self.client.get(reverse(views.molecule_check, args=(name, )))
self.assertEqual(response.status_code, 200)
def test_molecule_check_specific(self):
names = [
("24ball_TON", "no rgroups allowed"),
("AA_TON", "can not attach to end"),
("A_TOO", "(1, 'Bad Core Name')"),
]
for name, error in names:
response = self.client.get(reverse(views.molecule_check, args=(name, )))
values = simplejson.loads(response.content)["molecules"]
self.assertEqual(values[0][2], error)
def test_report_molecule(self):
names = [
"24242424242a_TON",
"25252525252a_TON",
"26262626262a_TON",
]
data = {
"email": "something@test.com",
"message": "something something something something"
}
for name in names:
for i in xrange(3):
data["urgency"] = i
response = self.client.get(reverse(views.molecule_check, args=(name, )))
values = simplejson.loads(response.content)["molecules"]
self.assertFalse(values[0][1])
response = self.client.post(reverse(views.report, args=(name, )), data)
self.assertEqual(response.status_code, 302)
response = self.client.get(reverse(views.molecule_check, args=(name, )))
values = simplejson.loads(response.content)["molecules"]
self.assertTrue(values[0][1])
obj = ErrorReport.objects.get(molecule=name)
obj.delete()
|
Python
| 0
|
@@ -2531,207 +2531,732 @@
00)%0A
-%0A def test_write_mol2(self):%0A for name in self.names:%0A response = self.client.get(reverse(views.write_mol2, args=(name, )))%0A self.assertEqual(response.status_code, 200
+ self.assertEqual(response.get('Content-Disposition'),%0A %22attachment; filename=%25s.gjf%22 %25 name)%0A string = %22%25%25nprocshared=16%5Cn%25%25mem=59GB%5Cn%25%25chk=%25s.chk%5Cn# opt B3LYP/6-31g(d) geom=connectivity%22%0A self.assertTrue(response.content.startswith(string %25 name))%0A%0A def test_write_mol2(self):%0A for name in self.names:%0A response = self.client.get(reverse(views.write_mol2, args=(name, )))%0A self.assertEqual(response.status_code, 200)%0A self.assertEqual(response.get('Content-Disposition'),%0A %22attachment; filename=%25s.mol2%22 %25 name)%0A string = %22@%3CTRIPOS%3EMOLECULE%22%0A self.assertTrue(response.content.startswith(string)
)%0A%0A
|
a61ed6052ad69a2d62a2b3e42c6769131382118f
|
Fix other import
|
tests/nlu/test_utils.py
|
tests/nlu/test_utils.py
|
import io
import os
import pickle
import pytest
import tempfile
import shutil
from typing import Text
from rasa.shared.exceptions import RasaException
import rasa.shared.nlu.training_data.message
from rasa.nlu.tokenizers.convert_tokenizer import RESTRICTED_ACCESS_URL
import rasa.shared.utils.io
import rasa.utils.io as io_utils
from rasa.nlu import utils
from pathlib import Path
@pytest.fixture(scope="function")
def empty_model_dir():
temp_path = tempfile.mkdtemp()
yield temp_path
if os.path.exists(temp_path):
shutil.rmtree(temp_path)
@pytest.fixture
def fake_model_dir(empty_model_dir):
metadata_file = "metadata.json"
metadata_content = {"pipeline": "pretrained_embeddings_spacy", "language": "en"}
metadata_path = os.path.join(empty_model_dir, metadata_file)
utils.write_json_to_file(metadata_path, metadata_content)
fake_obj = {"Fake", "model"}
fake_obj_path = os.path.join(empty_model_dir, "component.pkl")
with open(fake_obj_path, "wb") as f:
pickle.dump(fake_obj, f)
return empty_model_dir # not empty anymore ;)
def test_relative_normpath():
test_file = "/my/test/path/file.txt"
assert utils.relative_normpath(test_file, "/my/test") == Path("path/file.txt")
assert utils.relative_normpath(None, "/my/test") is None
def test_list_files_invalid_resource():
with pytest.raises(ValueError) as execinfo:
rasa.shared.utils.io.list_files(None)
assert "must be a string type" in str(execinfo.value)
def test_list_files_non_existing_dir():
with pytest.raises(ValueError) as execinfo:
rasa.shared.utils.io.list_files("my/made_up/path")
assert "Could not locate the resource" in str(execinfo.value)
def test_list_files_ignores_hidden_files(tmpdir):
# create a hidden file
open(os.path.join(tmpdir.strpath, ".hidden"), "a").close()
# create a normal file
normal_file = os.path.join(tmpdir.strpath, "normal_file")
open(normal_file, "a").close()
assert rasa.shared.utils.io.list_files(tmpdir.strpath) == [normal_file]
def test_creation_of_existing_dir(tmpdir):
# makes sure there is no exception
assert rasa.shared.utils.io.create_directory(tmpdir.strpath) is None
def test_empty_is_model_dir(empty_model_dir):
assert utils.is_model_dir(empty_model_dir)
def test_non_existent_folder_is_no_model_dir():
assert not utils.is_model_dir("nonexistent_for_sure_123/")
def test_data_folder_is_no_model_dir():
assert not utils.is_model_dir("data/")
def test_model_folder_is_model_dir(fake_model_dir):
assert utils.is_model_dir(fake_model_dir)
def test_remove_model_empty(empty_model_dir):
assert utils.remove_model(empty_model_dir)
def test_remove_model_with_files(fake_model_dir):
assert utils.remove_model(fake_model_dir)
def test_remove_model_invalid(empty_model_dir):
test_file = "something.else"
test_content = "Some other stuff"
test_file_path = os.path.join(empty_model_dir, test_file)
utils.write_to_file(test_file_path, test_content)
with pytest.raises(RasaException):
utils.remove_model(empty_model_dir)
os.remove(test_file_path)
@pytest.mark.parametrize(
"url, result",
[
("a/b/c", False),
("a", False),
("https://192.168.1.1", True),
("http://192.168.1.1", True),
("https://google.com", True),
("https://www.google.com", True),
("http://google.com", True),
("http://www.google.com", True),
("http://www.google.com?foo=bar", True),
("http://a/b/c", True),
("http://localhost:5002/api/projects/default/models/tags/production", True),
("http://rasa-x:5002/api/projects/default/models/tags/production", True),
(
"http://rasa-x:5002/api/projects/default/models/tags/production?foo=bar",
True,
),
(RESTRICTED_ACCESS_URL, True),
("file:///some/path/file", True),
],
)
def test_is_url(url: Text, result: bool):
assert result == utils.is_url(url)
|
Python
| 0.000214
|
@@ -194,80 +194,8 @@
age%0A
-from rasa.nlu.tokenizers.convert_tokenizer import RESTRICTED_ACCESS_URL%0A
impo
@@ -3789,47 +3789,8 @@
),%0A
- (RESTRICTED_ACCESS_URL, True),%0A
|
5af29cfa071360265b1c31538f89e806ae4eabc4
|
Fix #142: Testrunner and SOUTH_TESTS_MIGRATE broken on 1.1.
|
south/management/commands/test.py
|
south/management/commands/test.py
|
from django.core import management
from django.core.management.commands import test
from django.core.management.commands import syncdb
from django.conf import settings
class Command(test.Command):
def handle(self, *args, **kwargs):
if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE:
# point at the core syncdb command when creating tests
# tests should always be up to date with the most recent model structure
management.get_commands()
management._commands['syncdb'] = 'django.core'
super(Command, self).handle(*args, **kwargs)
|
Python
| 0.000001
|
@@ -162,16 +162,277 @@
ttings%0A%0A
+from syncdb import Command as SyncDbCommand%0A%0A%0Aclass MigrateAndSyncCommand(SyncDbCommand):%0A option_list = SyncDbCommand.option_list%0A for opt in option_list:%0A if %22--migrate%22 == opt.get_opt_string():%0A opt.default = True%0A break%0A%0A%0A
class Co
@@ -496,16 +496,50 @@
wargs):%0A
+ management.get_commands()%0A
@@ -779,46 +779,8 @@
ure%0A
- management.get_commands()%0A
@@ -834,16 +834,99 @@
o.core'%0A
+ else:%0A management._commands%5B'syncdb'%5D = MigrateAndSyncCommand()%0A
|
61411f7eeda8d7f92f2624c2875828bb8f16598f
|
fix typo
|
commitizen/cli.py
|
commitizen/cli.py
|
import argparse
import logging
import sys
import warnings
from decli import cli
from commitizen import commands, config, out
logger = logging.getLogger(__name__)
data = {
"prog": "cz",
"description": (
"Commitizen is a cli tool to generate conventional commits.\n"
"For more information about the topic go to "
"https://conventionalcommits.org/"
),
"formatter_class": argparse.RawDescriptionHelpFormatter,
"arguments": [
{"name": "--debug", "action": "store_true", "help": "use debug mode"},
{
"name": ["-n", "--name"],
"help": "use the given commitizen (default: cz_conventional_commits)",
},
{
"name": ["--version"],
"action": "store_true",
"help": "get the version of the installed commitizen",
},
],
"subcommands": {
"title": "commands",
# TODO: Add this constraint back in 2.0
# "required": True,
"commands": [
{
"name": "ls",
"help": "show available commitizens",
"func": commands.ListCz,
},
{
"name": ["commit", "c"],
"help": "create new commit",
"func": commands.Commit,
"arguments": [
{
"name": ["--retry"],
"action": "store_true",
"help": "retry last commit",
},
{
"name": "--dry-run",
"action": "store_true",
"help": "show output to stdout, no commit, no modified files",
},
],
},
{
"name": "example",
"help": "show commit example",
"func": commands.Example,
},
{
"name": "info",
"help": "show information about the cz",
"func": commands.Info,
},
{"name": "schema", "help": "show commit schema", "func": commands.Schema},
{
"name": "bump",
"help": "bump semantic version based on the git log",
"func": commands.Bump,
"arguments": [
{
"name": "--dry-run",
"action": "store_true",
"help": "show output to stdout, no commit, no modified files",
},
{
"name": "--files-only",
"action": "store_true",
"help": "bump version in the files from the config",
},
{
"name": "--yes",
"action": "store_true",
"help": "accept automatically questions done",
},
{
"name": "--tag-format",
"help": (
"the format used to tag the commit and read it, "
"use it in existing projects, "
"wrap around simple quotes"
),
},
{
"name": "--bump-message",
"help": (
"template used to create the release commmit, "
"useful when working with CI"
),
},
{
"name": ["--prerelease", "-pr"],
"help": "choose type of prerelease",
"choices": ["alpha", "beta", "rc"],
},
{
"name": ["--increment"],
"help": "manually specify the desired increment",
"choices": ["MAJOR", "MINOR", "PATCH"],
},
],
},
{
"name": ["version"],
"help": (
"get the version of the installed commitizen or the current project"
" (default: installed commitizen)"
),
"func": commands.Version,
"arguments": [
{
"name": ["-p", "--project"],
"help": "get the version of the current project",
"action": "store_true",
"exclusive_group": "group1",
},
{
"name": ["-c", "--commitizen"],
"help": "get the version of the installed commitizen",
"action": "store_true",
"exclusive_group": "group1",
},
{
"name": ["-v", "--verbose"],
"help": (
"get the version of both the installed commitizen "
"and the current project"
),
"action": "store_true",
"exclusive_group": "group1",
},
],
},
{
"name": ["check"],
"help": "validates that a commit message matches the commitizen schema",
"func": commands.Check,
"arguments": [
{
"name": "--commit-msg-file",
"help": (
"ask for the name of the temporal file that contains "
"the commit message. "
"Using it in a git hook script: MSG_FILE=$1"
),
"exclusive_group": "group1",
},
{
"name": "--rev-range",
"help": ("a reange of git rev to check. e.g, master..HEAD"),
"exclusive_group": "group1",
},
],
},
{
"name": ["init"],
"help": "init commitizen configuration",
"func": commands.Init,
},
],
},
}
def main():
conf = config.read_cfg()
parser = cli(data)
# Show help if no arg provided
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
raise SystemExit()
# This is for the command required constraint in 2.0
try:
args = parser.parse_args()
except TypeError:
out.error("Command is required")
raise SystemExit()
if args.name:
conf.update({"name": args.name})
elif not args.name and not conf.path:
conf.update({"name": "cz_conventional_commits"})
if args.version:
warnings.warn(
"'cz --version' will be deprecated in next major version. "
"Please use 'cz version' command from your scripts"
)
args.func = commands.Version
if args.debug:
warnings.warn(
"Debug will be deprecated in next major version. "
"Please remove it from your scripts"
)
logging.getLogger("commitizen").setLevel(logging.DEBUG)
# TODO: This try block can be removed after command is required in 2.0
# Handle the case that argument is given, but no command is provided
try:
args.func(conf, vars(args))()
except AttributeError:
out.error("Command is required")
raise SystemExit()
|
Python
| 0.000138
|
@@ -6158,14 +6158,12 @@
p%22:
-(
%22a r
-e
ange
@@ -6202,17 +6202,16 @@
r..HEAD%22
-)
,%0A
|
39874a0ddb65582a04ea32fa2b05bacc968f56f3
|
Update max-chunks-to-make-sorted-ii.py
|
Python/max-chunks-to-make-sorted-ii.py
|
Python/max-chunks-to-make-sorted-ii.py
|
# Time: O(nlogn)
# Space: O(n)
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
def compare(i1, i2):
return arr[i1]-arr[i2] if arr[i1] != arr[i2] else i1-i2
idxs = [i for i in xrange(len(arr))]
result, max_i = 0, 0
for i, v in enumerate(sorted(idxs, cmp=compare)):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
|
Python
| 0.000001
|
@@ -26,16 +26,1085 @@
: O(n)%0A%0A
+# This question is the same as %22Max Chunks to Make Sorted%22%0A# except the integers of the given array are not necessarily distinct,%0A# the input array could be up to length 2000, and the elements could be up to 10**8.%0A#%0A# Given an array arr of integers (not necessarily distinct),%0A# we split the array into some number of %22chunks%22 (partitions),%0A# and individually sort each chunk.%0A# After concatenating them, the result equals the sorted array.%0A#%0A# What is the most number of chunks we could have made?%0A#%0A# Example 1:%0A#%0A# Input: arr = %5B5,4,3,2,1%5D%0A# Output: 1%0A# Explanation:%0A# Splitting into two or more chunks will not return the required result.%0A# For example, splitting into %5B5, 4%5D, %5B3, 2, 1%5D will result in %5B4, 5, 1, 2, 3%5D, which isn't sorted.%0A# Example 2:%0A#%0A# Input: arr = %5B2,1,3,4,4%5D%0A# Output: 4%0A# Explanation:%0A# We can split into two chunks, such as %5B2, 1%5D, %5B3, 4, 4%5D.%0A# However, splitting into %5B2, 1%5D, %5B3%5D, %5B4%5D, %5B4%5D is the highest number of chunks possible.%0A#%0A# Note:%0A# - arr will have length in range %5B1, 2000%5D.%0A# - arr%5Bi%5D will be an integer in range %5B0, 10**8%5D.%0A%0A
class So
|
5c20418b8e5f6dc033d1a7c515d30d5e9b026db5
|
Fix sampleproject view
|
sampleproject/bot/views.py
|
sampleproject/bot/views.py
|
from django.shortcuts import render
from django.conf import settings
from django_telegrambot.apps import DjangoTelegramBot
# Create your views here.
def index(request):
bot_list = DjangoTelegramBot.bots
context = {'bot_list': bot_list, 'update_mode':settings.TELEGRAM_BOT_MODE}
return render(request, 'bot/index.html', context)
|
Python
| 0.000001
|
@@ -265,16 +265,23 @@
ngs.
+DJANGO_
TELEGRAM
_BOT
@@ -280,17 +280,19 @@
GRAM
-_
BOT
-_
+%5B'
MODE
+'%5D
%7D%0A
|
f0f31ea0a86620b77073b5da0dca386b337b98da
|
update prop2part tests
|
tests/prop2part_test.py
|
tests/prop2part_test.py
|
#!/usr/bin/env python
"""
Tests for abstract.prop2partition
"""
from tulip.abstract import prop2part
import tulip.polytope as pc
import numpy as np
def prop2part_test():
state_space = pc.Polytope.from_box(np.array([[0., 2.],[0., 2.]]))
cont_props = []
A = []
b = []
A.append(np.array([[1., 0.],
[-1., 0.],
[0., 1.],
[0., -1.]]))
b.append(np.array([[.5, 0., .5, 0.]]).T)
cont_props.append(pc.Polytope(A[0], b[0]))
A.append(np.array([[1., 0.],
[-1., 0.],
[0., 1.],
[0., -1.]]))
b.append(np.array([[2., -1.5, 2., -1.5]]).T)
cont_props.append(pc.Polytope(A[1], b[1]))
cont_props_dict = dict([("C"+str(i), pc.Polytope(A[i], b[i])) for i in range(2)])
mypartition = prop2part(state_space, cont_props_dict)
ref_adjacency = np.array([[1,0,1],[0,1,1],[1,1,1]])
assert np.all(mypartition.adj.todense() == ref_adjacency)
assert len(mypartition.regions) == 3
for reg in mypartition.regions[0:2]:
assert len(reg.props) == 2
assert len(reg.list_poly) == 1
i = [i for i in range(len(reg.props)) if reg.props[i] == 1]
assert len(i) == 1
i = i[0]
assert cont_props_dict == mypartition.cont_props
ref_V = pc.extreme(mypartition.cont_props)
ref_V = set([(v[0],v[1]) for v in ref_V.tolist()])
actual_V = pc.extreme(reg.list_poly[0])
actual_V = set([(v[0],v[1]) for v in actual_V.tolist()])
assert ref_V == actual_V
assert len(mypartition.regions[2].props) == 2
assert sum(mypartition.regions[2].props) == 0
assert len(mypartition.regions[2].list_poly) == 3
dum = state_space.copy()
for reg in mypartition.regions[0:2]:
dum = dum.diff(reg)
assert pc.is_empty(dum.diff(mypartition.regions[2]) )
assert pc.is_empty(mypartition.regions[2].diff(dum) )
|
Python
| 0
|
@@ -783,15 +783,9 @@
t =
-dict(%5B(
+%7B
%22C%22+
@@ -790,17 +790,18 @@
%22+str(i)
-,
+ :
pc.Poly
@@ -816,17 +816,16 @@
%5D, b%5Bi%5D)
-)
for i i
@@ -834,23 +834,17 @@
range(2)
-%5D)%0A
+%7D
%0A %0A
@@ -899,16 +899,44 @@
s_dict)%0A
+ print(mypartition)%0A %0A
ref_
@@ -1162,25 +1162,25 @@
g.props) ==
-2
+1
%0A ass
@@ -1219,424 +1219,67 @@
-i = %5Bi for i in range(len(reg.props)) if reg.props%5Bi%5D == 1%5D%0A assert len(i) == 1%0A i = i%5B0%5D%0A assert cont_props_dict == mypartition.cont_props%0A ref_V = pc.extreme(mypartition.cont_props)%0A ref_V = set(%5B(v%5B0%5D,v%5B1%5D) for v in ref_V.tolist()%5D)%0A actual_V = pc.extreme(reg.list_poly%5B0%5D)%0A actual_V = set(%5B(v%5B0%5D,v%5B1%5D) for v in actual_V.tolist()%5D)%0A assert ref_V == actual_V
+%0A assert cont_props_dict == mypartition.prop_regions
%0A
@@ -1336,59 +1336,14 @@
==
-2
+0
%0A
-assert sum(mypartition.regions%5B2%5D.props) == 0
%0A
|
81bbe22cd92ea834f059b963cf0d0127f2d45a19
|
Add SUSPENDED to new "error" status group.
|
core/constants.py
|
core/constants.py
|
""" Norc-specific constants.
Any constants required for the core execution of Norc
should be defined here if possible.
"""
# The maximum number of tasks an Executor is allowed to run at once.
CONCURRENCY_LIMIT = 4
# How often a scheduler can poll the database for new schedules.
SCHEDULER_PERIOD = 5
# How many new schedules the scheduler can pull from the database at once.
SCHEDULER_LIMIT = 10000
EXECUTOR_PERIOD = 0.5
# A list of all Task implementations.
TASK_MODELS = [] # NOTE: This is dynamically generated by MetaTask.
# A list of all AbstractInstance implementations.
INSTANCE_MODELS = [] # NOTE: This is dynamically generated by MetaInstance.
# How often hearts should beat, in seconds.
HEARTBEAT_PERIOD = 3
# How long a heart can go without beating before being considered failed.
# This has serious implications for how long before an error in the system
# is caught. If the number is too small, though, a slow database could
# cause failsafes to activate erroneously.
HEARTBEAT_FAILED = HEARTBEAT_PERIOD + 20
class MetaConstant(type):
"""Generates the NAMES attribute of the Status class."""
def __new__(cls, name, bases, dct):
"""Magical function to dynamically create NAMES and ALL."""
NAMES = {}
ALL = []
for k, v in dct.iteritems():
if type(v) == int:
assert not v in NAMES, "Can't have duplicate values."
NAMES[v] = k
ALL.append(v)
dct['NAMES'] = NAMES
dct['ALL'] = ALL
return type.__new__(cls, name, bases, dct)
def name(cls, item):
return cls.NAMES.get(item)
class Status(object):
"""Class to hold all status constants.
The MetaStatus class automatically generates a NAMES attribute which
contains the reverse dict for retrieving a status name from its value.
The numbers should probably be moved further apart, but SUCCESS being
7 and FAILURE being 13 just seems so fitting...
"""
__metaclass__ = MetaConstant
# Transitive states.
CREATED = 1 # Created but nothing else.
RUNNING = 2 # Is currently running.
PAUSED = 3 # Currently paused.
STOPPING = 4 # In the process of stopping; should become ENDED.
SUSPENDED = 5 # Errors need addressing before a restart.
# Final states.
SUCCESS = 7 # Succeeded.
ENDED = 8 # Ended gracefully.
KILLED = 9 # Forcefully killed.
HANDLED = 12 # Was ERROR, but the problem's been handled.
# Failure states.
FAILURE = 13 # User defined failure (Task returned False).
ERROR = 14 # There was an error during execution.
TIMEDOUT = 15 # The execution timed out.
INTERRUPTED = 16 # Execution was interrupted before completion.
@staticmethod
def is_final(status):
"""Whether the given status counts as final."""
return status >= 7
@staticmethod
def is_failure(status):
"""Whether the given status counts as a failure."""
return status >= 13
@staticmethod
def GROUPS(name):
"""Used for accessing groups of Statuses by a string name."""
return {
"active": filter(lambda s: s < 7, Status.ALL),
"running": [Status.RUNNING],
"succeeded": filter(lambda s: s >= 7 and s < 13, Status.ALL),
"failed": filter(lambda s: s >= 13, Status.ALL),
"final": filter(lambda s: s >= 7, Status.ALL),
}.get(name.lower())
class Request(object):
""""""
__metaclass__ = MetaConstant
# Requests to change to a final state.
STOP = 1
KILL = 2
# Other features.
PAUSE = 7
RESUME = 8
RELOAD = 9
|
Python
| 0
|
@@ -3535,32 +3535,129 @@
7, Status.ALL),%0A
+ %22error%22: filter(lambda s: s %3E= 13, Status.ALL) +%0A %5BStatus.SUSPENDED%5D,%0A
%7D.get(na
|
77cb3f0037dad2444560d8231e6ffb4f072e19f5
|
Remove Continue click after New
|
tests/steps/creation.py
|
tests/steps/creation.py
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
from behave import step
from dogtail.rawinput import typeText
from dogtail.predicate import GenericPredicate
from time import sleep
from utils import get_showing_node_name
@step('Create new box "{name}" from "{item}" menuitem')
def create_machine_from_menuitem(context, name, item):
"""
Create new box, wait till it finishes and save its IP
"""
context.execute_steps(u"""
* Create new box from menu "%s"
* Press "Create"
* Wait for "sleep 1" end
* Hit "Enter"
* Wait for "sleep 1" end
* Hit "Enter"
* Wait for "sleep 1" end
* Hit "Enter"
* Save IP for machine "%s"
* Press "back" in "%s" vm
""" %(item, name, name))
@step('Create new box "{name}"')
def create_machine(context, name):
"""
Same as create_machine_from_menuitem except it assumes menu item and created box to have the same name.
"""
context.execute_steps(u"""
* Create new box "%s" from "%s" menuitem
""" %(name, name))
@step('Create new box from file "{location}"')
def create_new_vm_via_file(context, location):
path = location.split('/')
context.app.child('New').click()
context.app.child('Continue').click()
context.app.child('Select a file').click()
for item in path:
context.app.child(item).click()
context.app.child('Open').click()
@step('Create new box from url "{url}"')
def create_new_vm_via_url(context, url):
context.app.child('New').click()
context.app.child('Continue').click()
context.app.child('Enter URL').click()
typeText(url)
context.app.child('Continue').click()
if url.find('http') != -1:
half_minutes = 0
while half_minutes < 120:
half_minutes += 1
if context.app.findChild(
GenericPredicate(name='Choose express install to automatically '
'preconfigure the box with optimal settings.'),
retry=False,
requireResult=False):
return
create = context.app.child('Create')
if create.sensitive and create.showing:
create.click()
break
else:
sleep(30)
@step('Create new box from menu "{sys_name}"')
def create_new_vm_from_menu(context, sys_name):
context.app.child('New').click()
context.app.child('Continue').click()
get_showing_node_name(sys_name, context.app).click()
@step('Import machine "{name}" from image "{location}"')
def import_image(context, name, location):
context.execute_steps(u"""
* Create new box from file "%s"
* Press "Create"
* Save IP for machine "%s"
""" %(location, name))
@step('Initiate new box "{name}" installation from "{item}" menuitem')
def create_machine_from_menuitem_no_wait(context, name, item):
"""
Initiate new box installation but don't save its IP nor wait for it to be ready
"""
context.execute_steps(u"""
* Create new box from menu "%s"
* Press "Create"
* Wait for "sleep 1" end
* Hit "Enter"
* Wait for "sleep 1" end
* Hit "Enter"
* Press "back" in "%s" vm
""" %(item, name))
@step('Initiate new box "{name}" installation')
def create_machine_no_wait(context, name):
"""
Same as create_machine_from_menuitem_no_wait except it assumes menu item and created box to have the same name.
"""
context.execute_steps(u"""
* Initiate new box "%s" installation from "%s" menuitem
""" %(name, name))
|
Python
| 0
|
@@ -2449,50 +2449,8 @@
k()%0A
- context.app.child('Continue').click()%0A
|
d32b2494c1a72d040a651bbb2f0abb7a94c1d2db
|
remove stray line
|
tests/test-datatypes.py
|
tests/test-datatypes.py
|
"""Test datatypes."""
from statscraper.datatypes import Datatype
from statscraper import Dimension, DimensionValue
def test_allowed_values():
"""Datatypes shuold have allowed values."""
dt = Datatype("region")
assert("Ale kommun" in dt.allowed_values)
def test_b():
"""Dimension values should be translatable."""
d = Dimension("municipality", datatype="region", domain="sweden/municipalities")
dv = DimensionValue("Ale kommun", d)
assert(dv.translate("numerical") == "1440")
|
Python
| 0.002086
|
@@ -500,9 +500,8 @@
%221440%22)%0A
-%0A
|
e8cffceecf79b42790ccab1c61a2da06ae6529cd
|
comment no longer relevant. dealt with 2FA already
|
corehq/apps/sso/backends.py
|
corehq/apps/sso/backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain
from corehq.apps.sso.utils.user_helpers import get_email_domain_from_username
class SsoBackend(ModelBackend):
"""
Authenticates against an IdentityProvider and SAML2 session data.
"""
def authenticate(self, request, username, idp_slug, is_handshake_successful):
if not (request and username and idp_slug and is_handshake_successful):
return None
try:
identity_provider = IdentityProvider.objects.get(slug=idp_slug)
except IdentityProvider.DoesNotExist:
# not sure how we would even get here, but just in case
request.sso_login_error = f"Identity Provider {idp_slug} does not exist."
return None
if not identity_provider.is_active:
request.sso_login_error = f"This Identity Provider {idp_slug} is not active."
return None
email_domain = get_email_domain_from_username(username)
if not email_domain:
# not a valid username
request.sso_login_error = f"Username {username} is not valid."
return None
if not AuthenticatedEmailDomain.objects.filter(
email_domain=email_domain, identity_provider=identity_provider
).exists():
# if this user's email domain is not authorized by this identity
# do not continue with authentication
request.sso_login_error = (
f"The Email Domain {email_domain} is not allowed to "
f"authenticate with this Identity Provider ({idp_slug})."
)
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# todo handle user creation based on information from request/session
# do this prior to handling the invite scenario and new user scenario
request.sso_login_error = f"User {username} does not exist."
return None
request.sso_login_error = None
# todo what happens with 2FA required here?
return user
|
Python
| 0
|
@@ -2166,60 +2166,8 @@
one%0A
- # todo what happens with 2FA required here?%0A
|
3607d23865e9a28f53c7b8d3ef38014533c831c5
|
check is no longer necessary with above refactor
|
corehq/util/es/interface.py
|
corehq/util/es/interface.py
|
import abc
import logging
import traceback
from django.conf import settings
from corehq.util.es.elasticsearch import bulk
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def _verify_is_alias(self, index_or_alias):
from corehq.elastic import ES_META
if settings.ENABLE_ES_INTERFACE_LOGGING:
logger = logging.getLogger('es_interface')
all_es_aliases = [index_info.alias for index_info in ES_META.values()]
if index_or_alias not in all_es_aliases:
logger.info("Found a use case where an index is queried instead of alias")
logger.info(traceback.format_stack())
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._disallowed_index_settings
}
}
return self.es.indices.put_settings(settings_dict, index=index)
def get_doc(self, index_alias, doc_type, doc_id):
self._verify_is_alias(index_alias)
doc = self.es.get_source(index_alias, doc_type, doc_id)
doc['_id'] = doc_id
return doc
def get_bulk_docs(self, index_alias, doc_type, doc_ids):
from corehq.elastic import ESError
self._verify_is_alias(index_alias)
docs = []
results = self.es.mget(
index=index_alias, doc_type=doc_type, body={'ids': doc_ids}, _source=True)
for doc_result in results['docs']:
if 'error' in doc_result:
raise ESError(doc_result['error'].get('reason', 'error doing bulk get'))
if doc_result['found']:
self._fix_hit(doc_result)
docs.append(doc_result['_source'])
return docs
def create_doc(self, index_alias, doc_type, doc_id, doc):
self._verify_is_alias(index_alias)
self.es.create(index_alias, doc_type, body=self._without_id_field(doc), id=doc_id)
def update_doc(self, index_alias, doc_type, doc_id, doc, params=None):
self._verify_is_alias(index_alias)
self.es.index(index_alias, doc_type, body=self._without_id_field(doc), id=doc_id,
params=params or {})
def update_doc_fields(self, index_alias, doc_type, doc_id, fields, params=None):
self._verify_is_alias(index_alias)
self.es.update(index_alias, doc_type, doc_id, body={"doc": self._without_id_field(fields)},
params=params or {})
@staticmethod
def _without_id_field(doc):
# Field [_id] is a metadata field and cannot be added inside a document.
# Use the index API request parameters.
return {key: value for key, value in doc.items() if key != '_id'}
def delete_doc(self, index_alias, doc_type, doc_id):
self._verify_is_alias(index_alias)
self.es.delete(index_alias, doc_type, doc_id)
def bulk_ops(self, actions, stats_only=False, **kwargs):
for action in actions:
if '_source' in action:
action['_source'] = self._without_id_field(action['_source'])
ret = bulk(self.es, actions, stats_only=stats_only, **kwargs)
return ret
def search(self, index_alias=None, doc_type=None, body=None, params=None, **kwargs):
self._verify_is_alias(index_alias)
results = self.es.search(index_alias, doc_type, body=body, params=params or {}, **kwargs)
self._fix_hits_in_results(results)
return results
def scroll(self, scroll_id=None, body=None, params=None, **kwargs):
results = self.es.scroll(scroll_id, body, params=params or {}, **kwargs)
self._fix_hits_in_results(results)
return results
@staticmethod
def _fix_hit(hit):
if '_source' in hit:
hit['_source']['_id'] = hit['_id']
def _fix_hits_in_results(self, results):
try:
hits = results['hits']['hits']
except KeyError:
return results
for hit in hits:
self._fix_hit(hit)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
_disallowed_index_settings = (
'max_result_window',
)
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_disallowed_index_settings = (
'merge.policy.merge_factor',
'store.throttle.max_bytes_per_sec',
'store.throttle.type',
)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
Python
| 0
|
@@ -846,219 +846,8 @@
s()%0A
- settings_dict = %7B%0A %22index%22: %7B%0A key: value for key, value in settings_dict%5B'index'%5D.items()%0A if key not in self._disallowed_index_settings%0A %7D%0A %7D%0A
@@ -4072,73 +4072,12 @@
-_disallowed_index_settings = (%0A 'max_result_window',%0A )
+pass
%0A%0A%0Ac
@@ -4146,156 +4146,12 @@
-_disallowed_index_settings = (%0A 'merge.policy.merge_factor',%0A 'store.throttle.max_bytes_per_sec',%0A 'store.throttle.type',%0A )
+pass
%0A%0A%0AE
|
52c3981b8880085d060f874eb8feace6ac125411
|
Replace exact equality assert with isclose in bands cli
|
tests/test_cli_bands.py
|
tests/test_cli_bands.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
|
Python
| 0.000116
|
@@ -126,16 +126,35 @@
empfile%0A
+import numpy as np%0A
import b
@@ -920,15 +920,35 @@
-assert
+np.testing.assert_allclose(
bs.c
@@ -984,10 +984,9 @@
nce)
- ==
+,
0
+)
%0A
|
8b4b5eb2506feed164b69efa66b4cdae159182c3
|
Fix pre-commit issues in the cli_parse tests.
|
tests/test_cli_parse.py
|
tests/test_cli_parse.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
|
Python
| 0
|
@@ -149,22 +149,49 @@
ch%3E%0A
-%0Aimport pytest
+%22%22%22Tests for the 'parse' CLI command.%22%22%22%0A
%0Aimp
@@ -202,16 +202,31 @@
tempfile
+%0A%0Aimport pytest
%0Afrom cl
@@ -483,16 +483,91 @@
_kind):%0A
+ %22%22%22Test the 'parse' command with different 'prefix' and 'pos_kind'.%22%22%22%0A
runn
|
78434bafbcc60ba7207d63481d3179474ae939ed
|
change to using scontrol for getting job state by default
|
pipeline/pipeline/batch.py
|
pipeline/pipeline/batch.py
|
import os, re
import subprocess
import time
def write_slurm_script(filename, cmd, **batch_options):
with open(filename, 'w') as fout:
fout.write('#!/bin/bash\n')
for opts in batch_options.items():
fout.write('#SBATCH --{0}={1}\n'.format(*opts))
fout.write('\n')
fout.write('{0}\n'.format(cmd))
def get_job_status(jobid, wait=30):
"""Returns status of slurm job <jobid>
Currently parses output of `sacct`. Perhaps would
be a good idea to move this to pyslurm (though this would
add a dependency.)
"""
m = False
repeat = 0
while not m and repeat < wait:
cmd = 'sacct -b -j {0}'.format(jobid)
output = subprocess.check_output(cmd, shell=True)
m = re.search('{0}\s+([A-Z]+)'.format(jobid), output)
time.sleep(1)
repeat += 1
if not m:
raise ValueError('Job not found: {0}'.format(jobid))
return m.group(1)
|
Python
| 0
|
@@ -583,18 +583,220 @@
-m = False%0A
+cmd = 'scontrol show job %7B0%7D'.format(jobid)%0A output = subprocess.check_output(cmd, shell=True)%0A m = re.search('JobState=(%5Cw+)', output)%0A status = None%0A if m:%0A status = m.group(1)%0A else:%0A
@@ -806,16 +806,20 @@
eat = 0%0A
+
whil
@@ -853,24 +853,28 @@
%0A
+
+
cmd = 'sacct
@@ -895,24 +895,28 @@
rmat(jobid)%0A
+
outp
@@ -965,24 +965,28 @@
ue)%0A
+
+
m = re.searc
@@ -1035,16 +1035,20 @@
+
time.sle
@@ -1057,24 +1057,28 @@
(1)%0A
+
repeat += 1
@@ -1082,21 +1082,79 @@
1 %0A
-%0A if not m
+ if m:%0A status = m.group(1) %0A%0A if status is None
:%0A
@@ -1220,23 +1220,41 @@
-return m.group(1)%0A
+else:%0A return status
%0A
|
899a648cb4497d8177ffa6a7b137f3363d285792
|
add tests for converter helpers
|
tests/test_converter.py
|
tests/test_converter.py
|
import unittest
from twiggy.lib.converter import Converter, ConversionTable
def convVal(x):
return x
def convItem(x, y):
return x, y
class ConverterTestCase(unittest.TestCase):
def test_repr(self):
c = Converter("pants", convVal, convItem)
assert repr(c) == "<Converter('pants')>"
class ConversionTableTestCase(unittest.TestCase):
def test_init_None(self):
ct = ConversionTable()
assert len(ct) == 0
def test_init_simple(self):
c = Converter("pants", convVal, convItem)
ct = ConversionTable([c])
assert ct[0] is c
def test_init_tuple(self):
ct = ConversionTable([("pants", convVal, convItem),
("shirt", convVal, convItem, True)])
assert ct[0].key == 'pants'
assert ct[0].convertValue is convVal
assert ct[0].convertItem is convItem
assert not ct[0].required
assert ct[1].key == 'shirt'
assert ct[1].convertValue is convVal
assert ct[1].convertItem is convItem
assert ct[1].required
def test_init_dict(self):
d = dict(key='pants', convertValue=convVal, convertItem=convItem)
ct = ConversionTable([d])
assert ct[0].key == 'pants'
assert ct[0].convertValue is convVal
assert ct[0].convertItem is convItem
assert not ct[0].required
def test_init_bad(self):
with self.assertRaises(ValueError):
ct = ConversionTable(['oops'])
def test_copy(self):
ct = ConversionTable([("pants", convVal, convItem),
("shirt", convVal, convItem, True)])
ct2 = ct.copy()
assert ct is not ct2
assert ct[0] is not ct2[0]
assert ct[1] is not ct2[1]
assert ct[0].key == ct2[0].key
assert ct[0].convertValue is ct2[0].convertValue
assert ct[0].convertItem is ct2[0].convertItem
assert ct[0].required == ct2[0].required
def test_duplicate(self):
c1 = Converter("pants", convVal, convItem)
c2 = Converter("pants", convVal, convItem)
c3 = Converter("shirt", convVal, convItem)
ct = ConversionTable([c1, c2, c3])
assert ct.get('pants') is c1
l = ct.getAll('pants')
assert l[0] is c1
assert l[1] is c2
ct.delete('pants')
l = ct.getAll('pants')
assert not l
assert len(ct) == 1
assert ct[0] is c3
def test_get(self):
c = Converter("pants", convVal, convItem)
ct = ConversionTable([c,
("shirt", convVal, convItem, True)])
assert ct.get("belt") is None
assert ct.get("pants") is c
def test_getAll_no_match(self):
ct = ConversionTable([("pants", convVal, convItem),
("shirt", convVal, convItem, True)])
l = ct.getAll("belt")
assert isinstance(l, list)
assert not l
def test_convert(self):
ct = ConversionTable([
("joe", "I wear {}".format, convItem),
("frank", "You wear {}".format, convItem)])
ct.genericValue = "Someone wears {}".format
d = ct.convert({'joe':'pants', 'frank':'shirt', 'bob':'shoes'})
assert d == {'joe': "I wear pants", 'frank': "You wear shirt", 'bob': "Someone wears shoes"}
def test_drop(self):
ct = ConversionTable([
("joe", "I wear {}".format, convItem),
("frank", "You wear {}".format, lambda k, v: None)])
ct.genericItem = lambda k, v: None
d = ct.convert({'joe':'pants', 'frank':'shirt', 'bob':'shoes'})
assert d == {'joe': "I wear pants"}
def test_generic(self):
c = Converter("pants", convVal, convItem)
ct = ConversionTable([c])
assert ct.convert({'shirt':42}) == {'shirt':42}
def test_missing(self):
c = Converter("pants", convVal, convItem, True)
ct = ConversionTable([c])
with self.assertRaises(ValueError):
ct.convert({'shirt':42}) == {'shirt':42}
|
Python
| 0
|
@@ -69,16 +69,43 @@
ionTable
+, sameValue, sameItem, drop
%0A%0Adef co
@@ -165,16 +165,371 @@
n x, y%0A%0A
+class HelperTestCase(unittest.TestCase):%0A%0A def test_drop(self):%0A assert drop(1, 2) is None%0A%0A def test_same_value(self):%0A o = object()%0A assert sameValue(o) is o%0A%0A def test_same_item(self):%0A o1 = object()%0A o2 = object()%0A %0A x1, x2 = sameItem(o1, o2) %0A assert o1 is x1%0A assert o2 is x2%0A%0A
class Co
@@ -3434,36 +3434,36 @@
ear %7B%7D%22.format,
-conv
+same
Item),%0A
@@ -3493,28 +3493,71 @@
%7B%7D%22.format,
-conv
+sameItem),%0A (%22bill%22, sameValue, same
Item)%5D)%0A
@@ -3669,32 +3669,49 @@
'frank':'shirt',
+ 'bill': 'naked',
'bob':'shoes'%7D)
@@ -3810,16 +3810,32 @@
s shoes%22
+, 'bill':%22naked%22
%7D%0A%0A d
@@ -3980,33 +3980,20 @@
format,
-lambda k, v: None
+drop
)%5D)%0A
@@ -4026,25 +4026,12 @@
m =
-lambda k, v: None
+drop
%0A
|
047a1a6072905e650d8a8c6dee3078a14b9df759
|
Use Path instead of PosixPath
|
tests/test_corrector.py
|
tests/test_corrector.py
|
# -*- coding: utf-8 -*-
import pytest
from pathlib import PosixPath
from thefuck import corrector, const
from tests.utils import Rule, Command, CorrectedCommand
from thefuck.corrector import get_corrected_commands, organize_commands
class TestGetRules(object):
@pytest.fixture
def glob(self, mocker):
results = {}
mocker.patch('pathlib.Path.glob',
new_callable=lambda: lambda *_: results.pop('value', []))
return lambda value: results.update({'value': value})
@pytest.fixture(autouse=True)
def load_source(self, monkeypatch):
monkeypatch.setattr('thefuck.types.load_source',
lambda x, _: Rule(x))
def _compare_names(self, rules, names):
assert {r.name for r in rules} == set(names)
@pytest.mark.parametrize('paths, conf_rules, exclude_rules, loaded_rules', [
(['git.py', 'bash.py'], const.DEFAULT_RULES, [], ['git', 'bash']),
(['git.py', 'bash.py'], ['git'], [], ['git']),
(['git.py', 'bash.py'], const.DEFAULT_RULES, ['git'], ['bash']),
(['git.py', 'bash.py'], ['git'], ['git'], [])])
def test_get_rules(self, glob, settings, paths, conf_rules, exclude_rules,
loaded_rules):
glob([PosixPath(path) for path in paths])
settings.update(rules=conf_rules,
priority={},
exclude_rules=exclude_rules)
rules = corrector.get_rules()
self._compare_names(rules, loaded_rules)
def test_get_corrected_commands(mocker):
command = Command('test', 'test', 'test')
rules = [Rule(match=lambda _: False),
Rule(match=lambda _: True,
get_new_command=lambda x: x.script + '!', priority=100),
Rule(match=lambda _: True,
get_new_command=lambda x: [x.script + '@', x.script + ';'],
priority=60)]
mocker.patch('thefuck.corrector.get_rules', return_value=rules)
assert [cmd.script for cmd in get_corrected_commands(command)] \
== ['test!', 'test@', 'test;']
def test_organize_commands():
"""Ensures that the function removes duplicates and sorts commands."""
commands = [CorrectedCommand('ls'), CorrectedCommand('ls -la', priority=9000),
CorrectedCommand('ls -lh', priority=100),
CorrectedCommand(u'echo café', priority=200),
CorrectedCommand('ls -lh', priority=9999)]
assert list(organize_commands(iter(commands))) \
== [CorrectedCommand('ls'), CorrectedCommand('ls -lh', priority=100),
CorrectedCommand(u'echo café', priority=200),
CorrectedCommand('ls -la', priority=9000)]
|
Python
| 0.000001
|
@@ -52,21 +52,16 @@
import
-Posix
Path%0Afro
@@ -1260,13 +1260,8 @@
ob(%5B
-Posix
Path
|
4180085e3bf6d0dd1f28233d4ac62198ebeb9814
|
Fix wrong assert
|
tests/test_histogram.py
|
tests/test_histogram.py
|
# vim: set fileencoding=utf-8 :
import unittest
import pyvips
from .helpers import PyvipsTester, JPEG_FILE
class TestHistogram(PyvipsTester):
def test_hist_cum(self):
im = pyvips.Image.identity()
sum = im.avg() * 256
cum = im.hist_cum()
p = cum(255, 0)
self.assertEqual(p[0], sum)
def test_hist_equal(self):
im = pyvips.Image.new_from_file(JPEG_FILE)
im2 = im.hist_equal()
self.assertEqual(im.width, im2.width)
self.assertEqual(im.height, im2.height)
self.assertTrue(im.avg() < im2.avg())
self.assertTrue(im.deviate() < im2.deviate())
def test_hist_ismonotonic(self):
im = pyvips.Image.identity()
self.assertTrue(im.hist_ismonotonic())
def test_hist_local(self):
im = pyvips.Image.new_from_file(JPEG_FILE)
im2 = im.hist_local(10, 10)
self.assertEqual(im.width, im2.width)
self.assertEqual(im.height, im2.height)
self.assertTrue(im.avg() < im2.avg())
self.assertTrue(im.deviate() < im2.deviate())
if pyvips.at_least_libvips(8, 5):
im3 = im.hist_local(10, 10, max_slope=3)
self.assertEqual(im.width, im2.width)
self.assertEqual(im.height, im2.height)
self.assertTrue(im3.deviate() < im2.deviate())
def test_hist_match(self):
im = pyvips.Image.identity()
im2 = pyvips.Image.identity()
matched = im.hist_match(im2)
self.assertEqual((im - matched).abs().max(), 0.0)
def test_hist_norm(self):
im = pyvips.Image.identity()
im2 = im.hist_norm()
self.assertEqual((im - im2).abs().max(), 0.0)
def test_hist_plot(self):
im = pyvips.Image.identity()
im2 = im.hist_plot()
self.assertEqual(im2.width, 256)
self.assertEqual(im2.height, 256)
self.assertEqual(im2.format, pyvips.BandFormat.UCHAR)
self.assertEqual(im2.bands, 1)
def test_hist_map(self):
im = pyvips.Image.identity()
im2 = im.maplut(im)
self.assertEqual((im - im2).abs().max(), 0.0)
def test_percent(self):
im = pyvips.Image.new_from_file(JPEG_FILE).extract_band(1)
pc = im.percent(90)
msk = im <= pc
n_set = (msk.avg() * msk.width * msk.height) / 255.0
pc_set = 100 * n_set / (msk.width * msk.height)
self.assertAlmostEqual(pc_set, 90, places=0)
def test_hist_entropy(self):
im = pyvips.Image.new_from_file(JPEG_FILE).extract_band(1)
ent = im.hist_find().hist_entropy()
self.assertAlmostEqual(ent, 4.37, places=2)
def test_stdif(self):
im = pyvips.Image.new_from_file(JPEG_FILE)
im2 = im.stdif(10, 10)
self.assertEqual(im.width, im2.width)
self.assertEqual(im.height, im2.height)
# new mean should be closer to target mean
self.assertTrue(abs(im.avg() - 128) > abs(im2.avg() - 128))
if __name__ == '__main__':
unittest.main()
|
Python
| 0.0022
|
@@ -1202,33 +1202,33 @@
ual(im.width, im
-2
+3
.width)%0A
@@ -1253,33 +1253,33 @@
al(im.height, im
-2
+3
.height)%0A%0A
|
eacf0414f3fed58c31f280e9ad02df7e610d422d
|
add exception handle for KeyControlInterrupt
|
pagrant/basecommand.py
|
pagrant/basecommand.py
|
#!/usr/bin/python
#coding:utf8
__author__ = ['markshao']
import sys
from pagrant.vendors.myoptparser import optparse
from pagrant import cmdoptions
from pagrant.cmdparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pagrant.util import get_prog, format_exc
from pagrant.exceptions import PagrantError, PagrantConfigError, VirtualBootstrapError
from pagrant.log import logger
__all__ = ['Command']
class Command(object):
name = None
usage = None
hidden = None
summary = ""
def __init__(self):
parser_kw = {
'usage': self.usage,
'prog': '%s %s' % (get_prog(), self.name),
'formatter': UpdatingDefaultsHelpFormatter(),
'add_help_option': False,
'name': self.name,
'description': self.__doc__,
}
self.parser = ConfigOptionParser(**parser_kw)
# Commands should add options to this option group
optgroup_name = '%s Options' % self.name.capitalize()
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser)
self.parser.add_option_group(gen_opts)
self.logger = None
def setup_logging(self):
pass
def parse_args(self, args):
# factored out for testability
return self.parser.parse_args(args)
def run(self, args):
"""
The sub command class should overide this method
"""
NotImplemented
def execute(self, args=None):
"""
The main interface for exectute the command
"""
import copy
args_bk = copy.deepcopy(args)
try:
options, args = self.parse_args(args)
except (optparse.OptionError, optparse.BadOptionError), e:
options = None
level = 1 # Notify
level += getattr(options, "verbose", 0)
level -= getattr(options, "verbose", 0)
level = logger.level_for_integer(4 - level)
complete_log = []
logger.add_consumers(
(level, sys.stdout),
(logger.DEBUG, complete_log.append),
)
if getattr(options, "log_explicit_levels", False):
logger.explicit_levels = True
self.logger = logger # if the sub command does nothing , we just reuse this log
self.setup_logging()
try:
self.run(args_bk)
except VirtualBootstrapError:
self.logger.fatal("ERROR: %s" % str(sys.exc_info()[1]))
self.logger.error('Exception information:\n%s' % format_exc())
sys.exit(1)
except PagrantConfigError:
self.logger.fatal("ERROR: %s" % str(sys.exc_info()[1]))
self.logger.error('Exception information:\n%s' % format_exc())
sys.exit(1)
except PagrantError:
self.logger.fatal("ERROR: %s" % str(sys.exc_info()[1]))
self.logger.error('Exception information:\n%s' % format_exc())
sys.exit(1)
|
Python
| 0.000001
|
@@ -3064,28 +3064,238 @@
())%0A sys.exit(1)%0A
+ except KeyboardInterrupt:%0A self.logger.fatal(%22The user interrupt the test case execution%22)%0A self.logger.error(%22The user interrupt the test case execution%22)%0A sys.exit(1)%0A
|
8895e607b223841e7edc2b67a85c746b790f28b5
|
add another check
|
tests/test_ratelimit.py
|
tests/test_ratelimit.py
|
# coding=utf-8
# Python 2 source containing unicode https://www.python.org/dev/peps/pep-0263/
"""
Tests for SMTP server rate limit feature.
Andrew DeOrio <awdeorio@umich.edu>
"""
import textwrap
import datetime
import time
import future.backports.email as email
import future.backports.email.parser # pylint: disable=unused-import
import pytest
import click
import click.testing
from mailmerge import SendmailClient, MailmergeRateLimitError
from mailmerge.__main__ import main
try:
from unittest import mock # Python 3
except ImportError:
import mock # Python 2
# Python 2 pathlib support requires backport
try:
from pathlib2 import Path
except ImportError:
from pathlib import Path
# The sh library triggers lot of false no-member errors
# pylint: disable=no-member
# We're going to use mock_SMTP because it mimics the real SMTP library
# pylint: disable=invalid-name
@mock.patch('smtplib.SMTP')
def test_sendmail_ratelimit(mock_SMTP, tmp_path):
"""Verify SMTP library calls."""
config_path = tmp_path/"server.conf"
config_path.write_text(textwrap.dedent(u"""\
[smtp_server]
host = open-smtp.example.com
port = 25
ratelimit = 60
"""))
sendmail_client = SendmailClient(
config_path,
dry_run=False,
)
message = email.message_from_string(u"""
TO: to@test.com
SUBJECT: Testing mailmerge
FROM: from@test.com
Hello world
""")
# First message
sendmail_client.sendmail(
sender="from@test.com",
recipients=["to@test.com"],
message=message,
)
smtp = mock_SMTP.return_value.__enter__.return_value
assert smtp.sendmail.call_count == 1
# Second message exceeds the rate limit, doesn't try to send a message
with pytest.raises(MailmergeRateLimitError):
sendmail_client.sendmail(
sender="from@test.com",
recipients=["to@test.com"],
message=message,
)
assert smtp.sendmail.call_count == 1
# Retry the second message after 1 s because the rate limit is 60 messages
# per minute
# FIXME a better way to do this is to mock datetime.datetime.now()
time.sleep(1.1)
sendmail_client.sendmail(
sender="from@test.com",
recipients=["to@test.com"],
message=message,
)
assert smtp.sendmail.call_count == 2
@mock.patch('smtplib.SMTP')
def test_stdout_ratelimit(mock_SMTP, tmpdir):
"""Verify SMTP server ratelimit parameter."""
# Simple template
template_path = Path(tmpdir/"mailmerge_template.txt")
template_path.write_text(textwrap.dedent(u"""\
TO: {{email}}
FROM: from@test.com
Hello world
"""))
# Simple database with two entries
database_path = Path(tmpdir/"mailmerge_database.csv")
database_path.write_text(textwrap.dedent(u"""\
email
one@test.com
two@test.com
"""))
# Simple unsecure server config
config_path = Path(tmpdir/"mailmerge_server.conf")
config_path.write_text(textwrap.dedent(u"""\
[smtp_server]
host = open-smtp.example.com
port = 25
ratelimit = 60
"""))
# Run mailmerge
before = datetime.datetime.now()
with tmpdir.as_cwd():
runner = click.testing.CliRunner(mix_stderr=False)
result = runner.invoke(
main, [
"--no-limit",
"--no-dry-run",
"--output-format", "text",
]
)
after = datetime.datetime.now()
assert after - before > datetime.timedelta(seconds=1)
assert result.exit_code == 0
assert result.stderr == ""
assert ">>> message 1 sent" in result.stdout
assert ">>> rate limit exceeded, waiting ..." in result.stdout
assert ">>> message 2 sent" in result.stdout
|
Python
| 0
|
@@ -3596,16 +3596,114 @@
onds=1)%0A
+ smtp = mock_SMTP.return_value.__enter__.return_value%0A assert smtp.sendmail.call_count == 2%0A
asse
|
8e5ffc7ed1db1d17e55cf538fc9858705ecc9dd2
|
Bump version to 1.20.4
|
platformio_api/__init__.py
|
platformio_api/__init__.py
|
# Copyright 2014-present Ivan Kravets <me@ikravets.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging.config
import os
VERSION = (1, 20, 3)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio-api"
__description__ = ("An API for PlatformIO")
__url__ = "https://github.com/ivankravets/platformio-api"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014-2017 Ivan Kravets"
config = dict(
SQLALCHEMY_DATABASE_URI=None,
GITHUB_LOGIN=None,
GITHUB_PASSWORD=None,
DL_PIO_DIR=None,
DL_PIO_URL=None,
MAX_DLFILE_SIZE=1024 * 1024 * 150, # 150 Mb
# Fuzzy search will not be applied to words shorter than the value below
SOLR_FUZZY_MIN_WORD_LENGTH=3,
LOGGING=dict(version=1)
)
assert "PIOAPI_CONFIG_PATH" in os.environ
with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f:
config.update(json.load(f))
# configure logging for packages
logging.basicConfig()
logging.config.dictConfig(config['LOGGING'])
# setup time zone to UTC globally
os.environ['TZ'] = "+00:00"
try:
from time import tzset
tzset()
except ImportError:
pass
|
Python
| 0
|
@@ -659,17 +659,17 @@
(1, 20,
-3
+4
)%0A__vers
|
5d6fd6f627b6fe073d95499a58575532618ef484
|
Add many=True to test_recursive
|
tests/test_relations.py
|
tests/test_relations.py
|
from rest_framework import serializers
from rest_framework.test import APISimpleTestCase
from drf_extra_fields.relations import (
PresentablePrimaryKeyRelatedField,
PresentableSlugRelatedField,
)
from .utils import MockObject, MockQueryset
class PresentationSerializer(serializers.Serializer):
def to_representation(self, instance):
return {"pk": instance.pk, "name": instance.name}
class RecursiveSerializer(serializers.Serializer):
pk = serializers.CharField()
recursive_field = PresentablePrimaryKeyRelatedField(
queryset=MockQueryset([]),
presentation_serializer="tests.test_relations.RecursiveSerializer",
)
class TestPresentablePrimaryKeyRelatedField(APISimpleTestCase):
def setUp(self):
self.queryset = MockQueryset(
[
MockObject(pk=1, name="foo"),
MockObject(pk=2, name="bar"),
MockObject(pk=3, name="baz"),
]
)
self.instance = self.queryset.items[2]
self.field = PresentablePrimaryKeyRelatedField(
queryset=self.queryset, presentation_serializer=PresentationSerializer
)
def test_representation(self):
representation = self.field.to_representation(self.instance)
expected_representation = PresentationSerializer(self.instance).data
assert representation == expected_representation
class TestPresentableSlugRelatedField(APISimpleTestCase):
def setUp(self):
self.queryset = MockQueryset(
[
MockObject(pk=1, name="foo"),
MockObject(pk=2, name="bar"),
MockObject(pk=3, name="baz"),
]
)
self.instance = self.queryset.items[2]
self.field = PresentableSlugRelatedField(
slug_field="name",
queryset=self.queryset,
presentation_serializer=PresentationSerializer,
)
def test_representation(self):
representation = self.field.to_representation(self.instance)
expected_representation = PresentationSerializer(self.instance).data
assert representation == expected_representation
class TestRecursivePresentablePrimaryKeyRelatedField(APISimpleTestCase):
def setUp(self):
self.related_object = MockObject(
pk=3,
name="baz",
recursive_field=MockObject(
pk=4,
name="foobar",
recursive_field=MockObject(
pk=5,
name="barbaz",
recursive_field=None)
),
)
def test_recursive(self):
serializer = RecursiveSerializer(self.related_object)
assert serializer.data == {
'pk': '3', 'recursive_field': {
'pk': '4', 'recursive_field': {
'pk': '5', 'recursive_field': None
}
}
}
|
Python
| 0.998278
|
@@ -659,16 +659,209 @@
,%0A )%0A
+ recursive_fields = PresentablePrimaryKeyRelatedField(%0A queryset=MockQueryset(%5B%5D),%0A presentation_serializer=%22tests.test_relations.RecursiveSerializer%22,%0A many=True%0A )%0A
%0A%0Aclass
@@ -2532,24 +2532,247 @@
name=%22baz%22,%0A
+ recursive_fields=%5B%0A MockObject(pk=6, name=%22foo%22, recursive_fields=%5B%5D, recursive_field=None),%0A MockObject(pk=7, name=%22baz%22, recursive_fields=%5B%5D, recursive_field=None)%0A %5D,%0A
@@ -2791,32 +2791,32 @@
eld=MockObject(%0A
-
@@ -2848,24 +2848,61 @@
e=%22foobar%22,%0A
+ recursive_fields=%5B%5D,%0A
@@ -2990,24 +2990,65 @@
e=%22barbaz%22,%0A
+ recursive_fields=%5B%5D,%0A
@@ -3067,32 +3067,49 @@
rsive_field=None
+%0A
)%0A ),
@@ -3270,16 +3270,28 @@
k': '3',
+%0A
'recurs
@@ -3330,16 +3330,32 @@
k': '4',
+%0A
'recurs
@@ -3398,16 +3398,36 @@
k': '5',
+%0A
'recurs
@@ -3438,24 +3438,468 @@
field': None
+,%0A 'recursive_fields': %5B%5D%0A %7D,%0A 'recursive_fields': %5B%5D%0A %7D,%0A 'recursive_fields': %5B%0A %7B%0A 'pk': '6',%0A 'recursive_field': None,%0A 'recursive_fields': %5B%5D%0A %7D,%0A %7B%0A 'pk': '7',%0A 'recursive_field': None,%0A 'recursive_fields': %5B%5D
%0A
@@ -3909,28 +3909,28 @@
%7D%0A
-%7D
+%5D
%0A %7D%0A
|
14a2ad18e70b6bc35e8d64c56b37520ebdb9fa3c
|
Add tests for full resource name
|
tests/test_resources.py
|
tests/test_resources.py
|
# Copyright 2019 The resource-policy-evaluation-library Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from rpe.resources import Resource
from rpe.resources.gcp import GcpBigqueryDataset
from rpe.resources.gcp import GcpComputeInstance
from rpe.resources.gcp import GcpSqlInstance
from rpe.resources.gcp import GcpStorageBucket
from rpe.resources.gcp import GcpStorageBucketIamPolicy
test_project = "my_project"
test_resource_name = "my_resource"
test_cases = [
(
{
'resource_type': 'bigquery.datasets',
'resource_name': test_resource_name,
'project_id': test_project
},
GcpBigqueryDataset,
'gcp.bigquery.datasets'
),
(
{
'resource_type': 'compute.instances',
'resource_name': test_resource_name,
'resource_location': 'us-central1-a',
'project_id': test_project
},
GcpComputeInstance,
'gcp.compute.instances'
),
(
{
'resource_type': 'sqladmin.instances',
'resource_name': test_resource_name,
'project_id': test_project
},
GcpSqlInstance,
'gcp.sqladmin.instances'
),
(
{
'resource_type': 'storage.buckets',
'resource_name': test_resource_name,
'project_id': test_project
},
GcpStorageBucket,
'gcp.storage.buckets'
),
(
{
'resource_type': 'storage.buckets.iam',
'resource_name': test_resource_name,
'project_id': test_project
},
GcpStorageBucketIamPolicy,
'gcp.storage.buckets.iam'
)
]
@pytest.mark.parametrize(
"input,cls,rtype",
test_cases,
ids=[cls.__name__ for (_, cls, _) in test_cases])
def test_gcp_resource_factory(input, cls, rtype):
r = Resource.factory("gcp", input)
assert r.__class__ == cls
assert r.type() == rtype
def test_gcp_resource_factory_invalid():
with pytest.raises(AssertionError):
Resource.factory('gcp', {})
|
Python
| 0
|
@@ -1222,24 +1222,102 @@
ry.datasets'
+,%0A '//bigquery.googleapis.com/projects/my_project/datasets/my_resource'
%0A ),%0A
@@ -1578,32 +1578,130 @@
mpute.instances'
+,%0A '//compute.googleapis.com/projects/my_project/zones/us-central1-a/instances/my_resource'
%0A ),%0A (%0A
@@ -1914,16 +1914,90 @@
stances'
+,%0A '//sql.googleapis.com/projects/my_project/instances/my_resource'
%0A ),%0A
@@ -2214,16 +2214,72 @@
buckets'
+,%0A '//storage.googleapis.com/buckets/my_resource'
%0A ),%0A
@@ -2513,16 +2513,72 @@
ets.iam'
+,%0A '//storage.googleapis.com/buckets/my_resource'
%0A )%0A%5D
@@ -2633,16 +2633,45 @@
e%22,%0A
+%5B(c%5B0%5D, c%5B1%5D, c%5B2%5D) for c in
test_cas
@@ -2672,16 +2672,17 @@
st_cases
+%5D
,%0A id
@@ -2685,18 +2685,19 @@
ids=%5Bc
-ls
+%5B1%5D
.__name_
@@ -2706,19 +2706,9 @@
for
-(_, cls, _)
+c
in
@@ -2987,8 +2987,262 @@
p', %7B%7D)%0A
+%0A@pytest.mark.parametrize(%0A %22input,frn%22,%0A %5B(c%5B0%5D, c%5B3%5D) for c in test_cases%5D,%0A ids=%5Bc%5B1%5D.__name__ for c in test_cases%5D)%0Adef test_gcp_full_resource_name(input, frn):%0A r = Resource.factory(%22gcp%22, input)%0A assert r.full_resource_name() == frn%0A
|
b144eb21003fc3f2e13e3d88b93a947a458cae24
|
test designed to fail confirmed - reverted
|
tests/test_simulator.py
|
tests/test_simulator.py
|
# test_simulator.py written by Duncan Murray 28/4/2015
import unittest
import os
import sys
root_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." + os.sep + 'vais')
ref_folder = root_folder + os.sep + "data"
sys.path.append(root_folder)
import planet as planet
import battle as battle
import character as character
import simulator as simulator
test_folder = os.getcwd() + os.sep + 'test_results'
test_file = test_folder + os.sep + 'battle.txt'
#rules_file = ref_folder + os.sep + 'battle.rules'
class VaisSimulatorTest(unittest.TestCase):
def setup(self):
print('running simulator tests')
def test_01_instantiate_sim(self):
traits = character.CharacterCollection(ref_folder)
a1 = traits.generate_random_character()
a2 = traits.generate_random_character()
a3 = traits.generate_random_character()
world = planet.Planet('SimWorld', num_seeds=5, width=20, height=15, wind=0.3, rain=0.10, sun=0.3, lava=0.4)
actions = ['walk', 'run', 'fight', 'buy', 'sell', 'collect']
s = simulator.SimAdventureGame('Test of SimWorld', world, [a1, a2, a3], [(2,2), (3,4), (4,4)], actions)
s.run()
self.assertEqual(len(str(s)), 231)
def test_02_move_character(self):
"""
add a single character to a world and move them around
"""
traits = character.CharacterCollection(ref_folder)
a1 = traits.generate_random_character()
world = planet.Planet('SimWorld', num_seeds=5, width=20, height=15, wind=0.3, rain=0.10, sun=0.3, lava=0.4)
actions = ['walk']
s = simulator.SimAdventureGame('Test of SimWorld', world, [a1], [(2,2)], actions)
s.run()
self.assertEqual(len(str(s)), 143)
self.assertEqual(s.agent_locations[0]['x'], 2)
self.assertEqual(s.agent_locations[0]['y'], 2)
s.command({'name':'walk', 'type':'move', 'direction':[0,1]}, a1)
self.assertEqual(s.agent_locations[0]['x'], 2)
self.assertEqual(s.agent_locations[0]['y'], 2)
s.command({'name':'walk', 'type':'move', 'direction':[1,1]}, a1)
def test_03_SimGameOfLife(self):
traits = character.CharacterCollection(ref_folder)
a1 = traits.generate_random_character()
world = planet.Planet('SimWorld', num_seeds=5, width=20, height=15, wind=0.3, rain=0.10, sun=0.3, lava=0.4)
actions = ['walk']
s = simulator.SimAdventureGame('Test of SimWorld', world, [a1], [(2,2)], actions)
s.run()
print(s)
self.assertEqual(len(str(s)), 143)
def test_04_sim_fail(self):
self.assertEqual(1, 2)
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -2686,24 +2686,27 @@
self.assert
+Not
Equal(1, 2)
|
ddaa94003bca853819855014b7d99c165c890de8
|
add yaml output command func to config_command tests
|
test/test_config_command.py
|
test/test_config_command.py
|
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
import os
import yaml
from mock import patch
from tempfile import mkdtemp
from shutil import rmtree
from beets import ui
from beets import config
from test._common import unittest
from test.helper import TestHelper, capture_stdout
from beets.library import Library
import six
class ConfigCommandTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = Library(':memory:')
self.temp_dir = mkdtemp()
if 'EDITOR' in os.environ:
del os.environ['EDITOR']
os.environ['BEETSDIR'] = self.temp_dir
self.config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(self.config_path, 'w') as file:
file.write('library: lib\n')
file.write('option: value\n')
file.write('password: password_value')
self.cli_config_path = os.path.join(self.temp_dir, 'cli_config.yaml')
with open(self.cli_config_path, 'w') as file:
file.write('option: cli overwrite')
config.clear()
config['password'].redact = True
config._materialized = False
def tearDown(self):
rmtree(self.temp_dir)
def test_show_user_config(self):
with capture_stdout() as output:
self.run_command('config', '-c')
output = yaml.load(output.getvalue())
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'password_value')
def test_show_user_config_with_defaults(self):
with capture_stdout() as output:
self.run_command('config', '-dc')
output = yaml.load(output.getvalue())
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'password_value')
self.assertEqual(output['library'], 'lib')
self.assertEqual(output['import']['timid'], False)
def test_show_user_config_with_cli(self):
with capture_stdout() as output:
self.run_command('--config', self.cli_config_path, 'config')
output = yaml.load(output.getvalue())
self.assertEqual(output['library'], 'lib')
self.assertEqual(output['option'], 'cli overwrite')
def test_show_redacted_user_config(self):
with capture_stdout() as output:
self.run_command('config')
output = yaml.load(output.getvalue())
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'REDACTED')
def test_show_redacted_user_config_with_defaults(self):
with capture_stdout() as output:
self.run_command('config', '-d')
output = yaml.load(output.getvalue())
self.assertEqual(output['option'], 'value')
self.assertEqual(output['password'], 'REDACTED')
self.assertEqual(output['import']['timid'], False)
def test_config_paths(self):
with capture_stdout() as output:
self.run_command('config', '-p')
paths = output.getvalue().split('\n')
self.assertEqual(len(paths), 2)
self.assertEqual(paths[0], self.config_path)
def test_config_paths_with_cli(self):
with capture_stdout() as output:
self.run_command('--config', self.cli_config_path, 'config', '-p')
paths = output.getvalue().split('\n')
self.assertEqual(len(paths), 3)
self.assertEqual(paths[0], self.cli_config_path)
def test_edit_config_with_editor_env(self):
os.environ['EDITOR'] = 'myeditor'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'myeditor', 'myeditor', self.config_path)
def test_edit_config_with_automatic_open(self):
with patch('beets.util.open_anything') as open:
open.return_value = 'please_open'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'please_open', 'please_open', self.config_path)
def test_config_editor_not_found(self):
with self.assertRaises(ui.UserError) as user_error:
with patch('os.execlp') as execlp:
execlp.side_effect = OSError('here is problem')
self.run_command('config', '-e')
self.assertIn('Could not edit configuration',
six.text_type(user_error.exception))
self.assertIn('here is problem', six.text_type(user_error.exception))
def test_edit_invalid_config_file(self):
with open(self.config_path, 'w') as file:
file.write('invalid: [')
config.clear()
config._materialized = False
os.environ['EDITOR'] = 'myeditor'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'myeditor', 'myeditor', self.config_path)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Python
| 0.000001
|
@@ -1240,34 +1240,41 @@
def
-test_show_user_config(self
+_run_with_yaml_output(self, *args
):%0A
@@ -1346,22 +1346,13 @@
and(
-'config', '-c'
+*args
)%0A
@@ -1353,32 +1353,30 @@
gs)%0A
-output =
+return
yaml.load(o
@@ -1385,32 +1385,131 @@
put.getvalue())%0A
+%0A def test_show_user_config(self):%0A output = self._run_with_yaml_output('config', '-c')%0A%0A
self.ass
@@ -1671,69 +1671,43 @@
-with capture_stdout() as output:%0A
+output =
self.
+_
run_
-command
+with_yaml_output
('co
@@ -1724,53 +1724,8 @@
c')%0A
- output = yaml.load(output.getvalue())
%0A
@@ -2005,69 +2005,43 @@
-with capture_stdout() as output:%0A
+output =
self.
+_
run_
-command
+with_yaml_output
('--
@@ -2074,64 +2074,63 @@
ath,
- 'config')%0A output = yaml.load(output.getvalue())
+%0A 'config')%0A
%0A
@@ -2296,125 +2296,54 @@
-with capture_stdout() as output:%0A
+output =
self.
+_
run_
-command('config')%0A output = yaml.load(output.getvalue())
+with_yaml_output('config')%0A
%0A
@@ -2521,69 +2521,43 @@
-with capture_stdout() as output:%0A
+output =
self.
+_
run_
-command
+with_yaml_output
('co
@@ -2573,53 +2573,8 @@
d')%0A
- output = yaml.load(output.getvalue())
%0A
|
d81c6e4ce44b0ee63fa116cb69efce17b8bb2c3f
|
test getting message via POP
|
test/test_pop_connection.py
|
test/test_pop_connection.py
|
"""Tests for POP connection handling."""
import os
import pathlib
import unittest
from maildaemon.config import load_config
from maildaemon.pop_connection import POPConnection
_HERE = pathlib.Path(__file__).parent
_TEST_CONFIG_PATH = _HERE.joinpath('maildaemon_test_config.json')
@unittest.skipUnless(os.environ.get('TEST_COMM') or os.environ.get('CI'),
'skipping tests that require server connection')
class Tests(unittest.TestCase):
config = load_config(_TEST_CONFIG_PATH)
def test_retrieve_message_ids(self):
for connection_name in ['test-pop', 'test-pop-ssl']:
with self.subTest(msg=connection_name):
connection = POPConnection.from_dict(self.config['connections'][connection_name])
connection.connect()
ids = connection.retrieve_message_ids()
alive = connection.is_alive()
connection.disconnect()
self.assertIsInstance(ids, list, msg=connection)
self.assertTrue(alive, msg=connection)
|
Python
| 0
|
@@ -1035,24 +1035,443 @@
(alive, msg=connection)%0A
+%0A def test_retrieve_message_lines(self):%0A for connection_name in %5B'test-pop', 'test-pop-ssl'%5D:%0A with self.subTest(msg=connection_name):%0A connection = POPConnection.from_dict(self.config%5B'connections'%5D%5Bconnection_name%5D)%0A connection.connect()%0A lines = connection.retrieve_message_lines(1)%0A self.assertGreater(len(lines), 0, msg=connection)%0A
|
39a1c6c8c3795775dc8811e8e195feaa4e973cd8
|
remove comments
|
tests/test_validator.py
|
tests/test_validator.py
|
# from unittest.mock import patch
import json
import unittest
from dacsspace.validator import Validator
class TestValidator(unittest.TestCase):
def test_validator(self):
json_file = "/Users/aberish/Documents/GitHub/DACSspace/fixtures/resource.json"
with open(json_file, 'r') as f:
json_data = json.load(f)
result = Validator().validate_data(json_data)
self.assertTrue(isinstance(result, dict))
self.assertEqual(result["valid"], "True")
# HELP: I don't know what this does, but I saw it used in other tests
# if __name__ == "__main__":
# unittest.main()
|
Python
| 0
|
@@ -498,126 +498,4 @@
e%22)%0A
-%0A%0A# HELP: I don't know what this does, but I saw it used in other tests%0A# if __name__ == %22__main__%22:%0A# unittest.main()%0A
|
c2b55844bff3de39ac9a0a4bd8860306da731662
|
fix for testing 401 after redirection
|
testsuite/test_views.py
|
testsuite/test_views.py
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Tests for communities views."""
from flask import url_for, current_app
from invenio.testsuite import InvenioTestCase, make_test_suite, \
run_test_suite
class CommunitiesViewTest(InvenioTestCase):
""" Test communities view functions. """
def test_home_communities_page_availability(self):
"""communities - availability of main page"""
response = self.client.get(url_for('communities.index'))
self.assert200(response)
def test_new_community_page_availability(self):
"""communities - availability of new community page"""
self.login('admin', '')
response = self.client.get(url_for('communities.new'))
self.assert200(response)
self.logout()
def test_new_community_page_unauthorized(self):
"""communities - new communities restricted to logged in users"""
response = self.client.get(url_for('communities.new'))
self.assert401(response)
TEST_SUITE = make_test_suite(CommunitiesViewTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
Python
| 0
|
@@ -854,21 +854,8 @@
_for
-, current_app
%0Afro
@@ -918,14 +918,8 @@
ite,
- %5C%0A
run
@@ -980,16 +980,16 @@
e):%0A
+%0A
%22%22%22
-
Test
@@ -1016,17 +1016,16 @@
nctions.
-
%22%22%22%0A%0A
@@ -1674,32 +1674,90 @@
ommunities.new')
+,%0A follow_redirects=True
)%0A self.a
@@ -1885,8 +1885,9 @@
T_SUITE)
+%0A
|
77199b8c6b06054c7741433ec2fadd654a636677
|
add hour var
|
tilejetlogs/tilelogs.py
|
tilejetlogs/tilelogs.py
|
def buildTileRequestDocument(tileorigin, tilesource, x, y, z, status, datetime, ip):
r = {
'ip': ip,
'origin': tileorigin if tileorigin else "",
'source': tilesource,
'location': z+'/'+x+'/'+y,
'z': z,
'status': status,
'year': datetime.strftime('%Y'),
'month': datetime.strftime('%Y-%m'),
'date': datetime.strftime('%Y-%m-%d'),
'date_iso': datetime.isoformat()
}
return r
|
Python
| 0.000016
|
@@ -394,24 +394,74 @@
%25Y-%25m-%25d'),%0A
+ 'hour': datetime.strftime('%25Y-%25m-%25d-%25H'),%0A
'dat
|
7525e6276d4868df9ce2ae24c240f29079619ce2
|
add unicode char for emoji
|
botigo/bot.py
|
botigo/bot.py
|
import copy
import os
import json
import requests
from botigo import config
class Bot():
def __init__(self, access_token, **kwargs):
"""
@required:
access_token
@optional:
api_version
"""
self.api_version = kwargs.get('api_version') or config.FB_GRAPH_API_VERSION
self.graph_url = 'https://graph.facebook.com/v{0}'.format(self.api_version)
self.graph_msg_url = '{}/me/messages'.format(self.graph_url)
self.params = {
'access_token': config.FB_ACCESS_TOKEN
}
self.headers = {
'Content-Type': 'application/json'
}
def post_payload(self, data, **kwargs):
response = requests.post(
self.graph_msg_url,
params=self.params,
headers=self.headers,
data=data
)
if response.status_code != 200:
return response.json()
return {}
def send_fb_msg(self, recipient_id, message_text):
print('sending message to {recipient}: {text}'.format(
recipient=recipient_id, text=message_text))
data = json.dumps({
'recipient': {
'id': recipient_id
},
'message': {
'text': message_text
}
})
self.post_payload(data)
def send_card_msg(self, recipient_id, elements=[]):
data = json.dumps({
'recipient': {
'id': recipient_id
},
'message': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'generic',
'elements': elements
}
}
}
})
self.post_payload(data)
def send_location_msg(self, recipient_id, msg):
data = json.dumps({
'recipient': {
'id': recipient_id
},
'message': {
'text': msg,
'quick_replies': [
{
'content_type': 'location',
}
]
}
})
self.post_payload(data)
def send_kind_msg(self, recipient_id, msg):
data = json.dumps({
'recipient': {
'id': recipient_id
},
'message': {
'text': msg,
'quick_replies': [
{
'content_type': 'text',
'title': 'Tram',
'payload': 'tram'
},
{
'content_type': 'text',
'title': 'Bus',
'payload': 'bus'
},
{
'content_type': 'text',
'title': 'Vélo',
'payload': 'velo'
}
]
}
})
self.post_payload(data)
def send_moment_msg(self, recipient_id, msg):
data = json.dumps({
'recipient': {
'id': recipient_id
},
'message': {
'text': msg,
'quick_replies': [
{
'content_type': 'text',
'title': 'Maintenant',
'payload': 'now'
},
{
'content_type': 'text',
'title': 'Dans 10 min.',
'payload': '10_minutes'
},
{
'content_type': 'text',
'title': 'Dans 30 min.',
'payload': '30_minutes'
}
]
}
})
self.post_payload(data)
def get_user_fullname(self, recipient_id, fields=['first_name', 'last_name']):
params = copy.deepcopy(self.params)
if fields is not None and isinstance(fields, (list, tuple)):
params['fields'] = ','.join(fields)
request_endpoint = '{}/{}'.format(self.graph_url, recipient_id)
res = requests.get(request_endpoint, params=params, headers=self.headers)
if res.status_code == 200:
return res.json()
return None
def has_location_payload(self, messaging_event):
try:
_ = messaging_event['message']['attachments'][0]['payload']['coordinates']
return True
except:
return False
def has_sticker_payload(self, messaging_event):
try:
_ = messaging_event['message']['attachments'][0]['payload']['sticker_id']
return True
except:
return False
def get_location_payload(self, messaging_event):
return messaging_event['message']['attachments'][0]['payload']['coordinates']
def has_quick_reply(self, messaging_event):
try:
_ = messaging_event['message']['quick_reply']
return True
except:
return False
def get_quick_reply(self, messaging_event):
return messaging_event['message']['text']
|
Python
| 0.999043
|
@@ -2672,18 +2672,26 @@
': 'tram
+ U+1F68A
'%0A
-
@@ -2854,16 +2854,24 @@
d': 'bus
+ U+1F68C
'%0A
@@ -2998,16 +2998,16 @@
'V%C3%A9lo',%0A
-
@@ -3038,16 +3038,24 @@
': 'velo
+ U+1F6B2
'%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.