commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
0a0da3821c6dec714b42e119468f1caba2da5341
Remove unused imports
openprescribing/pipeline/management/commands/run_pipeline_e2e_tests.py
openprescribing/pipeline/management/commands/run_pipeline_e2e_tests.py
from distutils.dir_util import copy_tree import glob import os import shutil from django.apps import apps from django.core.management import BaseCommand, CommandError from django.conf import settings from django.core.management import call_command from gcutils.bigquery import Client as BQClient, DATASETS, build_schema from gcutils.storage import Client as StorageClient from frontend import bq_schemas as schemas from frontend.models import MeasureValue, MeasureGlobal, TariffPrice from dmd2.models import VMP, VMPP from openprescribing.slack import notify_slack from pipeline import runner e2e_path = os.path.join(settings.APPS_ROOT, "pipeline", "e2e-test-data") class Command(BaseCommand): def handle(self, *args, **kwargs): if os.environ["DJANGO_SETTINGS_MODULE"] != "openprescribing.settings.e2etest": raise CommandError("Command must run with e2etest settings") try: run_end_to_end() except Exception: msg = "End-to-end test failed (seed: %s)\n\n" % settings.BQ_NONCE msg += "Check logs in /tmp/" notify_slack(msg) raise msg = "Pipeline tests ran to completion (seed: %s)" % settings.BQ_NONCE notify_slack(msg) def run_end_to_end(): print("BQ_NONCE: {}".format(settings.BQ_NONCE)) call_command("migrate") # No MeasureGlobals or MeasureValues are generated for the ghost branded # generics measure, because both numerator and denominator are computed # from a view (vw__ghost_generic_measure) which has no data. Rather than # populate this view, it is simpler to pretend it doesn't exist. num_measures = ( len(glob.glob(os.path.join(settings.MEASURE_DEFINITIONS_PATH, "*.json"))) - 1 ) shutil.rmtree(settings.PIPELINE_DATA_BASEDIR, ignore_errors=True) with open(settings.PIPELINE_IMPORT_LOG_PATH, "w") as f: f.write("{}") for blob in StorageClient().bucket().list_blobs(): blob.delete() for dataset_key in DATASETS: BQClient(dataset_key).create_dataset() client = BQClient("hscic") client.create_table("bnf", schemas.BNF_SCHEMA) client.create_table("ccgs", schemas.CCG_SCHEMA) client.create_table("ppu_savings", schemas.PPU_SAVING_SCHEMA) client.create_table("practice_statistics", schemas.PRACTICE_STATISTICS_SCHEMA) client.create_table( "practice_statistics_all_years", schemas.PRACTICE_STATISTICS_SCHEMA ) client.create_table("practices", schemas.PRACTICE_SCHEMA) client.create_table("prescribing", schemas.PRESCRIBING_SCHEMA) client.create_table("presentation", schemas.PRESENTATION_SCHEMA) client.create_table("tariff", schemas.TARIFF_SCHEMA) client.create_table("bdz_adq", schemas.BDZ_ADQ_SCHEMA) client = BQClient("measures") # This is enough of a schema to allow the practice_data_all_low_priority # table to be created, since it references these fields. Once populated by # import_measures, the tables in the measures dataset will have several # more fields. But we don't need to specify exactly what they are, as BQ # will work it out when the data is inserted with insert_rows_from_query. measures_schema = build_schema( ("month", "DATE"), ("practice_id", "STRING"), ("numerator", "INTEGER"), ("denominator", "INTEGER"), ) for path in glob.glob(os.path.join(settings.MEASURE_DEFINITIONS_PATH, "*.json")): measure_id = os.path.splitext(os.path.basename(path))[0] client.create_table("practice_data_" + measure_id, measures_schema) client.create_table("ccg_data_" + measure_id, measures_schema) client.create_table("global_data_" + measure_id, measures_schema) # Although there are no model instances, we call upload_model to create the # dm+d tables in BQ that are required by certain measure views. client = BQClient("dmd") for model in apps.get_app_config("dmd2").get_models(): client.upload_model(model) call_command("generate_presentation_replacements") copy_tree(os.path.join(e2e_path, "data-1"), os.path.join(e2e_path, "data")) runner.run_all(2017, 9, under_test=True) # We expect one MeasureGlobal per measure per month. assert_count_equal(num_measures, MeasureGlobal) # We expect one MeasureValue for each organisation per measure per month # (There are 4 practices, 2 CCGs, 2 STPs, and 2 regional teams). assert_count_equal(10 * num_measures, MeasureValue) copy_tree(os.path.join(e2e_path, "data-2"), os.path.join(e2e_path, "data")) runner.run_all(2017, 10, under_test=True) # We expect one MeasureGlobal per measure per month assert_count_equal(2 * num_measures, MeasureGlobal) # We expect one MeasureValue for each organisation per measure per month assert_count_equal(20 * num_measures, MeasureValue) def assert_count_equal(expected, model): actual = model.objects.count() if actual != expected: msg = "Expected {} {} objects, found {}".format(expected, model, actual) raise CommandError(msg)
Python
0.000001
@@ -470,55 +470,8 @@ obal -, TariffPrice%0Afrom dmd2.models import VMP, VMPP %0Afro
64ef54a757b1ad8160f8b30e9b5d1e90b47cac22
Version bump.
uptimerobot/__init__.py
uptimerobot/__init__.py
# -*- coding: utf-8 -*- __version__ = '0.1.0'
Python
0
@@ -40,7 +40,6 @@ 0.1. -0' +1 %0A
707be52f21dae6682cd541d62941c0095869c98f
Update time util docstrings
mycroft/util/time.py
mycroft/util/time.py
# # Copyright 2018 Mycroft AI Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from datetime import datetime from dateutil.tz import gettz, tzlocal def default_timezone(): """ Get the default timezone Based on user location settings location.timezone.code or the default system value if no setting exists. Returns: (datetime.tzinfo): Definition of the default timezone """ try: # Obtain from user's configurated settings # location.timezone.code (e.g. "America/Chicago") # location.timezone.name (e.g. "Central Standard Time") # location.timezone.offset (e.g. -21600000) from mycroft.configuration import Configuration config = Configuration.get() code = config["location"]["timezone"]["code"] return gettz(code) except Exception: # Just go with system default timezone return tzlocal() def now_utc(): """ Retrieve the current time in UTC Returns: (datetime): The current time in Universal Time, aka GMT """ return to_utc(datetime.utcnow()) def now_local(tz=None): """ Retrieve the current time Args: tz (datetime.tzinfo, optional): Timezone, default to user's settings Returns: (datetime): The current time """ if not tz: tz = default_timezone() return datetime.now(tz) def to_utc(dt): """ Convert a datetime with timezone info to a UTC datetime Args: dt (datetime): A datetime (presumably in some local zone) Returns: (datetime): time converted to UTC """ tzUTC = gettz("UTC") if dt.tzinfo: return dt.astimezone(tzUTC) else: return dt.replace(tzinfo=gettz("UTC")).astimezone(tzUTC) def to_local(dt): """ Convert a datetime to the user's local timezone Args: dt (datetime): A datetime (if no timezone, defaults to UTC) Returns: (datetime): time converted to the local timezone """ tz = default_timezone() if dt.tzinfo: return dt.astimezone(tz) else: return dt.replace(tzinfo=gettz("UTC")).astimezone(tz) def to_system(dt): """ Convert a datetime to the system's local timezone Args: dt (datetime): A datetime (if no timezone, assumed to be UTC) Returns: (datetime): time converted to the operation system's timezone """ tz = tzlocal() if dt.tzinfo: return dt.astimezone(tz) else: return dt.replace(tzinfo=gettz("UTC")).astimezone(tz)
Python
0.000001
@@ -575,16 +575,202 @@ ense.%0A#%0A +%22%22%22Time utils for getting and converting datetime objects for the Mycroft%0Asystem. This time is based on the setting in the Mycroft config and may or%0Amay not match the system locale.%0A%22%22%22%0A from dat @@ -863,17 +863,16 @@ %0A %22%22%22 - Get the @@ -1611,33 +1611,32 @@ w_utc():%0A %22%22%22 - Retrieve the cur @@ -1808,17 +1808,16 @@ %0A %22%22%22 - Retrieve @@ -1834,32 +1834,37 @@ nt time%0A%0A Arg +ument s:%0A tz (d @@ -2078,33 +2078,32 @@ utc(dt):%0A %22%22%22 - Convert a dateti @@ -2142,32 +2142,37 @@ atetime%0A%0A Arg +ument s:%0A dt (d @@ -2460,33 +2460,32 @@ cal(dt):%0A %22%22%22 - Convert a dateti @@ -2516,32 +2516,37 @@ imezone%0A%0A Arg +ument s:%0A dt (d @@ -2857,17 +2857,16 @@ %0A %22%22%22 - Convert @@ -2915,16 +2915,21 @@ %0A Arg +ument s:%0A
ee1f59a8a61709bebc4f08adce520ab251576148
Use a single query to fetch the guardian and hub.
us_ignite/hubs/views.py
us_ignite/hubs/views.py
from django.db.models import Q from django.contrib import messages from django.contrib.auth.decorators import login_required from django.http import Http404 from django.template.response import TemplateResponse from django.shortcuts import get_object_or_404, redirect from us_ignite.hubs.models import Hub, HubRequest from us_ignite.hubs import forms, mailer @login_required def hub_application(request): """View to submit a ``Hub`` for consideration""" object_list = HubRequest.objects.filter( ~Q(status=HubRequest.REMOVED), user=request.user) if request.method == 'POST': form = forms.HubRequestForm(request.POST) if form.is_valid(): instance = form.save(commit=False) instance.user = request.user instance.save() # Notify US Ignite about this request: mailer.notify_request(instance) msg = 'The registration for "%s" has been submited.' % instance.name messages.success(request, msg) return redirect('home') else: form = forms.HubRequestForm() context = { 'form': form, 'object_list': object_list, } return TemplateResponse(request, 'hubs/object_application.html', context) def hub_detail(request, slug): """Homepage of a Ignite Community. This view aggregates all the content related to this ``Hub``. """ instance = get_object_or_404(Hub, slug=slug) if not instance.is_published() and not instance.is_guardian(request.user): raise Http404 context = { 'object': instance, } return TemplateResponse(request, 'hubs/object_detail.html', context)
Python
0
@@ -1425,19 +1425,63 @@ _or_404( -Hub +%0A Hub.objects.select_related('guardian') , slug=s
abd2bbc081167c9306558d6376c691a965e0cf71
Improve onchage experience
l10n_es_location_nuts/models/res_partner.py
l10n_es_location_nuts/models/res_partner.py
# -*- coding: utf-8 -*- # Python source code encoding : https://www.python.org/dev/peps/pep-0263/ ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright : # (c) 2015 Antiun Ingenieria, SL (Madrid, Spain, http://www.antiun.com) # Antonio Espinosa <antonioea@antiun.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, api from openerp.addons.base_location_nuts.models.res_partner \ import dict_recursive_update class ResPartner(models.Model): _inherit = 'res.partner' @api.multi def onchange_state(self, state_id): result = super(ResPartner, self).onchange_state(state_id) state = self.env['res.country.state'].browse(state_id) if state.country_id.code == 'ES': region_id = False substate_id = False nuts_state = self.env['res.partner.nuts'].search( [('level', '=', 4), ('state_id', '=', state.id)]) if nuts_state: substate = nuts_state[0].parent_id if substate: substate_id = substate.id region = substate.parent_id if region: region_id = region.id changes = { 'domain': { 'substate': [('country_id', '=', 'ES'), ('level', '=', 3)], 'region': [('country_id', '=', 'ES'), ('level', '=', 2)], }, 'value': { 'substate': substate_id, 'region': region_id, } } dict_recursive_update(result, changes) return result
Python
0.000001
@@ -2564,8 +2564,1096 @@ result%0A +%0A @api.onchange('substate', 'region')%0A def onchange_substate_or_region(self):%0A result = super(ResPartner, self).onchange_substate_or_region()%0A if (self.state_id.country_id.code == 'ES' or%0A self.substate.country_id.code == 'ES' or%0A self.region.country_id.code == 'ES'):%0A changes = %7B%0A 'domain': %7B%0A 'substate': %5B('country_id', '=', 'ES'),%0A ('level', '=', 3)%5D,%0A 'region': %5B('country_id', '=', 'ES'),%0A ('level', '=', 2)%5D,%0A %7D%0A %7D%0A if self.substate.country_id.code == 'ES':%0A self.region = self.substate.parent_id%0A self.country_id = self.substate.country_id%0A if self.region.country_id.code == 'ES':%0A self.country_id = self.region.country_id%0A if self.state_id.country_id.code == 'ES':%0A self.country_id = self.state_id.country_id%0A%0A dict_recursive_update(result, changes)%0A return result%0A
0fdb93fb73142315fe404b9a161ef19af0d920cd
Add simple test for config builder
tests/test_bawlerd.py
tests/test_bawlerd.py
import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config
Python
0
@@ -1214,8 +1214,490 @@ config%0A +%0A def test_read_config_files(self):%0A config_base = os.path.join(%0A os.path.abspath(os.path.dirname(__file__)), 'configs')%0A locations = %5B%0A os.path.join(config_base, 'etc'),%0A os.path.join(config_base, 'home'),%0A %5D%0A config = bawlerd.conf.read_config_files(%0A bawlerd.conf.build_config_location_list(locations=locations))%0A%0A assert config%5B'common'%5D%5B'listen_timeout'%5D == 40%0A assert 'logging' in config%0A
e62b62107ef44fec414a3a932a3e3e7bad211f34
add django-extensions for easier local view debugging
coffeestats/coffeestats/settings/local.py
coffeestats/coffeestats/settings/local.py
# -*- python -*- # pymode:lint_ignore=W0401,E501 """Development settings and globals.""" from __future__ import absolute_import from .base import * # noqa # ######### DEBUG CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = True # ######### END DEBUG CONFIGURATION # ######### EMAIL CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # ######### END EMAIL CONFIGURATION # ######### CACHE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#caches CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } # ######### END CACHE CONFIGURATION # ######### TOOLBAR CONFIGURATION # See: http://django-debug-toolbar.readthedocs.org/en/latest/installation.html#explicit-setup # noqa INSTALLED_APPS += ( 'debug_toolbar', ) MIDDLEWARE_CLASSES += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', ) DEBUG_TOOLBAR_PATCH_SETTINGS = False # http://django-debug-toolbar.readthedocs.org/en/latest/installation.html INTERNAL_IPS = ('127.0.0.1', '10.0.2.2') # ######### END TOOLBAR CONFIGURATION
Python
0
@@ -926,16 +926,41 @@ olbar',%0A + 'django_extensions',%0A )%0A%0AMIDDL
0589fe156e710a97fd08001142ec05dea4bc2b65
Use other key for exercise strings for react
wger/core/management/commands/extract-i18n.py
wger/core/management/commands/extract-i18n.py
# -*- coding: utf-8 *-* # This file is part of wger Workout Manager. # # wger Workout Manager is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # wger Workout Manager is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # Django from django.core.management.base import BaseCommand # wger from wger.core.models import RepetitionUnit from wger.exercises.models import ( Equipment, ExerciseCategory, Muscle, ) class Command(BaseCommand): """ Helper command to extract translatable content from the database such as categories, muscles or equipment names and write it to files, so they can be extracted and translated on weblate. This is a bit hacky and ugly, but these strings *very* rarely change. """ help = 'Write the translatable strings from the database to a file' def handle(self, **options): # Collect all translatable items data = [i for i in ExerciseCategory.objects.all()] \ + [i for i in Equipment.objects.all()] \ + [i for i in Muscle.objects.all()] \ + [i for i in RepetitionUnit.objects.all()] # Django - write to .tpl file with open('wger/i18n.tpl', 'w') as f: out = '{% load i18n %}\n' for i in data: out += f'{{% translate "{i}" %}}\n' f.write(out) self.stdout.write(self.style.SUCCESS(f'Wrote content to i18n.tpl!')) # React - write to .tsx file (copy the file into the react repo) with open('wger/i18n.tsx', 'w') as f: out = ''' import { useTranslation } from "react-i18next"; export const DummyComponent = () => { const [t, i18n] = useTranslation();''' for i in data: out += f't("{i}");\n' out += ''' return (<p></p>); };''' f.write(out) self.stdout.write(self.style.SUCCESS(f'Wrote content to i18n.tsx!')) # Flutter - write to .dart file (copy the file into the flutter repo) # TO BE IMPLEMENTED...
Python
0.000001
@@ -1285,16 +1285,176 @@ ions):%0A%0A + # Replace whitespace with underscores and make lowercase%0A def cleanup_name(name: str) -%3E str:%0A return name.replace(' ', '_').lower()%0A%0A @@ -2385,18 +2385,52 @@ += f't(%22 -%7Bi +exercises.%7Bcleanup_name(i.__str__()) %7D%22);%5Cn'%0A
bc75dbaecfac0b9afff324c54e6c022b35419f28
set debug to false
project/web/app/website/settings.py
project/web/app/website/settings.py
""" Django settings for website project. Generated by 'django-admin startproject' using Django 1.10.2. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os, logging # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'ejxy^_(!sf_)ps@#2dr*q+x2jkuv0rre3dlm$orh%1*pvj1_jz' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True LOG = logging.basicConfig(filename='/tmp/myapp.log', level=logging.DEBUG) ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'customers', 'discounts', 'utils', 'products', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'website.urls' STATIC_ROOT = '/home/docker/code/app/static' STATIC_URL = '/static/' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'website.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases TEST = False if not TEST: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'postgres', 'USER': 'postgres', 'HOST': 'db', 'PORT': 5432, } } else: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/'
Python
0.99987
@@ -814,19 +814,20 @@ DEBUG = -Tru +Fals e%0ALOG =
a835dbfbaa2c70329c08d4b8429d49315dc6d651
Remove dangerous safestring declaration
openstack_dashboard/dashboards/identity/mappings/tables.py
openstack_dashboard/dashboards/identity/mappings/tables.py
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from django.utils import safestring from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ungettext_lazy from horizon import tables from openstack_dashboard import api class CreateMappingLink(tables.LinkAction): name = "create" verbose_name = _("Create Mapping") url = "horizon:identity:mappings:create" classes = ("ajax-modal",) icon = "plus" policy_rules = (("identity", "identity:create_mapping"),) class EditMappingLink(tables.LinkAction): name = "edit" verbose_name = _("Edit") url = "horizon:identity:mappings:update" classes = ("ajax-modal",) icon = "pencil" policy_rules = (("identity", "identity:update_mapping"),) class DeleteMappingsAction(tables.DeleteAction): @staticmethod def action_present(count): return ungettext_lazy( u"Delete Mapping", u"Delete Mappings", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Deleted Mapping", u"Deleted Mappings", count ) policy_rules = (("identity", "identity:delete_mapping"),) def delete(self, request, obj_id): api.keystone.mapping_delete(request, obj_id) class MappingFilterAction(tables.FilterAction): def filter(self, table, mappings, filter_string): """Naive case-insensitive search.""" q = filter_string.lower() return [mapping for mapping in mappings if q in mapping.ud.lower()] def get_rules_as_json(mapping): rules = getattr(mapping, 'rules', None) if rules: rules = json.dumps(rules, indent=4) return safestring.mark_safe(rules) class MappingsTable(tables.DataTable): id = tables.Column('id', verbose_name=_('Mapping ID')) description = tables.Column(get_rules_as_json, verbose_name=_('Rules')) class Meta(object): name = "idp_mappings" verbose_name = _("Attribute Mappings") row_actions = (EditMappingLink, DeleteMappingsAction) table_actions = (MappingFilterAction, CreateMappingLink, DeleteMappingsAction)
Python
0.000002
@@ -612,44 +612,8 @@ on%0A%0A -from django.utils import safestring%0A from @@ -2263,35 +2263,13 @@ urn -safestring.mark_safe( rules -) %0A%0A%0Ac
67bcec666fb1dc1ca48e531fd49bbbc75c09f041
fix the bug when the method it's call by the onchange (#140)
stock_voucher/models/stock_picking_voucher.py
stock_voucher/models/stock_picking_voucher.py
############################################################################## # For copyright and license notices, see __manifest__.py file in module root # directory ############################################################################## from odoo import fields, models, api, _ from odoo.exceptions import ValidationError class StockPickingVoucher(models.Model): _name = 'stock.picking.voucher' _description = 'Stock Voucher Book' # _rec_name = 'number' # we keep this for report compatibility number = fields.Char( related='name', ) # because m2m tags widget send only values to name field name = fields.Char( 'Number', copy=False, required=True, oldname='number', ) book_id = fields.Many2one( 'stock.book', 'Voucher Book', ) picking_id = fields.Many2one( 'stock.picking', 'Picking', ondelete='cascade', required=True, index=True, ) company_id = fields.Many2one( 'res.company', 'Company', related='picking_id.company_id', readonly=True, ) # constraint de que el book y el picking deben ser de la misma company _sql_constraints = [ ('voucher_number_uniq', 'unique(name, book_id)', _('The field "Number" must be unique per book.'))] @api.constrains('name', 'picking_id') @api.onchange('name', 'picking_id') def check_voucher_number_unique(self): """ Check internal pickings with voucher number unique """ for rec in self.filtered( lambda x: x.picking_id.picking_type_id.code == 'incoming'): pick_type = rec.picking_id.picking_type_id name = pick_type.voucher_number_validator_id.validate_value( rec.name) if name and name != rec.name: rec.name = name if pick_type.voucher_number_unique: rec._check_voucher_number_unique() @api.multi def _check_voucher_number_unique(self): self.ensure_one() same_number_recs = self.search([ ('picking_id.partner_id', '=', self.picking_id.partner_id.id), ('name', '=', self.name), ('id', '!=', self.id), ]) if same_number_recs: raise ValidationError(_( 'Picking voucher number must be unique per ' 'partner'))
Python
0
@@ -2268,45 +2268,17 @@ - ('id', '!=', self.id),%0A %5D) +%5D) - self %0A
4b0902e960f08f9ef99ce211b98e211b1685f68b
Update walletchangepass.py
contrib/wallettools/walletchangepass.py
contrib/wallettools/walletchangepass.py
from jsonrpc import ServiceProxy access = ServiceProxy("http://127.0.0.1:46393") pwd = raw_input("Enter old wallet passphrase: ") pwd2 = raw_input("Enter new wallet passphrase: ") access.walletpassphrasechange(pwd, pwd2)
Python
0.000001
@@ -70,13 +70,12 @@ 0.1: -46393 +6647 %22)%0Ap @@ -212,8 +212,9 @@ d, pwd2) +%0A
af0a932e8097701179310501d75888d7d4617dac
verifica codice con Schema dello SdI per fatturapa
l10n_it_payment_reason/tests/test_reason.py
l10n_it_payment_reason/tests/test_reason.py
from odoo.exceptions import ValidationError from odoo.tests.common import TransactionCase class TestReasons(TransactionCase): def setUp(self): super(TestReasons, self).setUp() self.reason_model = self.env["payment.reason"] self.reason_b = self.env.ref("l10n_it_payment_reason.b") def test_reasons(self): with self.assertRaises(ValidationError): self.reason_model.create({"code": "B", "name": "Test"}) name = self.reason_b.name_get() self.assertEqual( name, [ ( self.reason_b.id, "B - Utilizzazione economica, da parte dell'autore ...", ) ], )
Python
0
@@ -84,16 +84,97 @@ onCase%0A%0A +from odoo.addons.l10n_it_account.tools.account_tools import fpa_schema_get_enum%0A%0A %0Aclass T @@ -796,16 +796,542 @@ %5D,%0A )%0A +%0A def test_compare_with_fpa_schema(self):%0A %22%22%22Check that the values we define in this module are%0A the same as those defined in FPA xsd%22%22%22%0A%0A my_codes = self.reason_model.search(%5B%5D).mapped(%22code%22)%0A%0A # from fatturapa xml Schema%0A xsd_codes = %5B%0A code for code, descr in fpa_schema_get_enum(%22CausalePagamentoType%22)%0A %5D%0A%0A # XXX hardcoded - obsolete code, that is still supported by Schema%0A xsd_codes.remove(%22Z%22)%0A%0A self.assertCountEqual(my_codes, xsd_codes)%0A
c59a2ce0a6b164e7af26d73a553349937642c370
write frozen graph instead of just the graph def
write_pb_file.py
write_pb_file.py
#!/usr/bin/env python3 from gnt_model import model, error_rate, IMAGE_HEIGHT, IMAGE_WIDTH, PIXEL_DEPTH import sys import tensorflow as tf def main(): if len(sys.argv) != 3: print('Usage: {} modelpath outputdir'.format(sys.argv[0])) sys.exit() _, model_path, output_dir = sys.argv node_image_raw = tf.placeholder("float", shape=[None, 784], name="input") node_normalized_image = tf.reshape(node_image_raw, [1, IMAGE_HEIGHT, IMAGE_WIDTH, 1]) / PIXEL_DEPTH - 0.5 node_logits = model(node_normalized_image) node_predictions = tf.nn.softmax(node_logits, name="output") saver = tf.train.Saver() init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()) with tf.Session() as sess: print('loading model') sess.run(init_op) saver.restore(sess, model_path) pb_filename = 'character_model_graph.pb.txt' print('writing {}'.format(pb_filename)) graph_def = tf.get_default_graph().as_graph_def() tf.train.write_graph(graph_def, output_dir, pb_filename, as_text=True) if __name__ == '__main__': main()
Python
0.000006
@@ -98,16 +98,26 @@ _DEPTH%0A%0A +import os%0A import s @@ -144,16 +144,113 @@ as tf%0A%0A +from tensorflow.python.platform import gfile%0Afrom tensorflow.python.framework import graph_util%0A%0A def main @@ -305,21 +305,27 @@ age: %7B%7D -model +checkpoint_ path out @@ -327,16 +327,17 @@ h output +_ dir'.for @@ -381,21 +381,26 @@ %0A _, -model +checkpoint _path, o @@ -986,21 +986,26 @@ e(sess, -model +checkpoint _path)%0A%0A @@ -1030,85 +1030,66 @@ e = -'character_model_graph.pb.txt'%0A print('writing %7B%7D'.format(pb_filename) +os.path.join(output_dir, 'frozen_character_model_graph.pb' )%0A @@ -1144,24 +1144,25 @@ h_def()%0A +%0A tf.train @@ -1157,78 +1157,436 @@ -tf.train.write_graph(graph_def, output_dir, pb_filename, as_text=True) +for node in graph_def.node:%0A node.device = %22%22%0A%0A output_graph_def = graph_util.convert_variables_to_constants(%0A sess,%0A graph_def,%0A %5B'output'%5D)%0A%0A print('writing %7B%7D'.format(pb_filename))%0A%0A with gfile.GFile(pb_filename, %22wb%22) as f:%0A f.write(output_graph_def.SerializeToString())%0A print(%22%25d ops in the final graph.%22 %25 len(output_graph_def.node))%0A %0A%0Aif
6ecccc9d5600f46e4810ab3fdf82d29268105abc
use watchdog instead of bare inotifyx for recursive file system monitoring
purkinje_pytest/testrunner.py
purkinje_pytest/testrunner.py
# -*- coding: utf-8 -*- """Automatic test execution""" from __future__ import print_function from watchdog.observers import Observer from watchdog.events import LoggingEventHandler import logging import time logger = logging.getLogger(__file__) #WATCH_MASK = inotify.IN_CLOSE_WRITE | inotify.IN_DELETE # Extensions of files to be watched EXTENSIONS = ['.py'] class TestRunner: """Watches project directory and executes test when relevant files have been changed TODO: ensure that ulimit is sufficient for number of files to be monitored """ def __init__(self, dir): self._dir = dir #self._fd = inotify.init() # self._wd = inotify.add_watch(self._fd, # self._dir, # WATCH_MASK) logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') self.event_handler = LoggingEventHandler() self.observer = Observer() def start(self): """Watch directory forever and execute test cases """ print('{}: watching directory "{}"'.format(self.__class__, self._dir)) self.observer.schedule(self.event_handler, self._dir, recursive=True) self.observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: self.observer.stop() def _filter(self, events): """Select files that are relevant to test execution""" print('Before filter: {}'.format(events)) for event in events: n = event.name if n.endswith('.py'): yield event def _handle_event(self, e): print('Event: {}'.format(e)) def main(): fw = TestRunner('.') fw.start()
Python
0
@@ -156,23 +156,26 @@ import -Logging +FileSystem EventHan @@ -362,16 +362,643 @@ .py'%5D%0A%0A%0A +class Handler(FileSystemEventHandler):%0A%0A %22%22%22Triggers test execution when project contents change%0A %22%22%22%0A%0A def on_created(self, event):%0A self._trigger(event)%0A%0A def on_deleted(self, event):%0A self._trigger(event)%0A%0A def on_modified(self, event):%0A self._trigger(event)%0A%0A def on_moved(self, event):%0A self._trigger(event)%0A%0A def _filter(self, path):%0A %22%22%22Determine whether a file is relevant to test execution%22%22%22%0A return path.endswith('.py')%0A%0A def _trigger(self, event):%0A if self._filter(event.src_path):%0A print('##### Trigger: %7B%7D ####'.format(event))%0A%0A%0A class Te @@ -1450,178 +1450,8 @@ K)%0A%0A - logging.basicConfig(level=logging.INFO,%0A format='%25(asctime)s - %25(message)s',%0A datefmt='%25Y-%25m-%25d %25H:%25M:%25S')%0A%0A @@ -1479,20 +1479,8 @@ r = -LoggingEvent Hand @@ -1901,16 +1901,16 @@ errupt:%0A + @@ -1939,271 +1939,8 @@ ()%0A%0A - def _filter(self, events):%0A %22%22%22Select files that are relevant to test execution%22%22%22%0A print('Before filter: %7B%7D'.format(events))%0A for event in events:%0A n = event.name%0A if n.endswith('.py'):%0A yield event%0A%0A
167b55048595f5329c064ad83c3dc19eaa9dc03e
add flask API function to add a set of images
recognition_server.py
recognition_server.py
# -*- coding: utf-8 -*- """ Created on Sat Aug 12 18:08:52 2017 @author: Shaun Werkhoven @purpose: To create a image classification system for the Image Intelligence TakeHome Assignment Licensed under the Apache License, Version 2.0 (the "License"); Simple image classification with TensorFlow and the Inception model. Run image classification with Inception trained on ImageNet 2012 Challenge data set. This program creates a graph from a saved GraphDef protocol buffer, and runs inference on an input JPEG, GIF or PNG image. It outputs human readable strings of the top 5 predictions along with their probabilities. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import os.path import re import sys import tarfile import numpy as np import tensorflow as tf import json import urllib from flask import Flask, jsonify, render_template, abort, make_response, request, Markup from flask_httpauth import HTTPBasicAuth from PIL import Image auth = HTTPBasicAuth() app = Flask(__name__) FLAGS = None DATA_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz' images = [ { 'id': 1, 'title': u'Nikes', 'url': 'http://imgdirect.s3-website-us-west-2.amazonaws.com/nike.jpg', 'results': '', 'resize': False, 'size': "" }, { 'id': 2, 'title': u'Altra', 'url': 'https://s3-us-west-2.amazonaws.com/imgdirect/altra.jpg', 'results': '', 'resize': False, 'size': "" } ] @auth.error_handler def unauthorized(): return make_response(jsonify({'error': 'unauthorized access'}), 403) @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'not found'}), 404) @app.errorhandler(400) def bad_request(error): return make_response(jsonify({'error': 'missing json data'}), 400) @app.errorhandler(410) def missing_URL(error): return make_response(jsonify({'error': 'missing URL field'}), 410) @app.route('/') @app.route('/index') def index(): return "Hello, World!" ### test string ### curl -i http://127.0.0.1:5000/img/api/v1.0/images @app.route('/img/api/v1.0/images', methods=['GET']) #@auth.login_required def get_imgs(): return jsonify({'images': images}) ### test String ### curl -i http://127.0.0.1:5000/img/api/v1.0/images/2 @app.route('/img/api/v1.0/images/<int:img_id>', methods = ['GET']) #@auth.login_required def get_img(img_id): img = [img for img in images if img['id'] == img_id] if len(img) == 0: abort(404) return jsonify({'img': img[0]})
Python
0
@@ -2654,8 +2654,1385 @@ mg%5B0%5D%7D)%0A +%0A%0A### test String%0A### curl -i -H %22Content-Type: application/json%22 -X POST -d '%7B%22url%22:%22http://imgdirect.s3-website-us-west-2.amazonaws.com/neither.jpg%22%7D' http://127.0.0.1:5000/img/api/v1.0/images%0A@app.route('/img/api/v1.0/images', methods = %5B'POST'%5D)%0A#@auth.login_required%0Adef add_imgs():%0A if not request.json:%0A abort(400)%0A %0A missing_url = False%0A json_str = request.json%0A img_data = json_str%5B'new_imgs'%5D%0A new_images = %5B%5D%0A %0A for img in img_data:%0A if img.get('url') == None:%0A missing_url = True%0A continue%0A %0A if img.get('title') == None:%0A new_title = %22%22%0A else:%0A new_title = img.get('title')%0A if img.get('results') == None:%0A new_results = %22%22%0A else:%0A new_results = img.get('results')%0A %0A image = %7B%0A # simple way to ensure a unique id%0A 'id' : images%5B-1%5D%5B'id'%5D + 1,%0A 'title': new_title,%0A # url is required, otherwise return error%0A 'url': img%5B'url'%5D,%0A 'results': new_results,%0A 'resize': False,%0A 'size': %22%22%0A %7D%0A images.append(image)%0A new_images.append(image)%0A %0A if missing_url:%0A return_val = jsonify(new_images), 410%0A else:%0A return_val = jsonify(new_images), 201%0A %0A return return_val%0A%0A%0A
c052aaca11f8fdcd465ee986548db8141b27a197
bump to 0.7.2
hublib/__init__.py
hublib/__init__.py
from pint import UnitRegistry ureg = UnitRegistry() ureg.autoconvert_offset_to_baseunit = True Q_ = ureg.Quantity __version__ = "0.7.1"
Python
0.000013
@@ -131,8 +131,8 @@ 0.7. -1 +2 %22%0A%0A
8a0a00a688d2a7edb6b03a505d4ededf72730886
Remove useless import
zinnia/views/trackback.py
zinnia/views/trackback.py
"""Views for Zinnia trackback""" from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.http import HttpResponsePermanentRedirect from django.shortcuts import get_object_or_404 from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.generic.base import TemplateView import django_comments as comments from zinnia.flags import TRACKBACK from zinnia.flags import get_user_flagger from zinnia.models.entry import Entry from zinnia.signals import trackback_was_posted from zinnia.spam_checker import check_is_spam class EntryTrackback(TemplateView): """ View for handling trackbacks on the entries. """ content_type = 'text/xml' template_name = 'zinnia/entry_trackback.xml' @method_decorator(csrf_exempt) def dispatch(self, *args, **kwargs): """ Decorate the view dispatcher with csrf_exempt. """ return super(EntryTrackback, self).dispatch(*args, **kwargs) def get_object(self): """ Retrieve the Entry trackbacked. """ return get_object_or_404(Entry.published, pk=self.kwargs['pk']) def get(self, request, *args, **kwargs): """ GET only do a permanent redirection to the Entry. """ entry = self.get_object() return HttpResponsePermanentRedirect(entry.get_absolute_url()) def post(self, request, *args, **kwargs): """ Check if an URL is provided and if trackbacks are enabled on the Entry. If so the URL is registered one time as a trackback. """ url = request.POST.get('url') if not url: return self.get(request, *args, **kwargs) entry = self.get_object() site = Site.objects.get_current() if not entry.trackbacks_are_open: return self.render_to_response( {'error': 'Trackback is not enabled for %s' % entry.title}) title = request.POST.get('title') or url excerpt = request.POST.get('excerpt') or title blog_name = request.POST.get('blog_name') or title ip_address = request.META.get('REMOTE_ADDR', None) trackback_klass = comments.get_model() trackback_datas = { 'content_type': ContentType.objects.get_for_model(Entry), 'object_pk': entry.pk, 'site': site, 'user_url': url, 'user_name': blog_name, 'ip_address': ip_address, 'comment': excerpt } trackback = trackback_klass(**trackback_datas) if check_is_spam(trackback, entry, request): return self.render_to_response( {'error': 'Trackback considered like spam'}) trackback_defaults = {'comment': trackback_datas.pop('comment')} trackback, created = trackback_klass.objects.get_or_create( defaults=trackback_defaults, **trackback_datas) if created: trackback.flags.create(user=get_user_flagger(), flag=TRACKBACK) trackback_was_posted.send(trackback.__class__, trackback=trackback, entry=entry) else: return self.render_to_response( {'error': 'Trackback is already registered'}) return self.render_to_response({})
Python
0.000004
@@ -235,42 +235,8 @@ 404%0A -from django.utils import timezone%0A from
8959801a293fa33cb7625b524c6e9226af7253b2
Use separate HITTypeIds for live vs. sandbox
recruiters.py
recruiters.py
"""Recruiters of judicious humans.""" import logging import os import boto3 # Set up logging. logger = logging.getLogger(__name__) handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s [recruiter.1]: %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) class Recruiter(object): """Generic recruiter.""" def recruit(self): raise NotImplementedError class HotAirRecruiter(Recruiter): """Talks about recruiting, but does not recruit.""" def recruit(self): logger.info("Recruiting a participant.") class MTurkRecruiter(Recruiter): """Recruits from Amazon Mechanical Turk.""" def __init__(self): self.mode = os.environ["JUDICIOUS_MTURK_MODE"] if self.mode == "sandbox": self._client = boto3.client( service_name='mturk', endpoint_url='https://mturk-requester-sandbox.us-east-1.amazonaws.com', ) elif self.mode == "live": self._client = boto3.client( service_name='mturk', region_name="us-east-1", ) def _print_balance(self): balance = self.client.get_account_balance()['AvailableBalance'] logger.info("Current MTurk balance is ${}.".format(balance)) def recruit(self): response = self._client.create_hit_with_hit_type( HITTypeId=os.environ["JUDICIOUS_MTURK_HIT_TYPE_ID"], MaxAssignments=1, LifetimeInSeconds=int(os.environ["JUDICIOUS_MTURK_LIFETIME"]), Question=open("external.xml", "r").read(), ) logger.info("Created HIT with ID {}".format(response['HIT']['HITId']))
Python
0.000001
@@ -1367,57 +1367,142 @@ -response = self._client.create_hit_with_hit_type( +if self.mode == %22sandbox%22:%0A HITTypeId = os.environ%5B%22JUDICIOUS_MTURK_HIT_TYPE_ID_SANDBOX%22%5D%0A elif self.mode == %22live%22: %0A @@ -1519,17 +1519,19 @@ ITTypeId -= + = os.envir @@ -1561,18 +1561,114 @@ _TYPE_ID -%22%5D +_LIVE%22%5D%0A%0A response = self._client.create_hit_with_hit_type(%0A HITTypeId=HITTypeId ,%0A
6655a1b2633ccf55c8511a37a242560f53b15299
Implement Deathlord and Wailing Soul
fireplace/carddata/minions/neutral/rare.py
fireplace/carddata/minions/neutral/rare.py
import random from ...card import * from fireplace.enums import CardType, Race # Injured Blademaster class CS2_181(Card): def action(self): self.hit(self, 4) # Young Priestess class EX1_004(Card): def OWN_TURN_END(self): other_minions = [t for t in self.controller.field if t is not self] if other_minions: random.choice(other_minions).buff("EX1_004e") class EX1_004e(Card): Health = 1 # Alarm-o-Bot class EX1_006(Card): def OWN_TURN_BEGIN(self): minions = self.controller.hand.filterByType(CardType.MINION) if minions: self.bounce() self.controller.summon(random.choice(minions)) # Ancient Watcher class EX1_045(Card): cantAttack = True # Coldlight Oracle class EX1_050(Card): def action(self): self.controller.draw(2) self.controller.opponent.draw(2) # Mind Control Tech class EX1_085(Card): def action(self): if len(self.controller.opponent.field) >= 4: self.controller.takeControl(random.choice(self.controller.opponent.field)) # Arcane Golem class EX1_089(Card): def action(self): self.controller.opponent.maxMana += 1 # Defender of Argus class EX1_093(Card): def action(self): for target in self.adjacentMinions: target.buff("EX1_093e") class EX1_093e(Card): Atk = 1 Health = 1 taunt = True # Gadgetzan Auctioneer class EX1_095(Card): def OWN_CARD_PLAYED(self, card): if card.type == CardType.SPELL: self.controller.draw() # Abomination class EX1_097(Card): def deathrattle(self): for target in self.controller.getTargets(TARGET_ALL_CHARACTERS): self.hit(target, 2) # Coldlight Seer class EX1_103(Card): def action(self): for minion in self.controller.field: if minion.race == Race.MURLOC: minion.buff("EX1_103e") class EX1_103e(Card): Health = 2 # Ancient Mage class EX1_584(Card): def action(self): for target in self.adjacentMinions: target.buff("EX1_584e") class EX1_584e(Card): spellpower = 1 # Imp Master class EX1_597(Card): def OWN_TURN_END(self): self.hit(self, 1) self.controller.summon("EX1_598") # Nerubian Egg class FP1_007(Card): deathrattle = summonMinion("FP1_007t") # Sludge Belcher class FP1_012(Card): deathrattle = summonMinion("FP1_012t") # Wild Pyromancer class NEW1_020(Card): def AFTER_OWN_CARD_PLAYED(self, card): if card.type == CardType.SPELL: for target in self.controller.getTargets(TARGET_ALL_MINIONS): self.hit(target, 1) # Bloodsail Corsair class NEW1_025(Card): def action(self): weapon = self.controller.opponent.hero.weapon if self.controller.opponent.hero.weapon: weapon.durability -= 1 # Master Swordsmith class NEW1_037(Card): def OWN_TURN_END(self): other_minions = [t for t in self.controller.field if t is not self] if other_minions: random.choice(other_minions).buff("NEW1_037e") class NEW1_037e(Card): Atk = 1 # Stampeding Kodo class NEW1_041(Card): def action(self): targets = [t for t in self.controller.opponent.field if t.atk <= 2] if targets: random.choice(targets).destroy()
Python
0
@@ -2105,82 +2105,402 @@ %0A%0A# -Sludge Belcher%0Aclass FP1_012(Card):%0A%09deathrattle = summonMinion(%22FP1_012t%22 +Deathlord%0Aclass FP1_009(Card):%0A%09def deathrattle(self):%0A%09%09minions = self.controller.opponent.deck.filterByType(CardType.MINION)%0A%09%09if minions:%0A%09%09%09self.controller.opponent.summon(random.choice(minions))%0A%0A%0A# Sludge Belcher%0Aclass FP1_012(Card):%0A%09deathrattle = summonMinion(%22FP1_012t%22)%0A%0A%0A# Wailing Soul%0Aclass FP1_016(Card):%0A%09def action(self):%0A%09%09for target in self.controller.field:%0A%09%09%09target.silence( )%0A%0A%0A
1d50124d9d4465a42beb2ca37cde390d088c8c78
Include health monitors expected codes upper bound into HAProxy config
quantum/plugins/services/agent_loadbalancer/drivers/haproxy/cfg.py
quantum/plugins/services/agent_loadbalancer/drivers/haproxy/cfg.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 New Dream Network, LLC (DreamHost) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Mark McClain, DreamHost import itertools from oslo.config import cfg from quantum.agent.linux import utils from quantum.plugins.common import constants as qconstants from quantum.plugins.services.agent_loadbalancer import constants PROTOCOL_MAP = { constants.PROTOCOL_TCP: 'tcp', constants.PROTOCOL_HTTP: 'http', constants.PROTOCOL_HTTPS: 'tcp', } BALANCE_MAP = { constants.LB_METHOD_ROUND_ROBIN: 'roundrobin', constants.LB_METHOD_LEAST_CONNECTIONS: 'leastconn', constants.LB_METHOD_SOURCE_IP: 'source' } STATS_MAP = { constants.STATS_CURRENT_CONNECTIONS: 'qcur', constants.STATS_MAX_CONNECTIONS: 'qmax', constants.STATS_CURRENT_SESSIONS: 'scur', constants.STATS_MAX_SESSIONS: 'smax', constants.STATS_TOTAL_SESSIONS: 'stot', constants.STATS_IN_BYTES: 'bin', constants.STATS_OUT_BYTES: 'bout', constants.STATS_CONNECTION_ERRORS: 'econ', constants.STATS_RESPONSE_ERRORS: 'eresp' } ACTIVE = qconstants.ACTIVE def save_config(conf_path, logical_config, socket_path=None): """Convert a logical configuration to the HAProxy version""" data = [] data.extend(_build_global(logical_config, socket_path=socket_path)) data.extend(_build_defaults(logical_config)) data.extend(_build_frontend(logical_config)) data.extend(_build_backend(logical_config)) utils.replace_file(conf_path, '\n'.join(data)) def _build_global(config, socket_path=None): opts = [ 'daemon', 'user nobody', 'group %s' % cfg.CONF.user_group, 'log /dev/log local0', 'log /dev/log local1 notice' ] if socket_path: opts.append('stats socket %s mode 0666 level user' % socket_path) return itertools.chain(['global'], ('\t' + o for o in opts)) def _build_defaults(config): opts = [ 'log global', 'retries 3', 'option redispatch', 'timeout connect 5000', 'timeout client 50000', 'timeout server 50000', ] return itertools.chain(['defaults'], ('\t' + o for o in opts)) def _build_frontend(config): protocol = config['vip']['protocol'] opts = [ 'option tcplog', 'bind %s:%d' % ( _get_first_ip_from_port(config['vip']['port']), config['vip']['protocol_port'] ), 'mode %s' % PROTOCOL_MAP[protocol], 'default_backend %s' % config['pool']['id'], ] if config['vip']['connection_limit'] >= 0: opts.append('maxconn %s' % config['vip']['connection_limit']) if protocol == constants.PROTOCOL_HTTP: opts.append('option forwardfor') return itertools.chain( ['frontend %s' % config['vip']['id']], ('\t' + o for o in opts) ) def _build_backend(config): protocol = config['pool']['protocol'] lb_method = config['pool']['lb_method'] opts = [ 'mode %s' % PROTOCOL_MAP[protocol], 'balance %s' % BALANCE_MAP.get(lb_method, 'roundrobin') ] if protocol == constants.PROTOCOL_HTTP: opts.append('option forwardfor') # add the first health_monitor (if available) server_addon, health_opts = _get_server_health_option(config) opts.extend(health_opts) # add session persistence (if available) persist_opts = _get_session_persistence(config) opts.extend(persist_opts) # add the members for member in config['members']: if member['status'] == ACTIVE and member['admin_state_up']: server = (('server %(id)s %(address)s:%(protocol_port)s ' 'weight %(weight)s') % member) + server_addon if _has_http_cookie_persistence(config): server += ' cookie %d' % config['members'].index(member) opts.append(server) return itertools.chain( ['backend %s' % config['pool']['id']], ('\t' + o for o in opts) ) def _get_first_ip_from_port(port): for fixed_ip in port['fixed_ips']: return fixed_ip['ip_address'] def _get_server_health_option(config): """return the first active health option""" for monitor in config['healthmonitors']: if monitor['status'] == ACTIVE and monitor['admin_state_up']: break else: return '', [] server_addon = ' check inter %(delay)ds fall %(max_retries)d' % monitor opts = [ 'timeout check %ds' % monitor['timeout'] ] if monitor['type'] in (constants.HEALTH_MONITOR_HTTP, constants.HEALTH_MONITOR_HTTPS): opts.append('option httpchk %(http_method)s %(url_path)s' % monitor) opts.append( 'http-check expect rstatus %s' % '|'.join(_expand_expected_codes(monitor['expected_codes'])) ) if monitor['type'] == constants.HEALTH_MONITOR_HTTPS: opts.append('option ssl-hello-chk') return server_addon, opts def _get_session_persistence(config): persistence = config['vip'].get('session_persistence') if not persistence: return [] opts = [] if persistence['type'] == constants.SESSION_PERSISTENCE_SOURCE_IP: opts.append('stick-table type ip size 10k') opts.append('stick on src') elif persistence['type'] == constants.SESSION_PERSISTENCE_HTTP_COOKIE: opts.append('cookie SRV insert indirect nocache') elif (persistence['type'] == constants.SESSION_PERSISTENCE_APP_COOKIE and persistence.get('cookie_name')): opts.append('appsession %s len 56 timeout 3h' % persistence['cookie_name']) return opts def _has_http_cookie_persistence(config): return (config['vip'].get('session_persistence') and config['vip']['session_persistence']['type'] == constants.SESSION_PERSISTENCE_HTTP_COOKIE) def _expand_expected_codes(codes): """Expand the expected code string in set of codes. 200-204 -> 200, 201, 202, 204 200, 203 -> 200, 203 """ retval = set() for code in codes.replace(',', ' ').split(' '): code = code.strip() if not code: continue elif '-' in code: low, hi = code.split('-')[:2] retval.update(str(i) for i in xrange(int(low), int(hi))) else: retval.add(code) return retval
Python
0.000001
@@ -6879,16 +6879,20 @@ int(hi) + + 1 ))%0A
7f2f00edd248f231627615328c1173110053baac
use consume
dace/processinstance/tests/test_signal.py
dace/processinstance/tests/test_signal.py
import transaction from pyramid.threadlocal import get_current_registry from dace.interfaces import IProcessDefinition import dace.processinstance.tests.example.process as example from dace.processdefinition.processdef import ProcessDefinition from dace.processdefinition.activitydef import ActivityDefinition from dace.processdefinition.gatewaydef import ParallelGatewayDefinition from dace.processdefinition.transitiondef import TransitionDefinition from dace.processdefinition.eventdef import ( StartEventDefinition, EndEventDefinition, IntermediateCatchEventDefinition, IntermediateThrowEventDefinition, SignalEventDefinition) from dace.testing import FunctionalTests def ref_signal(process): return "X" class TestsSignal(FunctionalTests): def tearDown(self): registry = get_current_registry() registry.unregisterUtility(provided=IProcessDefinition) super(TestsSignal, self).tearDown() def _process_definition(self): """ G1(+), G2(+): parallel gateways S: start event E: End event St: Signal throwing Sc: Signal catching A, D: activities ----- ------ -->| A |-->| St |-- ----- --------- / ----- ------ \ --------- ----- | S |-->| G1(+) |- ------ ----- -| G2(+) |-->| E | ----- --------- \-->| Sc |->| D |---/ --------- ----- ------ ----- """ pd = ProcessDefinition(u'sample') self.app['pd'] = pd pd.defineNodes( s = StartEventDefinition(), g1 = ParallelGatewayDefinition(), g2 = ParallelGatewayDefinition(), a = ActivityDefinition(), d = ActivityDefinition(), e = EndEventDefinition(), st = IntermediateThrowEventDefinition( SignalEventDefinition(ref_signal)), sc = IntermediateCatchEventDefinition( SignalEventDefinition(ref_signal)), ) pd.defineTransitions( TransitionDefinition('s', 'g1'), TransitionDefinition('g1', 'a'), TransitionDefinition('g1', 'sc'), TransitionDefinition('a', 'st'), TransitionDefinition('sc', 'd'), TransitionDefinition('st', 'g2'), TransitionDefinition('d', 'g2'), TransitionDefinition('g2', 'e'), ) self.config.scan(example) return pd def xtest_signal_event_start_sc(self): pd = self._process_definition() self.registry.registerUtility(pd, provided=IProcessDefinition, name=pd.id) start_wi = pd.start_process('sc') sc_wi, proc = start_wi.start() sc_wi.start() self.assertEqual(len(proc.getWorkItems()), 2) self.assertEqual(sorted(proc.getWorkItems().keys()), ['sample.a', 'sample.sc']) def test_signal_event(self): pd = self._process_definition() self.registry.registerUtility(pd, provided=IProcessDefinition, name=pd.id) start_wi = pd.start_process('a') # commit the application transaction.commit() a_wi, proc = start_wi.start() a_wi.start() transaction.commit() import time time.sleep(3) transaction.begin() self.assertEqual(sorted(proc.getWorkItems().keys()), ['sample.d']) d_wi = proc.getWorkItems()['sample.d'] self.assertEqual(len(proc.getWorkItems()), 1) self.assertEqual(sorted(proc.getWorkItems().keys()), ['sample.d']) d_wi.start().start() self.assertEqual(len(proc.getWorkItems()), 0) def _process_definition_with_activity_after_start_event(self): """ G1(+), G2(+): parallel gateways S: start event E: End event St: Signal throwing Sc: Signal catching A, B, D: activities ----- ------ -->| A |-->| St |-- ----- ----- --------- / ----- ------ \ --------- ----- | S |-->| B |-->| G1(+) |- ------ ----- -| G2(+) |-->| E | ----- ----- --------- \-->| Sc |->| D |---/ --------- ----- ------ ----- """ pd = ProcessDefinition(u'sample') self.app['pd'] = pd pd.defineNodes( s = StartEventDefinition(), g1 = ParallelGatewayDefinition(), g2 = ParallelGatewayDefinition(), a = ActivityDefinition(), b = ActivityDefinition(), d = ActivityDefinition(), e = EndEventDefinition(), st = IntermediateThrowEventDefinition( SignalEventDefinition(ref_signal)), sc = IntermediateCatchEventDefinition( SignalEventDefinition(ref_signal)), ) pd.defineTransitions( TransitionDefinition('s', 'b'), TransitionDefinition('b', 'g1'), TransitionDefinition('g1', 'a'), TransitionDefinition('g1', 'sc'), TransitionDefinition('a', 'st'), TransitionDefinition('sc', 'd'), TransitionDefinition('st', 'g2'), TransitionDefinition('d', 'g2'), TransitionDefinition('g2', 'e'), ) self.config.scan(example) return pd def test_start_intermediate_events_on_startup(self): from zope.processlifetime import DatabaseOpenedWithRoot from dace.processinstance.event import callbacks as event_callbacks pd = self._process_definition_with_activity_after_start_event() self.registry.registerUtility(pd, provided=IProcessDefinition, name=pd.id) start_wi = pd.start_process('b') # commit the application transaction.commit() b_wi, proc = start_wi.start() b_wi.start() transaction.commit() self.assertEqual(sorted(proc.getWorkItems().keys()), ['sample.a', 'sample.sc']) # simulate application shutdown self.assertEqual(len(event_callbacks), 1) proc.getWorkItems()['sample.sc'].node.eventKind.stop() self.assertEqual(len(event_callbacks), 0) from dace.subscribers import stop_ioloop stop_ioloop() # simulate application startup event = DatabaseOpenedWithRoot(self.app._p_jar.db()) self.registry.notify(event) self.assertEqual(len(event_callbacks), 1) # from dace.subscribers import start_intermediate_events # start_ioloop(event) # start_intermediate_events(event) a_wi = proc.getWorkItems()['sample.a'] a_wi.start().start() # we need to commit so the catching event Job # see the modified process. transaction.commit() # The job wait 2 sec before executing import time time.sleep(3) transaction.begin() self.assertEqual(sorted(proc.getWorkItems().keys()), ['sample.d'])
Python
0.000003
@@ -3276,37 +3276,39 @@ proc = start_wi. -start +consume ()%0A a_wi. @@ -3376,33 +3376,33 @@ time.sleep( -3 +5 )%0A transa @@ -3676,29 +3676,31 @@ d_wi. -start +consume ().start()%0A @@ -6047,37 +6047,39 @@ proc = start_wi. -start +consume ()%0A b_wi. @@ -6860,21 +6860,23 @@ a_wi. -start +consume ().start @@ -7088,9 +7088,9 @@ eep( -3 +5 )%0A
5cae7ae0896731d45483279c29a03e67a036321e
store the time from before the changelog was requested, not after
crate_project/apps/pypi/tasks.py
crate_project/apps/pypi/tasks.py
import collections import datetime import hashlib import logging import re import time import xmlrpclib import redis import requests from celery.task import task from django.conf import settings from packages.models import Package from pypi.processor import PyPIPackage logger = logging.getLogger(__name__) INDEX_URL = "http://pypi.python.org/pypi" SERVERKEY_URL = "http://pypi.python.org/serverkey" SERVERKEY_KEY = "crate:pypi:serverkey" PYPI_SINCE_KEY = "crate:pypi:since" def process(name, version, timestamp, action, matches): package = PyPIPackage(name, version) package.process() def remove(name, version, timestamp, action, matches): package = PyPIPackage(name, version) package.delete() def remove_file(name, version, timestamp, action, matches): package = PyPIPackage(name, version) package.remove_files(*matches.groups()) @task def bulk_process(name, version, timestamp, action, matches): package = PyPIPackage(name) package.process(bulk=True) @task def bulk_synchronize(): pypi = xmlrpclib.ServerProxy(INDEX_URL) names = set() for package in pypi.list_packages(): names.add(package) bulk_process.delay(package, None, None, None, None) Package.objects.exclude(name__in=names).update(deleted=True) @task def synchronize(since=None): datastore = redis.StrictRedis(**getattr(settings, "PYPI_DATASTORE_CONFIG", {})) if since is None: s = datastore.get(PYPI_SINCE_KEY) if s is not None: since = int(float(s)) #since = 1320000896 #since = 1329500152 pypi = xmlrpclib.ServerProxy(INDEX_URL) headers = datastore.hgetall(SERVERKEY_KEY + ":headers") sig = requests.get(SERVERKEY_URL, headers=headers, prefetch=True) if not sig.status_code == 304: sig.raise_for_status() else: if sig.content != datastore.get(SERVERKEY_KEY): pass # @@@ Key rolled over, redownload all sigs. if since is None: # @@@ Should we do this for more than just initial? bulk_synchronize.delay() else: logger.info("[SYNCING] Changes since %s" % since) changes = pypi.changelog(since) for name, version, timestamp, action in changes: line_hash = hashlib.sha256(":".join([str(x) for x in (name, version, timestamp, action)])).hexdigest() logdata = {"action": action, "name": name, "version": version, "timestamp": timestamp, "hash": line_hash} if not datastore.exists("crate:pypi:changelog:%s" % line_hash): logger.debug("[PROCESS] %(name)s %(version)s %(timestamp)s %(action)s" % logdata) logger.debug("[HASH] %(name)s %(version)s %(hash)s" % logdata) dispatch = collections.OrderedDict([ (re.compile("^create$"), process), (re.compile("^new release$"), process), (re.compile("^add [\w\d\.]+ file .+$"), process), (re.compile("^remove$"), remove), (re.compile("^remove file (.+)$"), remove_file), (re.compile("^update [\w]+(, [\w]+)*$"), process), #(re.compile("^docupdate$"), docupdate), # @@@ Do Something #(re.compile("^add (Owner|Maintainer) .+$"), add_user_role), # @@@ Do Something #(re.compile("^remove (Owner|Maintainer) .+$"), remove_user_role), # @@@ Do Something ]) # Dispatch Based on the action for pattern, func in dispatch.iteritems(): matches = pattern.search(action) if matches is not None: func(name, version, timestamp, action, matches) break else: logger.warn("[UNHANDLED] %(name)s %(version)s %(timestamp)s %(action)s" % logdata) datastore.setex("crate:pypi:changelog:%s" % line_hash, datetime.datetime.utcnow().isoformat(), 2629743) else: logger.debug("[SKIP] %(name)s %(version)s %(timestamp)s %(action)s" % logdata) logger.debug("[HASH] %(name)s %(version)s %(hash)s" % logdata) datastore.set(PYPI_SINCE_KEY, time.mktime(datetime.datetime.utcnow().timetuple()))
Python
0
@@ -1588,16 +1588,83 @@ 500152%0A%0A + current = time.mktime(datetime.datetime.utcnow().timetuple())%0A%0A pypi @@ -4315,57 +4315,13 @@ EY, -time.mktime(datetime.datetime.utcnow().timetuple()) +current )%0A
af36bbe421b133de96d8f1eeeb5beacd01e5d467
store the time from before the changelog was requested, not after
crate_project/apps/pypi/tasks.py
crate_project/apps/pypi/tasks.py
import collections import datetime import hashlib import logging import re import time import xmlrpclib import redis import requests from celery.task import task from django.conf import settings from packages.models import Package from pypi.processor import PyPIPackage logger = logging.getLogger(__name__) INDEX_URL = "http://pypi.python.org/pypi" SERVERKEY_URL = "http://pypi.python.org/serverkey" SERVERKEY_KEY = "crate:pypi:serverkey" PYPI_SINCE_KEY = "crate:pypi:since" def process(name, version, timestamp, action, matches): package = PyPIPackage(name, version) package.process() def remove(name, version, timestamp, action, matches): package = PyPIPackage(name, version) package.delete() def remove_file(name, version, timestamp, action, matches): package = PyPIPackage(name, version) package.remove_files(*matches.groups()) @task def bulk_process(name, version, timestamp, action, matches): package = PyPIPackage(name) package.process(bulk=True) @task def bulk_synchronize(): pypi = xmlrpclib.ServerProxy(INDEX_URL) names = set() for package in pypi.list_packages(): names.add(package) bulk_process.delay(package, None, None, None, None) Package.objects.exclude(name__in=names).update(deleted=True) @task def synchronize(since=None): datastore = redis.StrictRedis(**getattr(settings, "PYPI_DATASTORE_CONFIG", {})) if since is None: s = datastore.get(PYPI_SINCE_KEY) if s is not None: since = int(float(s)) #since = 1320000896 #since = 1329500152 pypi = xmlrpclib.ServerProxy(INDEX_URL) headers = datastore.hgetall(SERVERKEY_KEY + ":headers") sig = requests.get(SERVERKEY_URL, headers=headers, prefetch=True) if not sig.status_code == 304: sig.raise_for_status() else: if sig.content != datastore.get(SERVERKEY_KEY): pass # @@@ Key rolled over, redownload all sigs. if since is None: # @@@ Should we do this for more than just initial? bulk_synchronize.delay() else: logger.info("[SYNCING] Changes since %s" % since) changes = pypi.changelog(since) for name, version, timestamp, action in changes: line_hash = hashlib.sha256(":".join([str(x) for x in (name, version, timestamp, action)])).hexdigest() logdata = {"action": action, "name": name, "version": version, "timestamp": timestamp, "hash": line_hash} if not datastore.exists("crate:pypi:changelog:%s" % line_hash): logger.debug("[PROCESS] %(name)s %(version)s %(timestamp)s %(action)s" % logdata) logger.debug("[HASH] %(name)s %(version)s %(hash)s" % logdata) dispatch = collections.OrderedDict([ (re.compile("^create$"), process), (re.compile("^new release$"), process), (re.compile("^add [\w\d\.]+ file .+$"), process), (re.compile("^remove$"), remove), (re.compile("^remove file (.+)$"), remove_file), (re.compile("^update [\w]+(, [\w]+)*$"), process), #(re.compile("^docupdate$"), docupdate), # @@@ Do Something #(re.compile("^add (Owner|Maintainer) .+$"), add_user_role), # @@@ Do Something #(re.compile("^remove (Owner|Maintainer) .+$"), remove_user_role), # @@@ Do Something ]) # Dispatch Based on the action for pattern, func in dispatch.iteritems(): matches = pattern.search(action) if matches is not None: func(name, version, timestamp, action, matches) break else: logger.warn("[UNHANDLED] %(name)s %(version)s %(timestamp)s %(action)s" % logdata) datastore.setex("crate:pypi:changelog:%s" % line_hash, datetime.datetime.utcnow().isoformat(), 2629743) else: logger.debug("[SKIP] %(name)s %(version)s %(timestamp)s %(action)s" % logdata) logger.debug("[HASH] %(name)s %(version)s %(hash)s" % logdata) datastore.set(PYPI_SINCE_KEY, time.mktime(datetime.datetime.utcnow().timetuple()))
Python
0
@@ -1588,16 +1588,83 @@ 500152%0A%0A + current = time.mktime(datetime.datetime.utcnow().timetuple())%0A%0A pypi @@ -4315,57 +4315,13 @@ EY, -time.mktime(datetime.datetime.utcnow().timetuple()) +current )%0A
3ca6affb630f0dea9b414f5405acae7a20f213d2
add request apis
crawler/spiders/movies_spider.py
crawler/spiders/movies_spider.py
# -*- coding: utf-8 -*- import scrapy import json from core.models import db, Movie from crawler.spiders.subjects_spider import save_subject_detail class MoviesSpider(scrapy.Spider): name = 'MoviesSpider' allowed_domains = ['douban.com'] start_urls = [ 'https://frodo.douban.com/jsonp/subject_collection/movie_showing/items?os=ios&callback=&start=0&count=30', # 'https://api.douban.com/v2/movie/coming_soon', # 'https://api.douban.com/v2/movie/in_theaters', # 'https://api.douban.com/v2/movie/top250', ] def parse(self, response): res = json.loads(response.body) for subject in res['subjects']: save_subject_detail(subject['id'])
Python
0
@@ -344,25 +344,106 @@ ems? -os=ios&callback=& +start=0&count=100',%0A 'https://frodo.douban.com/jsonp/subject_collection/movie_latest/items? star @@ -456,9 +456,10 @@ unt= -3 +10 0',%0A
2be8bef16d7da5701a70ca23a854f5d0bbe416d5
Update learner record MFE port for local/devstack development
credentials/settings/devstack.py
credentials/settings/devstack.py
from credentials.settings._debug_toolbar import * from credentials.settings.base import * from credentials.settings.utils import get_logger_config, str2bool DEBUG = str2bool(os.environ.get("DEBUG", True)) ALLOWED_HOSTS = ["*"] LOGGING = get_logger_config(debug=True, dev_env=True, local_loglevel="DEBUG") del LOGGING["handlers"]["local"] SECRET_KEY = os.environ.get("SECRET_KEY", "change-me") LANGUAGE_CODE = os.environ.get("LANGUAGE_CODE", "en") CACHES = { "default": { "BACKEND": "django.core.cache.backends.memcached.MemcachedCache", "LOCATION": os.environ.get("CACHE_LOCATION", "edx.devstack.memcached:11211"), } } CREDENTIALS_SERVICE_USER = os.environ.get("CREDENTIALS_SERVICE_USER", "credentials_service_user") DATABASES = { "default": { "ENGINE": "django.db.backends.mysql", "NAME": os.environ.get("DB_NAME", "credentials"), "USER": os.environ.get("DB_USER", "credentials001"), "PASSWORD": os.environ.get("DB_PASSWORD", "password"), "HOST": os.environ.get("DB_HOST", "db"), "PORT": os.environ.get("DB_PORT", 3306), "ATOMIC_REQUESTS": False, "CONN_MAX_AGE": 60, } } INSTALLED_APPS += ["credentials.apps.edx_credentials_extensions"] EMAIL_BACKEND = "django.core.mail.backends.filebased.EmailBackend" EMAIL_FILE_PATH = "/tmp/credentials-emails" DEFAULT_FILE_STORAGE = os.environ.get("DEFAULT_FILE_STORAGE", "django.core.files.storage.FileSystemStorage") MEDIA_URL = os.environ.get("MEDIA_URL", "/media/") STATICFILES_STORAGE = os.environ.get("STATICFILES_STORAGE", "django.contrib.staticfiles.storage.StaticFilesStorage") STATIC_URL = os.environ.get("STATIC_URL", "/static/") # OAuth2 variables specific to social-auth/SSO login use case. SOCIAL_AUTH_EDX_OAUTH2_KEY = os.environ.get("SOCIAL_AUTH_EDX_OAUTH2_KEY", "credentials-sso-key") SOCIAL_AUTH_EDX_OAUTH2_SECRET = os.environ.get("SOCIAL_AUTH_EDX_OAUTH2_SECRET", "credentials-sso-secret") SOCIAL_AUTH_EDX_OAUTH2_ISSUER = os.environ.get("SOCIAL_AUTH_EDX_OAUTH2_ISSUER", "http://localhost:18000") SOCIAL_AUTH_EDX_OAUTH2_URL_ROOT = os.environ.get("SOCIAL_AUTH_EDX_OAUTH2_URL_ROOT", "http://edx.devstack.lms:18000") SOCIAL_AUTH_EDX_OAUTH2_LOGOUT_URL = os.environ.get("SOCIAL_AUTH_EDX_OAUTH2_LOGOUT_URL", "http://localhost:18000/logout") SOCIAL_AUTH_EDX_OAUTH2_PUBLIC_URL_ROOT = os.environ.get( "SOCIAL_AUTH_EDX_OAUTH2_PUBLIC_URL_ROOT", "http://localhost:18000", ) # OAuth2 variables specific to backend service API calls. BACKEND_SERVICE_EDX_OAUTH2_KEY = os.environ.get("BACKEND_SERVICE_EDX_OAUTH2_KEY", "credentials-backend-service-key") BACKEND_SERVICE_EDX_OAUTH2_SECRET = os.environ.get( "BACKEND_SERVICE_EDX_OAUTH2_SECRET", "credentials-backend-service-secret" ) BACKEND_SERVICE_EDX_OAUTH2_PROVIDER_URL = os.environ.get( "BACKEND_SERVICE_EDX_OAUTH2_PROVIDER_URL", "http://edx.devstack.lms:18000/oauth2", ) CORS_ORIGIN_WHITELIST = ("http://localhost:8080",) SOCIAL_AUTH_REDIRECT_IS_HTTPS = str2bool(os.environ.get("SOCIAL_AUTH_REDIRECT_IS_HTTPS", False)) JWT_AUTH.update( { "JWT_SECRET_KEY": "lms-secret", "JWT_ISSUER": "http://localhost:18000/oauth2", "JWT_AUDIENCE": None, "JWT_VERIFY_AUDIENCE": False, "JWT_PUBLIC_SIGNING_JWK_SET": ( '{"keys": [{"kid": "devstack_key", "e": "AQAB", "kty": "RSA", "n": "smKFSYowG6nNUAdeqH1jQQnH1PmIHphzBmwJ5vRf1vu' "48BUI5VcVtUWIPqzRK_LDSlZYh9D0YFL0ZTxIrlb6Tn3Xz7pYvpIAeYuQv3_H5p8tbz7Fb8r63c1828wXPITVTv8f7oxx5W3lFFgpFAyYMmROC" "4Ee9qG5T38LFe8_oAuFCEntimWxN9F3P-FJQy43TL7wG54WodgiM0EgzkeLr5K6cDnyckWjTuZbWI-4ffcTgTZsL_Kq1owa_J2ngEfxMCObnzG" 'y5ZLcTUomo4rZLjghVpq6KZxfS6I1Vz79ZsMVUWEdXOYePCKKsrQG20ogQEkmTf9FT_SouC6jPcHLXw"}]}' ), "JWT_ISSUERS": [ { "AUDIENCE": "lms-key", "ISSUER": "http://localhost:18000/oauth2", "SECRET_KEY": "lms-secret", } ], } ) SEND_EMAIL_ON_PROGRAM_COMPLETION = True USE_LEARNER_RECORD_MFE = False LEARNER_RECORD_MFE_RECORDS_PAGE_URL = "http://localhost:8080/" ##################################################################### # Lastly, see if the developer has any local overrides. try: from .private import * # pylint: disable=import-error except ImportError: pass
Python
0
@@ -2929,19 +2929,19 @@ calhost: -808 +199 0%22,)%0A%0ASO @@ -4093,11 +4093,11 @@ ost: -808 +199 0/%22%0A
3387dc727255647b8e38f3363807052abcceee61
put leaderboards vars to env vars
cron-scripts/get_weekly_stats.py
cron-scripts/get_weekly_stats.py
import asyncio import aiohttp import json import urllib.request import urllib.parse import datetime import pymongo CONFIG = json.load(open("config.json")) # For GUI-less OS uncomment the line below # dryscrape.start_xvfb() # Countries selection COUNTRIES = ("gr", "cy") # number of top players/teams TOP_PLAYERS = 5 TOP_TEAMS = 3 def request(url, params, headers): params = urllib.parse.urlencode(params) url = "{}?{}".format(url, params) request = urllib.request.Request(url, headers=headers) response = urllib.request.urlopen(request) return json.loads(response.read().decode('utf-8')) def get_leaderboards(): matchtypes = {} params = {'title': "coh2"} response = request(CONFIG['leaderboards'], params, CONFIG['headers']) id1v1 = None # get 1v1 matchtype id for mt in response['matchTypes']: if mt['name'] == '1V1': id1v1 = mt['id'] break # get leaderboard ids for leaderboard in response['leaderboards']: for m in leaderboard['leaderboardmap']: if m['matchtype_id'] == id1v1: matchtypes.update( {leaderboard['name']: leaderboard['id']}) elif m['statgroup_type'] in (2, 3, 4): matchtypes.update( {leaderboard['name']: leaderboard['id']}) return matchtypes async def get_results(matchtype, matchtype_id, aio_session, positions, sortBy=1, step=40, count=40): params = { 'leaderboard_id': matchtype_id, 'title': "coh2", 'platform': "PC_STEAM", 'sortBy': sortBy, 'start': 1, 'count': count } current_position = 1 category_results = [] while True: async with aio_session.get(CONFIG['specific_leaderboard'], params=params, headers=CONFIG['headers']) as response: response = await response.json() # if the leaderboardStats array is empty # we have exhausted this category if not response['leaderboardStats'] or current_position > positions: return (matchtype, category_results) params['start'] += step for group in response['statGroups']: found = all(member['country'] in COUNTRIES for member in group['members']) if found: print("found for matchtype:", matchtype) stats = next(stats for stats in response['leaderboardStats'] if stats['statGroup_id'] == group['id']) results = dict(stats) results['total'] = results['wins'] + results['losses'] results['ratio'] = f"{results['wins'] / results['total']:.0%}" if matchtype.startswith('1v1'): results['player'] = { 'profile_id': group['members'][0]['profile_id'], 'steam_id': group['members'][0]['name'], 'name': group['members'][0]['alias'], 'country': group['members'][0]['country'] } else: results['players'] = [ { 'profile_id': member['profile_id'], 'steam_id': member['name'], 'name': member['alias'], 'country': member['country'] } for member in group['members'] ] category_results.append(results) current_position += 1 if current_position > positions: break def normalize(data): normalized = { "1v1": {}, "team-of-2": {}, "team-of-3": {}, "team-of-4": {} } for matchtype, result in data: if matchtype.startswith("1v1"): normalized['1v1'].update({matchtype[3:]: result}) else: num = matchtype[6] key = "team-of-{}".format(num) normalized[key].update({matchtype[7:]: result}) return normalized async def main(): matchtypes = get_leaderboards() async with aiohttp.ClientSession() as aio_session: results = [ get_results(matchtype, matchtype_id, aio_session, TOP_PLAYERS) if matchtype.startswith('1v1') else get_results(matchtype, matchtype_id, aio_session, TOP_TEAMS) for matchtype, matchtype_id in matchtypes.items() ] completed_tasks = await asyncio.gather(*results) results = normalize([task for task in completed_tasks]) # Connect to a local MongoDB and store the results mongo_client = pymongo.MongoClient() grstats = mongo_client.coh2stats.weeklystats results = {'created': datetime.datetime.utcnow(), 'stats': results} grstats.insert(results) if __name__ == '__main__': eloop = asyncio.get_event_loop() try: eloop.run_until_complete(main()) finally: eloop.close()
Python
0
@@ -108,16 +108,27 @@ pymongo%0A +import os%0A%0A %0ACONFIG @@ -339,16 +339,125 @@ MS = 3%0A%0A +LEADERBOARDS = os.environ.get('LEADERBOARDS')%0ASPECIFIC_LEADERBOARD = os.environ.get('SPECIFIC_LEADERBOARD')%0A%0A %0Adef req @@ -834,30 +834,20 @@ est( -CONFIG%5B'leaderboards'%5D +LEADERBOARDS , pa @@ -1866,38 +1866,28 @@ get( -CONFIG%5B'specific_leaderboard'%5D +SPECIFIC_LEADERBOARD , pa
ac7090293c4686b071ca314c37304b2eeddec766
Fix isort problems
pretix_cartshare/migrations/0002_auto_20161008_1047.py
pretix_cartshare/migrations/0002_auto_20161008_1047.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2016-10-08 10:47 from __future__ import unicode_literals from django.db import migrations, models import pretix_cartshare.models class Migration(migrations.Migration): dependencies = [ ('pretix_cartshare', '0001_initial'), ] operations = [ migrations.AlterField( model_name='sharedcart', name='cart_id', field=models.CharField(default=pretix_cartshare.models.generate_cart_id, max_length=255, verbose_name='Cart ID'), ), ]
Python
0.000002
@@ -107,16 +107,47 @@ terals%0A%0A +import pretix_cartshare.models%0A from dja @@ -183,39 +183,8 @@ els%0A -import pretix_cartshare.models%0A %0A%0Acl
bbb9ecacd59186f07f0120d154e625fabd5a87af
Set name on vpc
touchdown/aws/vpc/vpc.py
touchdown/aws/vpc/vpc.py
# Copyright 2014 Isotoma Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from botocore import session from touchdown.core.resource import Resource from touchdown.core.policy import Policy from touchdown.core.action import Action from touchdown.core.argument import String from touchdown.core import errors class VPCMixin(object): def __init__(self, *args, **kwargs): super(VPCMixin, self).__init__(*args, **kwargs) self.session = session.Session() # self.session.set_credentials(aws_access_key_id, aws_secret_access_key) self.service = self.session.get_service("ec2") self.endpoint = self.service.get_endpoint("eu-west-1") class VPC(Resource): """ A DNS zone hosted at Amazon Route53 """ resource_name = "vpc" subresources = [ ] name = String() cidr_block = String() class AddVPC(Action): description = "Add virtual private cloud'%(name)s'" def run(self): operation = self.policy.service.get_operation("CreateVpc") response, data = operation.call( self.policy.endpoint, CidrBlock=self.resource.cidr_block, ) if response.status_code != 200: raise errors.Error("Unable to create VPC") """ class UpdateVPCName(Action): description = "Change zone comment to '%(comment)s'" def __init__(self, policy, zone_id): super(UpdateHostedZoneComment, self).__init__(policy) self.zone_id = zone_id def run(self): operation = self.policy.service.get_operation("UpdateHostedZoneComment") response, data = operation.call( self.policy.endpoint, Id=self.zone_id, Comment=self.resource.comment, ) if response.status_code != 200: raise errors.Error("Failed to update hosted zone comment") """ class Apply(Policy, VPCMixin): name = "apply" resource = VPC default = True def get_vpc(self): operation = self.service.get_operation("DescribeVpcs") response, data = operation.call(self.endpoint) for vpc in data['Vpcs']: if vpc['CidrBlock'] == self.resource.cidr_block: return vpc def get_actions(self, runner): zone = self.get_vpc() if not zone: yield AddVPC(self)
Python
0.000001
@@ -1745,31 +1745,94 @@ %22)%0A%0A -%22%22%22%0Aclass UpdateVPCName + # FIXME: Create and invoke CreateTags to set the name here.%0A%0A%0Aclass CreateTags (Act @@ -1861,30 +1861,28 @@ = %22 -Change zone comment to +Set tags on resource '%25( @@ -1925,23 +1925,31 @@ policy, -zone_id +resources, tags ):%0A @@ -1957,39 +1957,26 @@ super( -UpdateHostedZoneComment +CreateTags , self). @@ -2009,25 +2009,54 @@ elf. -zone_id = zone_id +resources = resources%0A self.tags = tags %0A%0A @@ -2131,31 +2131,18 @@ on(%22 -UpdateHostedZoneComment +CreateTags %22)%0A @@ -2231,66 +2231,105 @@ -Id=self.zone_id,%0A Comment=self.resource.comment +Resources=self.resources,%0A Tags=%5B%7B%22Key%22: k, %22Value%22: v%7D for k, v in self.tags.items()%5D ,%0A @@ -2448,20 +2448,16 @@ mment%22)%0A -%22%22%22%0A %0A%0Aclass @@ -2868,16 +2868,17 @@ t_vpc()%0A +%0A @@ -2921,8 +2921,317 @@ C(self)%0A + return%0A%0A tags = dict((v%5B%22Key%22%5D, v%5B%22Value%22%5D) for v in zone.get('Tags', %5B%5D))%0A%0A if tags.get('name', '') != self.resource.name:%0A yield CreateTags(%0A self,%0A resources=%5Bzone%5B'VpcId'%5D%5D,%0A tags=%7B%22name%22: self.resource.name%7D%0A )%0A
7c7d517a7b749d43968ee491b181b437df2f923b
remove startswith filter
custom/enikshay/nikshay_datamigration/management/commands/create_enikshay_cases.py
custom/enikshay/nikshay_datamigration/management/commands/create_enikshay_cases.py
import logging from django.core.management import BaseCommand import mock from casexml.apps.case.mock import CaseFactory from casexml.apps.phone.cleanliness import set_cleanliness_flags_for_domain from custom.enikshay.nikshay_datamigration.factory import EnikshayCaseFactory, get_nikshay_codes_to_location from custom.enikshay.nikshay_datamigration.models import PatientDetail logger = logging.getLogger('nikshay_datamigration') DEFAULT_NUMBER_OF_PATIENTS_PER_FORM = 50 def mock_ownership_cleanliness_checks(): # this function is expensive so bypass this during processing return mock.patch( 'casexml.apps.case.xform._get_all_dirtiness_flags_from_cases', new=lambda case_db, touched_cases: [], ) class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument( '--start', dest='start', default=0, type=int, ) parser.add_argument( '--limit', dest='limit', default=None, type=int, ) parser.add_argument( '--chunksize', dest='chunk_size', default=DEFAULT_NUMBER_OF_PATIENTS_PER_FORM, type=int, ) parser.add_argument( '--test-phi', dest='test_phi', default=None, type=str, ) parser.add_argument( '--startswith', dest='startswith', default='', type=str, ) @mock_ownership_cleanliness_checks() def handle(self, domain, **options): base_query = PatientDetail.objects.filter( PregId__startswith=options['startswith'], ).order_by('PregId') start = options['start'] limit = options['limit'] chunk_size = options['chunk_size'] test_phi = options['test_phi'] if test_phi: logger.warning("** USING TEST PHI ID **") if limit is not None: patient_details = base_query[start:start + limit] else: patient_details = base_query[start:] total = patient_details.count() counter = 0 num_succeeded = 0 num_failed = 0 logger.info('Starting migration of %d patient cases on domain %s.' % (total, domain)) nikshay_codes_to_location = get_nikshay_codes_to_location(domain) factory = CaseFactory(domain=domain) case_structures = [] for patient_detail in patient_details: counter += 1 try: case_factory = EnikshayCaseFactory( domain, patient_detail, nikshay_codes_to_location, test_phi ) case_structures.extend(case_factory.get_case_structures_to_create()) except Exception: num_failed += 1 logger.error( 'Failed on %d of %d. Nikshay ID=%s' % ( counter, total, patient_detail.PregId ), exc_info=True, ) else: num_succeeded += 1 if num_succeeded % chunk_size == 0: logger.info('committing cases {}-{}...'.format(num_succeeded - chunk_size, num_succeeded)) factory.create_or_update_cases(case_structures) case_structures = [] logger.info('done') logger.info( 'Succeeded on %s of %d. Nikshay ID=%s' % ( counter, total, patient_detail.PregId ) ) if case_structures: logger.info('committing final cases...'.format(num_succeeded - chunk_size, num_succeeded)) factory.create_or_update_cases(case_structures) logger.info('Done creating cases for domain %s.' % domain) logger.info('Number of attempts: %d.' % counter) logger.info('Number of successes: %d.' % num_succeeded) logger.info('Number of failures: %d.' % num_failed) # since we circumvented cleanliness checks just call this at the end logger.info('Setting cleanliness flags') set_cleanliness_flags_for_domain(domain, force_full=True) logger.info('Done!')
Python
0.000001
@@ -1419,152 +1419,8 @@ ) -%0A parser.add_argument(%0A '--startswith',%0A dest='startswith',%0A default='',%0A type=str,%0A ) %0A%0A @@ -1546,80 +1546,8 @@ cts. -filter(%0A PregId__startswith=options%5B'startswith'%5D,%0A ). orde
1b7289dd8b72cb67fe5c369e99b60e5cd0e85958
Fix neovim support
ftplugin/markdown/follow_markdown_links.py
ftplugin/markdown/follow_markdown_links.py
import re try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse from vim import * DEFAULT_EXTENSION = 'md' MAX_LINE_LEN = 1024 def _extract_link_under_cursor(): _, col = current.window.cursor line = current.line # skip long lines to stop hogging CPU in vim if len(line) >= MAX_LINE_LEN: return # find the markdown link substring from line start_pos = line[:col].rfind("[") if start_pos < 0: return end_pos = line[col:].rfind(")") if end_pos < 0: return end_pos += (col + 1) link = line[start_pos:end_pos] return link def _is_local_link(link): link = urlparse(link) return not link.netloc def _resolve_link(link): buf_path = os.path.dirname(current.buffer.name) return os.path.join(buf_path, link) def _ensure_extension(link): name = os.path.basename(link) if '.' not in name: return link + '.' + DEFAULT_EXTENSION return link def follow_link(): link = _extract_link_under_cursor() if not link: return # extract link text and link url link = re.findall(r'^\[([^]]*)\]\(([^)]*)\)$', link) if not link: return # if not local link then stop text, link = link[0] if not _is_local_link(link): return # Support [Text]() cases; Assume Text as link # Also assume default extension if not link: link = text link = _ensure_extension(link) # Resolve link (if relative) with relation # to current file in buffer link = _resolve_link(link) # Open if exists if os.path.exists(link): return command('e %s' % link) # Directory path does not exist. Ask user to create it. dirpath = os.path.dirname(link) if not os.path.exists(dirpath): confirm_fn = Function('confirm') msg = '"%s" does not exist. create?' % dirpath result = confirm_fn(msg, "&Yes\n&No") if result != 1: return os.makedirs(dirpath) # Open as new file return command('e %s' % link)
Python
0
@@ -1,12 +1,22 @@ +import os%0A import re%0Atr @@ -13,16 +13,16 @@ port re%0A - try:%0A @@ -115,25 +115,18 @@ se%0A%0A -from vim import -* +vim %0A%0ADE @@ -216,16 +216,20 @@ , col = +vim. current. @@ -253,16 +253,20 @@ line = +vim. current. @@ -765,16 +765,20 @@ dirname( +vim. current. @@ -1604,32 +1604,36 @@ %0A return +vim. command('e %25s' %25 @@ -1998,16 +1998,16 @@ ew file%0A - retu @@ -2009,16 +2009,20 @@ return +vim. command(
44532322f4a183bd535de22374d149a6e2f8176a
Fix pep8 errors
RandoAmisSecours/views/reporting.py
RandoAmisSecours/views/reporting.py
# -*- coding: utf-8 -*- # vim: set ts=4 # Copyright 2014 Rémi Duraffort # This file is part of RandoAmisSecours. # # RandoAmisSecours is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # RandoAmisSecours is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with RandoAmisSecours. If not, see <http://www.gnu.org/licenses/> from __future__ import unicode_literals from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.admin.views.decorators import staff_member_required from django.contrib.auth.models import User from django.utils.timezone import datetime, utc from RandoAmisSecours.models import Outing, DRAFT, CONFIRMED, FINISHED from datetime import timedelta @staff_member_required def index(request): user_count = User.objects.count() return render_to_response('RandoAmisSecours/reporting/index.html', {'user_count': user_count}, context_instance=RequestContext(request)) @staff_member_required def outings(request): return render_to_response('RandoAmisSecours/reporting/outings.html', context_instance=RequestContext(request)) @staff_member_required def users(request): now = datetime.utcnow().replace(tzinfo=utc) users_list = User.objects.all() joining_dates = [0] * 366 last_logins = [0] * 366 for user in users_list: days_delta = (now - user.date_joined).days if days_delta <= 365: joining_dates[365 - days_delta] += 1 days_delta = (now - user.last_login).days if days_delta <= 365: last_logins[365 - days_delta] += 1 return render_to_response('RandoAmisSecours/reporting/users.html', {'joining_dates': joining_dates, 'last_logins': last_logins}, context_instance=RequestContext(request))
Python
0.000217
@@ -1453,32 +1453,33 @@ text(request))%0A%0A +%0A @staff_member_re @@ -1653,16 +1653,17 @@ uest))%0A%0A +%0A @staff_m
6f6e57fe3e8251454b5ee2e349199424dc60a118
Add thumbnail treklist
geotrek/api/mobile/serializers/trekking.py
geotrek/api/mobile/serializers/trekking.py
from __future__ import unicode_literals from django.conf import settings from rest_framework import serializers from rest_framework_gis import serializers as geo_serializers from geotrek.common import models as common_models from geotrek.zoning.models import City class AttachmentSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField(read_only=True) def get_url(self, obj): if not obj.attachment_file: return "" return '/{id}{file}'.format(id=obj.object_id, file=obj.attachment_file.url) class Meta: model = common_models.Attachment fields = ( 'url', 'author', 'title', 'legend', ) if 'geotrek.trekking' in settings.INSTALLED_APPS: from geotrek.trekking import models as trekking_models class POIListSerializer(geo_serializers.GeoFeatureModelSerializer): pictures = AttachmentSerializer(many=True, ) thumbnail = serializers.ReadOnlyField(source='serializable_thumbnail_mobile') geometry = geo_serializers.GeometryField(read_only=True, precision=7, source='geom2d_transformed') type_pois = serializers.ReadOnlyField(source='type.pk') class Meta: model = trekking_models.POI geo_field = 'geometry' fields = ( 'id', 'pictures', 'name', 'description', 'thumbnail', 'type_pois', 'geometry', ) class TrekDetailSerializer(geo_serializers.GeoFeatureModelSerializer): geometry = geo_serializers.GeometryField(read_only=True, precision=7, source='geom2d_transformed') length = serializers.SerializerMethodField(read_only=True) pictures = AttachmentSerializer(many=True, ) cities = serializers.SerializerMethodField(read_only=True) departure_city = serializers.SerializerMethodField(read_only=True) arrival_city = serializers.SerializerMethodField(read_only=True) def get_cities(self, obj): qs = City.objects if hasattr(qs, 'existing'): qs = qs.existing() cities = qs.filter(geom__intersects=(obj.geom, 0)) return [city.code for city in cities] def get_departure_city(self, obj): qs = City.objects if hasattr(qs, 'existing'): qs = qs.existing() if obj.start_point: city = qs.filter(geom__covers=(obj.start_point, 0)).first() if city: return city.code return None def get_arrival_city(self, obj): qs = City.objects if hasattr(qs, 'existing'): qs = qs.existing() if obj.start_point: city = qs.filter(geom__covers=(obj.end_point, 0)).first() if city: return city.code return None def get_length(self, obj): return round(obj.length_2d_m, 1) def get_geometry(self, obj): return obj.geom2d_transformed class Meta: model = trekking_models.Trek geo_field = 'geometry' fields = ( 'id', 'name', 'accessibilities', 'description_teaser', 'cities', 'description', 'departure', 'arrival', 'duration', 'access', 'advised_parking', 'advice', 'difficulty', 'length', 'ascent', 'descent', 'route', 'is_park_centered', 'min_elevation', 'max_elevation', 'themes', 'networks', 'practice', 'difficulty', 'geometry', 'pictures', 'information_desks', 'cities', 'departure_city', 'arrival_city' ) auto_bbox = True class TrekListSerializer(geo_serializers.GeoFeatureModelSerializer): thumbnail = serializers.SerializerMethodField(source='serializable_thumbnail_mobile') length = serializers.SerializerMethodField(read_only=True) geometry = geo_serializers.GeometryField(read_only=True, precision=7, source='start_point', ) cities = serializers.SerializerMethodField(read_only=True) departure_city = serializers.SerializerMethodField(read_only=True) def get_thumbnail(self, obj): if obj.serializable_thumbnail_mobile: return '/{id}{file}'.format(id=obj.pk, file=obj.serializable_thumbnail_mobile) return None def get_cities(self, obj): qs = City.objects if hasattr(qs, 'existing'): qs = qs.existing() cities = qs.filter(geom__intersects=(obj.geom, 0)) return [city.code for city in cities] def get_departure_city(self, obj): qs = City.objects if hasattr(qs, 'existing'): qs = qs.existing() if obj.start_point: city = qs.filter(geom__covers=(obj.start_point, 0)).first() if city: return city.code return None def get_length(self, obj): return round(obj.length_2d_m, 1) class Meta: model = trekking_models.Trek geo_field = 'geometry' fields = ( 'id', 'thumbnail', 'name', 'departure', 'accessibilities', 'route', 'departure_city', 'difficulty', 'practice', 'themes', 'length', 'geometry', 'cities', 'duration' )
Python
0
@@ -1581,32 +1581,126 @@ d_transformed')%0A + thumbnail = serializers.SerializerMethodField(source='serializable_thumbnail_mobile')%0A length = @@ -3116,16 +3116,224 @@ formed%0A%0A + def get_thumbnail(self, obj):%0A if obj.serializable_thumbnail_mobile:%0A return '/%7Bid%7D%7Bfile%7D'.format(id=obj.pk, file=obj.serializable_thumbnail_mobile)%0A return None%0A%0A @@ -3472,16 +3472,29 @@ 'name', + 'thumbnail', 'access
b5b5542af86f2b8fd7e6613d6f5e21961cae17e8
Remove useless type
geotrek/signage/migrations/0001_initial.py
geotrek/signage/migrations/0001_initial.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.14 on 2019-01-10 15:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import geotrek.authent.models def deplace_data(apps, schema_editor): # We can't import Infrastructure models directly as it may be a newer # version than this migration expects. We use the historical version. Old_Signage = apps.get_model('infrastructure', 'Signage') New_Signage = apps.get_model('signage', 'Signage') InfrastructureType = apps.get_model('infrastructure', 'InfrastructureType') SignageType = apps.get_model('signage', 'SignageType') for signagetype in InfrastructureType.objects.filter(type='S'): values = signagetype.values() del values['type'] SignageType.objects.create(**values) signagetype.delete() for signage in Old_Signage.objects.all().values(): New_Signage.objects.create(**signage) class Migration(migrations.Migration): initial = True dependencies = [ ('infrastructure', '0010_replace_table_name'), ('common', '0003_auto_20180608_1236'), ('core', '0008_aggregation_infrastructure'), ('authent', '0003_auto_20181203_1518'), ] operations = [ migrations.CreateModel( name='Signage', fields=[ ('published', models.BooleanField(db_column=b'public', default=False, help_text='Online', verbose_name='Published')), ('publication_date', models.DateField(blank=True, db_column=b'date_publication', editable=False, null=True, verbose_name='Publication date')), ('topo_object', models.OneToOneField(db_column=b'evenement', on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.Topology')), ('name', models.CharField(db_column=b'nom', help_text='Reference, code, ...', max_length=128, verbose_name='Name')), ('description', models.TextField(blank=True, db_column=b'description', help_text='Specificites', verbose_name='Description')), ('implantation_year', models.PositiveSmallIntegerField(db_column=b'annee_implantation', null=True, verbose_name='Implantation year')), ('eid', models.CharField(blank=True, db_column=b'id_externe', max_length=1024, null=True, verbose_name='External id')), ('code', models.CharField(blank=True, db_column=b'code_commune', max_length=250, null=True, verbose_name='Code commune')), ('printed_elevation', models.IntegerField(blank=True, db_column=b'altitude_imprimee', null=True, verbose_name='Printed Elevation')), ('administrator', models.ForeignKey(db_column=b'gestionnaire', null=True, on_delete=django.db.models.deletion.CASCADE, to='common.Organism', verbose_name='Administrator')), ('condition', models.ForeignKey(blank=True, db_column=b'etat', null=True, on_delete=django.db.models.deletion.PROTECT, to='infrastructure.InfrastructureCondition', verbose_name='Condition')), ], options={ 'verbose_name': 'Signage', 'verbose_name_plural': 'Signages', }, bases=('core.topology', models.Model), ), migrations.CreateModel( name='SignageSealing', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('label', models.CharField(db_column=b'etat', max_length=250, verbose_name='Name')), ('structure', models.ForeignKey(blank=True, db_column=b'structure', default=geotrek.authent.models.default_structure_pk, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure')), ], options={ 'db_table': 'a_b_scellement', 'verbose_name': 'Signage Sealing', 'verbose_name_plural': 'Signages Sealing', }, ), migrations.CreateModel( name='SignageType', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('pictogram', models.FileField(blank=True, db_column=b'picto', max_length=512, null=True, upload_to=b'upload', verbose_name='Pictogram')), ('label', models.CharField(db_column=b'nom', max_length=128)), ('structure', models.ForeignKey(blank=True, db_column=b'structure', default=geotrek.authent.models.default_structure_pk, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure')), ], options={ 'db_table': 'a_b_signaletique', 'verbose_name': 'Signage Type', 'verbose_name_plural': 'Signage Types', }, ), migrations.AddField( model_name='signage', name='sealing', field=models.ForeignKey(db_column=b'scellement', null=True, on_delete=django.db.models.deletion.CASCADE, to='signage.SignageSealing', verbose_name='Sealing'), ), migrations.AddField( model_name='signage', name='structure', field=models.ForeignKey(db_column=b'structure', default=geotrek.authent.models.default_structure_pk, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'), ), migrations.AddField( model_name='signage', name='type', field=models.ForeignKey(db_column=b'type', on_delete=django.db.models.deletion.CASCADE, to='signage.SignageType', verbose_name='Type'), ), migrations.RunPython(deplace_data), ]
Python
0.000133
@@ -707,16 +707,22 @@ objects. +all(). filter(t @@ -733,38 +733,8 @@ 'S') -:%0A values = signagetype .val @@ -738,16 +738,17 @@ values() +: %0A @@ -752,22 +752,27 @@ del -values +signagetype %5B'type'%5D @@ -813,15 +813,95 @@ e(** -values) +signagetype)%0A%0A for signagetypedelete in InfrastructureType.objects.filter(type='S'): %0A @@ -916,16 +916,22 @@ nagetype +delete .delete( @@ -932,16 +932,17 @@ elete()%0A +%0A for
b8e7b9d9316e84fec4c8524eab6839e82b8534c1
Fix typo in add_subset
glue_vispy_viewers/volume/volume_viewer.py
glue_vispy_viewers/volume/volume_viewer.py
from ..common.vispy_data_viewer import BaseVispyViewer from .layer_artist import VolumeLayerArtist from .layer_style_widget import VolumeLayerStyleWidget class VispyVolumeViewer(BaseVispyViewer): LABEL = "3D Volume Rendering" _layer_style_widget_cls = VolumeLayerStyleWidget def add_data(self, data): if data in self._layer_artist_container: return True layer_artist = VolumeLayerArtist(data, vispy_viewer=self._vispy_widget) if len(self._layer_artist_container) == 0: self._options_widget.set_limits(*layer_artist.bbox) self._layer_artist_container.append(layer_artist) return True def add_subset(self, subset): if subset in self._layer_artist_container: return if subset.to_mask().ndim != 3: return layer_artist = VolumeLayerArtist(message.subset, vispy_viewer=self._vispy_widget) self._layer_artist_container.append(layer_artist) def _add_subset(self, message): self.add_subset(message.subset) def _update_attributes(self, index=None, layer_artist=None): pass
Python
0.000093
@@ -868,24 +868,16 @@ rArtist( -message. subset,
dcffc99e64fa6f90b39dfa8bcb79e441e728831e
Updated the max length of a log message to be 1024 characters
google/cloud/forseti/common/util/logger.py
google/cloud/forseti/common/util/logger.py
# Copyright 2017 The Forseti Security Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A basic util that wraps logging. Setup logging for Forseti Security. Logs to console and syslog. """ import logging import logging.handlers DEFAULT_LOG_FMT = ('%(asctime)s %(levelname)s ' '%(name)s(%(funcName)s): %(message)s') SYSLOG_LOG_FMT = ('[forseti-security] %(levelname)s ' '%(name)s(%(funcName)s): %(message)s') LOGGERS = {} LOGLEVELS = { 'debug': logging.DEBUG, 'info' : logging.INFO, 'warning' : logging.WARN, 'error' : logging.ERROR, } LOGLEVEL = logging.INFO def get_logger(module_name): """Setup the logger. Args: module_name (str): The name of the mdule to describe the log entry. Returns: logger: An instance of the configured logger. """ # TODO: Move this into a configuration file. console_handler = logging.StreamHandler() console_handler.setFormatter(logging.Formatter(DEFAULT_LOG_FMT)) syslog_handler = logging.handlers.SysLogHandler() syslog_handler.setFormatter(logging.Formatter(SYSLOG_LOG_FMT)) logger_instance = logging.getLogger(module_name) logger_instance.addHandler(syslog_handler) logger_instance.setLevel(LOGLEVEL) LOGGERS[module_name] = logger_instance return logger_instance def _map_logger(func): """Map function to current loggers. Args: func (function): Function to call on every logger. """ for logger in LOGGERS.itervalues(): func(logger) def set_logger_level(level): """Modify log level of existing loggers as well as the default for new loggers. Args: level (int): The log level to set the loggers to. """ # pylint: disable=global-statement global LOGLEVEL LOGLEVEL = level _map_logger(lambda logger: logger.setLevel(level)) def set_logger_level_from_config(level_name): """Set the logger level from a config value. Args: level_name (str): The log level name. The accepted values are in the LOGLEVELS variable. """ set_logger_level(LOGLEVELS.get(level_name, LOGLEVEL))
Python
0.999991
@@ -852,24 +852,29 @@ : %25(message) +.1024 s')%0ASYSLOG_L @@ -972,16 +972,21 @@ message) +.1024 s')%0ALOGG
7f687f75eff291f0acb0038e9552b84a367d17c3
teste merge
Shigemura_Python_NeuralNetwork_1.py
Shigemura_Python_NeuralNetwork_1.py
""" network.py - Changed at MASTER branch ~~~~~~~~~~ A module to implement the stochastic gradient descent learning algorithm for a feedforward neural network. Gradients are calculated using backpropagation. Note that I have focused on making the code simple, easily readable, and easily modifiable. It is not optimized, and omits many desirable features. """ #### Libraries # Standard library import random # Third-party libraries import numpy as np class Network(object): def __init__(self, sizes): """The list ``sizes`` contains the number of neurons in the respective layers of the network. For example, if the list was [2, 3, 1] then it would be a three-layer network, with the first layer containing 2 neurons, the second layer 3 neurons, and the third layer 1 neuron. The biases and weights for the network are initialized randomly, using a Gaussian distribution with mean 0, and variance 1. Note that the first layer is assumed to be an input layer, and by convention we won't set any biases for those neurons, since biases are only ever used in computing the outputs from later layers.""" self.num_layers = len(sizes) self.sizes = sizes self.biases = [np.random.randn(y, 1) for y in sizes[1:]] self.weights = [np.random.randn(y, x) for x, y in zip(sizes[:-1], sizes[1:])] def feedforward(self, a): """Return the output of the network if ``a`` is input.""" for b, w in zip(self.biases, self.weights): a = sigmoid(np.dot(w, a)+b) return a def SGD(self, training_data, epochs, mini_batch_size, eta, test_data=None): """Train the neural network using mini-batch stochastic gradient descent. The ``training_data`` is a list of tuples ``(x, y)`` representing the training inputs and the desired outputs. The other non-optional parameters are self-explanatory. If ``test_data`` is provided then the network will be evaluated against the test data after each epoch, and partial progress printed out. This is useful for tracking progress, but slows things down substantially.""" if test_data: n_test = len(test_data) n = len(training_data) for j in xrange(epochs): random.shuffle(training_data) mini_batches = [ training_data[k:k+mini_batch_size] for k in xrange(0, n, mini_batch_size)] for mini_batch in mini_batches: self.update_mini_batch(mini_batch, eta) if test_data: print "Epoch {0}: {1} / {2}".format( j, self.evaluate(test_data), n_test) else: print "Epoch {0} complete".format(j) def update_mini_batch(self, mini_batch, eta): """Update the network's weights and biases by applying gradient descent using backpropagation to a single mini batch. The ``mini_batch`` is a list of tuples ``(x, y)``, and ``eta`` is the learning rate.""" nabla_b = [np.zeros(b.shape) for b in self.biases] nabla_w = [np.zeros(w.shape) for w in self.weights] for x, y in mini_batch: delta_nabla_b, delta_nabla_w = self.backprop(x, y) nabla_b = [nb+dnb for nb, dnb in zip(nabla_b, delta_nabla_b)] nabla_w = [nw+dnw for nw, dnw in zip(nabla_w, delta_nabla_w)] self.weights = [w-(eta/len(mini_batch))*nw for w, nw in zip(self.weights, nabla_w)] self.biases = [b-(eta/len(mini_batch))*nb for b, nb in zip(self.biases, nabla_b)] def backprop(self, x, y): """Return a tuple ``(nabla_b, nabla_w)`` representing the gradient for the cost function C_x. ``nabla_b`` and ``nabla_w`` are layer-by-layer lists of numpy arrays, similar to ``self.biases`` and ``self.weights``.""" nabla_b = [np.zeros(b.shape) for b in self.biases] nabla_w = [np.zeros(w.shape) for w in self.weights] # feedforward activation = x activations = [x] # list to store all the activations, layer by layer zs = [] # list to store all the z vectors, layer by layer for b, w in zip(self.biases, self.weights): z = np.dot(w, activation)+b zs.append(z) activation = sigmoid(z) activations.append(activation) # backward pass delta = self.cost_derivative(activations[-1], y) * \ sigmoid_prime(zs[-1]) nabla_b[-1] = delta nabla_w[-1] = np.dot(delta, activations[-2].transpose()) # Note that the variable l in the loop below is used a little # differently to the notation in Chapter 2 of the book. Here, # l = 1 means the last layer of neurons, l = 2 is the # second-last layer, and so on. It's a renumbering of the # scheme in the book, used here to take advantage of the fact # that Python can use negative indices in lists. for l in xrange(2, self.num_layers): z = zs[-l] sp = sigmoid_prime(z) delta = np.dot(self.weights[-l+1].transpose(), delta) * sp nabla_b[-l] = delta nabla_w[-l] = np.dot(delta, activations[-l-1].transpose()) return (nabla_b, nabla_w) def evaluate(self, test_data): """Return the number of test inputs for which the neural network outputs the correct result. Note that the neural network's output is assumed to be the index of whichever neuron in the final layer has the highest activation.""" test_results = [(np.argmax(self.feedforward(x)), y) for (x, y) in test_data] return sum(int(x == y) for (x, y) in test_results) def cost_derivative(self, output_activations, y): """Return the vector of partial derivatives \partial C_x / \partial a for the output activations.""" return (output_activations-y) #### Miscellaneous functions def sigmoid(z): """The sigmoid function.""" return 1.0/(1.0+np.exp(-z)) def sigmoid_prime(z): """Derivative of the sigmoid function.""" return sigmoid(z)*(1-sigmoid(z))
Python
0.000001
@@ -1,12 +1,25 @@ %22%22%22%0A +%3C%3C%3C%3C%3C%3C%3C HEAD%0A network. @@ -48,16 +48,83 @@ branch%0A +=======%0Anetwork.py - CHANGED AT SHIGEMURA BRANCH%0A%3E%3E%3E%3E%3E%3E%3E Shigemura%0A ~~~~~~~~
a61e2a461176801a3089e1db341e5e80f4b08659
Remove debug code
UM/Qt/Bindings/PointingRectangle.py
UM/Qt/Bindings/PointingRectangle.py
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from PyQt5.QtCore import pyqtSlot, pyqtProperty, pyqtSignal, PYQT_VERSION from PyQt5.QtCore import QPoint from PyQt5.QtGui import QColor from PyQt5.QtQuick import QQuickItem, QSGGeometryNode, QSGGeometry, QSGFlatColorMaterial, QSGSimpleRectNode class PointingRectangle(QQuickItem): def __init__(self, parent = None): super().__init__(parent) self.setFlag(QQuickItem.ItemHasContents) self._arrow_size = 0 self._color = QColor(255, 255, 255, 255) self._target = QPoint(0,0) self._geometry = None self._material = None self._node = None self._attributes = None def getArrowSize(self): return self._arrow_size def setArrowSize(self, size): if size != self._arrow_size: self._arrow_size = size self.update() self.arrowSizeChanged.emit() arrowSizeChanged = pyqtSignal() arrowSize = pyqtProperty(float, fget=getArrowSize, fset=setArrowSize, notify=arrowSizeChanged) def getTarget(self): return self._target def setTarget(self, target): if target != self._target: self._target = target self.update() self.targetChanged.emit() targetChanged = pyqtSignal() target = pyqtProperty(QPoint, fget=getTarget, fset=setTarget, notify=targetChanged) def getColor(self): return self._color def setColor(self, color): if color != self._color: self._color = color self.update() self.colorChanged.emit() colorChanged = pyqtSignal() colorChanged = pyqtSignal() color = pyqtProperty(QColor, fget=getColor, fset=setColor, notify=colorChanged) def updatePaintNode(self, paint_node, update_data): if paint_node is None: paint_node = QSGGeometryNode() geometry = QSGGeometry(QSGGeometry.defaultAttributes_Point2D(), 7, 9) geometry.setDrawingMode(QSGGeometry.GL_TRIANGLES) geometry.vertexDataAsPoint2D()[0].set(0, 0) geometry.vertexDataAsPoint2D()[1].set(0, self.height()) geometry.vertexDataAsPoint2D()[2].set(self.width(), self.height()) geometry.vertexDataAsPoint2D()[3].set(self.width(), 0) # no arrow by default geometry.vertexDataAsPoint2D()[4].set(0, 0) geometry.vertexDataAsPoint2D()[5].set(0, 0) geometry.vertexDataAsPoint2D()[6].set(0, 0) target_offset = self._target - QPoint(self.x(), self.y()) print(target_offset) if target_offset.x() >= 0 and target_offset.x() <= self.width(): arrow_size = min(self._arrow_size, self.width()/2) arrow_offset = max(arrow_size, min(self.width() - arrow_size, target_offset.x())) if target_offset.y() < 0: # top geometry.vertexDataAsPoint2D()[4].set(arrow_offset - arrow_size, 0) geometry.vertexDataAsPoint2D()[5].set(arrow_offset, - arrow_size) geometry.vertexDataAsPoint2D()[6].set(arrow_offset + arrow_size, 0) elif target_offset.y() > self.height(): # bottom geometry.vertexDataAsPoint2D()[4].set(arrow_offset - arrow_size, self.height()) geometry.vertexDataAsPoint2D()[5].set(arrow_offset, self.height() +arrow_size) geometry.vertexDataAsPoint2D()[6].set(arrow_offset + arrow_size, self.height()) elif target_offset.y() >= 0 and target_offset.y() <= self.height(): arrow_size = min(self._arrow_size, self.height()/2) arrow_offset = max(arrow_size, min(self.height() - arrow_size, target_offset.y())) if target_offset.x() < 0: # left geometry.vertexDataAsPoint2D()[4].set(0, arrow_offset - arrow_size) geometry.vertexDataAsPoint2D()[5].set(-arrow_size, arrow_offset) geometry.vertexDataAsPoint2D()[6].set(0, arrow_offset + arrow_size) elif target_offset.x() > self.width(): # right geometry.vertexDataAsPoint2D()[4].set(self.width(), arrow_offset - arrow_size) geometry.vertexDataAsPoint2D()[5].set(self.width() + arrow_size, arrow_offset) geometry.vertexDataAsPoint2D()[6].set(self.width(), arrow_offset + arrow_size) geometry.indexDataAsUShort()[0] = 0 geometry.indexDataAsUShort()[1] = 1 geometry.indexDataAsUShort()[2] = 2 geometry.indexDataAsUShort()[3] = 0 geometry.indexDataAsUShort()[4] = 2 geometry.indexDataAsUShort()[5] = 3 geometry.indexDataAsUShort()[6] = 4 geometry.indexDataAsUShort()[7] = 5 geometry.indexDataAsUShort()[8] = 6 paint_node.setGeometry(geometry) material = QSGFlatColorMaterial() material.setColor(self._color) paint_node.setMaterial(material) # For PyQt 5.4, I need to store these otherwise they will be garbage collected before rendering # and never show up, but otherwise never crash. self._paint_node = paint_node self._geometry = geometry self._material = material return paint_node
Python
0.000299
@@ -2596,36 +2596,8 @@ ())%0A - print(target_offset) %0A
91bb5bb09d84bc6db8c99a19e2c878a22d599c76
Revert 189061 "[chromedriver] Update the devtools socket names f..."
chrome/test/chromedriver/chrome/adb_commands.py
chrome/test/chromedriver/chrome/adb_commands.py
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A wrapper around adb commands called by chromedriver. Preconditions: - A single device is attached. - adb is in PATH. This script should write everything (including stacktraces) to stdout. """ import collections import optparse import subprocess import sys import traceback PackageInfo = collections.namedtuple('PackageInfo', ['activity', 'socket']) PACKAGE_INFO = { 'org.chromium.chrome.testshell': PackageInfo('ChromiumTestShellActivity', 'chromium_testshell_devtools_remote'), 'com.google.android.apps.chrome': PackageInfo('Main', 'chromium_devtools_remote'), 'com.chrome.dev': PackageInfo('Main', 'chrome_dev_devtools_remote'), 'com.chrome.beta': PackageInfo('Main', 'chrome_beta_devtools_remote'), 'com.android.chrome': PackageInfo('Main', 'chrome_devtools_remote'), } class AdbError(Exception): def __init__(self, message, output, cmd): self.message = message self.output = output self.cmd = cmd def __str__(self): return ('%s\nCommand: "%s"\nOutput: "%s"' % (self.message, self.cmd, self.output)) def RunAdbCommand(args): """Executes an ADB command and returns its output. Args: args: A sequence of program arguments supplied to adb. Returns: output of the command (stdout + stderr). Raises: AdbError: if exit code is non-zero. """ args = ['adb', '-d'] + args try: p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = p.communicate() if p.returncode: raise AdbError('Command failed.', out, args) return out except OSError as e: print 'Make sure adb command is in PATH.' raise e def SetChromeFlags(): """Sets the command line flags file on device. Raises: AdbError: If failed to write the flags file to device. """ cmd = [ 'shell', 'echo chrome --disable-fre --metrics-recording-only ' '--enable-remote-debugging > /data/local/chrome-command-line;' 'echo $?' ] out = RunAdbCommand(cmd).strip() if out != '0': raise AdbError('Failed to set the command line flags.', out, cmd) def ClearAppData(package): """Clears the app data. Args: package: Application package name. Raises: AdbError: if any step fails. """ cmd = ['shell', 'pm clear %s' % package] # am/pm package do not return valid exit codes. out = RunAdbCommand(cmd) if 'Success' not in out: raise AdbError('Failed to clear the profile.', out, cmd) def StartActivity(package): """Start the activity in the package. Args: package: Application package name. Raises: AdbError: if any step fails. """ cmd = [ 'shell', 'am start -a android.intent.action.VIEW -S -W -n %s/.%s ' '-d "data:text/html;charset=utf-8,"' % (package, PACKAGE_INFO[package].activity)] out = RunAdbCommand(cmd) if 'Complete' not in out: raise AdbError('Failed to start the activity. %s', out, cmd) def Forward(package, host_port): """Forward host socket to devtools socket on the device. Args: package: Application package name. host_port: Port on host to forward. Raises: AdbError: if command fails. """ cmd = ['forward', 'tcp:%d' % host_port, 'localabstract:%s' % PACKAGE_INFO[package].socket] RunAdbCommand(cmd) if __name__ == '__main__': try: parser = optparse.OptionParser() parser.add_option( '', '--package', help='Application package name.') parser.add_option( '', '--launch', action='store_true', help='Launch the app with a fresh profile and forward devtools port.') parser.add_option( '', '--port', type='int', default=33081, help='Host port to forward for launch operation [default: %default].') options, _ = parser.parse_args() if not options.package: raise Exception('No package specified.') if options.package not in PACKAGE_INFO: raise Exception('Unknown package provided. Supported packages are:\n %s' % PACKAGE_INFO.keys()) if options.launch: SetChromeFlags() ClearAppData(options.package) StartActivity(options.package) Forward(options.package, options.port) else: raise Exception('No options provided.') except: traceback.print_exc(file=sys.stdout) sys.exit(1)
Python
0.000002
@@ -544,16 +544,76 @@ cket'%5D)%0A +CHROME_INFO = PackageInfo('Main', 'chrome_devtools_remote')%0A PACKAGE_ @@ -807,309 +807,130 @@ me': -%0A PackageInfo('Main', 'chromium_devtools_remote'),%0A 'com.chrome.dev':%0A PackageInfo('Main', 'chrome_dev_devtools_remote'),%0A 'com.chrome.beta':%0A PackageInfo('Main', 'chrome_beta_devtools_remote'),%0A 'com.android.chrome':%0A PackageInfo('Main', 'chrome_devtools_remote') + CHROME_INFO,%0A 'com.chrome.dev': CHROME_INFO,%0A 'com.chrome.beta': CHROME_INFO,%0A 'com.android.chrome': CHROME_INFO ,%0A%7D%0A
872e008b3986b18a7c01cd47e91a7ef39e21006b
Remove unused parameter in test (#5423)
cirq-core/cirq/contrib/paulistring/pauli_string_dag_test.py
cirq-core/cirq/contrib/paulistring/pauli_string_dag_test.py
# Copyright 2018 The Cirq Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import cirq from cirq.contrib.paulistring import convert_and_separate_circuit, pauli_string_dag_from_circuit @pytest.mark.parametrize('repetition', range(6)) def test_pauli_string_dag_from_circuit(repetition): q0, q1, q2 = cirq.LineQubit.range(3) c_orig = cirq.testing.nonoptimal_toffoli_circuit(q0, q1, q2) c_left, _ = convert_and_separate_circuit(c_orig) c_left_dag = pauli_string_dag_from_circuit(c_left) c_left_reordered = c_left_dag.to_circuit() cirq.testing.assert_allclose_up_to_global_phase( c_left.unitary(), c_left_reordered.unitary(), atol=1e-7 )
Python
0
@@ -582,23 +582,8 @@ e.%0A%0A -import pytest%0A%0A impo @@ -590,16 +590,16 @@ rt cirq%0A + %0Afrom ci @@ -694,57 +694,8 @@ t%0A%0A%0A -@pytest.mark.parametrize('repetition', range(6))%0A def @@ -733,18 +733,8 @@ uit( -repetition ):%0A
6044f11916a909f46291653560a5aa2088ee3189
remove the wrong conversion of the ObjectID to an integer
coherence/upnp/services/servers/content_directory_server.py
coherence/upnp/services/servers/content_directory_server.py
# Licensed under the MIT license # http://opensource.org/licenses/mit-license.php # Copyright 2005, Tim Potter <tpot@samba.org> # Copyright 2006 John-Mark Gurney <gurney_j@resnet.uoregon.edu> # Copyright 2006, Frank Scholz <coherence@beebits.net> # Content Directory service from twisted.python import failure from twisted.web import resource from coherence.upnp.core.soap_service import UPnPPublisher from coherence.upnp.core.soap_service import errorCode from coherence.upnp.core.DIDLLite import DIDLElement from coherence.upnp.core import service class ContentDirectoryControl(service.ServiceControl,UPnPPublisher): def __init__(self, server): self.service = server self.variables = server.get_variables() self.actions = server.get_actions() class ContentDirectoryServer(service.ServiceServer, resource.Resource): def __init__(self, device, backend=None): self.device = device if backend == None: backend = self.device.backend resource.Resource.__init__(self) service.ServiceServer.__init__(self, 'ContentDirectory', self.device.version, backend) self.control = ContentDirectoryControl(self) self.putChild('scpd.xml', service.scpdXML(self, self.control)) self.putChild('control', self.control) self.set_variable(0, 'SystemUpdateID', 0) self.set_variable(0, 'ContainerUpdateIDs', '') def listchilds(self, uri): cl = '' for c in self.children: cl += '<li><a href=%s/%s>%s</a></li>' % (uri,c,c) return cl def render(self,request): return '<html><p>root of the ContentDirectory</p><p><ul>%s</ul></p></html>'% self.listchilds(request.uri) def upnp_Search(self, *args, **kwargs): ContainerID = kwargs['ContainerID'] Filter = kwargs['Filter'] StartingIndex = int(kwargs['StartingIndex']) RequestedCount = int(kwargs['RequestedCount']) SortCriteria = kwargs['SortCriteria'] SearchCriteria = kwargs['SearchCriteria'] total = 0 root_id = 0 item = None items = [] wmc_mapping = getattr(self.backend, "wmc_mapping", None) """ fake a Windows Media Connect Server and return for the moment an error for the things we can't support now """ if( kwargs.get('X_UPnPClient', '') == 'XBox' and wmc_mapping != None and wmc_mapping.has_key(ContainerID)): root_id = wmc_mapping[ContainerID] if ContainerID in ['4','8','13','B']: # _all_ items item = self.backend.get_by_id(root_id) if item == None: return failure.Failure(errorCode(701)) containers = [item] while len(containers)>0: container = containers.pop() if container.mimetype != 'directory': continue for child in container.get_children(0,0): if child.mimetype == 'directory': containers.append(child) else: items.append(child) total += 1 else: try: root_id = int(ContainerID) except: pass item = self.backend.get_by_id(root_id) if item == None: return failure.Failure(errorCode(701)) items = item.get_children(StartingIndex, StartingIndex + RequestedCount) total = item.get_child_count() didl = DIDLElement() for i in items: didl.addItem(i.get_item()) r = { 'Result': didl.toString(), 'TotalMatches': total, 'NumberReturned': didl.numItems()} if(item != None and hasattr(item, 'update_id')): r['UpdateID'] = item.update_id else: r['UpdateID'] = self.backend.update_id # FIXME return r def upnp_Browse(self, *args, **kwargs): ObjectID = kwargs['ObjectID'] BrowseFlag = kwargs['BrowseFlag'] Filter = kwargs['Filter'] StartingIndex = int(kwargs['StartingIndex']) RequestedCount = int(kwargs['RequestedCount']) SortCriteria = kwargs['SortCriteria'] wmc_mapping = getattr(self.backend, "wmc_mapping", None) """ fake a Windows Media Connect Server and return for the moment an error for the things we can't support now """ if( kwargs.get('X_UPnPClient', '') == 'XBox' and wmc_mapping != None and wmc_mapping.has_key(ObjectID)): root_id = wmc_mapping[ObjectID] else: root_id = int(ObjectID) item = self.backend.get_by_id(int(root_id)) if item == None: return failure.Failure(errorCode(701)) didl = DIDLElement() if BrowseFlag == 'BrowseDirectChildren': childs = item.get_children(StartingIndex, StartingIndex + RequestedCount) for i in childs: didl.addItem(i.get_item()) total = item.get_child_count() else: didl.addItem(item.get_item()) total = 1 r = { 'Result': didl.toString(), 'TotalMatches': total, 'NumberReturned': didl.numItems()} if hasattr(item, 'update_id'): r['UpdateID'] = item.update_id else: r['UpdateID'] = self.backend.update_id # FIXME return r
Python
0.999905
@@ -3306,20 +3306,16 @@ ot_id = -int( Containe @@ -3317,17 +3317,16 @@ tainerID -) %0A @@ -4784,20 +4784,16 @@ d = -int( ObjectID )%0A%0A @@ -4788,17 +4788,16 @@ ObjectID -) %0A%0A @@ -4832,20 +4832,16 @@ _id( -int( root_id) )%0A @@ -4836,17 +4836,16 @@ root_id) -) %0A
0c69ef23d6ed2368cc416bf2d47ec0aacead9ae8
Update test_exportchannel.py
contentcuration/contentcuration/tests/test_exportchannel.py
contentcuration/contentcuration/tests/test_exportchannel.py
import os import pytest import zipfile import tempfile from mixer.backend.django import mixer from contentcuration import models as cc from kolibri.content import models as k from django.core.management import call_command from django.conf import settings pytestmark = pytest.mark.django_db @pytest.fixture def video(): return mixer.blend('contentcuration.ContentKind', kind='video') @pytest.fixture def preset_video(video): return mixer.blend('contentcuration.FormatPreset', id='mp4', kind=video) @pytest.fixture def topic(): return mixer.blend('contentcuration.ContentKind', kind='topic') @pytest.fixture def exercise(): return mixer.blend('contentcuration.ContentKind', kind='exercise') @pytest.fixture def preset_exercise(exercise): return mixer.blend('contentcuration.FormatPreset', id='exercise', kind=exercise) @pytest.fixture def fileformat_perseus(): return mixer.blend('contentcuration.FileFormat', extension='perseus', mimetype='application/exercise') @pytest.fixture def fileformat_mp4(): return mixer.blend('contentcuration.FileFormat', extension='mp4', mimetype='application/video') @pytest.fixture def license_wtfpl(): return mixer.blend('contentcuration.License', license_name="WTF License") @pytest.yield_fixture def fileobj_video(preset_video, fileformat_mp4): randomfilebytes = "4" with tempfile.NamedTemporaryFile(dir=settings.STORAGE_ROOT, delete=False) as f: filename = f.name f.write(randomfilebytes) f.flush() db_file_obj = mixer.blend('contentcuration.File', file_format=fileformat_mp4, preset=preset_video, file_on_disk=filename) yield db_file_obj @pytest.fixture def assessment_item(): answers = "[{\"correct\": false, \"answer\": \"White Rice\", \"help_text\": \"\"}, {\"correct\": true, \"answer\": \"Brown Rice\", \"help_text\": \"\"}, {\"correct\": false, \"answer\": \"Rice Krispies\", \"help_text\": \"\"}]" return mixer.blend('contentcuration.AssessmentItem', question='Which rice is the healthiest?', type='single_selection', answers=answers) @pytest.fixture def assessment_item2(): answers = "[{\"correct\": true, \"answer\": \"Eggs\", \"help_text\": \"\"}, {\"correct\": true, \"answer\": \"Tofu\", \"help_text\": \"\"}, {\"correct\": true, \"answer\": \"Meat\", \"help_text\": \"\"}, {\"correct\": true, \"answer\": \"Beans\", \"help_text\": \"\"}, {\"correct\": false, \"answer\": \"Rice\", \"help_text\": \"\"}]" return mixer.blend('contentcuration.AssessmentItem', question='Which of the following are proteins?', type='multiple_selection', answers=answers) @pytest.fixture def assessment_item3(): answers = "[]" return mixer.blend('contentcuration.AssessmentItem', question='Why a rice cooker?', type='free_response', answers=answers) @pytest.fixture def assessment_item4(): answers = "[{\"correct\": true, \"answer\": 20, \"help_text\": \"\"}]" return mixer.blend('contentcuration.AssessmentItem', question='How many minutes does it take to cook rice?', type='input_question', answers=answers) @pytest.fixture def channel(topic, video, exercise, preset_video, license_wtfpl, fileobj_video, assessment_item, assessment_item2, assessment_item3, assessment_item4): with cc.ContentNode.objects.delay_mptt_updates(): root = mixer.blend('contentcuration.ContentNode', title="root", parent=None, kind=topic) level1 = mixer.blend('contentcuration.ContentNode', parent=root, kind=topic) level2 = mixer.blend('contentcuration.ContentNode', parent=level1, kind=topic) leaf = mixer.blend('contentcuration.ContentNode', parent=level2, kind=video) leaf2 = mixer.blend('contentcuration.ContentNode', parent=level2, kind=exercise, title='EXERCISE 1', extra_fields="{\"mastery_model\":\"do_all\",\"randomize\":true}") fileobj_video.contentnode = leaf fileobj_video.save() assessment_item.contentnode = leaf2 assessment_item.save() assessment_item2.contentnode = leaf2 assessment_item2.save() assessment_item3.contentnode = leaf2 assessment_item3.save() assessment_item4.contentnode = leaf2 assessment_item4.save() channel = mixer.blend('contentcuration.Channel', main_tree=root, name='testchannel', thumbnail="") return channel # def test_things_work(channel, license_wtfpl): # # TODO (aron): split different gets/asserts into their own tests # call_command('exportchannel', channel.pk) # k.ChannelMetadata.objects.get(name=channel.name) # cc_root = channel.main_tree # for ccnode in cc_root.get_family(): # kolibrinode = k.ContentNode.objects.get(pk=ccnode.pk) # assert ccnode.parent_id == kolibrinode.parent_id # def test_assigns_channel_root_pk(channel, license_wtfpl): # call_command('exportchannel', channel.pk) # kolibrichannel = k.ChannelMetadata.objects.get(pk=channel.pk) # assert kolibrichannel.root_pk == channel.main_tree_id # def test_assigns_license(channel, license_wtfpl): # call_command('exportchannel', channel.pk) # kolibrichannel = k.ChannelMetadata.objects.get(pk=channel.pk) # root_kolibrinode = k.ContentNode.objects.get(pk=kolibrichannel.root_pk) # for n in root_kolibrinode.get_family(): # assert n.license.license_name == cc.ContentNode.objects.get(pk=n.pk).license.license_name # def test_increments_version(channel, license_wtfpl): # old_version = channel.version # call_command('exportchannel', channel.pk) # channel.refresh_from_db() # assert channel.version > old_version # def test_hack_hack_hack_returns_zip_containing_actual_files(channel): # call_command('exportchannel', channel.pk) # assert os.path.exists(settings.HACK_HACK_HACK_UNICEF_CONTENT_ZIP_PATH) # with zipfile.ZipFile(settings.HACK_HACK_HACK_UNICEF_CONTENT_ZIP_PATH) as zf: # # Get all File objects, and their corresponding contents # kolibri_file_objects = [str(f) for f in k.File.objects.all()] # for fileinzip in zf.infolist(): # # compare all filenames inside the zip and in our db, and their contents # assert any([i for i in kolibri_file_objects if fileinzip.filename in i]) def test_perseus_exercise_creation(channel, preset_exercise, fileformat_perseus): call_command('exportchannel', channel.pk) assert False
Python
0.000001
@@ -4324,2068 +4324,4 @@ nel%0A -%0A%0A# def test_things_work(channel, license_wtfpl):%0A# # TODO (aron): split different gets/asserts into their own tests%0A# call_command('exportchannel', channel.pk)%0A%0A# k.ChannelMetadata.objects.get(name=channel.name)%0A%0A# cc_root = channel.main_tree%0A%0A# for ccnode in cc_root.get_family():%0A# kolibrinode = k.ContentNode.objects.get(pk=ccnode.pk)%0A%0A# assert ccnode.parent_id == kolibrinode.parent_id%0A%0A%0A# def test_assigns_channel_root_pk(channel, license_wtfpl):%0A# call_command('exportchannel', channel.pk)%0A%0A# kolibrichannel = k.ChannelMetadata.objects.get(pk=channel.pk)%0A%0A# assert kolibrichannel.root_pk == channel.main_tree_id%0A%0A%0A# def test_assigns_license(channel, license_wtfpl):%0A# call_command('exportchannel', channel.pk)%0A%0A# kolibrichannel = k.ChannelMetadata.objects.get(pk=channel.pk)%0A# root_kolibrinode = k.ContentNode.objects.get(pk=kolibrichannel.root_pk)%0A%0A# for n in root_kolibrinode.get_family():%0A# assert n.license.license_name == cc.ContentNode.objects.get(pk=n.pk).license.license_name%0A%0A%0A# def test_increments_version(channel, license_wtfpl):%0A# old_version = channel.version%0A# call_command('exportchannel', channel.pk)%0A# channel.refresh_from_db()%0A# assert channel.version %3E old_version%0A%0A%0A# def test_hack_hack_hack_returns_zip_containing_actual_files(channel):%0A# call_command('exportchannel', channel.pk)%0A%0A# assert os.path.exists(settings.HACK_HACK_HACK_UNICEF_CONTENT_ZIP_PATH)%0A%0A# with zipfile.ZipFile(settings.HACK_HACK_HACK_UNICEF_CONTENT_ZIP_PATH) as zf:%0A# # Get all File objects, and their corresponding contents%0A# kolibri_file_objects = %5Bstr(f) for f in k.File.objects.all()%5D%0A# for fileinzip in zf.infolist():%0A# # compare all filenames inside the zip and in our db, and their contents%0A# assert any(%5Bi for i in kolibri_file_objects if fileinzip.filename in i%5D)%0A%0Adef test_perseus_exercise_creation(channel, preset_exercise, fileformat_perseus):%0A call_command('exportchannel', channel.pk)%0A assert False%0A
dfe99dde7c7d783caa90bc19e792ad04b53fd960
add doc type and update docs
corehq/apps/hqadmin/management/commands/stale_data_in_es.py
corehq/apps/hqadmin/management/commands/stale_data_in_es.py
import inspect from collections import namedtuple import dateutil from django.core.management.base import BaseCommand, CommandError from datetime import datetime from corehq.form_processor.models import CommCareCaseSQL from corehq.form_processor.utils import should_use_sql_backend from corehq.sql_db.util import get_db_aliases_for_partitioned_query from dimagi.utils.chunked import chunked from casexml.apps.case.models import CommCareCase from corehq.apps.es import CaseES from corehq.elastic import ES_EXPORT_INSTANCE from corehq.util.dates import iso_string_to_datetime from corehq.util.couch_helpers import paginate_view RunConfig = namedtuple('RunConfig', ['domain', 'start_date', 'end_date']) class Command(BaseCommand): """ Returns list of (doc_id, es_server_modified_on, couch_server_modified_on) tuples that are not updated in ES Can be used in conjunction with republish_case_changes 1. Generate case tuples not updated in ES with extra debug columns $ ./manage.py stale_data_in_es <DOMAIN> case > case_ids.txt 2. Republish case changes $ ./manage.py republish_case_changes <DOMAIN> case_ids.txt """ help = inspect.cleandoc(__doc__).split('\n')[0] def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument('data_models', nargs='+', help='A list of data models to check. Valid options are "case"') parser.add_argument( '--start', action='store', help='Only include data modified after this date', ) parser.add_argument( '--end', action='store', help='Only include data modified before this date', ) def handle(self, domain, data_models, **options): data_models = set(data_models) start = dateutil.parser.parse(options['start']) if options['start'] else datetime(2010, 1, 1) end = dateutil.parser.parse(options['end']) if options['end'] else datetime.utcnow() run_config = RunConfig(domain, start, end) for data_model in data_models: if data_model.lower() == 'case': for case_id, case_type, es_date, primary_date in get_server_modified_on_for_domain(run_config): print(f"{case_id},{case_type},{es_date},{primary_date}") else: raise CommandError('Only valid option for data models is "case"') def get_server_modified_on_for_domain(run_config): if should_use_sql_backend(run_config.domain): return _get_data_for_sql_backend(run_config) else: return _get_data_for_couch_backend(run_config) def _get_data_for_couch_backend(run_config): domain = run_config.domain start_time = datetime.utcnow() chunk_size = 1000 chunked_iterator = chunked(paginate_view( CommCareCase.get_db(), 'cases_by_server_date/by_server_modified_on', chunk_size=chunk_size, startkey=[domain], endkey=[domain, {}], include_docs=False, reduce=False ), chunk_size) for chunk in chunked_iterator: case_ids = [row['id'] for row in chunk] es_modified_on_by_ids = _get_es_modified_dates(domain, case_ids) for row in chunk: case_id, couch_modified_on = row['id'], row['value'] if iso_string_to_datetime(couch_modified_on) > start_time: # skip cases modified after the script started continue es_modified_on = es_modified_on_by_ids.get(case_id) if not es_modified_on or (es_modified_on != couch_modified_on): yield (case_id, 'COUCH_TYPE_NOT_SUPPORTED', es_modified_on, couch_modified_on) def _get_data_for_sql_backend(run_config): for db in get_db_aliases_for_partitioned_query(): matching_records_for_db = _get_sql_case_data_for_db(db, run_config) chunk_size = 1000 for chunk in chunked(matching_records_for_db, chunk_size): case_ids = [val[0] for val in chunk] es_modified_on_by_ids = _get_es_modified_dates(run_config.domain, case_ids) for case_id, case_type, sql_modified_on in chunk: sql_modified_on_str = f'{sql_modified_on.isoformat()}Z' es_modified_on = es_modified_on_by_ids.get(case_id) if not es_modified_on or (es_modified_on < sql_modified_on_str): yield (case_id, case_type, es_modified_on, sql_modified_on_str) def _get_sql_case_data_for_db(db, run_config): return CommCareCaseSQL.objects.using(db).filter( domain=run_config.domain, server_modified_on__gte=run_config.start_date, server_modified_on__lte=run_config.end_date, ).values_list('case_id', 'type', 'server_modified_on') def _get_es_modified_dates(domain, case_ids): results = (CaseES(es_instance_alias=ES_EXPORT_INSTANCE) .domain(domain) .case_ids(case_ids) .values_list('_id', 'server_modified_on')) return dict(results)
Python
0
@@ -766,16 +766,39 @@ (doc_id, + doc_type, doc_subtype, es_serv @@ -2356,16 +2356,29 @@ ase_id%7D, +CommCareCase, %7Bcase_ty
7917515dc5580bbd56b477435a46552d3a5353af
drop other pattern ops indexes
corehq/warehouse/migrations/0033_form_fact_user_id_index.py
corehq/warehouse/migrations/0033_form_fact_user_id_index.py
from django.db import migrations, models FORM_STAGING_USER_INDEX_NAME = 'warehouse_f_user_id_785d18_idx' COLUMNS = ['user_id'] CREATE_INDEX_SQL = "CREATE INDEX CONCURRENTLY IF NOT EXISTS {} ON {} ({})" DROP_INDEX_SQL = "DROP INDEX CONCURRENTLY IF EXISTS {}" class Migration(migrations.Migration): atomic = False dependencies = [ ('warehouse', '0032_auto_20190917_1542'), ] operations = [ migrations.RunSQL( sql=CREATE_INDEX_SQL.format( FORM_STAGING_USER_INDEX_NAME, 'warehouse_formstagingtable', ','.join(COLUMNS) ), reverse_sql=DROP_INDEX_SQL.format(FORM_STAGING_USER_INDEX_NAME), state_operations=[ migrations.AddIndex( model_name='formstagingtable', index=models.Index(fields=COLUMNS, name=FORM_STAGING_USER_INDEX_NAME), ), ] ), migrations.RunSQL( sql=DROP_INDEX_SQL.format('warehouse_formstagingtable_received_on_6a73ba8d'), reverse_sql=migrations.RunSQL.noop, state_operations=[ migrations.AlterField( model_name='formstagingtable', name='received_on', field=models.DateTimeField(), ), ] ), migrations.RunSQL( DROP_INDEX_SQL.format('warehouse_formstagingtable_form_id_246fcaf3_like'), migrations.RunSQL.noop ), migrations.RunSQL( DROP_INDEX_SQL.format('warehouse_formfact_form_id_1bb74f90_like'), migrations.RunSQL.noop ), # "warehouse_formstagingtable_timezone_idx" btree (timezone('UTC'::text, GREATEST(received_on, deleted_on, edited_on))) migrations.RunSQL( DROP_INDEX_SQL.format('warehouse_formstagingtable_timezone_idx'), migrations.RunSQL.noop ) ]
Python
0
@@ -1944,32 +1944,1491 @@ ons.RunSQL.noop%0A + ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_domaindim_domain_id_b1c3504b_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_groupdim_group_id_b5f6f7bd_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_locationdim_location_id_bb42cee7_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_userdim_user_id_701f9e28_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_appstatusformstaging_domain_5997210f_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_appstatusforms_submission_build_version_0a62241a_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_appstatusformstaging_commcare_version_cfb94daf_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_appstatussynclogstaging_domain_2ab81363_like'),%0A migrations.RunSQL.noop%0A ),%0A migrations.RunSQL(%0A DROP_INDEX_SQL.format('warehouse_applicationstatusfact_domain_18a67251_like'),%0A migrations.RunSQL.noop%0A )%0A %5D%0A
d7a665a3947e04c4689db67f35827db2cc3a6842
Update randrect.pyde
sketches/randrect/randrect.pyde
sketches/randrect/randrect.pyde
from random import randint, choice a = 220 colors = [color(155, 46, 105, a), color(217, 55, 80, a), color(226, 114, 79, a), color(243, 220, 123, a), color(78, 147, 151, a)] def setup(): size(940, 300) rectMode(CENTER) strokeWeight(2) # noStroke() background(255) def draw(): for _ in range(200): fill(choice(colors)) rect(randint(0, width), randint(0, height), randint(10, width - 200), randint(10, int((height/2) - 100))) noLoop()
Python
0.000001
@@ -204,14 +204,14 @@ ize( -940, 3 +600, 6 00)%0A @@ -248,17 +248,17 @@ eWeight( -2 +5 )%0A #
06d281160d5ce60f9c8dea5c75d8234e70f63642
Fix loading script to actually save options for answers.
estudios_socioeconomicos/load.py
estudios_socioeconomicos/load.py
import pickle from estudios_socioeconomicos.models import Seccion, Subseccion, Pregunta, OpcionRespuesta def parse(name): """ utility script to parse the study. """ preguntas = {} while True: seccion = input('Ingrese el nombre de la seccion: ') if seccion == 'n': break if seccion not in preguntas: preguntas[seccion] = {} while True: subseccion = input('Ingrese el nombre de la subseccion dentro de %s: ' % seccion) if subseccion == 'n': break curr = 1 if subseccion not in preguntas[seccion]: preguntas[seccion][subseccion] = [] while True: p = input('Ingrese el nombre de la pregunta: ') if p == 'n': break opt = input('Respuestas: ') opt = opt.split(',') rel_integrante = input('related? (y/n): ') preguntas[seccion][subseccion].append({ 'texto': p, 'numero': curr, 'opciones': list(map(lambda x: x.strip(), opt)) if len(opt) > 1 else [], 'relacionado_a_integrante': rel_integrante == 'y' }) curr += 1 print(preguntas) pickle.dump(preguntas, open(name, 'wb')) def load_data(name='estudios_socioeconomicos/preguntas.pkl'): """ Load the questions and sections for the study. To execute: import this function after running python manage.py shell and just call it. """ preguntas = pickle.load(open(name, 'rb')) nums = { 'Generales del Solicitante': 1, 'Datos y Relación Familiar de Todos los Integrantes de la Vivienda': 2, 'Situación Económica': 3, 'Vivienda y Entorno Social': 4, 'Investigación Laboral': 6, 'Personalidad': 7, 'Otros Aspectos': 8 } for sec in preguntas.keys(): seccion = Seccion.objects.create(nombre=sec, numero=nums[sec]) for i, sub in enumerate(preguntas[sec].keys()): subseccion = Subseccion.objects.create( seccion=seccion, nombre=sub, numero=i) for p in preguntas[sec][sub]: pregunta = Pregunta.objects.create( subseccion=subseccion, texto=p['texto'], descripcion=p['descripcion'], orden=p['numero'], ) map(lambda o: OpcionRespuesta.objects.create( pregunta=pregunta, texto=o), p['opciones'])
Python
0
@@ -2005,24 +2005,31 @@ ion.objects. +get_or_ create(nombr @@ -2052,16 +2052,19 @@ ms%5Bsec%5D) +%5B0%5D %0A @@ -2148,32 +2148,39 @@ seccion.objects. +get_or_ create(%0A @@ -2305,16 +2305,19 @@ umero=i) +%5B0%5D %0A @@ -2387,32 +2387,39 @@ regunta.objects. +get_or_ create(%0A @@ -2661,16 +2661,19 @@ ) +%5B0%5D %0A @@ -2685,21 +2685,53 @@ -map(lambda o: +for opt in p%5B'opciones'%5D:%0A Opc @@ -2751,16 +2751,23 @@ objects. +get_or_ create(%0A @@ -2782,32 +2782,44 @@ + + pregunta=pregunt @@ -2824,30 +2824,52 @@ nta, - texto=o), p%5B'opciones'%5D +%0A texto=opt )%0A
4ff146a7e81fe4b065a551e29f42ef9b2a973823
document title edit
servee_document/views.py
servee_document/views.py
import json from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from django.contrib.auth.decorators import login_required from .models import Document @csrf_exempt @require_POST @login_required def upload_documents(request): documents = [] for f in request.FILES.getlist("file"): obj = Document.objects.create(document=f) documents.append({"filelink": obj.document.url}) return HttpResponse(json.dumps(documents), mimetype="application/json") @login_required def recent_documents(request): documents = [ {"thumb": obj.document.url, "document": obj.document.url, "title": obj.document.name} for obj in Document.objects.all().order_by("-uploaded")[:20] ] return HttpResponse(json.dumps(documents), mimetype="application/json")
Python
0.000003
@@ -711,20 +711,12 @@ obj. -document.nam +titl e%7D%0A
4aad6aa1b50f8b0c86c49867bc75081b03b03086
remove unused import
repository/shimclient.py
repository/shimclient.py
# # Copyright (c) 2005 rpath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.opensource.org/licenses/cpl.php. # # This program is distributed in the hope that it will be useful, but # without any waranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import netclient import netrepos class ShimNetClient(netclient.NetworkRepositoryClient): """ A subclass of NetworkRepositoryClient which can take a NetworkRepositoryServer instance (plus a few other pieces of information) and expose the netclient interface without the overhead of XMLRPC. """ def __init__(self, server, protocol, port, authToken, repMap): netclient.NetworkRepositoryClient.__init__(self, repMap) self.server = ShimServerProxy(server, protocol, port, authToken) class _ShimMethod(netclient._Method): def __init__(self, server, protocol, port, authToken, name): self._server = server self._authToken = authToken self._name = name self._protocol = protocol self._port = port def __repr__(self): return "<server._ShimMethod(%r)>" % (self._ShimMethod__name) def __call__(self, *args): isException, result = self._server.callWrapper( self._protocol, self._port, self._name, self._authToken, args) if not isException: return result else: self.handleError(result) class ShimServerProxy(netclient.ServerProxy): def __init__(self, server, protocol, port, authToken): self._authToken = authToken self._server = server self._protocol = protocol self._port = port def __getattr__(self, name): return _ShimMethod(self._server, self._protocol, self._port, self._authToken, name)
Python
0.000001
@@ -585,24 +585,8 @@ ient -%0Aimport netrepos %0A%0Acl
c36282a41c248ad3d2405a0461195c679ef5327c
disable socket timeout, set heartbeat to 10 from 30
hiispider/amqp/amqp.py
hiispider/amqp/amqp.py
import specs from twisted.internet.protocol import ClientCreator from twisted.internet import reactor from txamqp.client import TwistedDelegate from txamqp.protocol import AMQClient import txamqp.spec def createClient(amqp_host, amqp_vhost, amqp_port=5672): amqp_spec = txamqp.spec.loadString(specs.v0_8) amqp_delegate = TwistedDelegate() client = ClientCreator(reactor, AMQClient, delegate=amqp_delegate, vhost=amqp_vhost, spec=amqp_spec, heartbeat=30).connectTCP(amqp_host, amqp_port) return client
Python
0.000001
@@ -195,16 +195,17 @@ p.spec%0A%0A +%0A def crea @@ -377,17 +377,16 @@ reactor, - %0A @@ -396,17 +396,16 @@ QClient, - %0A @@ -501,9 +501,9 @@ eat= -3 +1 0).c @@ -532,16 +532,27 @@ mqp_port +, timeout=0 )%0A re @@ -558,12 +558,13 @@ eturn client +%0A
4ef2c3988616736800b4e4470b598449b06790cd
Refactor some score calculation to make using the competition scorer easier
svm.py
svm.py
import numpy as np from sklearn import svm, preprocessing from feature_generation import FeatureGenerator from FeatureData import FeatureData class SVMModel(object): def __init__(self): self._stance_map = {'unrelated': 0, 'discuss': 2, 'agree': 3, 'disagree': 4} self._use_features= [ # 'refuting', 'ngrams', # 'polarity', 'named' ] def get_data(self, body_file, stance_file): feature_data = FeatureData(body_file, stance_file) X_train = FeatureGenerator.get_features_from_file(self._use_features) y_train = np.asarray([self._stance_map[stance['Stance']] for stance in feature_data.stances]) # Scale features to range[0, 1] to prevent larger features from dominating smaller ones min_max_scaler = preprocessing.MinMaxScaler() X_train = min_max_scaler.fit_transform(X_train) return {'X':X_train, 'y':y_train} def related_unrelated(self, y): return [x > 0 for x in y] def get_trained_classifier(self, X_train, y_train): """Trains the svm classifier and returns the trained classifier to be used for prediction on test data. Note that stances in test data will need to be translated to the numbers shown in self._stance_map.""" svm_classifier = svm.SVC(decision_function_shape='ovr', cache_size=1000) svm_classifier.fit(X_train, y_train) return svm_classifier def test_classifier(self, svm_classifier, X_test, y_test): predicted = [] for i, stance in enumerate(y_test): predicted.append(svm_classifier.predict([X_test[i]])[0]) print str(self._use_features) print "Precision %f" % self.precision(y_test, predicted) print "Recal %f" % self.recal(y_test, predicted) print "Accuracy %f" % self.accuracy(y_test, predicted) def precision(self, actual, predicted): pairs = zip(actual, predicted) truePositive = np.count_nonzero([x[1] > 0 for x in pairs if x[0] > 0]) falsePositive = np.count_nonzero([x[1] > 0 for x in pairs if x[0] == 0]) return float(truePositive) / (truePositive + falsePositive + 1) def recal(self, actual, predicted): pairs = zip(actual, predicted) truePositive = np.count_nonzero([x[1] > 0 for x in pairs if x[0] > 0]) falseNegative = np.count_nonzero([x[1] == 0 for x in pairs if x[0] > 0]) return float(truePositive) / (truePositive + falseNegative + 1) def accuracy(self, actual, predicted): pairs = zip(actual, predicted) accurate = np.count_nonzero([x[1] == x[0] for x in pairs]) return float(accurate)/len(pairs) if __name__ == '__main__': model = SVMModel() data = model.get_data('data/train_bodies.csv', 'data/train_stances.csv') testNum = 1000 X_test = data['X'][-testNum:] X_train = data['X'][:-testNum] Only_R_UR = True if Only_R_UR: y_test = model.related_unrelated(data['y'][-testNum:]) y_train = model.related_unrelated(data['y'][:-testNum]) else: y_test = data['y'][-testNum:] y_train = data['y'][:-testNum] classifier = model.get_trained_classifier(X_train, y_train) model.test_classifier(classifier, X_test, y_test)
Python
0.000015
@@ -1661,222 +1661,24 @@ -print str(self._use_features)%0A print %22Precision %25f%22 %25 self.precision(y_test, predicted)%0A print %22Recal %25f%22 %25 self.recal(y_test, predicted)%0A print %22Accuracy %25f%22 %25 self.accuracy(y_test, predicted) +return predicted %0A%0A @@ -3029,24 +3029,36 @@ y_train)%0A + predicted = model.test_ @@ -3096,8 +3096,220 @@ y_test)%0A +%0A print str(model._use_features)%0A print %22Precision %25f%22 %25 model.precision(y_test, predicted)%0A print %22Recal %25f%22 %25 model.recal(y_test, predicted)%0A print %22Accuracy %25f%22 %25 model.accuracy(y_test, predicted)%0A
f2acfb8b25c5df8cdfe278ee2df26ec4b3673334
remove redundant conditional branches
tap.py
tap.py
import random import socket import string import struct from mc_bin_client import mc_bin_client, memcacheConstants as Constants class Batch(object): def __init__(self, source): self.source = source self.msgs = [] self.bytes = 0 self.adjust_size = 0 def append(self, msg, num_bytes): self.msgs.append(msg) self.bytes = self.bytes + num_bytes def size(self): return len(self.msgs) def msg(self, i): return self.msgs[i] class TAP(object): BATCH_MAX_SIZE = 1000.0 BATCH_MAX_BYTES = 400000.0 RECV_MIN_BYTES = 4096 def __init__(self, host='127.0.0.1', port=11210, bucket='default', password=''): self.tap_done = False self.tap_name = ''.join(random.sample(string.letters, 16)) self.ack_last = False self.cmd_last = None self.num_msg = 0 self.tap_conn = self.get_tap_conn(host, port, bucket, password) def provide_batch(self): batch = Batch(self) try: while (not self.tap_done and batch.size() < self.BATCH_MAX_SIZE and batch.bytes < self.BATCH_MAX_BYTES): rv, cmd, vbucket_id, key, flg, exp, cas, meta, val, \ opaque, need_ack = self.read_tap_conn(self.tap_conn) if rv != 0: self.tap_done = True return rv, batch if (cmd == Constants.CMD_TAP_MUTATION or cmd == Constants.CMD_TAP_DELETE): if not False: msg = (cmd, vbucket_id, key, flg, exp, cas, meta, val) batch.append(msg, len(val)) self.num_msg += 1 if cmd == Constants.CMD_TAP_DELETE: batch.adjust_size += 1 elif cmd == Constants.CMD_TAP_OPAQUE: pass elif cmd == Constants.CMD_NOOP: if (self.cmd_last == Constants.CMD_NOOP and self.num_msg == 0 and batch.size() <= 0): self.tap_done = True return 0, batch else: return 'Unexpected TAP message' if need_ack: self.ack_last = True self.tap_conn._sendMsg(cmd, '', '', opaque, vbucketId=0, fmt=Constants.RES_PKT_FMT, magic=Constants.RES_MAGIC_BYTE) return 0, batch self.ack_last = False self.cmd_last = cmd except EOFError: if batch.size() <= 0 and self.ack_last: self.tap_done = True if batch.size() <= 0: return 0, None return 0, batch def get_tap_conn(self, host, port, bucket, password): tap_conn = mc_bin_client.MemcachedClient(host, port) tap_conn.sasl_auth_cram_md5(bucket, password) tap_conn.tap_fix_flag_byteorder = True tap_opts = {Constants.TAP_FLAG_DUMP: '', Constants.TAP_FLAG_SUPPORT_ACK: '', Constants.TAP_FLAG_TAP_FIX_FLAG_BYTEORDER: ''} ext, val = self.encode_tap_connect_opts(tap_opts) tap_conn._sendCmd(Constants.CMD_TAP_CONNECT, self.tap_name, val, 0, ext) return tap_conn def read_tap_conn(self, tap_conn): buf, cmd, vbucket_id, opaque, cas, keylen, extlen, data, datalen = \ self.recv_msg(tap_conn.s, getattr(tap_conn, 'buf', '')) tap_conn.buf = buf rv = 0 metalen = flags = flg = exp = 0 meta = key = val = '' need_ack = False if data: ext = data[0:extlen] if extlen == 8: metalen, flags, ttl = \ struct.unpack(Constants.TAP_GENERAL_PKT_FMT, ext) elif extlen == 16: metalen, flags, ttl, flg, exp = \ struct.unpack(Constants.TAP_MUTATION_PKT_FMT, ext) if not tap_conn.tap_fix_flag_byteorder: flg = socket.ntohl(flg) need_ack = flags & Constants.TAP_FLAG_ACK meta_start = extlen key_start = meta_start + metalen val_start = key_start + keylen meta = data[meta_start:key_start] key = data[key_start:val_start] val = data[val_start:] elif datalen: rv = 'Error: could not read full TAP message body' return \ rv, cmd, vbucket_id, key, flg, exp, cas, meta, val, opaque, need_ack def recv_msg(self, sock, buf): pkt, buf = self.recv(sock, Constants.MIN_RECV_PACKET, buf) if not pkt: raise EOFError() magic, cmd, keylen, extlen, dtype, errcode, datalen, opaque, cas = \ struct.unpack(Constants.REQ_PKT_FMT, pkt) data, buf = self.recv(sock, datalen, buf) return buf, cmd, errcode, opaque, cas, keylen, extlen, data, datalen def recv(self, skt, nbytes, buf): recv_arr = [buf] recv_tot = len(buf) while recv_tot < nbytes: data = skt.recv(max(nbytes - len(buf), self.RECV_MIN_BYTES)) if not data: return None, '' recv_arr.append(data) recv_tot += len(data) joined = ''.join(recv_arr) return joined[:nbytes], joined[nbytes:] def encode_tap_connect_opts(self, opts): header = 0 val = [] for op in sorted(opts.keys()): header |= op val.append(opts[op]) return struct.pack('>I', header), ''.join(val) def main(): tap = TAP() while True: rv, batch = tap.provide_batch() if not batch or rv: break if __name__ == '__main__': main()
Python
0.030329
@@ -1332,16 +1332,17 @@ p_conn)%0A +%0A @@ -1459,17 +1459,16 @@ if -( cmd == C @@ -1496,68 +1496,8 @@ TION - or%0A cmd == Constants.CMD_TAP_DELETE) :%0A @@ -1721,440 +1721,70 @@ - if cmd == Constants.CMD_TAP_DELETE:%0A batch.adjust_size += 1%0A elif cmd == Constants.CMD_TAP_OPAQUE:%0A pass%0A elif cmd == Constants.CMD_NOOP:%0A if (self.cmd_last == Constants.CMD_NOOP and%0A self.num_msg == 0 and batch.size() %3C= 0):%0A self.tap_done = True%0A return 0, batch +elif cmd == Constants.CMD_TAP_OPAQUE:%0A pass %0A
1198d398fc8051142c700991c1360750b0857dc4
Update mp3test.py
home/Markus/mp3test.py
home/Markus/mp3test.py
# this is a test script # i have a folder with the mp3 files named from music1 to music8. # it random choses the files . no problem # but i want to change the sleep(120) so the next starts when the previous is finished from java.lang import String from org.myrobotlab.service import Speech from org.myrobotlab.service import Sphinx from org.myrobotlab.service import Runtime import random mouth = Runtime.createAndStart("mouth","Speech") music = 1 # add python as a listener of the "stopped" event from audioFile mouth.audioFile.addListener("stopped", python.name, "stopped") def play(): number = str(random.randint(1, 8)) # usually you need to escape backslash mouth.audioFile.playFile("C:\\Users\\Markus\\Music\\Robynsfavoriter\\music" + str(number) + ".mp3", False) print number mouth.speak("playing song number" + str(number)) # stopped method is called when at the end of an audio file def stopped(): if music == 1: print("I have started playing") global music music = 2 elif music == 2: global music music = 1 play() play()
Python
0.000001
@@ -21,206 +21,8 @@ ript -%0A# i have a folder with the mp3 files named from music1 to music8.%0A# it random choses the files . no problem%0A# but i want to change the sleep(120) so the next starts when the previous is finished %0A%0Afr
53b346ff3d816cc2e637dd19003ff06505023929
Test InvalidUCRData more explicitly
corehq/apps/userreports/tests/test_save_errors.py
corehq/apps/userreports/tests/test_save_errors.py
from __future__ import absolute_import from __future__ import unicode_literals import uuid from alembic.operations import Operations from alembic.runtime.migration import MigrationContext from django.test import TestCase, override_settings from corehq.apps.userreports.app_manager.helpers import clean_table_name from corehq.apps.userreports.const import UCR_SQL_BACKEND from corehq.apps.userreports.exceptions import TableNotFoundWarning, MissingColumnWarning from corehq.apps.userreports.models import DataSourceConfiguration, InvalidUCRData from corehq.apps.userreports.util import get_indicator_adapter from six.moves import range def get_sample_config(domain=None): return DataSourceConfiguration( domain=domain or 'domain', display_name='foo', referenced_doc_type='CommCareCase', table_id=clean_table_name('domain', str(uuid.uuid4().hex)), configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": 'name' }, "column_id": 'name', "display_name": 'name', "datatype": "string" }], ) class SaveErrorsTest(TestCase): def setUp(self): self.config = get_sample_config() def tearDown(self): self.config = get_sample_config() self._get_adapter().drop_table() def _get_adapter(self): return get_indicator_adapter(self.config, raise_errors=True) def test_raise_error_for_missing_table(self): adapter = self._get_adapter() adapter.drop_table() doc = { "_id": '123', "domain": "domain", "doc_type": "CommCareCase", "name": 'bob' } with self.assertRaises(TableNotFoundWarning): adapter.best_effort_save(doc) def test_missing_column(self): adapter = self._get_adapter() adapter.build_table() with adapter.engine.begin() as connection: context = MigrationContext.configure(connection) op = Operations(context) op.drop_column(adapter.get_table().name, 'name') doc = { "_id": '123', "domain": "domain", "doc_type": "CommCareCase", "name": 'bob' } with self.assertRaises(MissingColumnWarning): adapter.best_effort_save(doc) def test_non_nullable_column(self): self.config.configured_indicators[0]['is_nullable'] = False self.config._id = 'docs id' adapter = self._get_adapter() adapter.build_table() doc = { "_id": '123', "domain": "domain", "doc_type": "CommCareCase", "name": None } adapter.best_effort_save(doc) self.assertEqual(InvalidUCRData.objects.count(), 1) class AdapterBulkSaveTest(TestCase): def setUp(self): self.domain = 'adapter_bulk_save' self.config = get_sample_config(domain=self.domain) self.config.save() self.adapter = get_indicator_adapter(self.config, raise_errors=True) def tearDown(self): self.config.delete() self.adapter.clear_table() def test_bulk_save(self): docs = [] for i in range(10): docs.append({ "_id": str(i), "domain": self.domain, "doc_type": "CommCareCase", "name": 'doc_name_' + str(i) }) self.adapter.build_table() self.adapter.bulk_save(docs) self.assertEqual(self.adapter.get_query_object().count(), 10) self.adapter.bulk_delete([doc['_id'] for doc in docs]) self.assertEqual(self.adapter.get_query_object().count(), 0) def test_save_rows_empty(self): self.adapter.build_table() self.adapter.save_rows([])
Python
0
@@ -2820,16 +2820,64 @@ ave(doc) +%0A%0A invalid = InvalidUCRData.objects.all() %0A @@ -2898,41 +2898,149 @@ ual( -I +len(i nvalid -UCRData.objects.count(), 1 +), 1)%0A self.assertEqual(invalid%5B0%5D.validation_name, 'not_null_violation')%0A self.assertEqual(invalid%5B0%5D.doc_id, '123' )%0A%0A%0A
7388de0439913a8a33ac47a3cec14546e2860737
Add code example with loggable quantities in _CustomAction
hoomd/custom_action.py
hoomd/custom_action.py
from abc import ABC, abstractmethod from hoomd.parameterdicts import ParameterDict from hoomd.operation import _HOOMDGetSetAttrBase class _CustomAction(ABC): """Base class for all Python ``Action``s. This class must be the parent class for all Python ``Action``s. This class requires all subclasses to implement the act method which performs the Python object's task whether that be updating the system, writing output, or analyzing some property of the system. To use subclasses of this class, the object must be passed as an argument for the `hoomd.python_action._CustomOperation` constructor. If the pressure, rotational kinetic energy, or external field virial is needed for a subclass, the flags attribute of the class needs to be set with the appropriate flags from `hoomd.util.ParticleDataFlags`. .. code-block:: python from hoomd.python_action import _CustomAction from hoomd.util import ParticleDataFlags class ExampleActionWithFlag(_CustomAction): flags = [ParticleDataFlags.ROTATIONAL_KINETIC_ENERGY, ParticleDataFlags.PRESSURE_TENSOR, ParticleDataFlags.EXTERNAL_FIELD_VIRIAL] def act(self, timestep): pass For advertising loggable quantities through the `hoomd.python_action._CustomOperation` object, the class attribute ``log_quantities`` can be used. The dictionary expects string keys with the name of the loggable and `hooomd.logger.LoggerQuantity` objects as the values. """ flags = [] log_quantities = {} def __init__(self): pass def attach(self, simulation): self._state = simulation.state def detach(self): if hasattr(self, '_state'): del self._state @abstractmethod def act(self, timestep): pass class _InternalCustomAction(_CustomAction, _HOOMDGetSetAttrBase): """An internal class for Python ``Action``s. Gives additional support in using HOOMD constructs like ``ParameterDict``s and ``TypeParameters``. """ pass
Python
0
@@ -1561,16 +1561,583 @@ values. +%0A%0A .. code-block:: python%0A%0A from hoomd.python_action import _CustomAction%0A from hoomd.logger import LoggerQuantity%0A%0A%0A class ExampleActionWithFlag(_CustomAction):%0A def __init__(self):%0A self.log_quantities = %7B%0A 'loggable': LoggerQuantity('scalar_loggable',%0A self.__class__,%0A flag='scalar')%7D%0A%0A def loggable(self):%0A return 42%0A%0A def act(self, timestep):%0A pass %0A %22%22%22
3ef08fc11a512070e02586d7981c0c539eabff13
fix wrong parameter name
host/scan_threshold.py
host/scan_threshold.py
from scan.scan import ScanBase from daq.readout import open_raw_data_file from analysis.analyze_raw_data import AnalyzeRawData import logging logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - [%(levelname)-8s] (%(threadName)-10s) %(message)s") class ThresholdScan(ScanBase): def __init__(self, config_file, definition_file=None, bit_file=None, device=None, scan_identifier="scan_threshold", scan_data_path=None): super(ThresholdScan, self).__init__(config_file=config_file, definition_file=definition_file, bit_file=bit_file, device=device, scan_identifier=scan_identifier, scan_data_path=scan_data_path) def scan(self, mask=3, repeat=100, scan_parameter='PlsrDAC', scan_paramter_value=None): '''Scan loop Parameters ---------- mask : int Number of mask steps. repeat : int Number of injections per scan step. scan_parameter : string Name of global register. scan_paramter_value : list, tuple Specify scan steps. These values will be written into global register scan_parameter. ''' if scan_paramter_value is None: scan_paramter_value_list = range(0, 101, 1) # default else: scan_paramter_value_list = list(scan_paramter_value) with open_raw_data_file(filename=self.scan_data_filename, title=self.scan_identifier, scan_parameters=[scan_parameter]) as raw_data_file: for scan_paramter_value in scan_paramter_value_list: if self.stop_thread_event.is_set(): break logging.info('Scan step: %s %d' % (scan_parameter, scan_paramter_value)) commands = [] commands.extend(self.register.get_commands("confmode")) self.register.set_global_register_value(scan_parameter, scan_paramter_value) commands.extend(self.register.get_commands("wrregister", name=[scan_parameter])) self.register_utils.send_commands(commands) self.readout.start() cal_lvl1_command = self.register.get_commands("cal")[0] + self.register.get_commands("zeros", length=40)[0] + self.register.get_commands("lv1")[0] + self.register.get_commands("zeros", mask_steps=mask)[0] self.scan_loop(cal_lvl1_command, repeat=repeat, mask=mask, mask_steps=[], double_columns=[], same_mask_for_all_dc=True, hardware_repeat=True, digital_injection=False, eol_function=None) self.readout.stop(timeout=10) # saving data raw_data_file.append(self.readout.data, scan_parameters={scan_parameter: scan_paramter_value}) def analyze(self): with AnalyzeRawData(raw_data_file=scan.scan_data_filename + ".h5", analyzed_data_file=self.scan_data_filename + "_interpreted.h5") as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.interpreter.set_warning_output(False) # so far the data structure in a threshold scan was always bad, too many warnings given analyze_raw_data.interpret_word_table(FEI4B=scan.register.fei4b) analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms(scan_data_filename=scan.scan_data_filename) if __name__ == "__main__": import configuration scan = ThresholdScan(config_file=configuration.config_file, bit_file=configuration.bit_file, scan_data_path=configuration.scan_data_path) scan.start(use_thread=True, scan_paramter_values=range(0, 101, 2)) scan.stop() scan.analyze()
Python
0.000003
@@ -735,16 +735,17 @@ er_value +s =None):%0D @@ -737,32 +737,32 @@ _values=None):%0D%0A - '''Scan @@ -1033,16 +1033,17 @@ er_value +s : list, @@ -1192,16 +1192,17 @@ er_value +s is None @@ -1283,24 +1283,24 @@ else:%0D%0A - @@ -1350,16 +1350,17 @@ er_value +s )%0D%0A%0D%0A
94ecbdc67dd72c671862aea29fd5525ea92650d8
Update model.py
hurricane/model.py
hurricane/model.py
Python
0.000001
@@ -1 +1,1805 @@ %0A +import sys%0Aimport os%0Aimport urllib2%0Aimport datetime%0Aimport time%0Aimport psycopg2%0Aimport pandas%0Afrom subprocess import call, Popen%0A%0A# pull the 6 hr forecast track forecast from NHC %0A#os.system(%22wget http://www.srh.noaa.gov/ridge2/Precip/qpehourlyshape/latest/last_1_hours.tar.gz -O last_1_hours.tar.gz%22)%0A#os.system(%22mv last_1_hours.tar.gz last_1_hours.tar%22)%0A%0A#os.system(%22tar xvf last_1_hours.tar%22)%0A%0A%0A#last_1hr_shp = './latest/last_1_hours.shp'%0A#last_hr_shp2pgsql = 'ogr2ogr -f %22PostgreSQL%22 PG:%22user=postgres dbname=hamlet password=password%22 %7B%7D -t_srs EPSG:4326 -nln last_1hr_qpe -overwrite'.format(last_1hr_shp)%0A#print last_hr_shp2pgsql%0A#call(last_hr_shp2pgsql, shell = True)%0A%0Aconn_string = %22dbname='hamlethurricane' user=postgres port='5432' host='127.0.0.1' password='password'%22%0A%0Aprint %22Connecting to database...%22%0A%0Atry:%0A%09conn = psycopg2.connect(conn_string)%0Aexcept Exception as e:%0A%09print str(e)%0A%09sys.exit()%0A%0Aprint %22Connected!%5Cn%22%0A%0A%0Adataframe_cur = conn.cursor()%0A%0Adataframe_cur.execute(%22%22%22Select * from hurricane_irene%22%22%22)%0A%0Adata = dataframe_cur.fetchall()%0A%0Acolnames = %5Bdesc%5B0%5D for desc in dataframe_cur.description%5D%0A%0Adataframe = pandas.DataFrame(data)%0A%0Adataframe.columns = colnames%0A%0Aprint data%0Aprint dataframe%0A%0Aconn.commit()%0A%0Anum_feat = len(data)%0A%0Afor i in range(len(data))%0A%09os.system('pgsql2shp -f %7B%7D -u postgres dbname=hamlet password=password%22 %7B%7D -t_srs EPSG:4326 -nln last_1hr_qpe -overwrite ').%0A%0A#drop_cur.close()%0A%0A# hurricane_cur = conn.cursor() %0A%0A# hurricane_cur.execute(%22%22%22%0A# create table roads_flooded_bunco as %0A# select%0A# a.gid,%0A# street_nam,%0A# sum(b.globvalue),%0A# a.geom%0A# from conterlines_poly as a%0A# inner join last_1hr_qpe as b %0A# on st_dwithin(a.geom::geometry(MULTIpolygon, 4326), b.wkb_geometry::geometry(point, 4326), 0.025)%0A# group by a.gid, a.street_nam, a.geom;%22%22%22)%0A%0A%0A# conn.commit()%0A
c78cb26ff07712027b3ae340d6209482e8708641
implement single-length tuple
vlermv/transformers/raw.py
vlermv/transformers/raw.py
import posixpath error_msg = '''The index must be a string.''' def to_path(key): return tuple(key.strip('/').split('/')) def from_path(path): return posixpath.join(*path)
Python
0.999999
@@ -84,46 +84,264 @@ -return tuple(key.strip('/').split('/') +if isinstance(key, tuple) and len(key) == 1:%0A key = key%5B0%5D%0A%0A if hasattr(key, 'strip') and hasattr(key, 'split'):%0A return tuple(key.strip('/').split('/'))%0A else:%0A raise TypeError('Key must be string-like or a tuple of length one.' )%0A%0Ad
8cc4816556d43fde3f51b75d985fbf2e15299302
Add version and a help text
subvenv/core.py
subvenv/core.py
#!/usr/bin/env python # coding=utf-8 import json import logging import os import sys import click from collections import namedtuple log = logging.getLogger(__name__) HELP_COMMANDS = dict(help_option_names=['-h', '--help']) class VirtualenvError(Exception): pass def get_virtualenv(): path = os.getenv('VIRTUAL_ENV') if not path: raise VirtualenvError( 'Trying to get virtualenv data while not in a virtualenv' ) name = os.path.basename(path) interpreter = os.path.join(path, 'bin', 'python') Virtualenv = namedtuple('Virtualenv', ['name', 'path', 'interpreter']) return Virtualenv(name, path, interpreter) def post_mkproject(args=None): """ Create a Sublime text project file on virtualenvwrapper project creation. """ try: venv = get_virtualenv() except VirtualenvError: sys.exit('You need to be inside a virtualenv for using subvenv.') project_path_file = os.path.join(venv.path, '.project') try: with open(project_path_file, 'r') as f: project_folder = f.readline().rstrip('\r\n') except IOError: sys.exit('Virtualenv project not found.\n') create_sublime_project_file(project_folder, venv.name, venv.interpreter) def create_sublime_project_file(project_folder, project_name, interpreter): """ Create a Sublime Text project file in the given project folder. Args: project_folder (str): path to project folder project_name (str): name of the project interpreter (str): path to the Python interpreter used for the project """ sublime_file_name = "{}.sublime-project".format(project_name) settings_text = { "folders": [ { "follow_symlinks": True, "path": project_folder, }, ], "settings": { "python_interpreter": interpreter, }, } target_path = (os.path.join(project_folder, sublime_file_name)) try: with open(target_path, 'w') as f: f.write(json.dumps(settings_text, sort_keys=True, indent=4)) except IOError: sys.exit( 'Cannot create file.\n\ Attempted path: {}'.format(project_folder) ) @click.group(context_settings=HELP_COMMANDS) def cli(): """ Subvenv is a tool for creating virtualenv-friendly Sublime Text project files. It can be used as a standalone or as a plugin for Virtualenwrapper. See https://github.com/Railslide/subvenv for more information. """ pass @cli.command() @click.option( '--folder', type=click.Path(), help='Target folder for file creation.' ) def make_project(folder=None): """ Create a Sublime project file for the current virtual environment. If no target folder is specified, the file will be created in the current working directory. """ if not folder: folder = os.getcwd() folder = os.path.abspath(folder) try: venv = get_virtualenv() except VirtualenvError: sys.exit('You need to be inside a virtualenv for using subvenv.') create_sublime_project_file(folder, venv.name, venv.interpreter) if __name__ == '__main__': cli()
Python
0.000001
@@ -31,16 +31,32 @@ =utf-8%0A%0A +import argparse%0A import j @@ -3267,16 +3267,731 @@ eter)%0A%0A%0A +def main():%0A description = (%0A 'Subvenv is a tool for creating virtualenv-friendly Sublime Text '%0A 'project files.%5CnIt can be used as a standalone or as a plugin for '%0A 'Virtualenwrapper. %5Cn%5CnSee https://github.com/Railslide/subvenv '%0A 'for more information.'%0A )%0A parser = argparse.ArgumentParser(%0A description=description,%0A formatter_class=argparse.RawTextHelpFormatter%0A )%0A parser.add_argument(%0A 'make_project',%0A help=%22create a Sublime Text project file in the given project folder%22%0A )%0A parser.add_argument(%0A %22-v%22, %22--version%22,%0A help=%22print version information%22,%0A action=%22store_true%22%0A )%0A parser.parse_args()%0A%0A%0A if __nam @@ -4012,15 +4012,28 @@ __':%0A + main()%0A # cli()%0A
edb6a027aae1656d07112c0dcb455d6891a44992
split around nan test is passing
test_data_processor.py
test_data_processor.py
import DataProcessor as dp import numpy as np def test_unsize_vector(): n = 3 a = np.ones(n) b = np.append(a, np.array([np.nan, np.nan])) c = dp.unsize_vector(a, n) assert (a == c).all() def test_time_vector(): numSamples = 100 sampleRate = 50 time = dp.time_vector(numSamples, sampleRate) assert (time == np.linspace(0., 2. - 1. / 50., num=100)).all() def test_split_around_nan(): # build an array of length 25 with some nan values a = np.ones(25) * np.nan b = np.arange(25) for i in b: if i not in [0, 5, 20, 24]: a[i] = b[i] # run the function and test the results indices, arrays = dp.split_around_nan(a) assert indices[0] == (1, 5) assert indices[1] == (6, 20) assert indices[2] == (21, 24) # build an array of length 25 with some nan values a = np.ones(25) * np.nan b = np.arange(25) for i in b: if i not in [5, 20]: a[i] = b[i] # run the function and test the results indices, arrays = dp.split_around_nan(a) assert indices[0] == (0, 5) assert indices[1] == (6, 20) assert indices[2] == (21, 25)
Python
0.000296
@@ -700,94 +700,255 @@ ert -indices%5B0%5D == (1, 5)%0A assert indices%5B1%5D == (6, 20)%0A assert indices%5B2%5D == (21, 24 +len(indices) == 7%0A assert indices%5B0%5D == (0, 1)%0A assert indices%5B1%5D == (1, 5)%0A assert indices%5B2%5D == (5, 6)%0A assert indices%5B3%5D == (6, 20)%0A assert indices%5B4%5D == (20, 21)%0A assert indices%5B5%5D == (21, 24)%0A assert indices%5B6%5D == (24, 25 )%0A @@ -1201,32 +1201,61 @@ t_around_nan(a)%0A + assert len(indices) == 5%0A assert indic @@ -1300,45 +1300,487 @@ == ( -6, 20)%0A assert indices%5B2%5D == (21, 25 +5, 6)%0A assert indices%5B2%5D == (6, 20)%0A assert indices%5B3%5D == (20, 21)%0A assert indices%5B4%5D == (21, 25)%0A a = np.array(%5Bnp.nan, 1, 2, 3, np.nan, np.nan, 6, 7, np.nan%5D)%0A # run the function and test the results%0A indices, arrays = dp.split_around_nan(a)%0A assert len(indices) == 6%0A assert indices%5B0%5D == (0, 1)%0A assert indices%5B1%5D == (1, 4)%0A assert indices%5B2%5D == (4, 5)%0A assert indices%5B3%5D == (5, 6)%0A assert indices%5B4%5D == (6, 8)%0A assert indices%5B5%5D == (8, 9 )%0A
e5c3ece6ebad2b7ab524e074fd982e7fc11497b1
set better filter
watcher/tweakerswatcher.py
watcher/tweakerswatcher.py
import requests import json import os.path from watcher.watcher import Watcher class TweakersWatcher(Watcher): watcher_name = 'Tweakers Pricewatch' filename = 'site_tweakers.txt' def parse_site(self): url = 'https://tweakers.net/xmlhttp/xmlHttp.php?application=tweakbase&type=filter&action=deals&fromHash=1&currFilters=q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA&output=json'; request = requests.get(url) json_object = json.loads(request.text) return json_object['data']['html'] def check_price_error(self): url = 'https://tweakers.net/pricewatch/deals/#filter:q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA' message_text = 'Mogelijke prijsfout, check: {0}'.format(url) html = self.parse_site() if not os.path.isfile(self.filename): self.write_to_file(self.filename, html) exit(0) else: with open(self.filename, 'r') as f: file_content = f.read() if file_content != html: self.send_telegram(self.watcher_name, message_text) self.write_to_file(self.filename, html)
Python
0.000002
@@ -316,147 +316,200 @@ als& -fromHash=1&currFilters=q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA +dayOffset=1&minRelativePriceDrop=0.4&maxRelativePriceDrop=1&minAbsolutePriceDrop=30&maxAbsolutePriceDrop=&minCurrentPrice=0&maxCurrentPrice=&minPrices=3&minViews=0&of=absolutePriceDrop&od=desc &out @@ -521,9 +521,8 @@ son' -; %0A
6f641efb983c88427b0ec7955792a48ecbc0e0ea
add checking to avoiding update status issue of event_cases
web/controller/strategy.py
web/controller/strategy.py
# -*- coding:utf-8 -*- __author__ = 'Ulric Qin' from web import app from flask import request, jsonify from web.model.strategy import Strategy from frame import config from fe_api import post2FeUpdateEventCase import logging log = logging.getLogger(__name__) @app.route('/strategy/update', methods=['POST']) def strategy_update_post(): sid = request.form['sid'].strip() metric = request.form['metric'].strip() tags = request.form['tags'].strip() max_step = request.form['max_step'].strip() priority = request.form['priority'].strip() note = request.form['note'].strip() func = request.form['func'].strip() op = request.form['op'].strip() right_value = request.form['right_value'].strip() run_begin = request.form['run_begin'].strip() run_end = request.form['run_end'].strip() tpl_id = request.form['tpl_id'].strip() data = {'id': sid} alarmAdUrl = config.JSONCFG['shortcut']['falconUIC'] + "/api/v1/alarmadjust/whenstrategyupdated" if not metric: return jsonify(msg='metric is blank') if not note: return jsonify(msg='note is blank') if metric == 'net.port.listen' and '=' not in tags: return jsonify(msg='if metric is net.port.listen, tags should like port=22') if sid: # update Strategy.update_dict( { 'metric': metric, 'tags': tags, 'max_step': max_step, 'priority': priority, 'func': func, 'op': op, 'right_value': right_value, 'note': note, 'run_begin': run_begin, 'run_end': run_end }, 'id=%s', [sid] ) respCode = post2FeUpdateEventCase(alarmAdUrl, data) if respCode != 200: log.error(alarmAdUrl + " got " + str(respCode) + " with " + str(data)) return jsonify(msg='') # insert Strategy.insert( { 'metric': metric, 'tags': tags, 'max_step': max_step, 'priority': priority, 'func': func, 'op': op, 'right_value': right_value, 'note': note, 'run_begin': run_begin, 'run_end': run_end, 'tpl_id': tpl_id } ) respCode = post2FeUpdateEventCase(alarmAdUrl, data) if respCode != 200: log.error(alarmAdUrl + " got " + str(respCode) + " with " + str(data)) return jsonify(msg='') @app.route('/strategy/<sid>') def strategy_get(sid): sid = int(sid) s = Strategy.get(sid) if not s: return jsonify(msg='no such strategy') return jsonify(msg='', data=s.to_json()) @app.route('/strategy/delete/<sid>') def strategy_delete_get(sid): sid = int(sid) s = Strategy.get(sid) data = {'id': sid} alarmAdUrl = config.JSONCFG['shortcut']['falconUIC'] + "/api/v1/alarmadjust/whenstrategydeleted" if not s: return jsonify(msg='no such strategy') Strategy.delete_one(sid) respCode = post2FeUpdateEventCase(alarmAdUrl, data) if respCode != 200: log.error(alarmAdUrl + " got " + str(respCode) + " with " + str(data)) return jsonify(msg='')
Python
0
@@ -1251,16 +1251,343 @@ rt=22')%0A + st = Strategy.get(sid)%0A need_reset = False%0A if st.func != func or st.right_value != right_value or st.op != op:%0A need_reset = True%0A elif st.metric != metric or st.tags != tags:%0A log.info(%22g 2%22)%0A need_reset = True%0A log.debug(%22need_reset: %22 + str(need_reset))%0A log.debug(str(st.to_json())) %0A if @@ -2057,24 +2057,51 @@ %5D%0A )%0A + if need_reset:%0A resp @@ -2152,24 +2152,28 @@ ta)%0A + if respCode @@ -2172,32 +2172,36 @@ espCode != 200:%0A + log.
2790a7c5f7f90eaafa543651a6cf273c9f40e6e1
tidy engine
team1/engine.py
team1/engine.py
#! /usr/bin/env python import os import sys import json import difflib #{ 'document': ['terms', 'terms'], # 'document2': ['1','2','3'], #} def simple_search_engine(index, query): return [doc_name for doc_name, terms in index.iteritems() if query in terms] def weighted_search_engine(index, query): results = {} search_terms = query.split() for doc_name, terms in index.iteritems(): matches = 0 for search_term in search_terms: if search_term in terms: matches += 1 rating = float(matches) / len(search_terms) results[doc_name] = rating return results def guessy_weighted_search_engine(index, query): results = {} search_terms = query.split() for doc_name, terms in index.iteritems(): matches = 0 for search_term in search_terms: search_termys = difflib.get_close_matches(search_term, terms) for search_termy in search_termys: if search_termy in terms: matches += 1 rating = float(matches) / len(search_terms) results[doc_name] = rating return results def display_search_results(engine_name, results): print(engine_name) if not results: print(' <No reults>') return for i, result in enumerate(results, 1): print('%d. %s' % (i, result)) def sort_by_score(result_dict): return [result for result, score in sorted(result_dict.items(), key=lambda res_score: res_score[1]) if score > 0 ] def load_index(filename): if os.path.exists(filename): return json.load(open(filename)) else: print("Please run:\n\tpython get_documents.py > index.txt") sys.exit() if __name__ == '__main__': index = load_index('index.txt') search_query = raw_input('Search terms: ') print('') display_search_results( 'Simple search engine returns:', simple_search_engine(index=index, query=search_query) ) # New line for readability print('') weighted_results = weighted_search_engine(index=index, query=search_query) display_search_results( 'Weighted search engine returns:', sort_by_score(weighted_results) ) print('') guessy_weighted_results = guessy_weighted_search_engine(index=index, query=search_query) display_search_results( 'Guessy weighted search engine returns:', sort_by_score(guessy_weighted_results) )
Python
0.000001
@@ -1405,56 +1405,45 @@ -return %5Bresult%0A for result, score in%0A +sorted_pairs = sorted(%0A @@ -1452,17 +1452,8 @@ - sorted( resu @@ -1468,16 +1468,40 @@ items(), +%0A key=lam @@ -1527,17 +1527,17 @@ score%5B1%5D -) +, %0A @@ -1537,24 +1537,124 @@ + reverse=True%0A )%0A return %5Bresult for result, score in sorted_pairs if score %3E 0 @@ -1653,25 +1653,16 @@ core %3E 0 -%0A %5D%0A%0Adef l @@ -1854,16 +1854,17 @@ exit()%0A%0A +%0A if __nam @@ -1918,16 +1918,17 @@ x.txt')%0A +%0A sear @@ -1965,16 +1965,48 @@ erms: ') +%0A%0A # New line for readability %0A pri @@ -2154,39 +2154,8 @@ )%0A%0A - # New line for readability%0A
c0566ab5f4dabdf0c366d6b6a32cbd8ca3fb4a75
use idle_add callback for animation
test/animate.py
test/animate.py
import matplotlib matplotlib.use('GTKAgg') import pygrib import matplotlib.pyplot as plt import numpy as np import time from mpl_toolkits.basemap import Basemap # animation example. grbs = pygrib.open('../sampledata/safrica.grib2') # grab all "brightness temp" grib messages. btemps = [grb for grb in grbs if grb['name']=='Brightness temperature'] lats, lons = grb.latlons() projd = grb.projparams grbs.close() print projd # create a map projection for the domain, plot 1st image on it. m =\ Basemap(projection=projd['proj'],lat_ts=projd['lat_ts'],lon_0=projd['lon_0'],\ lat_0=projd['lat_0'],rsphere=(projd['a'],projd['b']),\ llcrnrlat=lats[0,0],urcrnrlat=lats[-1,-1],\ llcrnrlon=lons[0,0],urcrnrlon=lons[-1,-1],resolution='i') plt.ion() # set interactive mode on plt.figure(figsize=(8,7)) m.drawcoastlines() m.drawcountries() grb = btemps[0] im = m.imshow(grb['values'],interpolation='nearest',vmin=230,vmax=310) plt.colorbar(orientation='horizontal') m.drawparallels(np.arange(-80,10,10),labels=[1,0,0,0]) m.drawmeridians(np.arange(-80,81,20),labels=[0,0,0,1]) plt.title(grb,fontsize=8) plt.draw() # loop 4 times, plot all images sequentially. for loop in range(4): time.sleep(5) for grb in btemps: print grb im.set_data(grb['values']) plt.title(grb,fontsize=8) plt.draw() time.sleep(5)
Python
0.000006
@@ -49,16 +49,31 @@ t pygrib +, time ,gobject %0Aimport @@ -120,20 +120,8 @@ np%0A -import time%0A from @@ -412,20 +412,8 @@ se() -%0Aprint projd %0A%0A# @@ -743,44 +743,8 @@ i')%0A -plt.ion() # set interactive mode on%0A plt. @@ -1038,16 +1038,22 @@ 0,0,1%5D)%0A +txt = plt.titl @@ -1074,125 +1074,278 @@ =8)%0A -plt.draw()%0A%0A# loop 4 times, plot all images sequentially.%0Afor loop in r +%0Amanager = plt.get_current_fig_manager()%0Adef updatefig(*args):%0A global cnt, loop, delay%0A grb = btemps%5Bcnt%5D%0A im.set_data(grb%5B'values'%5D)%0A txt.set_text(repr(grb))%0A m an +a ge -(4):%0A time.sleep(5)%0A for grb in +r.canvas.draw()%0A if cnt==0: time.sleep(delay)%0A cnt = cnt+1%0A if cnt==len( btemps +) :%0A @@ -1353,113 +1353,328 @@ -print grb%0A im.set_data(grb%5B'values'%5D)%0A plt.title(grb,fontsize=8)%0A plt.draw()%0Atime.sleep(5 + loop = loop + 1%0A print 'done loop = ',loop%0A if loop == loops:%0A print 'all done - close plot window to exit'%0A return False%0A else:%0A cnt = 0%0A return True%0A else:%0A return True%0A%0Acnt = 0%0Adelay = 5%0Aloops = 4%0Aloop = 0%0Agobject.idle_add(updatefig)%0Aplt.show( )%0A
65f5695b90054f73d7119f0c50be51f61de777fa
Print the time of checking status at github.
tardis/tests/tests_slow/runner.py
tardis/tests/tests_slow/runner.py
import argparse import json import os import time import requests from tardis import __githash__ as tardis_githash parser = argparse.ArgumentParser(description="Run slow integration tests") parser.add_argument("--yaml", dest="yaml_filepath", help="Path to YAML config file for integration tests.") parser.add_argument("--atomic-dataset", dest="atomic_dataset", help="Path to atomic dataset.") test_command = ( "python setup.py test --test-path=tardis/tests/tests_slow/test_integration.py " "--args=\"-rs --integration-tests={0} --atomic-dataset={1} --remote-data\"" ) if __name__ == "__main__": args = parser.parse_args() while True: gh_request = requests.get( "https://api.github.com/repos/tardis-sn/tardis/branches/master" ) gh_master_head_data = json.loads(gh_request.content) gh_tardis_githash = gh_master_head_data['commit']['sha'] if gh_tardis_githash != tardis_githash: os.system("git pull origin master") os.system(test_command.format(args.yaml_filepath, args.atomic_dataset)) else: time.sleep(600)
Python
0.000022
@@ -9,16 +9,32 @@ rgparse%0A +import datetime%0A import j @@ -1189,16 +1189,210 @@ else:%0A + checked = datetime.datetime.now()%0A print %22Up-to-date. Checked on %7B0%7D %7B1%7D%22.format(%0A checked.strftime(%22%25d-%25b-%25Y%22), checked.strftime(%22%25H:%25M:%25S%22)%0A )%0A
33956d6a31f3c019758a397688d6a29cd8fe9a45
Use profile likelihood when appropriate
test_grant_2.py
test_grant_2.py
#!/usr/bin/env python # Some tests written in support of Matt's grant-writing. # Daniel Klein, 10/26/2012 import numpy as np import matplotlib.pyplot as plt import networkx as nx from Network import Network from Models import StationaryLogistic, NonstationaryLogistic from Models import FixedMargins, alpha_norm from BinaryMatrix import approximate_conditional_nll from Utility import init_latex_rendering init_latex_rendering() # Parameters N = 20 G = 30 alpha_sd = 2.0 theta_true = { 'x_1': 2.0, 'x_2': -1.0 } target_degree = 2 # Setup network net = Network(N) alpha_norm(net, alpha_sd) # Setup data model and network covariates data_model = NonstationaryLogistic() covariates = [] for name in theta_true: covariates.append(name) data_model.beta[name] = theta_true[name] def f_x(i_1, i_2): return np.random.normal(0, 1.0) net.new_edge_covariate(name).from_binary_function_ind(f_x) # Instantiate network according to data model data_model.match_kappa(net, ('row_sum', target_degree)) net.generate(data_model) net.show_heatmap(order_by_row = 'alpha_out') net.show_heatmap(order_by_col = 'alpha_in') # Display network plt.figure(figsize = (17, 4.25)) plt.subplot(141) plt.title('Network') graph = nx.DiGraph() A = net.adjacency_matrix() for i in range(N): graph.add_node(i) for i in range(N): for j in range(N): if A[i,j]: graph.add_edge(i,j) pos = nx.graphviz_layout(graph, prog = 'neato') nx.draw(graph, pos, node_size = 60, with_labels = False) def grid_fit(fit_model, f_nll): # Evaluate likelihoods on a grid theta_star_1 = data_model.beta[covariates[0]] theta_star_2 = data_model.beta[covariates[1]] x = np.linspace(theta_star_1 - 2.0, theta_star_1 + 2.0, G) y = np.linspace(theta_star_2 - 2.0, theta_star_2 + 2.0, G) z = np.empty((G,G)) for i, theta_1 in enumerate(x): for j, theta_2 in enumerate(y): fit_model.beta[covariates[0]] = theta_1 fit_model.beta[covariates[1]] = theta_2 z[i,j] = f_nll(net, fit_model) nll_min = np.min(z) theta_opt_min = np.where(z == nll_min) theta_opt_1 = x[theta_opt_min[0][0]] theta_opt_2 = y[theta_opt_min[1][0]] # contour expects x, y, z generated by meshgrid... CS = plt.contour(x, y, np.transpose(z), colors = 'k') plt.plot(theta_star_1, theta_star_2, 'b*', markersize = 12) plt.plot(theta_opt_1, theta_opt_2, 'ro', markersize = 12) # plt.clabel(CS, inline = 1, fontsize = 10, fmt = '%1.1f') plt.xlabel(r'$\theta_2$', fontsize = 14) plt.ylabel(r'$\theta_1$', fontsize = 14) return theta_opt_1, theta_opt_2 # Grid search for stationary and non-stationary fits plt.subplot(142) plt.title('Stationary') grid_fit(StationaryLogistic(), lambda n, m: m.nll(n)) plt.subplot(143) plt.title('Nonstationary') grid_fit(NonstationaryLogistic(), lambda n, m: m.nll(n)) # Grid search for conditional fit plt.subplot(144) plt.title('Conditional') def f_nll(n, m): P = m.edge_probabilities(n) w = P / (1.0 - P) A = np.array(n.adjacency_matrix()) return approximate_conditional_nll(A, w) grid_fit(StationaryLogistic(), f_nll) for c in covariates: plt.figure() plt.scatter(net.edge_covariates[c].matrix(), net.adjacency_matrix()) plt.show() #plt.savefig('../grant/figs/simulated_data_no_wopt_sort_10.eps') plt.show()
Python
0
@@ -1537,42 +1537,64 @@ _nll -):%0A # Evaluate likelihoods on a +, profile = False, pre_offset = False):%0A # Initialize gri @@ -1844,16 +1844,146 @@ y((G,G)) +%0A%0A if pre_offset:%0A net.offset_extremes()%0A else:%0A net.initialize_offset()%0A%0A # Evaluate likelihoods on a grid %0A for @@ -2155,16 +2155,92 @@ theta_2%0A + if profile:%0A fit_model.fit(net, fix_beta = True)%0A @@ -3001,16 +3001,32 @@ m.nll(n) +, profile = True )%0Aplt.su @@ -3118,16 +3118,60 @@ m.nll(n) +,%0A profile = True, pre_offset = True )%0A%0A# Gri
3bc6ab85f4fc2e3dba0e7c9d16a28fa370558021
remove useless comment
tests/images.py
tests/images.py
# -*- coding: utf-8 -*- """\ This is a python port of "Goose" orignialy licensed to Gravity.com under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Python port was written by Xavier Grangier for Recrutae Gravity.com licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import json import hashlib from goose import Goose from goose.configuration import Configuration from goose.utils import FileHelper from base import BaseMockTests, MockResponse from extractors import TestExtractionBase CURRENT_PATH = os.path.dirname(os.path.abspath(__file__)) class MockResponseImage(MockResponse): def image_content(self, req): md5_hash = hashlib.md5(req.get_full_url()).hexdigest() current_test = self.cls._get_current_testname() path = os.path.join(CURRENT_PATH, "data", "images", current_test, md5_hash) path = os.path.abspath(path) f = open(path, 'rb') content = f.read() f.close() return content def html_content(self, req): current_test = self.cls._get_current_testname() path = os.path.join(CURRENT_PATH, "data", "images", current_test, "%s.html" % current_test) path = os.path.abspath(path) return FileHelper.loadResourceFile(path) def content(self, req): if self.cls.data['url'] == req.get_full_url(): return self.html_content(req) return self.image_content(req) class ImageTests(TestExtractionBase): """\ Base Mock test case """ callback = MockResponseImage def loadData(self): """\ """ suite, module, cls, func = self.id().split('.') path = os.path.join(CURRENT_PATH, "data", module, func, "%s.json" % func) path = os.path.abspath(path) content = FileHelper.loadResourceFile(path) self.data = json.loads(content) def getArticle(self): """\ """ # load test case data self.loadData() # basic configuration # no image fetching config = self.getConfig() config.enable_image_fetching = True # run goose g = Goose(config=config) return self.extract(g) def test_basic_image(self): article = self.getArticle() fields = ['top_image'] self.runArticleAssertions(article=article, fields=fields) # def test_2(self): # article = self.getArticle()
Python
0
@@ -2931,67 +2931,4 @@ ds)%0A -%0A # def test_2(self):%0A # article = self.getArticle()%0A
08a65747d608fcc530adf6291a95104d4348eae6
apply RatingTargetMixin to test model
tests/models.py
tests/models.py
from __future__ import unicode_literals from django.db import models class TextSnippet(models.Model): text = models.TextField() user = models.ForeignKey('auth.User')
Python
0
@@ -67,27 +67,106 @@ ls%0A%0A -%0Aclass TextSnippet( +from generic_ratings.model_mixins import RatingTargetMixin%0A%0A%0Aclass TextSnippet(RatingTargetMixin, mode
bff7989e5ce0f5cde4176ee0652578100201c90b
Update about.py
tincan/about.py
tincan/about.py
# Copyright 2014 Rustici Software # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from tincan.serializable_base import SerializableBase from tincan.version import Version from tincan.extensions import Extensions class About(SerializableBase): """Stores info about this installation of `tincan`. :param version: The versions supported. This attribute is required. :type version: list of unicode :param extensions: Custom user data. This attribute is optional. :type extensions: :class:`tincan.Extensions` """ _props_req = [ 'version', ] _props = [ 'extensions', ] _props.extend(_props_req) @property def version(self): """Version for About :setter: Sets the version. If None is provided, defaults to `[tincan.Version.latest]`. If a string is provided, makes a 1-element list containing the string. :setter type: list | tuple | str | unicode | None :rtype: list """ return self._version @version.setter def version(self, value): def check_version(v): """Checks a single version string for validity. Raises if invalid. :param v: the version string to check :type v: list of str or unicode | tuple of str or unicode :raises ValueError """ if v in Version.supported: return # Construct the error message if isinstance(value, (list, tuple)): value_str = repr(v) + ' in ' + repr(value) else: value_str = repr(v) msg = ( "Tried to set property 'version' in a 'tincan.%s' object " "with an invalid value: %s\n" "Allowed versions are: %s" % ( self.__class__.__name__, value_str, ', '.join(map(repr, Version.supported)), ) ) raise ValueError(msg) if value is None: self._version = [Version.latest] elif isinstance(value, basestring): check_version(value) self._version = [value] elif isinstance(value, (list, tuple)): for v in value: check_version(v) self._version = list(value) else: raise TypeError( "Property 'version' in a 'tincan.%s' object must be set with a " "list, tuple, str, unicode or None. Tried to set it with: %s" % ( self.__class__.__name__, repr(value), )) @property def extensions(self): """Extensions for About :setter: Tries to convert to Extensions. If None is provided, sets to an empty `tincan.Extensions` dict. :setter type: :class:`tincan.Extensions` | dict | None :rtype: :class:`tincan.Extensions` """ return self._extensions @extensions.setter def extensions(self, value): if isinstance(value, Extensions): self._extensions = value elif value is None: self._extensions = Extensions() else: try: self._extensions = Extensions(value) except Exception as e: msg = ( "Property 'extensions' in a 'tincan.%s' object must be set with a " "tincan.Extensions, dict, or None.\n\n" % self.__class__.__name__, ) msg += e.message raise TypeError(msg) @extensions.deleter def extensions(self): del self._extensions
Python
0
@@ -1506,17 +1506,8 @@ ist%0A - %0A @@ -3513,17 +3513,16 @@ nsions%60%0A -%0A
5935a9f0ec35774f95b32465134d88d3e087fd1b
Use newer sphinx if available.
tools/sphinx.py
tools/sphinx.py
# Simple Sphinx tool and builder. import os from SCons.Script import * # Build sphinx documentation: def _action_sphinx(target, source, env): sourcedir = os.path.dirname(source[0].path) outdir = os.path.dirname(target[0].path) app = "%s %s %s %s" % (env['SPHINX_BUILD'], env['SPHINX_OPTS'], sourcedir, outdir) ret = env.Execute([app, 'tools/munge-sphinx-perl.pl']) if not ret: print "Build finished. The HTML pages are in " + outdir return ret def generate(env): """Add builders and construction variables for the sphinx tool.""" import SCons.Builder builder = SCons.Builder.Builder(action=_action_sphinx) # Use Unix 'install' rather than env.InstallAs(), due to scons bug #1751 install = SCons.Builder.Builder(action="install -d ${TARGET.dir} && " + \ "install -d ${TARGET.dir}/_static && " + \ "install -d ${TARGET.dir}/_sources && " + \ "install -d ${TARGET.dir}/modules && " + \ "install -d ${TARGET.dir}/_sources/modules && " + \ "install ${SOURCE.dir}/*.html ${TARGET.dir} && " + \ "install ${SOURCE.dir}/*.js ${TARGET.dir} && " + \ "install ${SOURCE.dir}/modules/*.html " + \ "${TARGET.dir}/modules && " + \ "install ${SOURCE.dir}/_sources/*.txt " + \ "${TARGET.dir}/_sources && " + \ "install ${SOURCE.dir}/_sources/modules/* " + \ "${TARGET.dir}/_sources/modules && " + \ "install ${SOURCE.dir}/_static/* ${TARGET.dir}/_static") env.Append(BUILDERS = {'Sphinx': builder, 'SphinxInstall':install}) env.AppendUnique(SPHINX_BUILD='/usr/bin/sphinx-build') env.AppendUnique(SPHINX_OPTS='-a -E -b html') def exists(env): """Make sure sphinx tools exist.""" return env.Detect("sphinx")
Python
0
@@ -1695,16 +1695,149 @@ tall%7D)%0A%0A + if os.path.exists('/usr/bin/sphinx-1.0-build'):%0A env.AppendUnique(SPHINX_BUILD='/usr/bin/sphinx-1.0-build')%0A else:%0A env.
19c59d3d80b2cf64880d428d0e762d4e262385a5
Handle stale PostgreSQL (or others) more gracefully. Closes #3394. Thanks to flfr at stibo.com for the patch.
trac/db/pool.py
trac/db/pool.py
# -*- coding: utf-8 -*- # # Copyright (C) 2005 Edgewall Software # Copyright (C) 2005 Christopher Lenz <cmlenz@gmx.de> # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://trac.edgewall.com/license.html. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, available at http://projects.edgewall.com/trac/. # # Author: Christopher Lenz <cmlenz@gmx.de> try: import threading except ImportError: import dummy_threading as threading threading._get_ident = lambda: 0 import time from trac.db.util import ConnectionWrapper class TimeoutError(Exception): """Exception raised by the connection pool when no connection has become available after a given timeout.""" class PooledConnection(ConnectionWrapper): """A database connection that can be pooled. When closed, it gets returned to the pool. """ def __init__(self, pool, cnx): ConnectionWrapper.__init__(self, cnx) self._pool = pool def close(self): if self.cnx: self._pool._return_cnx(self.cnx) self.cnx = None def __del__(self): self.close() class ConnectionPool(object): """A very simple connection pool implementation.""" def __init__(self, maxsize, connector, **kwargs): self._dormant = [] # inactive connections in pool self._active = {} # active connections by thread ID self._available = threading.Condition(threading.Lock()) self._maxsize = maxsize # maximum pool size self._cursize = 0 # current pool size, includes active connections self._connector = connector self._kwargs = kwargs def get_cnx(self, timeout=None): start = time.time() self._available.acquire() try: tid = threading._get_ident() if tid in self._active: self._active[tid][0] += 1 return PooledConnection(self, self._active[tid][1]) while True: if self._dormant: cnx = self._dormant.pop() break elif self._maxsize and self._cursize < self._maxsize: cnx = self._connector.get_connection(**self._kwargs) self._cursize += 1 break else: if timeout: self._available.wait(timeout) if (time.time() - start) >= timeout: raise TimeoutError, 'Unable to get database ' \ 'connection within %d seconds' \ % timeout else: self._available.wait() self._active[tid] = [1, cnx] return PooledConnection(self, cnx) finally: self._available.release() def _return_cnx(self, cnx): self._available.acquire() try: tid = threading._get_ident() if tid in self._active: num, cnx_ = self._active.get(tid) assert cnx is cnx_ if num > 1: self._active[tid][0] = num - 1 else: del self._active[tid] if cnx not in self._dormant: cnx.rollback() if cnx.poolable: self._dormant.append(cnx) else: self._cursize -= 1 self._available.notify() finally: self._available.release() def shutdown(self): self._available.acquire() try: for cnx in self._dormant: cnx.cnx.close() finally: self._available.release()
Python
0.000001
@@ -2266,37 +2266,217 @@ -break +try:%0A cnx.cursor() # check whether the connection is stale%0A break%0A except Exception:%0A cnx.close() %0A
f0cd7923a82cd06d15f486db608a5dbc1fb15b1f
rewrite timer logic
traderthread.py
traderthread.py
import sys import decimal from decimal import Decimal import PyQt4 from PyQt4 import QtCore from PyQt4.QtCore import pyqtSlot from PyQt4.QtCore import pyqtSignal class Tops(object): ask1 = Decimal('0') ask1_amount = Decimal('0') bid1 = Decimal('0') bid1_amount = Decimal('0') ask2 = Decimal('0') ask2_amount = Decimal('0') bid2 = Decimal('0') bid2_amount = Decimal('0') ask3 = Decimal('0') ask3_amount = Decimal('0') bid3 = Decimal('0') bid3_amount = Decimal('0') class ArbData(object): def __init__(self, direction = "Forward", p = 0, usdProfit = 0, usdInvestment = 0): self.tradeDirection = direction self.profit = p self.usdProfit = usdProfit self.usdInvestment = usdInvestment class Balance(object): balance_usd = 0 balance_ltc = 0 balance_btc = 0 class TraderThread(QtCore.QThread): timer = QtCore.QTimer() stopwatch = QtCore.QTime() tradeTimer = QtCore.QTimer() updateData = pyqtSignal(Tops, ArbData, ArbData) updateLag = pyqtSignal(int) def __init__(self, parent, tradeAPI, p1='btcusd', p2='ltcbtc', p3 = 'ltcusd', refreshInterval=900, tradeInterval=15000): super(TraderThread, self).__init__(parent) self.tradeAPI = tradeAPI self.k = Decimal('1') - (tradeAPI.Comission() / Decimal('100.0')) self.p1 = p1 self.p2 = p2 self.p3 = p3 self.timer.setInterval(refreshInterval) self.timer.setSingleShot(0) self.tradeTimer.setInterval(tradeInterval) self.tradeTimer.setSingleShot(1) self.timer.timeout.connect(self.onTimer) @pyqtSlot() def onTimer(self): self.stopwatch.start() t = self.GetTops() elapsed = self.stopwatch.elapsed() self.updateLag.emit(elapsed) a1, b1 = t.ask1, t.bid1 a2, b2 = t.ask2, t.bid2 a3, b3 = t.ask3, t.bid3 k3 = self.k * self.k * self.k profit1 = k3 * (b3) / ((a1) * (a2)) - Decimal('1.0') profit2 = k3 * (b1) * (b2) / (a3) - Decimal('1.0') a1 = ArbData("Forward", profit1) a2 = ArbData("Backward", profit2) self.updateData.emit(t, a1, a2) def run(self): self.timer.start() self.exec_() def GetTop(self, pair): r = self.tradeAPI.GetDepth(pair, 1, 1) return ((r['ask'][0]['price'], r['ask'][0]['amount']), (r['bid'][0]['price'], r['bid'][0]['amount'])) def GetTops(self): result = Tops() r = self.tradeAPI.GetDepths([self.p1, self.p2, self.p3], 1, 1, 5) p1Top = ((r[self.p1]['ask'][0]['price'], r[self.p1]['ask'][0]['amount']), (r[self.p1]['bid'][0]['price'], r[self.p1]['bid'][0]['amount'])) p2Top = ((r[self.p2]['ask'][0]['price'], r[self.p2]['ask'][0]['amount']), (r[self.p2]['bid'][0]['price'], r[self.p2]['bid'][0]['amount'])) p3Top = ((r[self.p3]['ask'][0]['price'], r[self.p3]['ask'][0]['amount']), (r[self.p3]['bid'][0]['price'], r[self.p3]['bid'][0]['amount'])) result.ask1 = p1Top[0][0] result.ask1_amount = p1Top[0][1] result.bid1 = p1Top[1][0] result.bid1_amount = p1Top[1][1] result.ask2 = p2Top[0][0] result.ask2_amount = p2Top[0][1] result.bid2 = p2Top[1][0] result.bid2_amount = p2Top[1][1] result.ask3 = p3Top[0][0] result.ask3_amount = p3Top[0][1] result.bid3 = p3Top[1][0] result.bid3_amount = p3Top[1][1] return result
Python
0
@@ -1103,9 +1103,9 @@ val= -9 +1 00, @@ -1396,17 +1396,20 @@ gleShot( -0 +True )%0A se @@ -1483,17 +1483,20 @@ gleShot( -1 +True )%0A se @@ -2059,16 +2059,44 @@ a1, a2) +%0A %0A self.timer.start() %0A%0A def
0f02210e4f57f0023a2be6b11059bc5754f4d467
Update version.py
ttim/version.py
ttim/version.py
__version__='0.4.0' #__build__='4.0.0.0'
Python
0
@@ -10,17 +10,17 @@ __='0.4. -0 +1 '%0A#__bui
91a25c6933015dc09fdf63c6bca75dfaf6115c47
fix test
backend/src/tests/backend/plugins/misc/test_filter_validators.py
backend/src/tests/backend/plugins/misc/test_filter_validators.py
# This file is part of the GOsa framework. # # http://gosa-project.org # # Copyright: # (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de # # See the LICENSE file in the project's top-level directory for details. import unittest from gosa.backend.plugins.misc.filter_validators import * class FilterValidatorTests(unittest.TestCase): def test_IsValidHostName(self): filter = IsValidHostName() (res, errors) = filter.process(None, None, ["www.gonicus.de"]) assert res == True assert len(errors) == 0 (res, errors) = filter.process(None, None, ["1www.gonicus.de"]) assert res == False assert len(errors) == 1 @unittest.mock.patch.object(PluginRegistry, 'getInstance') def test_IsExistingDN(self, mockedRegistry): # mockup ObjectIndex.search mockedRegistry.return_value.search.return_value = [] # start the tests filter = IsExistingDN() props = { 'test': { 'value': ['test'] }} (res, errors) = filter.process(props, 'test', ["test"]) assert res is False assert len(errors) == 1 mockedRegistry.return_value.search.return_value = [1] (res, errors) = filter.process(props, 'test', ["test"]) assert res is True assert len(errors) == 0 @unittest.mock.patch.object(PluginRegistry, 'getInstance') def test_IsExistingDnOfType(self, mockedRegistry): # mockup ObjectIndex.search mockedRegistry.return_value.search.return_value = [] # start the tests filter = IsExistingDnOfType() (res, errors) = filter.process(None, None, ["test"], "type") assert res == False assert len(errors) == 1 mockedRegistry.return_value.search.return_value = [1] (res, errors) = filter.process(None, None, ["test"], "type") assert res == True assert len(errors) == 0 @unittest.mock.patch.object(PluginRegistry, 'getInstance') def test_ObjectWithPropertyExists(self, mocked_registry): mocked_registry.return_value.search.return_value = [] # start the tests filter = ObjectWithPropertyExists() (res, errors) = filter.process(None, None, ["test"], "type", "attr") assert res is False assert len(errors) == 1 mocked_registry.return_value.search.return_value = [1] (res, errors) = filter.process(None, None, ["test"], "type", "attr") assert res is True assert len(errors) == 0
Python
0.000002
@@ -1060,32 +1060,33 @@ , 'test', %5B%22test +1 %22%5D)%0A asse @@ -1256,16 +1256,17 @@ , %5B%22test +1 %22%5D)%0A
6a46f45bec528ccb342661a07db09cdd2d8ad8a2
Correct statistics in qpe test (#328)
projectq/setups/decompositions/phaseestimation_test.py
projectq/setups/decompositions/phaseestimation_test.py
# Copyright 2017 ProjectQ-Framework (www.projectq.ch) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. "Tests for projectq.setups.decompositions.phaseestimation.py." import cmath import numpy as np import pytest from projectq import MainEngine from projectq.backends import Simulator from projectq.cengines import (AutoReplacer, DecompositionRuleSet, DummyEngine, InstructionFilter, MainEngine) from projectq.ops import X, H, All, Measure, Tensor, Ph, CNOT, StatePreparation from projectq.ops import (BasicGate) from projectq.ops import QPE from projectq.setups.decompositions import phaseestimation as pe from projectq.setups.decompositions import qft2crandhadamard as dqft import projectq.setups.decompositions.stateprep2cnot as stateprep2cnot import projectq.setups.decompositions.uniformlycontrolledr2cnot as ucr2cnot def test_simple_test_X_eigenvectors(): rule_set = DecompositionRuleSet(modules=[pe, dqft]) eng = MainEngine(backend=Simulator(), engine_list=[AutoReplacer(rule_set), ]) results = np.array([]) for i in range(100): autovector = eng.allocate_qureg(1) X | autovector H | autovector unit = X ancillas = eng.allocate_qureg(1) QPE(unit) | (ancillas, autovector) All(Measure) | ancillas fasebinlist = [int(q) for q in ancillas] fasebin = ''.join(str(j) for j in fasebinlist) faseint = int(fasebin, 2) phase = faseint / (2. ** (len(ancillas))) results = np.append(results, phase) All(Measure) | autovector eng.flush() num_phase = (results == 0.5).sum() assert num_phase/100. >= 0.35, "Statistics phase calculation are not correct (%f vs. %f)" % (num_phase/100., 0.35) def test_Ph_eigenvectors(): rule_set = DecompositionRuleSet(modules=[pe, dqft]) eng = MainEngine(backend=Simulator(), engine_list=[AutoReplacer(rule_set), ]) results = np.array([]) for i in range(100): autovector = eng.allocate_qureg(1) theta = cmath.pi*2.*0.125 unit = Ph(theta) ancillas = eng.allocate_qureg(3) QPE(unit) | (ancillas, autovector) All(Measure) | ancillas fasebinlist = [int(q) for q in ancillas] fasebin = ''.join(str(j) for j in fasebinlist) faseint = int(fasebin, 2) phase = faseint / (2. ** (len(ancillas))) results = np.append(results, phase) All(Measure) | autovector eng.flush() num_phase = (results == 0.125).sum() assert num_phase/100. >= 0.35, "Statistics phase calculation are not correct (%f vs. %f)" % (num_phase/100., 0.35) def two_qubit_gate(system_q, time): CNOT | (system_q[0], system_q[1]) Ph(2.0*cmath.pi*(time * 0.125)) | system_q[1] CNOT | (system_q[0], system_q[1]) def test_2qubitsPh_andfunction_eigenvectors(): rule_set = DecompositionRuleSet(modules=[pe, dqft]) eng = MainEngine(backend=Simulator(), engine_list=[AutoReplacer(rule_set), ]) results = np.array([]) for i in range(100): autovector = eng.allocate_qureg(2) X | autovector[0] ancillas = eng.allocate_qureg(3) QPE(two_qubit_gate) | (ancillas, autovector) All(Measure) | ancillas fasebinlist = [int(q) for q in ancillas] fasebin = ''.join(str(j) for j in fasebinlist) faseint = int(fasebin, 2) phase = faseint / (2. ** (len(ancillas))) results = np.append(results, phase) All(Measure) | autovector eng.flush() num_phase = (results == 0.125).sum() assert num_phase/100. >= 0.35, "Statistics phase calculation are not correct (%f vs. %f)" % (num_phase/100., 0.35) def test_X_no_eigenvectors(): rule_set = DecompositionRuleSet(modules=[pe, dqft, stateprep2cnot, ucr2cnot]) eng = MainEngine(backend=Simulator(), engine_list=[AutoReplacer(rule_set), ]) results = np.array([]) results_plus = np.array([]) results_minus = np.array([]) for i in range(100): autovector = eng.allocate_qureg(1) amplitude0 = (np.sqrt(2) + np.sqrt(6))/4. amplitude1 = (np.sqrt(2) - np.sqrt(6))/4. StatePreparation([amplitude0, amplitude1]) | autovector unit = X ancillas = eng.allocate_qureg(1) QPE(unit) | (ancillas, autovector) All(Measure) | ancillas fasebinlist = [int(q) for q in ancillas] fasebin = ''.join(str(j) for j in fasebinlist) faseint = int(fasebin, 2) phase = faseint / (2. ** (len(ancillas))) results = np.append(results, phase) Tensor(H) | autovector if np.allclose(phase, .0, rtol=1e-1): results_plus = np.append(results_plus, phase) All(Measure) | autovector autovector_result = int(autovector) assert autovector_result == 0 elif np.allclose(phase, .5, rtol=1e-1): results_minus = np.append(results_minus, phase) All(Measure) | autovector autovector_result = int(autovector) assert autovector_result == 1 eng.flush() total = len(results_plus) + len(results_minus) plus_probability = len(results_plus)/100. assert total == pytest.approx(100, abs=5) assert plus_probability == pytest.approx(1./4., abs = 1e-1), "Statistics on |+> probability are not correct (%f vs. %f)" % (plus_probability, 1./4.) def test_string(): unit = X gate = QPE(unit) assert (str(gate) == "QPE(X)")
Python
0
@@ -4267,33 +4267,33 @@ hase/100. %3E= 0.3 -5 +4 , %22Statistics ph @@ -4351,33 +4351,33 @@ _phase/100., 0.3 -5 +4 )%0A%0A%0Adef test_X_n
1df785d6f337fa2dc9e42c808fab79a5a2827258
remove line to run tests
hs_modflow_modelinstance/tests/__init__.py
hs_modflow_modelinstance/tests/__init__.py
from test_modflow_modelinstance_metadata import *
Python
0.000002
@@ -1,49 +1 @@ -from test_modflow_modelinstance_metadata import * +%0A
40a98808da485edeaa87bb30f0e0749401c500da
update documentation configuration
_doc/sphinxdoc/source/conf3/conf.py
_doc/sphinxdoc/source/conf3/conf.py
import sys import os import datetime import re import sphinx_bootstrap_theme source_path = os.path.normpath( os.path.join( os.path.abspath( os.path.split(__file__)[0]), "..")) try: from conf_base import * except ImportError: sys.path.append(source_path) from conf_base import * html_theme = 'bootstrap' html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() templates_path = [os.path.join(source_path, 'phdoc_static3')] html_static_path = templates_path if not os.path.exists(templates_path[0]): raise FileNotFoundError(templates_path[0]) html_logo = "project_ico_small.png" html_sidebars = {} if html_theme == "bootstrap": html_theme_options = { 'navbar_title': "home", 'navbar_site_name': "Site", 'navbar_links': [ ("XD", "http://www.xavierdupre.fr", True), ("blog", "blog/main_0000.html", True), ("index", "genindex"), ], 'navbar_sidebarrel': False, 'navbar_pagenav': True, 'navbar_pagenav_name': "Page", 'globaltoc_depth': 3, 'globaltoc_includehidden': "true", 'navbar_class': "navbar navbar-inverse", 'navbar_fixed_top': "true", 'source_link_position': "nav", 'bootswatch_theme': "yeti", # united = weird colors, sandstone=green, simplex=red, paper=trop bleu # lumen: OK # to try, yeti, flatly, paper 'bootstrap_version': "3", } blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/" blog_background = False html_context = { 'css_files': get_default_stylesheet() + ['_static/my-styles.css', '_static/gallery.css'], }
Python
0.000001
@@ -1254,19 +1254,22 @@ tion': %22 -nav +footer %22,%0A
00990ff3905b70c4833053dbea6505998c3d9685
Simplify import clauses
modules/thm/python/peacock/ModelBuilder/FlowChannelParametersCalculator.py
modules/thm/python/peacock/ModelBuilder/FlowChannelParametersCalculator.py
import os, sys from PyQt5 import QtCore, QtWidgets, QtGui from PyQt5.QtCore import Qt, pyqtSignal, pyqtSlot, QProcess from PyQt5.QtWidgets import QSizePolicy, QLabel from PyQt5.QtGui import QDoubleValidator import peacock import FlowChannelGeometries class FlowChannelParametersCalculator(QtWidgets.QWidget, peacock.base.Plugin): """ Plugin to compute flow channel parameters """ UNITS_WIDTH = 25 def __init__(self, **kwargs): super(FlowChannelParametersCalculator, self).__init__(**kwargs) self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred) num_geometris = len(FlowChannelGeometries.GEOMETRIES) self.ctlInputs = [] self.ctlParams = [] self.btnCalculate = [] self.lblErrorMessage = [] self.MainLayout = QtWidgets.QVBoxLayout(self) self.ctlFChType = QtWidgets.QComboBox(self) self.MainLayout.addWidget(self.ctlFChType) self.GeometryLayout = QtWidgets.QStackedLayout() for i, g in enumerate(FlowChannelGeometries.GEOMETRIES): gname = g.name() self.ctlFChType.addItem(gname, i) paramsLayout = QtWidgets.QFormLayout() paramsLayout.setContentsMargins(5, 0, 5, 0) paramsLayout.setLabelAlignment(QtCore.Qt.AlignLeft) paramsLayout.setFormAlignment(QtCore.Qt.AlignLeft) paramsLayout.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow) self.ctlInputs.append({}) for p in g.inputs(): name = p['name'] unit = p['unit'] hint = p['hint'] lblInput = QtWidgets.QLabel(name, self) lblInput.setToolTip(hint) self.ctlInputs[i][name] = QtWidgets.QLineEdit(self) self.ctlInputs[i][name].setToolTip(hint) validator = QDoubleValidator(self) validator.setBottom(0.) self.ctlInputs[i][name].setValidator(validator) self.ctlInputs[i][name].textChanged.connect(self.onModified) lblUnit = QtWidgets.QLabel(unit, self) lblUnit.setFixedWidth(self.UNITS_WIDTH) hbox = QtWidgets.QHBoxLayout() hbox.addWidget(self.ctlInputs[i][name]) hbox.addWidget(lblUnit) paramsLayout.addRow(lblInput, hbox) icon_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", 'icons', 'calculator.svg')) icon = QtGui.QIcon(icon_path) btnCalc = QtWidgets.QPushButton(self) btnCalc.clicked.connect(self.onCalculate) btnCalc.setIcon(icon) btnCalc.setToolTip("Calculate flow channel parameters") btnCalc.setAutoDefault(True) btnCalc.setMaximumWidth(62) paramsLayout.addRow("", btnCalc) self.btnCalculate.append(btnCalc) # horz line ctlLine = QtWidgets.QFrame(self) ctlLine.setFrameShape(QtWidgets.QFrame.HLine) ctlLine.setFrameShadow(QtWidgets.QFrame.Sunken) paramsLayout.addRow(ctlLine) self.ctlParams.append({}) for p in g.outputs(): name = p['name'] unit = p['unit'] hint = p['hint'] lblParams = QtWidgets.QLabel(name, self) lblParams.setToolTip(hint) palette = lblParams.palette() palette.setCurrentColorGroup(QtGui.QPalette.Disabled) palette.setColorGroup(QtGui.QPalette.Normal, palette.windowText(), palette.button(), palette.light(), palette.dark(), palette.mid(), palette.text(), palette.brightText(), palette.base(), palette.window()) lblParams.setPalette(palette) self.ctlParams[i][name] = QtWidgets.QLineEdit(self) self.ctlParams[i][name].setReadOnly(True) self.ctlParams[i][name].setToolTip(hint) lblUnit = QtWidgets.QLabel(unit, self) lblUnit.setFixedWidth(self.UNITS_WIDTH) lblUnit.setPalette(palette) hbox = QtWidgets.QHBoxLayout() hbox.addWidget(self.ctlParams[i][name]) hbox.addWidget(lblUnit) paramsLayout.addRow(lblParams, hbox) widget = QtWidgets.QWidget() widget.setLayout(paramsLayout) self.GeometryLayout.addWidget(widget) lblErrorMsg = QLabel(self) lblErrorMsg.setStyleSheet("QLabel { color: red; }"); paramsLayout.addRow(lblErrorMsg) self.lblErrorMessage.append(lblErrorMsg) self.MainLayout.addLayout(self.GeometryLayout) shortcut = QtWidgets.QShortcut(QtGui.QKeySequence("Ctrl+Return"), self) shortcut.activated.connect(self.onCtrlReturn) self.updateWidgets() self.setMainLayoutName('MainLayout') self.ctlFChType.model().sort(0) self.ctlFChType.activated[int].connect(self.onGeometryTypeChanged) self.setup() self.store(key='default') def _loadPlugin(self): """ Helper for loading plugin state. """ self.load() def setup(self): super(peacock.base.Plugin, self).setup() pass def updateWidgets(self): idx = self.ctlFChType.currentData() enable = True for k, input in self.ctlInputs[idx].items(): if len(input.text()) == 0: enable = False break self.btnCalculate[idx].setEnabled(enable) def onModified(self): self.updateWidgets() def onCtrlReturn(self): idx = self.ctlFChType.currentData() if self.btnCalculate[idx].isEnabled(): self.btnCalculate[idx].animateClick() def onCalculate(self): self.computeParameters() def computeParameters(self): """ Called when the computation of parameters is requested """ g = self.GeometryLayout.currentIndex() geom = FlowChannelGeometries.GEOMETRIES[g] args = {} for i in geom.inputs(): name = i['name'] val = float(self.ctlInputs[g][name].displayText()) args[name] = val params = geom.compute(**args) if 'error' in params: self.lblErrorMessage[g].setText(params['error']) for o in geom.outputs(): name = o['name'] self.ctlParams[g][name].setText('error') else: self.lblErrorMessage[g].setText("") for o in geom.outputs(): name = o['name'] s = "%e" % params[name] self.ctlParams[g][name].setText(s) def onGeometryTypeChanged(self, index): page = self.ctlFChType.itemData(index) self.GeometryLayout.setCurrentIndex(page) self.updateWidgets() def main(size=None): """ Run the FlowChannelParametersCalculator alone """ from ModelBuilderPluginManager import ModelBuilderPluginManager widget = ModelBuilderPluginManager(plugins=[FlowChannelParametersCalculator]) widget.MainLayout.setContentsMargins(5, 5, 5, 5) widget.show() widget.setWindowTitle("Flow Channel Parameter Calculator") return widget if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) widget = main(size=[400,100]) sys.exit(app.exec_())
Python
0.000003
@@ -55,157 +55,8 @@ Gui%0A -from PyQt5.QtCore import Qt, pyqtSignal, pyqtSlot, QProcess%0Afrom PyQt5.QtWidgets import QSizePolicy, QLabel%0Afrom PyQt5.QtGui import QDoubleValidator%0A impo @@ -395,16 +395,26 @@ ePolicy( +QtWidgets. QSizePol @@ -428,16 +428,26 @@ ferred, +QtWidgets. QSizePol @@ -1750,16 +1750,22 @@ dator = +QtGui. QDoubleV @@ -4430,16 +4430,26 @@ orMsg = +QtWidgets. QLabel(s
32f0270ee3049e1a624d86f7f0a68bb4ea55c5f1
Correct IrActionsActWindow.read overloading
smile_base/models/ir_actions.py
smile_base/models/ir_actions.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2010 Smile (<http://www.smile.fr>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api, models, tools from openerp.tools.safe_eval import safe_eval as eval from openerp.addons.base.ir.ir_actions import ir_actions_act_window from ..tools import unquote class IrActionsActWindow(models.Model): _inherit = 'ir.actions.act_window' @api.one def _update_context(self): eval_dict = { 'active_id': unquote("active_id"), 'active_ids': unquote("active_ids"), 'active_model': unquote("active_model"), 'uid': unquote("uid"), 'user': unquote("user"), 'context': self._context, } try: context = eval(self.context or '{}', eval_dict) or {} if 'act_window_id' not in context: context['act_window_id'] = self.id self.context = tools.ustr(context) except: pass @api.model def create(self, vals): act_window = super(IrActionsActWindow, self).create(vals) act_window._update_context() return act_window @api.multi def write(self, vals): res = super(IrActionsActWindow, self).write(vals) self._update_context() return res @api.multi def read(self, fields=None, load='_classic_read'): results = super(ir_actions_act_window, self).read(fields, load) # Evaluate context value with user localdict = { 'active_model': unquote('active_model'), 'active_id': unquote('active_id'), 'active_ids': unquote('active_ids'), 'uid': unquote('uid'), 'context': unquote('context'), 'user': self.env.user, } for res in results: if 'context' in res: try: with tools.mute_logger("openerp.tools.safe_eval"): res['context'] = tools.ustr(eval(res['context'], localdict)) except: continue # Evaluate help if not fields or 'help' in fields: cr, uid, context = self.env.args eval_dict = { 'active_model': context.get('active_model'), 'active_id': context.get('active_id'), 'active_ids': context.get('active_ids'), 'uid': uid, } for res in results: model = res.get('res_model') if model and self.pool.get(model): try: with tools.mute_logger("openerp.tools.safe_eval"): eval_context = eval(res['context'] or "{}", eval_dict) or {} except: continue custom_context = dict(context, **eval_context) res['help'] = self.pool[model].get_empty_list_help(cr, uid, res.get('help', ""), context=custom_context) return results ir_actions_act_window.read = read
Python
0
@@ -1001,16 +1001,30 @@ models, + SUPERUSER_ID, tools%0Af @@ -1079,76 +1079,8 @@ eval -%0Afrom openerp.addons.base.ir.ir_actions import ir_actions_act_window %0A%0Afr @@ -2112,20 +2112,12 @@ es%0A%0A -%0A@api.multi%0A + def @@ -2131,44 +2131,167 @@ lf, -fields=None, load='_classic_read'):%0A +cr, uid, ids, fields=None, context=None, load='_classic_read'):%0A ids_int = isinstance(ids, (int, long))%0A if ids_int:%0A ids = %5Bids%5D%0A @@ -2306,32 +2306,29 @@ = super( -ir_a +IrA ctions -_act_w +ActW indow, s @@ -2341,22 +2341,45 @@ ead( -fields +cr, uid, ids, fields, context , load)%0A @@ -2374,16 +2374,20 @@ , load)%0A + # Ev @@ -2417,16 +2417,20 @@ th user%0A + loca @@ -2439,32 +2439,36 @@ ict = %7B%0A + 'active_model': @@ -2484,32 +2484,36 @@ active_model'),%0A + 'active_ @@ -2539,32 +2539,36 @@ e_id'),%0A + 'active_ids': un @@ -2588,32 +2588,36 @@ _ids'),%0A + 'uid': unquote(' @@ -2623,32 +2623,36 @@ 'uid'),%0A + + 'context': unquo @@ -2666,32 +2666,36 @@ text'),%0A + 'user': self.env @@ -2695,718 +2695,139 @@ elf. -env.user,%0A %7D%0A for res in results:%0A if 'context' in res:%0A try:%0A with tools.mute_logger(%22openerp.tools.safe_eval%22):%0A res%5B'context'%5D = tools.ustr(eval(res%5B'context'%5D, localdict))%0A except:%0A continue%0A # Evaluate help%0A if not fields or 'help' in fields:%0A cr, uid, context = self.env.args%0A eval_dict = %7B%0A 'active_model': context.get('active_model'),%0A 'active_id': context.get('active_id'),%0A 'active_ids': context.get('active_ids'),%0A 'uid': uid,%0A %7D%0A for res in results:%0A model = res.get('res_model')%0A if model and self.pool.get(model) +pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context),%0A %7D%0A for res in results:%0A if 'context' in res :%0A @@ -2944,23 +2944,36 @@ -eval_ +res%5B' context +'%5D = +tools.ustr( eval @@ -2991,34 +2991,21 @@ xt'%5D - or %22%7B%7D%22, ev +, loc al -_ dict) - or %7B%7D +) %0A @@ -3066,234 +3066,46 @@ - custom_context = dict(context, **eval_context)%0A res%5B'help'%5D = self.pool%5Bmodel%5D.get_empty_list_help(cr, uid, res.get('help', %22%22), context=custom_context)%0A return results%0A%0Air_actions_act_window.read = read +return results%5B0%5D if ids_int else results %0A
e4942c16322829d37f780d539517fe10e50e0e39
Fix bad var
hubblestack/extmods/grains/splunkconfig.py
hubblestack/extmods/grains/splunkconfig.py
# -*- coding: utf-8 -*- ''' Attempt to load alternate splunk config from the hubble.d/ directory and store in grains for use by the splunk returners. This way splunk config changes don't require a hubble restart. ''' import os import yaml def splunkconfig(): ''' Walk the hubble.d/ directory and read in any .conf files using YAML. If splunk config is found, place it in grains and return. ''' configdir = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble.d') ret = {} if not os.path.isdir(configdir): return ret try: for root, dirs, files in os.walk(configdir): for f in files: if f.endswith('.conf'): fpath = os.path.join(root, fpath) try: with open(fpath, 'r') as fh: config = yaml.safe_load(fh) if config.get('hubblestack', {}).get('returner', {}).get('splunk'): ret = {'hubblestack': config['hubblestack']} except: pass except: pass return ret
Python
0.000618
@@ -734,20 +734,16 @@ (root, f -path )%0A
e30304ee1cd90763f072e350aa850313e1d4a06b
version bump for pypi
gooey/__init__.py
gooey/__init__.py
import os from gooey.python_bindings.gooey_decorator import Gooey from gooey.python_bindings.gooey_parser import GooeyParser # from gooey.gui import application version_file = os.path.join(os.path.dirname(__file__), 'version') __version__ = '0.1.9'
Python
0
@@ -251,9 +251,10 @@ '0.1 -.9 +0.0 '%0D%0A
c9e8462dacfd511ca996cf73d7a08e1fdfeded01
fix file permission of stom collector
intelmq/bots/collectors/stomp/collector.py
intelmq/bots/collectors/stomp/collector.py
# -*- coding: utf-8 -*- import os.path from intelmq.lib.bot import CollectorBot try: import stomp class StompListener(stomp.listener.PrintingListener): """ the stomp listener gets called asynchronously for every STOMP message """ def __init__(self, n6stompcollector): self.n6stomper = n6stompcollector def on_heartbeat_timeout(self): self.n6stomper.logger.info("Heartbeat timeout. Attempting to re-connect.") self.n6stomper.conn.disconnect() status = self.n6stomper.conn.connect(wait=False) self.n6stomper.logger.info("Re-connected: %s.", status) def on_error(self, headers, message): self.n6stomper.logger.error('Received an error: %r.', message) def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message %r...', message[:500]) report = self.n6stomper.new_report() report.add("raw", message.rstrip()) report.add("feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + str(self.n6stomper.parameters.port) + "/" + self.n6stomper.parameters.exchange) self.n6stomper.send_message(report) except ImportError: stomp = None class StompCollectorBot(CollectorBot): """ main class for the STOMP protocol collector """ def init(self): if stomp is None: self.logger.error('Could not import stomp. Please install it.') self.stop() self.server = getattr(self.parameters, 'server', 'n6stream.cert.pl') self.port = getattr(self.parameters, 'port', 61614) self.exchange = getattr(self.parameters, 'exchange', '') self.heartbeat = getattr(self.parameters, 'heartbeat', 60000) self.ssl_ca_cert = getattr(self.parameters, 'ssl_ca_certificate', 'ca.pem') self.ssl_cl_cert = getattr(self.parameters, 'ssl_client_certificate', 'client.pem') self.ssl_cl_cert_key = getattr(self.parameters, 'ssl_client_certificate_key', 'client.key') self.http_verify_cert = getattr(self.parameters, 'http_verify_cert', True) # check if certificates exist for f in [self.ssl_ca_cert, self.ssl_cl_cert, self.ssl_cl_cert_key]: if not os.path.isfile(f): raise ValueError("Could not open file %r." % f) _host = [(self.server, self.port)] self.conn = stomp.Connection(host_and_ports=_host, use_ssl=True, ssl_key_file=self.ssl_cl_cert_key, ssl_cert_file=self.ssl_cl_cert, ssl_ca_certs=self.ssl_ca_cert, wait_on_receipt=True, heartbeats=(self.heartbeat, self.heartbeat)) self.conn.set_listener('', StompListener(self)) self.conn.start() self.conn.connect(wait=False) self.conn.subscribe(destination=self.exchange, id=1, ack='auto') self.logger.info('Successfully connected and subscribed to %s:%s.', self.server, self.port) def disconnect(self): self.conn.disconnect() def process(self): pass BOT = StompCollectorBot
Python
0
e8e7d188b45b06967a6f7ec210f91b1bbe4e494c
use pathlib
abilian/web/admin/panels/sysinfo.py
abilian/web/admin/panels/sysinfo.py
# coding=utf-8 """ """ from __future__ import absolute_import, print_function, division import os import sys import pkg_resources from pip.vcs import vcs from flask import render_template from ..panel import AdminPanel class SysinfoPanel(AdminPanel): id = 'sysinfo' label = 'System information' icon = 'hdd' def get(self): uname = os.popen("uname -a").read() python_version = sys.version.strip() packages = [] for dist in pkg_resources.working_set: package = dict( name=dist.project_name, key=dist.key, version=dist.version if dist.has_version() else u'Unknown version', vcs=None, ) location = os.path.normcase(os.path.abspath(dist.location)) vcs_name = vcs.get_backend_name(location) if vcs_name: vc = vcs.get_backend_from_location(location)() url, revision = vc.get_info(location) package['vcs'] = dict(name=vcs_name, url=url, revision=revision) packages.append(package) packages.sort(key=lambda d: d.get('key', None)) return render_template("admin/sysinfo.html", python_version=python_version, packages=packages, uname=uname)
Python
0.000001
@@ -147,16 +147,41 @@ port vcs +%0Afrom pathlib import Path %0A%0Afrom f @@ -707,37 +707,17 @@ n = -os.path.normcase(os.path.absp +unicode(P ath( @@ -730,16 +730,27 @@ ocation) +.absolute() )%0A @@ -787,24 +787,25 @@ e(location)%0A +%0A if vcs
d971b8c7d0261ee0774ccecf41b0484cff1dd62c
Change url config for image to see if it loads in production this way instead
source/services/imdb_service.py
source/services/imdb_service.py
import requests import re from bs4 import BeautifulSoup from source.models.technical_specs import TechnicalSpecs class ImdbService: __URL = 'http://www.imdb.com/title/' __API_URL = 'http://www.imdb.com/xml/find?' __OMDB_URL = 'http://www.omdbapi.com/?' __SEPERATOR = '-' def __init__(self, title): self.title = title self.id = self.get_movie_id() def get_tech_spec(self): search_url = self.__URL + str(self.id) + '/technical?' payload = {'ref_': 'tt_dt_spec'} technical_page = requests.get(search_url, data=payload) contents = technical_page.text soup = BeautifulSoup(contents, 'lxml') data_table = soup.find('tbody') rows = data_table.find_all('td') specs = self.get_specs(rows) specs.link = technical_page.url return specs def get_artwork(self): payload = {'i': self.id, 'plot': 'short', 'r': 'json'} response = requests.post(self.__OMDB_URL, params=payload) movie_info = response.json() artwork_url = movie_info['Poster'] return self.format_artwork_url(artwork_url) def get_movie_id(self): search_title = self.format_title() payload = {'json': '1', 'nr': 1, 'tt': 'on', 'q': search_title} response = requests.post(self.__API_URL, data=payload) movie_info = response.json() try: movie_id = movie_info['title_popular'][0]['id'] except: movie_id = movie_info['title_approx'][0]['id'] return movie_id def format_title(self): return self.title.replace(' ', self.__SEPERATOR) def get_specs(self, rows): tech_specs = TechnicalSpecs() for i in range(len(rows)): if 'Negative Format' in rows[i].get_text(): tech_specs.negative_format = self.format_specification(rows[i]) elif 'Cinematographic Process' in rows[i].get_text(): tech_specs.cinematographic_process = self.format_specification(rows[i]) return tech_specs def format_specification(self, cell): specs = list(cell.find_next_sibling('td').stripped_strings) output = [] ''' Strip newline characters left from stripped_strings''' for spec in specs: output.append(re.sub('\s+', ' ', spec)) return output def format_artwork_url(self, url): return url.replace('_SX300', '_SX333')
Python
0
@@ -1086,17 +1086,16 @@ oster'%5D%0A -%0A @@ -1092,28 +1092,43 @@ %5D%0A re -turn +sized_artwork_url = self.format @@ -1154,16 +1154,52 @@ k_url)%0A%0A + return resized_artwork_url%0A%0A %0A def @@ -2503,12 +2503,30 @@ , '_ -SX333 +UY460_UY0,0,333,460_AL_ ')%0A
d2ac9fd6e1bebd85b345df09e1717b65359c54bd
Correct Torfaen opening times
polling_stations/apps/data_importers/management/commands/import_torfaen.py
polling_stations/apps/data_importers/management/commands/import_torfaen.py
from addressbase.models import Address, UprnToCouncil from core.opening_times import OpeningTimes from data_importers.management.commands import BaseHalaroseCsvImporter from data_importers.mixins import AdvanceVotingMixin from pollingstations.models import AdvanceVotingStation class Command(BaseHalaroseCsvImporter, AdvanceVotingMixin): council_id = "TOF" addresses_name = ( "2022-05-05/2022-03-01T07:51:07.153603/polling_station_export-2022-03-01.csv" ) stations_name = ( "2022-05-05/2022-03-01T07:51:07.153603/polling_station_export-2022-03-01.csv" ) elections = ["2022-05-05"] def address_record_to_dict(self, record): uprn = record.uprn.strip().lstrip("0") if uprn in [ "200002953910", # PARK HOUSE FARM, GRAIG ROAD, UPPER CWMBRAN, CWMBRAN ]: return None if record.housepostcode in [ "NP4 7NW", "NP4 8JQ", "NP4 8LG", "NP44 1LE", "NP44 4QS", "NP4 6TX", ]: return None return super().address_record_to_dict(record) def station_record_to_dict(self, record): # FORGESIDE COMMUNITY CENTRE FORGESIDE COMMUNITY CENTRE BFORGESIDE BLAENAVON TORFAEN NP4 9BD if record.pollingstationname == "FORGESIDE COMMUNITY CENTRE": record = record._replace(pollingstationpostcode="NP4 9DH") # THORNHILL4UTOO THORNHILL COMMUNITY CENTRE LEADON COURT THORNHILL CWMBRAN TORFAEN NP44 5YZ if record.pollingstationname == "THORNHILL4UTOO": record = record._replace(pollingstationpostcode="NP44 5TZ") return super().station_record_to_dict(record) def add_advance_voting_stations(self): opening_times = OpeningTimes() opening_times.add_open_time("2022-05-01", "10:00", "16:00") opening_times.add_open_time("2022-05-02", "10:00", "16:00") advance_station = AdvanceVotingStation( name="Pontypool Civic Centre", address="""Glantorvaen Road Pontypool Torfaen """, postcode="NP4 6YB", location=Address.objects.get(uprn=100101048589).location, opening_times=opening_times.as_string_table(), council=self.council, ) advance_station.save() UprnToCouncil.objects.filter(lad=self.council.geography.gss).update( advance_voting_station=advance_station )
Python
0.000066
@@ -1823,12 +1823,12 @@ 22-0 -5-01 +4-30 %22, %22 @@ -1890,17 +1890,17 @@ 022-05-0 -2 +1 %22, %2210:0
b195e909ce3d3903998a91de0b5763dd679b25e3
fix version
debile/slave/runners/findbugs.py
debile/slave/runners/findbugs.py
# Copyright (c) 2012-2013 Paul Tagliamonte <paultag@debian.org> # Copyright (c) 2013 Leo Cavaille <leo@cavaille.net> # Copyright (c) 2013 Sylvestre Ledru <sylvestre@debian.org> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from debile.slave.wrappers.findbugs import parse_findbugs from debile.slave.utils import cd from debile.utils.commands import run_command import os def findbugs(deb, analysis): run_command(["dpkg", "-x", deb, "binary"]) with cd('binary'): # Force english as findbugs is localized os.putenv("LANG", "C") out, err, _ = run_command([ 'fb', 'analyze', '-effort:max', '-xml:withMessages', '.' ]) xmlbytes = out.encode("utf-8") failed = False # if err.strip() == '': # return (analysis, err, failed) for issue in parse_findbugs(xmlbytes): analysis.results.append(issue) if not failed and issue.severity in [ 'performance', 'portability', 'error', 'warning' ]: failed = True return (analysis, err, failed, None, None) def version(): out, _, ret = run_command(['fb', '-version']) if ret != 0: raise Exception("findbugs is not installed") name, version = out.split(" ") return (name, version.strip())
Python
0.004257
@@ -170,16 +170,82 @@ an.org%3E%0A +# Copyright (c) 2015 Lucas Kanashiro %3Ckanashiro.duarte@gmail.com%3E%0A #%0A# Perm @@ -2327,14 +2327,8 @@ %0A - name, ver @@ -2343,16 +2343,13 @@ ut.s -plit(%22 %22 +trip( )%0A @@ -2362,12 +2362,18 @@ rn ( -name +'findbugs' , ve
4e3e8d49b06c03b1ab31357206fc6385256d8438
Switch to official python graphviz port
graph/gengraph.py
graph/gengraph.py
#!/usr/bin/python import sys from graphviz import Digraph class Gengraph(object): def __init__(self): pass def getTrName(self, line): line = line.strip(' ') name = line.split(' ') name.remove('tr_t') for n in name: if len(n) > 0: x = n.find('[') return n[:x]; def createEdge(self, trdefinition): # prevent from adding multiple same named nodes nodeSet = set() for line in trdefinition: # now we have list of names: state1, state2, event, event handler tok = line.split(',') # no enought tokens (empty line?) if len(tok) < 3: continue for i in range(len(tok)): tok[i] = tok[i].strip(' ') # adding nodes to graph if (not tok[0] in nodeSet): nodeSet.add(tok[0]) self.dot.node(tok[0], tok[0], shape="box") if ((not tok[1] in nodeSet) and (tok[1] != "FSM_NO_STATE")): nodeSet.add(tok[1]) self.dot.node(tok[1], tok[1], shape="box") if (tok[2] == "FSM_DEF_TR"): self.dot.edge(tok[0], tok[1], label="["+tok[3]+"] ") elif (tok[1] == "FSM_NO_STATE"): self.dot.edge(tok[0], tok[0], label=tok[2]+"["+tok[3]+"] ") else: self.dot.edge(tok[0], tok[1], label=tok[2]+"["+tok[3]+"] ") def gen(self, filename): f = open(filename) src = f.readlines() found = 0 trdefinition = [] for line in src: if (line.find('tr_t') >= 0): found = 1 self.dot = Digraph(comment=self.getTrName(line)) print "Name: ", self.getTrName(line) continue if (found and line.find('};') >= 0): break if (found): line = line.strip(' ') if (line.startswith('/*') or line.endswith('*/') or line.startswith('*')): continue line = line.replace('{', '') line = line.replace('}', '') trdefinition.append(line) # parsing each line and creating dot graph self.createEdge(trdefinition) print self.dot.source self.dot.render("out.gv") print 'finded definition:' for line in trdefinition: print line print '...generating fsm graph' def usage(self, argv): print 'Usage:' print argv[0], "src tr_table_name" if __name__ == '__main__': g = Gengraph() if (len(sys.argv) < 2): g.usage(sys.argv) exit(1) g.gen(sys.argv[1])
Python
0
@@ -26,36 +26,17 @@ sys%0A -from graphviz import Digraph +import gv %0A%0Acl @@ -911,48 +911,82 @@ -self.dot.node(tok%5B0%5D, tok%5B0%5D +n = gv.node(self.dot, tok%5B0%5D)%0A gv.setv(n , +%22 shape -=%22box +%22, %22Mrecord %22)%0A @@ -1113,48 +1113,82 @@ -self.dot.node(tok%5B1%5D, tok%5B1%5D +n = gv.node(self.dot, tok%5B1%5D)%0A gv.setv(n , +%22 shape -=%22box +%22, %22Mrecord %22)%0A @@ -1252,32 +1252,44 @@ +e = gv.edge( self.dot .edge(tok%5B0%5D @@ -1268,38 +1268,34 @@ gv.edge(self.dot -.edge( +, tok%5B0%5D, tok%5B1%5D, @@ -1292,24 +1292,54 @@ , tok%5B1%5D +)%0A gv.setv(e , +%22 label -= +%22, %22%5B%22+tok%5B @@ -1405,32 +1405,44 @@ +e = gv.edge( self.dot .edge(tok%5B0%5D @@ -1421,38 +1421,34 @@ gv.edge(self.dot -.edge( +, tok%5B0%5D, tok%5B0%5D, @@ -1445,24 +1445,54 @@ , tok%5B0%5D +)%0A gv.setv(e , +%22 label -= +%22, tok%5B2%5D+%22 @@ -1500,32 +1500,79 @@ %22+tok%5B3%5D+%22%5D %22)%0A + gv.setv(e, %22arrowhead%22, %22tee%22)%0A else @@ -1589,24 +1589,36 @@ +e = gv.edge( self.dot .edge(to @@ -1609,22 +1609,18 @@ self.dot -.edge( +, tok%5B0%5D, @@ -1629,16 +1629,46 @@ k%5B1%5D +)%0A gv.setv(e , +%22 label -= +%22, tok%5B @@ -1946,24 +1946,19 @@ t = -D +gv.d igraph( -comment= self @@ -2395,24 +2395,64 @@ ce('%7D', '')%0A + line = line.strip(%22%5Cn%22)%0A @@ -2588,61 +2588,123 @@ -print self.dot.source%0A self.dot.render( +gv.layout(self.dot, %22dot%22)%0A gv.render(self.dot, %22dot%22, %22out.gv%22)%0A gv.render(self.dot, %22png%22, %22out. +pn g -v %22)%0A%0A
1ce9f91ac81c11013480e5ce20bb86774a397866
Fix bug when Cordaid fundin partner funding_amount is None
akvo/scripts/cordaid/post_import.py
akvo/scripts/cordaid/post_import.py
# -*- coding: utf-8 -*- # Akvo RSR is covered by the GNU Affero General Public License. # See more details in the license.txt file located at the root folder of the Akvo RSR module. # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >. import datetime from lxml import etree from os.path import splitext from django.core.management import setup_environ import akvo.settings setup_environ(akvo.settings) import os from django.core.files import File from django.core.files.temp import NamedTemporaryFile from akvo.rsr.models import Project, Partnership, Organisation from akvo.rsr.utils import model_and_instance_based_filename from akvo.scripts.cordaid import ( CORDAID_IATI_ACTIVITIES_XML, CORDAID_PROJECT_IMAGES_DIR, CORDAID_ORG_ID, print_log, log, ACTION_FUNDING_SET, ACTION_FUNDING_FOUND, ERROR_IMAGE_UPLOAD, ACTION_SET_IMAGE, CORDAID_ACTIVITIES_CSV_FILE, init_log ) def import_images(image_dir, img_to_proj_map): for image_name in os.listdir(image_dir): photo_id, ext = splitext(image_name) if ext.lower() in ['.png', '.jpg', '.jpeg', '.gif']: try: internal_id=img_to_proj_map.get( photo_id, {'internal_project_id': None} )['internal_project_id'] project = Project.objects.get( partnerships__internal_id=internal_id ) filename = model_and_instance_based_filename( 'Project', project.pk, 'current_image', image_name ) with open(os.path.join(image_dir, image_name), 'rb') as f: image_data = f.read() image_temp = NamedTemporaryFile(delete=True) image_temp.write(image_data) image_temp.flush() project.current_image.save(filename, File(image_temp), save=True) f.close() project.current_image_caption = img_to_proj_map.get( photo_id, {'image_caption': ''} )['image_caption'] project.save() log( u"Uploaded image to project {pk}", dict(internal_id=internal_id, pk=project.pk, event=ACTION_SET_IMAGE)) except Exception, e: log( u"Upload failed. internal_id: {internal_id} Exception class: {extra}", dict(internal_id=internal_id, event=ERROR_IMAGE_UPLOAD, extra=e.__class__), ) def fix_funding(img_to_proj_map): """ Add Cordaid as a funding partner to all its projects and "fill the project up" """ cordaid = Organisation.objects.get(pk=CORDAID_ORG_ID) for project_data in img_to_proj_map.values(): internal_id = project_data['internal_project_id'] try: project = None project = Project.objects.get( partnerships__internal_id=internal_id, partnerships__organisation=cordaid ) funds_needed = project.funds_needed if funds_needed > 0: cordaid_funding_partnership, created = Partnership.objects.get_or_create( organisation=cordaid, project=project, partner_type=Partnership.FUNDING_PARTNER, defaults={'funding_amount': funds_needed} ) if created: log( u"Added Cordaid as funding partner to project {pk}, funding amount: {extra}", dict(internal_id=internal_id, pk=project.pk, event=ACTION_FUNDING_SET, extra=funds_needed) ) else: # since Cordaid already is funding, we need to add thatamount to funds_needed to get to fully funded cordaid_funding_partnership.funding_amount = funds_needed + cordaid_funding_partnership.funding_amount cordaid_funding_partnership.save() log( u"Found Cordaid as funding partner to project {pk}, setting funding amount: {extra}", dict(internal_id=internal_id, pk=project.pk, event=ACTION_FUNDING_FOUND, extra=funds_needed) ) else: log( u"Project {pk} is fully funded", dict(internal_id=internal_id, pk=project.pk, event=ACTION_FUNDING_FOUND,) ) except Exception, e: log(u"Error trying to set up Cordaid as funding partner to project {pk}\nException class: {extra}", dict(internal_id=internal_id, pk=getattr(project, 'pk', None), event=e.__class__, extra=e.message), ) def create_mapping_images_to_projects(): """ Create a dict that maps the photo-ids in cordaid's xml to the internal-project-id of the same activity This allows us to find the project to add the current image to """ with open(CORDAID_IATI_ACTIVITIES_XML, 'r') as f: root = etree.fromstring(f.read()) images_to_projects = {} for i in range(len(root)): activity = root[i] images_to_projects[ activity.get('{http://www.akvo.org}photo-id') ] = dict( internal_project_id=activity.get('{http://www.akvo.org}internal-project-id'), image_caption=activity.get('{http://www.akvo.org}image-caption', '').strip() ) return images_to_projects if __name__ == '__main__': init_log() img_to_proj_map = create_mapping_images_to_projects() import_images(CORDAID_PROJECT_IMAGES_DIR, img_to_proj_map) fix_funding(img_to_proj_map) log_file = init_log(CORDAID_ACTIVITIES_CSV_FILE) names = (u'internal_id', u'pk', u'label', u'event', u'extra') print_log(log_file, names)
Python
0.999965
@@ -3959,16 +3959,42 @@ needed + + (%0A cordaid @@ -4028,16 +4028,43 @@ g_amount + or 0%0A ) %0A
43125b7ea61606d6d65d0c75168539cee8cdfcd2
support touch-tip for JSON protocols (#2000)
api/opentrons/protocols/__init__.py
api/opentrons/protocols/__init__.py
import time from itertools import chain from opentrons import instruments, labware, robot from opentrons.instruments import pipette_config def _sleep(seconds): if not robot.is_simulating(): time.sleep(seconds) def load_pipettes(protocol_data): pipettes = protocol_data.get('pipettes', {}) pipettes_by_id = {} for pipette_id, props in pipettes.items(): model = props.get('model') mount = props.get('mount') config = pipette_config.load(model) pipettes_by_id[pipette_id] = instruments._create_pipette_from_config( config=config, mount=mount) return pipettes_by_id def load_labware(protocol_data): data = protocol_data.get('labware', {}) loaded_labware = {} for labware_id, props in data.items(): slot = props.get('slot') model = props.get('model') display_name = props.get('display-name') if slot == '12': if model == 'fixed-trash': # pass in the pre-existing fixed-trash loaded_labware[labware_id] = robot.fixed_trash else: # share the slot with the fixed-trash loaded_labware[labware_id] = labware.load( model, slot, display_name, share=True ) else: loaded_labware[labware_id] = labware.load( model, slot, display_name ) return loaded_labware def get_location(command_params, loaded_labware): labwareId = command_params.get('labware') well = command_params.get('well') return loaded_labware.get(labwareId, {}).get(well) def get_pipette(command_params, loaded_pipettes): pipetteId = command_params.get('pipette') return loaded_pipettes.get(pipetteId) def dispatch_commands(protocol_data, loaded_pipettes, loaded_labware): subprocedures = [ p.get('subprocedure', []) for p in protocol_data.get('procedure', [])] flat_subs = chain.from_iterable(subprocedures) for command_item in flat_subs: command_type = command_item.get('command') params = command_item.get('params', {}) pipette = get_pipette(params, loaded_pipettes) location = get_location(params, loaded_labware) volume = params.get('volume') if command_type == 'delay': wait = params.get('wait', 0) if wait is True: # TODO Ian 2018-05-14 pass message robot.pause() else: _sleep(wait) elif command_type == 'blowout': pipette.blow_out(location) elif command_type == 'pick-up-tip': pipette.pick_up_tip(location) elif command_type == 'drop-tip': pipette.drop_tip(location) elif command_type == 'aspirate': pipette.aspirate(volume, location) elif command_type == 'dispense': pipette.dispense(volume, location) def execute_protocol(protocol): loaded_pipettes = load_pipettes(protocol) loaded_labware = load_labware(protocol) dispatch_commands(protocol, loaded_pipettes, loaded_labware) return { 'pipettes': loaded_pipettes, 'labware': loaded_labware }
Python
0
@@ -1871,24 +1871,100 @@ ipetteId)%0A%0A%0A +# C901 code complexity is due to long elif block, ok in this case (Ian+Ben)%0A def dispatch @@ -2017,24 +2017,43 @@ ed_labware): + # noqa: C901 E501 %0A subproc @@ -3149,16 +3149,99 @@ ation)%0A%0A + elif command_type == 'touch-tip':%0A pipette.touch_tip(location)%0A%0A %0Adef exe
4d157780f3f7927635b0a57399fc70a956924a0c
Add binding with filter function
gsensors/basic.py
gsensors/basic.py
#-*- coding:utf-8 -*- import logging from datetime import datetime import gevent from events import Events from gsensors.utils import full_exc_info class GSensorApp(): debug = False def __init__(self): self.sources = [] def add(self, source): self.sources.append(source) source.debug = self.debug def run(self): for source in self.sources: source.start() # wait gevent.wait() class DataSource(object): """ Abstract data source model """ debug = False timeout = -1 # no timeout by default #TODO def __init__(self, name=None, unit=None, timeout=None): self.name = name or self.__class__.__name__ self._logger = logging.getLogger("gsensors.%s" % self.name) self.events = Events() self.unit = unit self._value = 0 self._error = None if timeout is not None: self.timeout = timeout self.last_update = None # datetime on last update @property def value(self): return self._value @value.setter def value(self, val): """ Set/Update the value """ now = datetime.now() self.set_value(val, update_time=now) def set_value(self, val, update_time=None): self.events.on_update(self, val) if val != self._value: self._value = val self.last_update = update_time self.events.on_change(self, val) @property def error(self): return self._error @error.setter def error(self, err): if err != self._error: # call error listener old_err = self._error self._error = err if self._error is not None: self.events.on_error(self, err) else: self.events.on_error_release(self, old_err) def _checked_callback(self, callback): def wrapper(*args, **kwargs): try: callback(*args, **kwargs) except Exception as err: self.error = "Callback error" self._logger.error("Callback error: %s" % err, exc_info=full_exc_info()) return wrapper def _callback_wrap_onvalue(self, callback, value): def wrapper(new_value): self._logger.debug("%s =? %s" % (new_value, value)) if new_value == value: callback(new_value) return wrapper def on_update(self, callback, value=None): """ Callback when value is updated (even if it stays the same). If `value` is given the callback will be called only if the new value equals it. """ if value is not None: callback = self._callback_wrap_onvalue(callback, value) callback = self._checked_callback(callback) self.events.on_update += callback def on_change(self, callback, value=None): """ Callback when value changed. If `value` is given the callback will be called only if the new value equals it. """ if value is not None: callback = self._callback_wrap_onvalue(callback, value) callback = self._checked_callback(callback) self.events.on_change += callback def on_timeout(self, callback): #TODO raise NotImplementedError def on_error(self, callback): """ Callback when an error occurs (property error changed and is not None) """ callback = self._checked_callback(callback) self.events.on_error += callback def on_error_release(self, callback): """ Callback when there is no more error (property error changed back to None) """ callback = self._checked_callback(callback) self.events.on_error_release += callback def start(self): pass def export(self): """ Data given to the clients on each change """ res = {} res["type"] = self.__class__.__name__ res["name"] = self.name res["timeout"] = self.timeout res["value"] = self.value res["unit"] = self.unit res["error"] = self.error if self.last_update is not None: res["last_update"] = self.last_update.isoformat() return res def desc(self): res = {} res["type"] = self.__class__.__name__ res["name"] = self.name return res class AutoUpdateValue(DataSource): """ Basic value source model: * a single value (with a label and a unit) * an update methode called every N seconds """ unit = "" update_freq = 1 # frequence of update def __init__(self, name=None, unit=None, update_freq=None): super(AutoUpdateValue, self).__init__(name=name, unit=unit) # update timeput self.worker = None self.last_update = None if update_freq is not None: self.update_freq = update_freq or AutoUpdateValue.update_freq self.timeout = self.update_freq * 2 # timeout after 2 update fail # datetime of last and previous read (self.update return datetime) self.last_read = None self.prevous_read = None def update(self): """ Abstract update method """ self.value = 0 self.error = None raise NotImplementedError("Should be overiden in subclass") def _checked_update(self): try: self.update() except Exception as err: self.error = "Error" self._logger.error("Update error: %s" % err, exc_info=full_exc_info()) def update_work(self): while True: self._logger.info("Update !") self._checked_update() gevent.sleep(self.update_freq) def start(self): self.worker = gevent.spawn(self.update_work) class StupidCount(AutoUpdateValue): unit = "" update_freq = 1 def update(self): self.value += 1 def cb_print(source): print("%s: %s%s" % (source.name, source.value, source.unit)) def PrintValue(): def _print(source, value): print("%s: %s %s" % (source.name, value, source.unit if source.unit is not None else "")) return _print def PrintError(): def _print(source, error): print("%s ERROR: %s" % (source.name, error)) return _print
Python
0
@@ -2266,32 +2266,64 @@ llback, value):%0A + if callable(value):%0A def wrap @@ -2326,16 +2326,24 @@ wrapper( +source, new_valu @@ -2350,32 +2350,172 @@ e):%0A + if value(new_value):%0A callback(new_value)%0A else:%0A def wrapper(source, new_value):%0A # self._logger.deb @@ -2562,16 +2562,20 @@ + if new_v @@ -2585,24 +2585,28 @@ e == value:%0A +
4dace020f536fdec191f322d16e8af71bc2b327c
Add Burmese
appcomposer/translator/languages.py
appcomposer/translator/languages.py
""" To contain language-related ops. """ from collections import OrderedDict import babel from babel import Locale, UnknownLocaleError from appcomposer.babel import gettext def obtain_groups(): """ Obtains the groups that are available for translation, as an Ordered Dictionary. :return: Ordered dictionary with the name of the groups identified by each key. :rtype: OrderedDict """ groups = OrderedDict() groups["ALL"] = "ALL" groups["10-13"] = gettext("Preadolescence (age 10-13)") groups["14-18"] = gettext("Adolescence (age 14-18)") return groups # Taken from http://en.wikipedia.org/wiki/Languages_of_the_European_Union, April 2015 OFFICIAL_EUROPEAN_UNION_LANGUAGES = ['bg', 'hr', 'cs', 'da', 'nl', 'en', 'et', 'fi', 'fr', 'de', 'el', 'hu', 'ga', 'it', 'lv', 'lt', 'mt', 'pl', 'pt', 'ro', 'sk', 'sl', 'es', 'sv'] SEMIOFFICIAL_EUROPEAN_UNION_LANGUAGES = ['eu', 'ca', 'gl', 'gd', 'cy'] OTHER_LANGUAGES = [ # The following languages are in Graasp 'uk', # Ukranian 'tr', # Turkish 'sr', # Serbian language 'ru', # Russian language 'be', # Belarussian # The following languages are too widely used 'ar', # Arabic 'zh', # Chinese 'hi', # Hindi # The following have been selected to be interesting for Go-Lab 'no', # Norwegian 'id', # Indonesian 'ja', # Japanese # The following were available in the Go-Lab portal 'bs', # Bosnian 'sh', # Serbo-Croatian, 'lb', # Luxembourgish, 'se', # Northern Sami ] ALL_LANGUAGES = OFFICIAL_EUROPEAN_UNION_LANGUAGES + SEMIOFFICIAL_EUROPEAN_UNION_LANGUAGES + OTHER_LANGUAGES def obtain_languages(): """ Obtains the languages (without the groups) that are available for translation, as a Dictionary. The format is code:language_name TO-DO: This method can probably be optimized. :return: """ babel_supported_languages = babel.core.Locale("en", "US").languages.items() languages = [] for code, lang in babel_supported_languages: golab_supported = False for supported_code in ALL_LANGUAGES: if code == supported_code: golab_supported = True break if golab_supported: languages.append( (code, lang) ) if False: print "Babel Supported languages after filter: %s" % len(languages) print "Go-Lab Supported languages: %s" % len(ALL_LANGUAGES) languages.sort(key=lambda it: it[1]) # TODO: Currently, we filter languages which contain "_" in their code so as to simplify. # Because we use _ throughout the composer as a separator character, trouble is caused otherwise. # Eventually we should consider whether we need to support special languages with _ # on its code. targetlangs_codes = [lang[0] + "_ALL" for lang in languages if "_" not in lang[0]] targetlangs_list = [{"pcode": code, "repr": get_locale_english_name( *get_locale_info_from_code(code))} for code in targetlangs_codes] d = {lang["pcode"]: lang["repr"] for lang in targetlangs_list} d["all_ALL"] = "DEFAULT" return d def get_locale_info_from_code(code): """ Retrieves the lang, country and group from a full or partial locale code. @param code: Locale code. It can be a full code (ca_ES_ALL) or partial code (ca_ES). @return: (lang, country, group) or (lang, country), depending if it's full or partial. """ splits = code.split("_") # If our code is only "ca_ES" style (doesn't include group). if len(splits) == 2: lang, country = splits return lang, country # If we have 3 splits then it is probably "ca_ES_ALL" style (includes group). elif len(splits) == 3: lang, country, group = splits return lang, country, group # Unknown number of splits. Throw an exception, it is not a recognized code. else: raise UnrecognizedLocaleCodeException("The locale code can't be recognized: " + code) def get_locale_english_name(lang, country): """ Retrieves a string representation of a Locale. @param lang: Lang code. @param country: Country code. @return: String representation for the locale. """ try: if country.upper() == 'ALL': country = "" return Locale(lang, country).english_name except UnknownLocaleError: return Locale("en", "US").languages.get(lang) class UnrecognizedLocaleCodeException(Exception): """ Exception thrown when the format of a locale code does not seem to be as expected. """
Python
0.999999
@@ -1224,143 +1224,8 @@ ndi%0A - # The following have been selected to be interesting for Go-Lab%0A 'no', # Norwegian%0A 'id', # Indonesian%0A 'ja', # Japanese%0A @@ -1377,16 +1377,170 @@ rn Sami%0A + # The following have been selected to be interesting for Go-Lab%0A 'no', # Norwegian%0A 'id', # Indonesian%0A 'ja', # Japanese%0A 'my', # Burmese%0A %5D%0A%0AALL_L
57f494f27eca25228a49bcc26f5d28a9f390ac26
Fix premailer test
apps/sumo/tests/test_email_utils.py
apps/sumo/tests/test_email_utils.py
from mock import patch from nose.tools import eq_ from django.conf import settings from django.utils.translation import get_language from django.utils.functional import lazy from sumo.email_utils import (uselocale, safe_translation, emails_with_users_and_watches) from sumo.tests import TestCase from users.tests import user mock_translations = { 'Hello': { 'en-us': 'Hello', 'fr': 'Bonjour', 'es': 'Hola', }, 'Hello {name}': { 'en-us': 'Hello {name}', 'fr': 'Bonjour {0}', 'es': 'Hola {name}', } } def mock_ugettext(msg_id): locale = get_language() return mock_translations[msg_id][locale] mock_ugettext_lazy = lazy(mock_ugettext) def mock_gettext(f): f = patch('tower.ugettext', mock_ugettext)(f) f = patch('tower.ugettext_lazy', mock_ugettext_lazy)(f) return f class SafeTranslationTests(TestCase): def setUp(self): # These tests assume English is the fall back language. If it # isn't we are gonna have a bad time. eq_('en-US', settings.WIKI_DEFAULT_LANGUAGE) @mock_gettext def test_mocked_gettext(self): """I'm not entirely sure about the mocking, so test that.""" # Import tower now so it is affected by the mock. from tower import ugettext as _ with uselocale('en-US'): eq_(_('Hello'), 'Hello') with uselocale('fr'): eq_(_('Hello'), 'Bonjour') with uselocale('es'): eq_(_('Hello'), 'Hola') @mock_gettext def test_safe_translation_noop(self): """Test that safe_translation doesn't mess with good translations.""" # Import tower now so it is affected by the mock. from tower import ugettext as _ @safe_translation def simple(locale): return _('Hello') # These should just work normally. eq_(simple('en-US'), 'Hello') eq_(simple('fr'), 'Bonjour') eq_(simple('es'), 'Hola') @mock_gettext def test_safe_translation_bad_trans(self): """Test that safe_translation insulates from bad translations.""" # Import tower now so it is affected by the mock. from tower import ugettext as _ # `safe_translation` will call this with the given locale, and # if that fails, fall back to English. @safe_translation def bad_trans(locale): return _('Hello {name}').format(name='Mike') # French should come back as English, because it has a bad # translation, but Spanish should come back in Spanish. eq_(bad_trans('en-US'), 'Hello Mike') eq_(bad_trans('fr'), 'Hello Mike') eq_(bad_trans('es'), 'Hola Mike') @mock_gettext @patch('sumo.email_utils.log') def test_safe_translation_logging(self, mocked_log): """Logging translation errors is really important, so test it.""" # Import tower now so it is affected by the mock. from tower import ugettext as _ # Assert that bad translations cause error logging. @safe_translation def bad_trans(locale): return _('Hello {name}').format(name='Mike') # English and Spanish should not log anything. French should. bad_trans('en-US') bad_trans('es') eq_(len(mocked_log.method_calls), 0) bad_trans('fr') eq_(len(mocked_log.method_calls), 1) method_name, method_args, method_kwargs = mocked_log.method_calls[0] eq_(method_name, 'error') assert 'Bad translation' in method_args[0] eq_(method_args[1], 'fr') class UseLocaleTests(TestCase): def test_uselocale(self): """Test that uselocale does what it says on the tin.""" with uselocale('en-US'): eq_(get_language(), 'en-us') with uselocale('de'): eq_(get_language(), 'de') with uselocale('fr'): eq_(get_language(), 'fr') class PremailerTests(TestCase): def test_styles_inlining(self): """Test that styles tags are converted to inline styles""" with patch('jingo.render_to_string') as mocked: mocked.return_value = ('<html>' '<head>' '<style>a { color: #000; }</style>' '</head>' '<body>' '<a href="#">Hyperlink</a>' '</body>' '</html>') u = user(save=True) msg = emails_with_users_and_watches('test', 'a.ltxt', 'a.html', {}, [(u, [None])]) for m in msg: self.assertIn('<a href="#" style="color:#000">Hyperlink</a>', str(m.message()))
Python
0
@@ -77,16 +77,61 @@ ettings%0A +from django.contrib.sites.models import Site%0A from dja @@ -4509,17 +4509,21 @@ a href=%22 -# +/test %22%3EHyperl @@ -4845,68 +4845,172 @@ -self.assertIn('%3Ca href=%22#%22 style=%22color:#000%22%3EHyperlink%3C/a%3E' +tag = ('%3Ca href=%22https://%25s/test%22 style=%22color:#000%22%3E'%0A 'Hyperlink%3C/a%3E')%0A self.assertIn(tag %25 Site.objects.get_current().domain ,%0A
2d3e2f796d6a839994c2708f31e60d52c6bf8c15
Simplify main()
sjp.py
sjp.py
#!/usr/bin/env python3 import urllib.request # to download HTML source import sys # to access CLI arguments and to use exit codes from bs4 import BeautifulSoup # to parse HTML source version = 0.01 def printUsageInfo(): helpMsg = """Usage: sjp.py <word> sjp.py (-h | --help | /?) sjp.py (-v | --version)""" print(helpMsg) def printVersionInfo(): versionMsg = "sjp.py " + str(version) print(versionMsg) def getDefinition(word): url = 'http://sjp.pl/' + urllib.parse.quote(word) try: html = urllib.request.urlopen(url).read() except urllib.error.URLError: print("[Error] Can't connect to the service") sys.exit(2) soup = BeautifulSoup(html) # checks if definition is in dictionary: if soup.find_all('span', style="color: #e00;"): print("[Error] \"" + word + "\" not found") sys.exit(1) # definition is in dictionary, continue: ex = soup.find_all('p', style="margin-top: .5em; " "font-size: medium; " "max-width: 32em; ") ex = ex[0] return ex.contents[0::2] # returns a list of lines of definition def main(): if len(sys.argv) > 1: if sys.argv[1] in ["-h", "--help", "/?"]: printUsageInfo() sys.exit() elif sys.argv[1] in ["-v", "--version"]: printVersionInfo() sys.exit() else: print('\n'.join(getDefinition(sys.argv[1]))) else: printUsageInfo() sys.exit() if __name__ == "__main__": main()
Python
0.000042
@@ -1231,17 +1231,18 @@ gv) -%3E +%3C= 1:%0A @@ -1233,24 +1233,64 @@ ) %3C= 1:%0A + printUsageInfo()%0A sys.exit()%0A if sys.a @@ -1299,17 +1299,17 @@ v%5B1%5D in -%5B +( %22-h%22, %22- @@ -1320,17 +1320,17 @@ p%22, %22/?%22 -%5D +) :%0A @@ -1327,28 +1327,24 @@ %22):%0A - - printUsageIn @@ -1348,36 +1348,32 @@ eInfo()%0A - sys.exit()%0A @@ -1371,20 +1371,16 @@ t()%0A - - elif sys @@ -1391,17 +1391,17 @@ v%5B1%5D in -%5B +( %22-v%22, %22- @@ -1409,17 +1409,17 @@ version%22 -%5D +) :%0A @@ -1416,28 +1416,24 @@ %22):%0A - - printVersion @@ -1443,28 +1443,24 @@ o()%0A - sys.exit()%0A @@ -1458,20 +1458,16 @@ .exit()%0A - else @@ -1472,28 +1472,24 @@ se:%0A - print('%5Cn'.j @@ -1525,61 +1525,8 @@ )))%0A - else:%0A printUsageInfo()%0A sys.exit() %0A%0Aif
a78d1bcfdc3d979cd7be1f82345c29047993953d
Add more init logic to handle AWS HTTP API
sqs.py
sqs.py
#!/usr/bin/env python from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient from tornado.httputil import url_concat import datetime import hashlib import hmac class SQSRequest(HTTPRequest): """SQS AWS Adapter for Tornado HTTP request""" def __init__(self, *args, **kwargs): t = datetime.datetime.utcnow() method = kwargs.get('method', 'GET') url = kwargs.get('url') or args[0] params = sorted(url.split('?')[1].split('&')) canonical_querystring = '&'.join(params) kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring args = tuple() host = url.split('://')[1].split('/')[0] canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0] service = 'sqs' region = kwargs.get('region', 'eu-west-1') super(SQSRequest, self).__init__(*args, **kwargs)
Python
0
@@ -819,16 +819,1337 @@ west-1') +%0A%0A amz_date = t.strftime('%25Y%25m%25dT%25H%25M%25SZ')%0A datestamp = t.strftime('%25Y%25m%25d')%0A%0A canonical_headers = 'host:' + host + '%5Cn' + 'x-amz-date:' + amz_date + '%5Cn'%0A signed_headers = 'host;x-amz-date'%0A payload_hash = hashlib.sha256('').hexdigest()%0A%0A canonical_request = method + '%5Cn' + canonical_uri + '%5Cn' + canonical_querystring + '%5Cn' + canonical_headers + '%5Cn' + signed_headers + '%5Cn' + payload_hash%0A algorithm = 'AWS4-HMAC-SHA256'%0A credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request'%0A string_to_sign = algorithm + '%5Cn' + amz_date + '%5Cn' + credential_scope + '%5Cn' + hashlib.sha256(canonical_request).hexdigest()%0A signing_key = self.getSignatureKey(kwargs%5B'secret_key'%5D, datestamp, region, service)%0A signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest()%0A authorization_header = algorithm + ' ' + 'Credential=' + kwargs%5B'access_key'%5D + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature%0A%0A del kwargs%5B'access_key'%5D%0A del kwargs%5B'secret_key'%5D%0A headers = kwargs.get('headers', %7B%7D)%0A headers.update(%7B'x-amz-date':amz_date, 'Authorization':authorization_header%7D)%0A kwargs%5B'headers'%5D = headers %0A
b751cd9cfd9286431f37525e57b713270addd69c
Version bump
iam_syncr/__init__.py
iam_syncr/__init__.py
VERSION="0.2.3"
Python
0.000001
@@ -10,7 +10,7 @@ 0.2. -3 +4 %22%0A
2166f52ce5da81bf8f28a3dbbc92145b0913db07
Update usage of layouts.get_layout
examples/basics/visuals/graph.py
examples/basics/visuals/graph.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015, Vispy Development Team. # Distributed under the (new) BSD License. See LICENSE.txt for more info. """ This example demonstrates how to visualise a NetworkX graph using the GraphVisual. """ import sys import networkx as nx from vispy import app, gloo, visuals from vispy.visuals.graphs import layouts from vispy.visuals.transforms import STTransform class Canvas(app.Canvas): def __init__(self): app.Canvas.__init__(self, title="Simple NetworkX Graph", keys="interactive", size=(600, 600)) self.graph = nx.fast_gnp_random_graph(100, 0.02) self.visual = visuals.GraphVisual( nx.adjacency_matrix(self.graph), layout=layouts.get('force_directed'), line_color=(1.0, 1.0, 1.0, 1.0), arrow_type="stealth", arrow_size=7.5, node_symbol="disc", node_size=10, face_color="red", border_width=0.0, animate=True ) self.visual.events.update.connect(lambda evt: self.update()) self.visual.transform = STTransform(self.visual_size, (20, 20)) self.timer = app.Timer(interval=0, connect=self.animate, start=True) self.show() @property def visual_size(self): return ( self.physical_size[0] - 40, self.physical_size[1] - 40 ) def on_resize(self, event): self.visual.transform.scale = self.visual_size vp = (0, 0, self.physical_size[0], self.physical_size[1]) self.context.set_viewport(*vp) self.visual.transforms.configure(canvas=self, viewport=vp) def on_draw(self, event): gloo.clear('black') self.visual.draw() def animate(self, event): ready = self.visual.animate_layout() if ready: self.timer.disconnect(self.animate) if __name__ == '__main__': win = Canvas() if sys.flags.interactive != 1: app.run()
Python
0.000001
@@ -264,16 +264,35 @@ rt sys%0A%0A +import numpy as np%0A import n @@ -669,14 +669,32 @@ (100 +0 , 0.0 -2 +006, directed=True )%0A%0A @@ -735,16 +735,74 @@ Visual(%0A + # np.asarray(nx.to_numpy_matrix(self.graph)),%0A @@ -868,16 +868,23 @@ outs.get +_layout ('force_ @@ -1001,10 +1001,9 @@ ize= -7. +1 5,%0A @@ -1143,16 +1143,43 @@ ate=True +,%0A directed=True %0A
2a8d1fb9fca47078334a0c030b3c99d66bb5eb13
Rename _run_mover into _run_dds_put
delivery/services/dds_service.py
delivery/services/dds_service.py
import os.path import shutil import logging import re import json from tornado import gen from delivery.models.db_models import StagingStatus, DeliveryStatus from delivery.exceptions import ProjectNotFoundException, TooManyProjectsFound, InvalidStatusException log = logging.getLogger(__name__) class DDSService(object): def __init__(self, external_program_service, staging_service, delivery_repo, session_factory, dds_conf): self.external_program_service = external_program_service self.mover_external_program_service = self.external_program_service self.staging_service = staging_service self.delivery_repo = delivery_repo self.session_factory = session_factory self.dds_conf = dds_conf @staticmethod @gen.coroutine def _run_mover(delivery_order_id, delivery_order_repo, external_program_service, session_factory, dds_conf): session = session_factory() # This is a somewhat hacky work-around to the problem that objects created in one # thread, and thus associated with another session cannot be accessed by another # thread, therefore it is re-materialized in here... delivery_order = delivery_order_repo.get_delivery_order_by_id(delivery_order_id, session) try: cmd = [ 'dds', '-tp', dds_conf["token_path"], '-l', dds_conf["log_path"], 'data', 'put', '--source', delivery_order.delivery_source, '-p', delivery_order.dds_project_id, '--silent', ] log.debug("Running dds with cmd: {}".format(" ".join(cmd))) execution = external_program_service.run(cmd) delivery_order.delivery_status = DeliveryStatus.delivery_in_progress delivery_order.mover_pid = execution.pid session.commit() execution_result = yield external_program_service.wait_for_execution(execution) if execution_result.status_code == 0: delivery_order.delivery_status = DeliveryStatus.delivery_successful log.info(f"Successfully delivered: {delivery_order}") else: delivery_order.delivery_status = DeliveryStatus.delivery_failed error_msg = f"Failed to deliver: {delivery_order}. DDS returned status code: {execution_result.status_code}" log.error(error_msg) raise RuntimeError(error_msg) except Exception as e: delivery_order.delivery_status = DeliveryStatus.delivery_failed log.error(f"Failed to deliver: {delivery_order} because this exception was logged: {e}") raise e finally: # Always commit the state change to the database session.commit() @staticmethod @gen.coroutine def _get_dds_project_id(delivery_project, external_program_service): cmd = ['dds', 'project', 'ls', '--json'] execution = external_program_service.run(cmd) execution_result = yield external_program_service.wait_for_execution(execution) if execution_result.status_code == 0: projects = [project for project in json.loads(execution_result.stdout) if project['Title'] == delivery_project] if len(projects) == 1: project_id = projects[0]['Project ID'] log.info(f"Fetched DDS project id for project {delivery_project}: {project_id}") return project_id elif len(projects) == 0: error_msg = f"Project {delivery_project} could not be found in DDS." log.error(error_msg) raise ProjectNotFoundException(error_msg) else: error_msg = f"Multiple projects found with name {delivery_project}." log.error(error_msg) raise TooManyProjectsFound(error_msg) else: error_msg = f"Project {delivery_project} could not be found in DDS. DDS returned status code: {execution_result.status_code}, DDS stderr: {execution_result.stderr}" log.error(error_msg) raise ProjectNotFoundException(error_msg) @gen.coroutine def deliver_by_staging_id(self, staging_id, delivery_project, md5sum_file, skip_mover=False): stage_order = self.staging_service.get_stage_order_by_id(staging_id) if not stage_order or not stage_order.status == StagingStatus.staging_successful: raise InvalidStatusException("Only deliver by staging_id if it has a successful status!" "Staging order was: {}".format(stage_order)) if not skip_mover: dds_project_id = yield DDSService._get_dds_project_id(delivery_project, self.mover_external_program_service) else: dds_project_id = None delivery_order = self.delivery_repo.create_delivery_order(delivery_source=stage_order.get_staging_path(), delivery_project=delivery_project, delivery_status=DeliveryStatus.pending, staging_order_id=staging_id, dds_project_id=dds_project_id, md5sum_file=md5sum_file) args_for_run_mover = {'delivery_order_id': delivery_order.id, 'delivery_order_repo': self.delivery_repo, 'external_program_service': self.mover_external_program_service, 'session_factory': self.session_factory, 'dds_conf': self.dds_conf } if skip_mover: session = self.session_factory() delivery_order.delivery_status = DeliveryStatus.delivery_skipped session.commit() else: yield DDSService._run_mover(**args_for_run_mover) logging.info(f"Removing staged runfolder at {stage_order.staging_target}") shutil.rmtree(stage_order.staging_target) return delivery_order.id def get_delivery_order_by_id(self, delivery_order_id): return self.delivery_repo.get_delivery_order_by_id(delivery_order_id) @gen.coroutine def update_delivery_status(self, delivery_order_id): """ Check delivery status and update the delivery database accordingly """ # NB: this is done automatically with the new DDS implementation now. return self.get_delivery_order_by_id(delivery_order_id)
Python
0.999946
@@ -790,21 +790,23 @@ ef _run_ -mover +dds_put (deliver @@ -5593,21 +5593,23 @@ for_run_ -mover +dds_put = %7B'del @@ -6194,21 +6194,23 @@ ce._run_ -mover +dds_put (**args_ @@ -6217,21 +6217,23 @@ for_run_ -mover +dds_put )%0A%0A
e43aa23b3d4b7d3319e4b2766cdb4a9b9382954b
Fix typo
django_tricks/models/abstract.py
django_tricks/models/abstract.py
from uuid import uuid4 from django.db import models from .mixins import MPAwareModel treebeard = True try: from treebeard.mp_tree import MP_Node except ImportError: treebeard = False class UniqueTokenModel(models.Model): token = models.CharField(max_length=32, unique=True, blank=True) class Meta: abstract = True def get_token(self): return uuid4().hext def save(self, **kwargs): if not self.token: self.token = self.get_token() super().save(**kwargs) if treebeard: class MaterializedPathNode(MPAwareModel, MP_Node): slug = models.SlugField(max_length=255, db_index=True, unique=False, blank=True) node_order_by = ['name'] class Meta: abstract = True
Python
0.999999
@@ -379,16 +379,20 @@ return +str( uuid4(). @@ -394,17 +394,17 @@ d4().hex -t +) %0A%0A de
0d0bc3b5c1dad86cd5b2d7d90925d5722f2be6e8
Add locale fuzzer to FUZZERS_MISSING_CORPORA
test/fuzz/test_runner.py
test/fuzz/test_runner.py
#!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Run fuzz test targets. """ import argparse import configparser import os import sys import subprocess import logging # Fuzzers known to lack a seed corpus in https://github.com/bitcoin-core/qa-assets/tree/master/fuzz_seed_corpus FUZZERS_MISSING_CORPORA = [ "addr_info_deserialize", "asmap", "base_encode_decode", "block", "block_file_info_deserialize", "block_filter_deserialize", "block_header_and_short_txids_deserialize", "bloom_filter", "decode_tx", "fee_rate_deserialize", "flat_file_pos_deserialize", "float", "hex", "integer", "key", "key_origin_info_deserialize", "merkle_block_deserialize", "out_point_deserialize", "p2p_transport_deserializer", "parse_hd_keypath", "parse_numbers", "parse_script", "parse_univalue", "partial_merkle_tree_deserialize", "partially_signed_transaction_deserialize", "prefilled_transaction_deserialize", "psbt_input_deserialize", "psbt_output_deserialize", "pub_key_deserialize", "rolling_bloom_filter", "script_deserialize", "strprintf", "sub_net_deserialize", "tx_in", "tx_in_deserialize", "tx_out", ] def main(): parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( "-l", "--loglevel", dest="loglevel", default="INFO", help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console.", ) parser.add_argument( '--export_coverage', action='store_true', help='If true, export coverage information to files in the seed corpus', ) parser.add_argument( '--valgrind', action='store_true', help='If true, run fuzzing binaries under the valgrind memory error detector', ) parser.add_argument( '-x', '--exclude', help="A comma-separated list of targets to exclude", ) parser.add_argument( 'seed_dir', help='The seed corpus to run on (must contain subfolders for each fuzz target).', ) parser.add_argument( 'target', nargs='*', help='The target(s) to run. Default is to run all targets.', ) args = parser.parse_args() # Set up logging logging.basicConfig( format='%(message)s', level=int(args.loglevel) if args.loglevel.isdigit() else args.loglevel.upper(), ) # Read config generated by configure. config = configparser.ConfigParser() configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini" config.read_file(open(configfile, encoding="utf8")) if not config["components"].getboolean("ENABLE_FUZZ"): logging.error("Must have fuzz targets built") sys.exit(1) # Build list of tests test_list_all = parse_test_list(makefile=os.path.join(config["environment"]["SRCDIR"], 'src', 'Makefile.test.include')) if not test_list_all: logging.error("No fuzz targets found") sys.exit(1) logging.debug("{} fuzz target(s) found: {}".format(len(test_list_all), " ".join(sorted(test_list_all)))) args.target = args.target or test_list_all # By default run all test_list_error = list(set(args.target).difference(set(test_list_all))) if test_list_error: logging.error("Unknown fuzz targets selected: {}".format(test_list_error)) test_list_selection = list(set(test_list_all).intersection(set(args.target))) if not test_list_selection: logging.error("No fuzz targets selected") if args.exclude: for excluded_target in args.exclude.split(","): if excluded_target not in test_list_selection: logging.error("Target \"{}\" not found in current target list.".format(excluded_target)) continue test_list_selection.remove(excluded_target) test_list_selection.sort() logging.info("{} of {} detected fuzz target(s) selected: {}".format(len(test_list_selection), len(test_list_all), " ".join(test_list_selection))) try: help_output = subprocess.run( args=[ os.path.join(config["environment"]["BUILDDIR"], 'src', 'test', 'fuzz', test_list_selection[0]), '-help=1', ], timeout=20, check=True, stderr=subprocess.PIPE, universal_newlines=True, ).stderr if "libFuzzer" not in help_output: logging.error("Must be built with libFuzzer") sys.exit(1) except subprocess.TimeoutExpired: logging.error("subprocess timed out: Currently only libFuzzer is supported") sys.exit(1) run_once( corpus=args.seed_dir, test_list=test_list_selection, build_dir=config["environment"]["BUILDDIR"], export_coverage=args.export_coverage, use_valgrind=args.valgrind, ) def run_once(*, corpus, test_list, build_dir, export_coverage, use_valgrind): for t in test_list: corpus_path = os.path.join(corpus, t) if t in FUZZERS_MISSING_CORPORA: os.makedirs(corpus_path, exist_ok=True) args = [ os.path.join(build_dir, 'src', 'test', 'fuzz', t), '-runs=1', corpus_path, ] if use_valgrind: args = ['valgrind', '--quiet', '--error-exitcode=1'] + args logging.debug('Run {} with args {}'.format(t, args)) result = subprocess.run(args, stderr=subprocess.PIPE, universal_newlines=True) output = result.stderr logging.debug('Output: {}'.format(output)) try: result.check_returncode() except subprocess.CalledProcessError as e: if e.stdout: logging.info(e.stdout) if e.stderr: logging.info(e.stderr) logging.info("Target \"{}\" failed with exit code {}: {}".format(t, e.returncode, " ".join(args))) sys.exit(1) if not export_coverage: continue for l in output.splitlines(): if 'INITED' in l: with open(os.path.join(corpus, t + '_coverage'), 'w', encoding='utf-8') as cov_file: cov_file.write(l) break def parse_test_list(makefile): with open(makefile, encoding='utf-8') as makefile_test: test_list_all = [] read_targets = False for line in makefile_test.readlines(): line = line.strip().replace('test/fuzz/', '').replace(' \\', '') if read_targets: if not line: break test_list_all.append(line) continue if line == 'FUZZ_TARGETS =': read_targets = True return test_list_all if __name__ == '__main__': main()
Python
0.000001
@@ -839,24 +839,38 @@ serialize%22,%0A + %22locale%22,%0A %22merkle_
8b5f8c1baa44b11bbe8790bd4eaa7ffdd8b0990b
fix comments
telemeta/util/kdenlive/session.py
telemeta/util/kdenlive/session.py
# -*- coding: utf-8 -*- # Copyright (C) 2012-2013 Parisson SARL # This software is a computer program whose purpose is to backup, analyse, # transcode and stream any audio content with its metadata over a web frontend. # This software is governed by the CeCILL license under French law and # abiding by the rules of distribution of free software. You can use, # modify and/ or redistribute the software under the terms of the CeCILL # license as circulated by CEA, CNRS and INRIA at the following URL # "http://www.cecill.info". # As a counterpart to the access to the source code and rights to copy, # modify and redistribute granted by the license, users are provided only # with a limited warranty and the software's author, the holder of the # economic rights, and the successive licensors have only limited # liability. # In this respect, the user's attention is drawn to the risks associated # with loading, using, modifying and/or developing or reproducing the # software by the user in light of its specific status of free software, # that may mean that it is complicated to manipulate, and that also # therefore means that it is reserved for developers and experienced # professionals having in-depth computer knowledge. Users are therefore # encouraged to load and test the software's suitability as regards their # requirements in conditions enabling the security of their systems and/or # data to be ensured and, more generally, to use and operate it in the # same conditions as regards security. # The fact that you are presently reading this means that you have had # knowledge of the CeCILL license and that you accept its terms. # Authors: Guillaume Pellerin <yomguy@parisson.com> import time from telemeta.util.xmltodict2 import * class KDEnLiveSession(object): def __init__(self, path): self.session = xmltodict(path) def entries(self): entries = [] for attr in self.session['children']: if 'playlist' in attr['name'] and 'children' in attr: for att in attr['children']: if 'entry' in att['name'] and att['attributes']['producer'] != 'black': entries.append(att['attributes']) return entries def video_entries(self): entries = [] for attr in self.session['children']: if 'playlist' in attr['name'] and 'children' in attr: for att in attr['children']: if 'entry' in att['name'] and att['attributes']['producer'] != 'black' \ and 'video' in att['attributes']['producer']: entries.append(att['attributes']) return entries def entries_sorted(self): return sorted(self.entries(), key=lambda k: int(k['in']), reverse=False) def entries_video_seconds(self): fps = float(self.profile()['frame_rate_num']) list = [] entries = self.video_entries() for i in range(0,len(entries)-1): id = entries[i]['producer'].split('_')[0] t_in = int(entries[i]['in'])/fps t_out = int(entries[i]['out'])/fps if i == 0: t = 0 else: t = list[i-1]['t'] + int(entries[i-1]['out'])/fps - int(entries[i-1]['in'])/fps list.append({'id' : id, 't': t, 'in': t_in , 'out': t_out }) return list def cuts(self, entries): i = 0 cuts = [0, ] for entry in entries: if i > 0: cuts.append(cuts[i-1] + int(entries[i]['in'])-int(entries[i-1]['out'])) i += 1 return cuts def first_video_frame(self): return int(self.entries_sorted()[0]['in']) def profile(self): for attr in self.session['children']: if 'profile' in attr['name']: return attr['attributes'] def markers_relative(self, offset=0): markers = [] entries = self.entries_video_seconds() for attr in self.session['children']: if 'kdenlivedoc' in attr['name']: for att in attr['children']: if 'markers' in att['name'] and 'children' in att.keys(): for at in att['children']: if 'marker' in at['name']: rel_time = float(at['attributes']['time'].replace(',','.')) id = at['attributes']['id'] j = 0 abs_time = 0 for entry in entries: if rel_time > entry['in'] and rel_time < entry['out'] and id == entry['id']: if j != 0: abs_time = entry['t'] + (rel_time - entry['in']) print abs_time break j += 1 at['attributes']['time'] = abs_time at['attributes']['session_timecode'] = time.strftime('%H:%M:%S', time.gmtime(abs_time+offset)) markers.append(at['attributes']) return markers
Python
0.000001
@@ -3495,16 +3495,158 @@ utes'%5D%0A%0A +%09def fix_text(self, text):%0A%09%09try:%0A%09%09%09s = text.split(' ')%0A%09%09%09i = int(s%5B1%5D)%0A%09%09%09s.insert(2, ':')%0A%09%09%09return ' '.join(s)%0A%09%09except:%0A%09%09%09return text%0A%0A %09def mar @@ -3676,16 +3676,16 @@ set=0):%0A - %09%09marker @@ -4108,24 +4108,25 @@ bs_time = 0%0A +%0A %09%09%09%09%09%09%09%09for @@ -4369,16 +4369,17 @@ %09j += 1%0A +%0A %09%09%09%09%09%09%09%09 @@ -4406,32 +4406,32 @@ me'%5D = abs_time%0A - %09%09%09%09%09%09%09%09at%5B'attr @@ -4517,16 +4517,97 @@ ffset))%0A +%09%09%09%09%09%09%09%09at%5B'attributes'%5D%5B'comment'%5D = self.fix_text(at%5B'attributes'%5D%5B'comment'%5D)%0A %09%09%09%09 %09%09%09
54b720ded25f04d51e60673044b2d1320f870d4d
Fix Django importlib deprecation warnings
impersonate/helpers.py
impersonate/helpers.py
import re from django.conf import settings from django.utils.safestring import mark_safe from django.utils.importlib import import_module from django.core.paginator import Paginator, EmptyPage try: # Django 1.5 check from django.contrib.auth import get_user_model except ImportError: from django.contrib.auth.models import User else: User = get_user_model() def get_redir_path(request=None): nextval = None redirect_field_name = getattr( settings, 'IMPERSONATE_REDIRECT_FIELD_NAME', None, ) if request and redirect_field_name: nextval = request.GET.get(redirect_field_name, None) return nextval or getattr( settings, 'IMPERSONATE_REDIRECT_URL', getattr(settings, 'LOGIN_REDIRECT_URL', u'/'), ) def get_redir_arg(request): redirect_field_name = getattr( settings, 'IMPERSONATE_REDIRECT_FIELD_NAME', None, ) if redirect_field_name: nextval = request.GET.get(redirect_field_name, None) if nextval: return u'?{0}={1}'.format(redirect_field_name, nextval) return u'' def get_redir_field(request): redirect_field_name = getattr( settings, 'IMPERSONATE_REDIRECT_FIELD_NAME', None, ) if redirect_field_name: nextval = request.GET.get(redirect_field_name, None) if nextval: return mark_safe( u'<input type="hidden" name="{0}" value="{1}"/>'.format( redirect_field_name, nextval, ) ) return u'' def get_paginator(request, qs): try: page_number = int(request.GET.get('page', 1)) except ValueError: page_number = 1 paginator = Paginator( qs, int(getattr(settings, 'IMPERSONATE_PAGINATE_COUNT', 20)), ) try: page = paginator.page(page_number) except EmptyPage: page = None return (paginator, page, page_number) def check_allow_staff(): return (not getattr(settings, 'IMPERSONATE_REQUIRE_SUPERUSER', False)) def users_impersonable(request): ''' Returns a QuerySet of users that this user can impersonate. Uses the IMPERSONATE_CUSTOM_USER_QUERYSET if set, else, it returns all users ''' if hasattr(settings, 'IMPERSONATE_CUSTOM_USER_QUERYSET'): custom_queryset_func = import_func_from_string( settings.IMPERSONATE_CUSTOM_USER_QUERYSET ) return custom_queryset_func(request) else: return User.objects.all() def check_allow_for_user(request, end_user): ''' Return True if some request can impersonate end_user ''' if check_allow_impersonate(request): # start user can impersonate # Can impersonate superusers if IMPERSONATE_ALLOW_SUPERUSER is True # Can impersonate anyone who is in your queryset of 'who i can impersonate'. allow_superusers = getattr( settings, 'IMPERSONATE_ALLOW_SUPERUSER', False, ) upk = end_user.pk return ( ((request.user.is_superuser and allow_superusers) or not end_user.is_superuser) and users_impersonable(request).filter(pk=upk).exists() ) # start user not allowed impersonate at all return False def import_func_from_string(string_name): ''' Given a string like 'mod.mod2.funcname' which refers to a function, return that function so it can be called ''' mod_name, func_name = string_name.rsplit('.', 1) mod = import_module(mod_name) return getattr(mod, func_name) def check_allow_impersonate(request): ''' Returns True if this request is allowed to do any impersonation. Uses the IMPERSONATE_CUSTOM_ALLOW function if required, else looks at superuser/staff status and IMPERSONATE_REQUIRE_SUPERUSER ''' if hasattr(settings, 'IMPERSONATE_CUSTOM_ALLOW'): custom_allow_func = \ import_func_from_string(settings.IMPERSONATE_CUSTOM_ALLOW) return custom_allow_func(request) else: # default allow checking: if not request.user.is_superuser: if not request.user.is_staff or not check_allow_staff(): return False return True def check_allow_for_uri(uri): uri = uri.lstrip('/') exclusions = getattr(settings, 'IMPERSONATE_URI_EXCLUSIONS', (r'^admin/',)) if not isinstance(exclusions, (list, tuple)): exclusions = (exclusions,) for exclusion in exclusions: if re.search(exclusion, uri): return False return True
Python
0.000005
@@ -86,57 +86,8 @@ afe%0A -from django.utils.importlib import import_module%0A from @@ -316,24 +316,157 @@ er_model()%0A%0A +try:%0A from importlib import import_module # Python 2.7%0Aexcept ImportError:%0A from django.utils.importlib import import_module%0A%0A %0Adef get_red