commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
d39dbc227eab571312533ec25bdad1d6542ffac8
ADD license text
pages/desktop_page.py
pages/desktop_page.py
from gi.repository import Gtk import common import gettext import locale import logging def init_localization(): locale.setlocale(locale.LC_ALL, '') # use user's preferred locale # take first two characters of country code loc = locale.getlocale() filename = "../lang/desktop_%s.mo" % locale.getlocale()[0][0:2] try: logging.debug( "Opening message file %s for locale %s", filename, loc[0] ) trans = gettext.GNUTranslations(open( filename, "rb" ) ) except IOError: logging.debug( "Locale not found. Using default messages" ) trans = gettext.NullTranslations() trans.install() if __name__ == '__main__': init_localization() class DesktopPage(Gtk.ScrolledWindow): def __init__(self): self.intro() def intro(self): _desktop_box = common.init_flowbox() _unity_button = \ common.set_button_image(common.IMAGE_PATH + "unity_top.png") _gnome3_button = \ common.set_button_image(common.IMAGE_PATH + "gnome3_top.png") _kde5_button = \ common.set_button_image(common.IMAGE_PATH + "kde5_top.png") _unity_button.connect("clicked", self.on_desktop_button_clicked, "unity") _gnome3_button.connect("clicked", self.on_desktop_button_clicked, "gnome3") _kde5_button.connect("clicked", self.on_desktop_button_clicked, "kde5") _desktop_box.insert(_unity_button, 0) _desktop_box.insert(_gnome3_button, 1) _desktop_box.insert(_kde5_button, 2) _desktop_scrolled = common.init_scroll(_desktop_box) return _desktop_scrolled def get_desktop_page(self, desktop): _grid = Gtk.Grid() _image = Gtk.Image() _text = Gtk.TextView() _buffer = Gtk.TextBuffer() _return_button = Gtk.Button(_("Return")) _install_button = Gtk.Button(_("Install")) _file = open(common.TEXT_PATH + desktop, 'r') _image.set_from_file(common.IMAGE_PATH + desktop + '.png') _image.set_alignment(0.5, 0.5) _buffer.set_text(_file.read()) _text.set_buffer(_buffer) _text.set_wrap_mode(Gtk.WrapMode.WORD) _return_button.connect("clicked", self.on_return_button_clicked, None) _grid.set_column_homogeneous(True) _grid.attach(_image, 0, 0, 2, 1) _grid.attach(_text, 0, 1, 2, 1) _grid.attach(_return_button, 0, 2, 1, 1) _grid.attach(_install_button, 1, 2, 1, 1) _scroll = common.init_scroll(_grid) _file.close() return _scroll def on_desktop_button_clicked(self, button, data): common.WINDOW.desktop_page = self.get_desktop_page(data) _label = Gtk.Label(_("Choose Desktop Environment")) common.WINDOW.notebook.remove_page(0) common.WINDOW.notebook.prepend_page(common.WINDOW.desktop_page, _label) common.WINDOW.show_all() common.WINDOW.notebook.set_current_page(0) def on_return_button_clicked(self, button, data): common.WINDOW.desktop_page = self.intro() _label = Gtk.Label(_("Choose Desktop Environment")) common.WINDOW.notebook.remove_page(0) common.WINDOW.notebook.prepend_page(common.WINDOW.desktop_page, _label) common.WINDOW.show_all() common.WINDOW.notebook.set_current_page(0) def on_install_button_clicked(self, button, data): self.popup_desktop_install_dialog(data)
Python
0
@@ -1,12 +1,256 @@ +# Part of ubuntu-easy-config https://github.com/minwook-shin/ubuntu-easy-config%0A#%0A# See LICENSE file for copyright and license details%0A#%0A# This program written for beginner for ubuntu and %22ubuntu Setting%22 by minwook Shin and hedone21, fmowl10%0A%0A from gi.repo
ee487dd220612dfff9cb5e63c602490371e2bdac
Update repo_metadata.py
gitmostwanted/tasks/repo_metadata.py
gitmostwanted/tasks/repo_metadata.py
from gitmostwanted.app import app, db, celery from gitmostwanted.models.repo import Repo, RepoMean from gitmostwanted.lib.github import api from sqlalchemy.sql import func, expression from datetime import datetime, timedelta @celery.task() def metadata_maturity(num_months): repos = Repo.query\ .filter(Repo.created_at <= datetime.now() + timedelta(days=num_months * 30 * -1))\ .filter(Repo.mature.is_(False)) for repo in repos: repo.mature = True db.session.commit() return repos.count() @celery.task() def metadata_refresh(num_days): repos = Repo.query\ .filter( Repo.checked_at.is_(None) | (Repo.checked_at <= datetime.now() + timedelta(days=num_days * -1)) )\ .yield_per(25)\ .limit(300) # GitHub allows only 3000 calls per day within a token for repo in repos: repo.checked_at = datetime.now() details, code = api.repo_info(repo.full_name) if not details: if 400 <= code < 500: repo.worth -= 1 app.logger.info( '{0} is not found, the "worth" has been decreased by 1'.format(repo.full_name) ) continue for key in ['description', 'language', 'homepage', 'stargazers_count']: if getattr(repo, key) != details[key]: setattr(repo, key, details[key]) db.session.commit() return repos.count() @celery.task() def metadata_trend(num_days): results = db.session.query( RepoMean.repo_id, func.substring_index( func.group_concat( RepoMean.value.op('ORDER BY')(expression.desc(RepoMean.created_at)) ), ',', 2) )\ .filter(RepoMean.created_at >= datetime.now() + timedelta(days=num_days * -1))\ .group_by(RepoMean.repo_id)\ .all() for result in filter(lambda x: ',' in x[1], results): curr, prev = result[1].split(',') if curr < prev: app.logger.info( 'Mean value of {0} is {1}, previous was {2}. The "worth" has been decreased by 1' .format(result[0], curr, prev) ) db.session.query(Repo)\ .filter(Repo.id == result[0])\ .update({Repo.worth: Repo.worth - 1}) db.session.commit() @celery.task() def metadata_erase(): cnt = Repo.query.filter(Repo.worth < 0).delete() db.session.commit() return cnt
Python
0.000002
@@ -2441,17 +2441,17 @@ worth %3C -0 +5 ).delete
6105e355cf0275e00f284ac6658454905a9b9a07
change import of tfpark
python/chronos/src/bigdl/chronos/forecaster/tfpark_forecaster.py
python/chronos/src/bigdl/chronos/forecaster/tfpark_forecaster.py
# # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from abc import ABCMeta, abstractmethod from zoo.tfpark import KerasModel as TFParkKerasModel import tensorflow as tf from bigdl.chronos.forecaster.abstract import Forecaster class TFParkForecaster(TFParkKerasModel, Forecaster, metaclass=ABCMeta): """ Base class for TFPark KerasModel based Forecast models. """ def __init__(self): """ Build a tf.keras model. Turns the tf.keras model returned from _build into a tfpark.KerasModel """ self.model = self._build() assert (isinstance(self.model, tf.keras.Model)) super().__init__(self.model) @abstractmethod def _build(self): """ Build a tf.keras model. :return: a tf.keras model (compiled) """ pass
Python
0
@@ -633,11 +633,18 @@ rom -zoo +bigdl.orca .tfp
60ecc08395eb266f09aa8587bf38aceb59a2b968
Update Scramble_String.py
Array/Scramble_String.py
Array/Scramble_String.py
Given a string s1, we may represent it as a binary tree by partitioning it to two non-empty substrings recursively. Below is one possible representation of s1 = "great": great / \ gr eat / \ / \ g r e at / \ a t To scramble the string, we may choose any non-leaf node and swap its two children. For example, if we choose the node "gr" and swap its two children, it produces a scrambled string "rgeat". rgeat / \ rg eat / \ / \ r g e at / \ a t We say that "rgeat" is a scrambled string of "great". Similarly, if we continue to swap the children of nodes "eat" and "at", it produces a scrambled string "rgtae". rgtae / \ rg tae / \ / \ r g ta e / \ t a We say that "rgtae" is a scrambled string of "great". Given two strings s1 and s2 of the same length, determine if s2 is a scrambled string of s1. class Solution: # @return a boolean def isScramble(self, s1, s2): if len(s1) != len(s2): return False if s1 == s2: return True length = len(list(s1)) if sorted(s1) != sorted(s2): return False for i in xrange(1,length): if self.isScramble(s1[:i],s2[:i]) and self.isScramble(s1[i:],s2[i:]): return True if self.isScramble(s1[:i], s2[-i:]) and self.isScramble(s1[i:],s2[:-i]): return True return False # Note: # Condition: 1) length_s1 != length_s2 # 2) s1 == s2, s1与s2完全相等 # 3) sorted(s1) 与 sorted(s2)是不是相等 # 4) 比较s1[:i] s2[:i] and s1[i:],s2[i:] # 5) 比较s1[:i], s2[length_s2-i:] and s1[i:],s2[length_s2:-i]
Python
0.000002
@@ -1,12 +1,16 @@ +%22%22%22%0A Given a stri @@ -939,16 +939,20 @@ of s1.%0A%0A +%22%22%22%0A class So
04def6c69d5ee35edb1b2d7d2d10f7aa18b2eb47
Move fermi activation flag out of route mapper.
palm/blink_factory.py
palm/blink_factory.py
import numpy from palm.base.model_factory import ModelFactory from palm.blink_model import BlinkModel from palm.blink_state_enumerator import SingleDarkState, DoubleDarkState,\ SingleDarkStateEnumeratorFactory,\ DoubleDarkStateEnumeratorFactory from palm.blink_route_mapper import Route, SingleDarkRouteMapperFactory,\ DoubleDarkRouteMapperFactory class SingleDarkBlinkFactory(ModelFactory): ''' This factory class creates an aggregated kinetic model with the following microstate topology: I --> A A <--> D A --> B ''' def __init__(self, fermi_activation=False, MAX_A=10): self.state_factory = SingleDarkState self.route_factory = Route self.fermi_activation = fermi_activation self.MAX_A = MAX_A def create_model(self, parameter_set): self.parameter_set = parameter_set N = self.parameter_set.get_parameter('N') state_enumerator_factory = SingleDarkStateEnumeratorFactory( N, self.state_factory, self.MAX_A) state_enumerator = state_enumerator_factory.create_state_enumerator() route_mapper_factory = SingleDarkRouteMapperFactory( parameter_set=self.parameter_set, route_factory=self.route_factory, max_A=self.MAX_A) route_mapper = route_mapper_factory.create_route_mapper() new_model = BlinkModel(state_enumerator, route_mapper, self.parameter_set, self.fermi_activation) return new_model class DoubleDarkBlinkFactory(ModelFactory): ''' This factory class creates an aggregated kinetic model with the following microstate topology: I --> A A <--> D1 A <--> D2 A --> B ''' def __init__(self, fermi_activation=False, MAX_A=10): self.state_factory = DoubleDarkState self.route_factory = Route self.fermi_activation = fermi_activation self.MAX_A = MAX_A def create_model(self, parameter_set): self.parameter_set = parameter_set N = self.parameter_set.get_parameter('N') state_enumerator_factory = DoubleDarkStateEnumeratorFactory( N, self.state_factory, self.MAX_A) state_enumerator = state_enumerator_factory.create_state_enumerator() route_mapper_factory = DoubleDarkRouteMapperFactory( parameter_set=self.parameter_set, route_factory=self.route_factory, max_A=self.MAX_A, fermi_activation=self.fermi_activation) route_mapper = route_mapper_factory.create_route_mapper() new_model = BlinkModel(state_enumerator, route_mapper, self.parameter_set) return new_model
Python
0
@@ -2738,80 +2738,8 @@ AX_A -,%0A fermi_activation=self.fermi_activation )%0A
4579231595e42674fc53b2869242fd4ac1ff4ff1
Remove some unneeded code
blaze/datashape/normalization.py
blaze/datashape/normalization.py
# -*- coding: utf-8 -*- """ Datashape normalization. This handles Ellipses and broadcasting. """ from itertools import chain from collections import defaultdict, deque from blaze import error from . import transform, tzip from .coretypes import DataShape, Ellipsis, Fixed, CType #------------------------------------------------------------------------ # Normalization #------------------------------------------------------------------------ def normalize(constraints, broadcasting=None): """ Parameters ---------- constraints : [(DataShape, DataShape)] List of constraints (datashape type equations) broadcasting: [bool] indicates for each constraint whether the two DataShapes broadcast Returns: (constraints, broadcast_env) A two-tuple containing a list of normalized constraints and a broadcasting environment listing all type variables which may broadcast together. """ broadcasting_env = None result = [normalize_simple(a, b) for a, b in constraints] return result, broadcasting_env def normalize_simple(a, b): if isinstance(a, (CType, DataShape)) and isinstance(a, (CType, DataShape)): a, b = normalize_constructors(a, b) if (type(a), type(b)) == (DataShape, DataShape): a, b = normalize_ellipses(a, b) a, b = normalize_broadcasting(a, b) else: a, b = tzip(normalize_simple, a, b) return a, b #------------------------------------------------------------------------ # DataShape Normalizers #------------------------------------------------------------------------ def normalize_constructors(a, b): """ Normalize a pair (DataShape, CType) constructors by "promoting" the CType to a DataShape. Since a DataShape has at least one dimension, we know we have at least one broadcastind dimension. FIXME: We should not have CType at all! Just 0d DataShape """ if isinstance(a, DataShape) and isinstance(b, CType): return a, DataShape(Fixed(1), b) elif isinstance(a, CType) and isinstance(b, DataShape): return DataShape(Fixed(1), a), b else: return a, b def normalize_ellipses(a, b): """Eliminate ellipses in DataShape""" S = _normalize_ellipses(a, b) return substitute(S, a), substitute(S, b) def normalize_broadcasting(a, b): """Add broadcasting dimensions to DataShapes""" return _normalize_broadcasting(a, b) #------------------------------------------------------------------------ # Ellipses #------------------------------------------------------------------------ def _normalize_ellipses(ds1, ds2): # ------------------------------------------------- # Find ellipses a = [x for x in ds1.parameters if isinstance(x, Ellipsis)] b = [x for x in ds2.parameters if isinstance(x, Ellipsis)] xs, ys = list(ds1.parameters[-2::-1]), list(ds2.parameters[-2::-1]) # ------------------------------------------------- # Match ellipses if a and (len(xs) <= len(ys) or not b): S = match(xs, ys) elif b and (len(ys) <= len(xs) or not a): S = match(ys, xs) elif a or b: assert len(xs) == len(ys) S = match(xs, ys) else: return ds1, ds2 # no ellipses, nothing to do # ------------------------------------------------- # Reverse the reversed matches for x, L in S.items(): S[x] = L[::-1] # ------------------------------------------------- # Error checking if a and b: # We have an ellipsis in either operand. We mandate that one # 'contains' the other, since it is unclear how to unify them if # they are disjoint [x], [y] = a, b if x not in S[y] and y not in S[x]: raise error.BlazeTypeError( "Unable to line up Ellipses in %s and %s" % (ds1, ds2)) if not S[x]: S[x].append(y) if not S[y]: S[y].append(x) return S def match(xs, ys, S=None): if S is None: S = defaultdict(list) xs, ys = deque(xs), deque(ys) while xs and ys: x = xs.popleft() if isinstance(x, Ellipsis): while len(ys) > len(xs): S[x].append(ys.popleft()) else: y = ys.popleft() if isinstance(y, Ellipsis): S[y].append(x) xs, ys = ys, xs # match(ys, xs, S) return S def substitute(S, ds): """Substitute a solution mapping Elipses to parameters""" sub_param = lambda x: S[x] if isinstance(x, Ellipsis) else [x] return DataShape(*chain(*map(sub_param, ds.parameters))) #------------------------------------------------------------------------ # Broadcasting #------------------------------------------------------------------------ def _normalize_broadcasting(a, b): if isinstance(a, DataShape) and isinstance(b, DataShape): # Create type variables for leading dimensions len1, len2 = len(a.parameters), len(b.parameters) leading = tuple(Fixed(1) for i in range(abs(len1 - len2))) if len1 < len2: a = DataShape(*leading + a.parameters) elif len2 < len1: b = DataShape(*leading + b.parameters) else: a, b = tzip(_normalize_broadcasting, a, b) return a, b #------------------------------------------------------------------------ # Simplification #------------------------------------------------------------------------ def simplify(t, solution): """ Simplify constraints by eliminating Implements (e.g. '10, A : numeric') and type variables associated with Ellipsis (e.g. 'A..., int32'), and by updating the given typing solution. Parameters ---------- t : Mono Blaze type Returns: Mono Simplified blaze type """ return transform(Simplifier(solution), t) class Simplifier(object): """Simplify a type and update a typing solution""" def __init__(self, S): self.S = S def Implements(self, term): typeset = self.S.setdefault(term.typevar, set()) typeset.add(term.typeset) # typeset.update(term.typeset) return term.typevar # TODO: ellipsis # def Ellipsis(self, term): # if term.typevar: # typeset = self.S.setdefault(term.typevar, set() # typeset.update(term) # return term.typevar # return term
Python
0.000033
@@ -1183,52 +1183,8 @@ )):%0A - a, b = normalize_constructors(a, b)%0A @@ -1573,558 +1573,8 @@ --%0A%0A -def normalize_constructors(a, b):%0A %22%22%22%0A Normalize a pair (DataShape, CType) constructors by %22promoting%22 the CType%0A to a DataShape.%0A%0A Since a DataShape has at least one dimension, we know we%0A have at least one broadcastind dimension.%0A%0A FIXME: We should not have CType at all! Just 0d DataShape%0A %22%22%22%0A if isinstance(a, DataShape) and isinstance(b, CType):%0A return a, DataShape(Fixed(1), b)%0A elif isinstance(a, CType) and isinstance(b, DataShape):%0A return DataShape(Fixed(1), a), b%0A else:%0A return a, b%0A%0A def
f89f170aa8d672a891547ed7ac77d806cda697c4
Update TestWebserver.py
emission/tests/netTests/TestWebserver.py
emission/tests/netTests/TestWebserver.py
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import # Standard imports from future import standard_library standard_library.install_aliases() from builtins import * import unittest import json import sys import os import uuid import logging import time # Our imports import emission.tests.common as etc import emission.net.api.cfc_webapp as enacw import importlib class TestWebserver(unittest.TestCase): def setUp(self): import shutil self.webserver_conf_path = "conf/net/api/webserver.conf" shutil.copyfile( "%s.sample" % self.webserver_conf_path, self.webserver_conf_path ) with open(self.webserver_conf_path, "w") as fd: fd.write( json.dumps( { "paths": { "static_path": "webapp/www", "python_path": "main", "log_base_dir": ".", "log_file": "debug.log", "404_redirect": "http://somewhere.else", }, "server": { "host": "0.0.0.0", "port": "8080", "timeout": "3600", "auth": "skip", "aggregate_call_auth": "no_auth", }, } ) ) logging.debug("Finished setting up %s" % self.webserver_conf_path) with open(self.webserver_conf_path) as fd: logging.debug("Current values are %s" % json.load(fd)) def tearDown(self): os.remove(self.webserver_conf_path) def test404Redirect(self): from emission.net.api.bottle import response importlib.reload(enacw) self.assertEqual(response.status_code, 200) self.assertEqual(response.get_header("Location"), None) enacw.error404("") self.assertEqual(response.status_code, 301) self.assertEqual(response.get_header("Location"), "http://somewhere.else") def testResolveAuth(self): import emission.net.api.cfc_webapp as enacw self.assertEqual(enacw.resolve_auth("skip"),"skip") self.assertEqual(enacw.resolve_auth("token_list"),"token_list") self.assertEqual(enacw.resolve_auth("dynamic"),"token_list") self.assertNotEqual(enacw.resolve_auth("dynamic"),"skip") from unittest import mock @mock.patch.dict(os.environ, {"STUDY_CONFIG":"nrel-commute"}, clear=True) def test_ResolveAuthWithEnvVar(self): importlib.reload(enacw) self.assertEqual(enacw.resolve_auth("dynamic"),"skip") if __name__ == "__main__": etc.configLogging() unittest.main()
Python
0.000001
@@ -2244,61 +2244,8 @@ f):%0A - import emission.net.api.cfc_webapp as enacw%0A%0A
504b527e493ae6ba377671e972338ef4cf58b017
Remove accidental print statements.
nbviewer/providers/github/tests/test_client.py
nbviewer/providers/github/tests/test_client.py
# encoding: utf-8 import mock from tornado.httpclient import AsyncHTTPClient from tornado.testing import AsyncTestCase from ..client import AsyncGitHubClient from ....utils import quote class GithubClientTest(AsyncTestCase): """Tests that the github API client makes the correct http requests.""" def setUp(self): super(GithubClientTest, self).setUp() # Need a mock HTTPClient for the github client to talk to. self.http_client = mock.create_autospec(AsyncHTTPClient) # patch the enviornment so that we get a known url prefix. with mock.patch('os.environ.get', return_value='https://api.github.com/'): self.gh_client = AsyncGitHubClient(client=self.http_client) def _get_url(self): """Get the last url requested from the mock http client.""" args, kw = self.http_client.fetch.call_args return args[0] def assertStartsWith(self, string, beginning): """Assert that a url has the correct beginning. Github API requests involve non-trivial query strings. This is useful when you want to compare URLs, but don't care about the querystring. """ if string.startswith(beginning): return print beginning print string self.assertTrue(string.startswith(beginning), '%s does not start with %s' % (string, beginning)) def test_basic_fetch(self): """Test the mock http client is hit""" self.gh_client.fetch('https://api.github.com/url') self.assertTrue(self.http_client.fetch.called) def test_fetch_params(self): """Test params are passed through.""" params = {'unique_param_name': 1} self.gh_client.fetch('https://api.github.com/url', params=params) url = self._get_url() self.assertTrue('unique_param_name' in url) def test_log_rate_limit(self): pass def test_get_repos(self): self.gh_client.get_repos('username') url = self._get_url() self.assertStartsWith(url, 'https://api.github.com/users/username/repos') def test_get_contents(self): user = 'username' repo = 'my_awesome_repo' path = u'möre-path' self.gh_client.get_contents(user, repo, path) url = self._get_url() correct_url = u'https://api.github.com' + quote(u'/repos/username/my_awesome_repo/contents/möre-path') self.assertStartsWith(url, correct_url) def test_get_branches(self): user = 'username' repo = 'my_awesome_repo' self.gh_client.get_branches(user, repo) url = self._get_url() correct_url = 'https://api.github.com/repos/username/my_awesome_repo/branches' self.assertStartsWith(url, correct_url) def test_get_tags(self): user = 'username' repo = 'my_awesome_repo' self.gh_client.get_tags(user, repo) url = self._get_url() correct_url = 'https://api.github.com/repos/username/my_awesome_repo/tags' self.assertStartsWith(url, correct_url) def test_get_tree_entry(self): user = 'username' repo = 'my_awesome_repo' path = 'extra-path' self.gh_client.get_tree_entry(user, repo, path) url = self._get_url() correct_url = 'https://api.github.com/repos/username/my_awesome_repo/git/trees/master' self.assertStartsWith(url, correct_url) def test_get_gist(self): gist_id = 'ap90avn23iovv2ovn2309n' self.gh_client.get_gist(gist_id) url = self._get_url() correct_url = 'https://api.github.com/gists/' + gist_id self.assertStartsWith(url, correct_url) def test_get_gists(self): user = 'username' self.gh_client.get_gists(user) url = self._get_url() correct_url = 'https://api.github.com/users/username/gists' self.assertStartsWith(url, correct_url)
Python
0.000012
@@ -1246,53 +1246,8 @@ urn%0A - print beginning%0A print string%0A
37e8452ad999f42746be395d193a306f9a893dbf
Update rpc.py
ncclient/operations/third_party/juniper/rpc.py
ncclient/operations/third_party/juniper/rpc.py
from ncclient.xml_ import * from ncclient.operations.rpc import RPC from ncclient.operations.rpc import RPCReply from ncclient.operations.rpc import RPCError class GetConfiguration(RPC): def request(self, format='xml', filter=None): node = new_ele('get-configuration', {'format':format}) if filter is not None: node.append(filter) return self._request(node) class LoadConfiguration(RPC): def request(self, format='xml', action='merge', target='candidate', config=None): if config is not None: if type(config) == list: config = '\n'.join(config) if action == 'set': format = 'text' node = new_ele('load-configuration', {'action':action, 'format':format}) if format == 'xml': config_node = sub_ele(node, 'configuration') config_node.append(config) if format == 'text' and not action == 'set': config_node = sub_ele(node, 'configuration-text').text = config if action == 'set' and format == 'text': config_node = sub_ele(node, 'configuration-set').text = config print to_xml(node) return self._request(node) class CompareConfiguration(RPC): def request(self, rollback=0): node = new_ele('get-configuration', {'compare':'rollback', 'rollback':str(rollback)}) return self._request(node) class ExecuteRpc(RPC): def request(self, rpc): if isinstance(rpc, str): rpc = to_ele(rpc) return self._request(rpc) class Command(RPC): def request(self, command=None, format='xml'): node = new_ele('command', {'format':format}) node.text = command return self._request(node) class Reboot(RPC): def request(self): node = new_ele('request-reboot') return self._request(node) class Halt(RPC): def request(self): node = new_ele('request-halt') return self._request(node)
Python
0.000001
@@ -1191,39 +1191,8 @@ fig%0A - print to_xml(node)%0A
28d306758e1f7704bc7b2e945a428b790e6b6487
Fix test based on new data from FIRST.
tests/test_datafeed_usfirst_legacy_events.py
tests/test_datafeed_usfirst_legacy_events.py
import unittest2 import datetime from google.appengine.ext import testbed from consts.event_type import EventType from datafeeds.datafeed_usfirst_legacy import DatafeedUsfirstLegacy class TestDatafeedUsfirstLegacyEvents(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_taskqueue_stub(root_path=".") self.datafeed = DatafeedUsfirstLegacy() def tearDown(self): self.testbed.deactivate() def test_getEvent(self): # test with 2011ct event = self.datafeed.getEventDetails(2011, "5561") self.assertEqual(event.key.id(), "2011ct") self.assertEqual(event.name, "Northeast Utilities FIRST Connecticut Regional") self.assertEqual(event.event_type_enum, EventType.REGIONAL) self.assertEqual(event.start_date, datetime.datetime(2011, 3, 31, 0, 0)) self.assertEqual(event.end_date, datetime.datetime(2011, 4, 2, 0, 0)) self.assertEqual(event.year, 2011) self.assertEqual(event.venue_address, "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA") self.assertEqual(event.website, "http://www.ctfirst.org/ctr") self.assertEqual(event.event_short, "ct") def test_getEventTeams(self): # test with 2011ct teams = self.datafeed.getEventTeams(2011, "5561") self.assertEqual( [(team.team_number, team.first_tpid) for team in teams], [(383, 41829), (1124, 42285), (155, 41609), (3634, 51637), (999, 42215), (1699, 42751), (173, 41625), (175, 41629), (716, 42049), (178, 41635), (2170, 43331), (3146, 44577), (2168, 43335), (2067, 43175), (181, 41641), (1991, 43133), (3125, 44539), (2785, 44073), (1740, 42765), (1784, 42895), (3654, 51609), (3718, 49891), (558, 41939), (3719, 52081), (230, 41681), (3464, 49827), (177, 41633), (2064, 43159), (195, 41651), (3104, 44463), (3555, 49069), (3141, 44487), (3461, 47483), (3525, 48801), (237, 41691), (3182, 44547), (571, 41947), (176, 41631), (1071, 42251), (2836, 43965), (126, 41585), (157, 41611), (69, 41519), (1027, 42235), (663, 42007), (3585, 50743), (1073, 42255), (501, 41899), (869, 42131), (714, 42047), (1923, 42947), (743, 42051), (20, 41475), (3204, 44731), (1601, 42659), (2791, 43935), (533, 41919), (694, 42027)] ) def test_getEventList(self): events = self.datafeed.getEventList(2011) self.assertEqual(len(events), 58) # 58 events expected self.assertEqual(events[0].first_eid, "5519") self.assertEqual(events[0].event_type_enum, EventType.REGIONAL) self.assertEqual(events[0].name, "BAE Systems/Granite State Regional") self.assertEqual(events[1].first_eid, "5523") self.assertEqual(events[1].event_type_enum, EventType.REGIONAL) self.assertEqual(events[1].name, "New Jersey Regional")
Python
0
@@ -2680,19 +2680,19 @@ _eid, %225 -519 +623 %22)%0A @@ -2798,16 +2798,202 @@ 0%5D.name, + %22Alamo Regional%22)%0A%0A self.assertEqual(events%5B1%5D.first_eid, %225519%22)%0A self.assertEqual(events%5B1%5D.event_type_enum, EventType.REGIONAL)%0A self.assertEqual(events%5B1%5D.name, %22BAE Sy @@ -3048,33 +3048,33 @@ ertEqual(events%5B -1 +2 %5D.first_eid, %2255 @@ -3102,33 +3102,33 @@ ertEqual(events%5B -1 +2 %5D.event_type_enu @@ -3174,33 +3174,33 @@ ertEqual(events%5B -1 +2 %5D.name, %22New Jer
8fc2ace83f7c25d5245f0496c3e52adcc2fd71c7
Fix typo in ResolverNode move alias.
tfx/components/common_nodes/resolver_node.py
tfx/components/common_nodes/resolver_node.py
# Lint as: python2, python3 # Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Deprecated location for the TFX Resolver. The new location is `tfx.dsl.components.common.resolver.Resolver`. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Dict, Text, Type from tfx import types from tfx.dsl.components.common import resolver from tfx.utils import deprecation_utils from tfx.utils import json_utils def _make_deprecated_resolver_node_alias(): """Make ResolverNode alias class. Make the deprecation shim for ResolverNode. Needed to conform to the convention expected by `tfx.utils.deprecation_utils` and to translate renamed constructor arguments. Returns: Deprecated ResolverNode alias class. """ parent_deprecated_class = deprecation_utils.deprecated_alias( # pylint: disable=invalid-name deprecated_name='tfx.components.common_nodes.resolver_node.ResolverNode', name='tfx.dsl.components.common.resolver.ResolverNode', func_or_class=resolver.Resolver) class _NewDeprecatedClass(parent_deprecated_class): """Deprecated ResolverNode alias constructor. This class location is DEPRECATED and is provided temporarily for compatibility. Please use `tfx.dsl.components.common.resolver.Resolver` instead. """ def __init__(self, instance_name: Text, resolver_class: Type[resolver.ResolverStrategy], resolver_configs: Dict[Text, json_utils.JsonableType] = None, **kwargs: types.Channel): """Forwarding shim for deprecated ResolverNode alias constructor. Args: instance_name: the name of the Resolver instance. resolver_class: a ResolverStrategy subclass which contains the artifact resolution logic. resolver_configs: a dict of key to Jsonable type representing configuration that will be used to construct the resolver strategy. **kwargs: a key -> Channel dict, describing what are the Channels to be resolved. This is set by user through keyword args. """ super(ResolverNode, self).__init__( instance_name=instance_name, strategy_class=resolver_class, config=resolver_configs, **kwargs) return _NewDeprecatedClass # Constant to access resolver class from resolver exec_properties. RESOLVER_CLASS = resolver.RESOLVER_STRATEGY_CLASS # Constant to access resolver config from resolver exec_properties. RESOLVER_CONFIGS = resolver.RESOLVER_CONFIG RESOLVER_CLASS_LIST = resolver.RESOLVER_STRATEGY_CLASS_LIST RESOLVER_CONFIG_LIST = resolver.RESOLVER_CONFIG_LIST ResolverNode = _make_deprecated_resolver_node_alias()
Python
0.999044
@@ -1567,36 +1567,32 @@ esolver.Resolver -Node ',%0A func_or
d6d60b1d44742a3d8c67f1a78562a376a9627c21
remove features not in django 1.11
minio_storage/management/commands/minio.py
minio_storage/management/commands/minio.py
import json import sys from string import Template from unittest.mock import patch import minio.error from django.core.management.base import BaseCommand, CommandError, no_translations from django.utils.module_loading import import_string from minio_storage.policy import Policy from minio_storage.storage import MinioStorage class Command(BaseCommand): help = "verify, list, create and delete minio buckets" CHECK = "check" CREATE = "create" DELETE = "delete" LIST = "ls" POLICY = "policy" FULL_FORMAT = "$name $size $modified $url $etag" def add_arguments(self, parser): group = parser.add_argument_group("minio") group.add_argument( "--class", type=str, default="minio_storage.storage.MinioMediaStorage", help="Storage class to modify " "(media/static are short names for default classes)", ) group.add_argument( "--bucket", type=str, default=None, help="bucket name (default: storage defined bucket if not set)", ) commands = parser.add_subparsers( dest="command", title="subcommands", description="valid subcommands", # required=True, ) commands.add_parser(self.CHECK, help="check bucket") commands.add_parser(self.CREATE, help="make bucket") commands.add_parser(self.DELETE, help="remove an empty bucket") ls = commands.add_parser(self.LIST, help="list bucket objects or buckets") ls.add_argument("--dirs", action="store_true", help="include directories") ls.add_argument("--files", action="store_true", help="include files") ls.add_argument( "-r", "--recursive", action="store_true", help="find files recursive" ) ls.add_argument("-p", "--prefix", type=str, default="", help="path prefix") ls.add_argument( "--buckets", action="store_true", help="list buckets instead of files" ) ls.add_argument( "-f", "--format", type=str, default="$name", help="list format. ( $name $size $modified $url $etag )", ) policy = commands.add_parser(self.POLICY, help="get or set bucket policy") policy.add_argument( "--set", type=str, default=None, choices=[p.value for p in Policy], help="set bucket policy", ) super().add_arguments(parser) @no_translations def handle(self, *args, **options): storage = self.storage(options) bucket_name = options["bucket"] or storage.bucket_name command = options["command"] or "" if command == self.CHECK: return self.bucket_exists(storage, bucket_name) if command == self.CREATE: return self.bucket_create(storage, bucket_name) elif command == self.DELETE: return self.bucket_delete(storage, bucket_name) elif command == self.LIST: if options["buckets"]: return self.list_buckets(storage) list_dirs = True list_files = True summary = True if options["dirs"] or options["files"]: list_dirs = options["dirs"] list_files = options["files"] summary = False return self.bucket_list( storage, bucket_name, prefix=options["prefix"], list_dirs=list_dirs, list_files=list_files, recursive=options["recursive"], format=options["format"], summary=summary, ) elif command == self.POLICY: if options["set"] is not None: return self.policy_set( storage, bucket_name, policy=Policy(options["set"]) ) return self.policy_get(storage, bucket_name) self.print_help("minio", "") if command != "": raise CommandError(f"don't know how to handle command: {command}") raise CommandError("command name required") def storage(self, options): class_name = { "media": "minio_storage.storage.MinioMediaStorage", "static": "minio_storage.storage.MinioStaticStorage", }.get(options["class"], options["class"]) try: storage_class = import_string(class_name) except ImportError: raise CommandError(f"could not find storage class: {class_name}") if not issubclass(storage_class, MinioStorage): raise CommandError(f"{class_name} is not an sub class of MinioStorage.") # TODO: maybe another way with patch.object(storage_class, "_init_check", return_value=None): storage = storage_class() return storage def bucket_exists(self, storage, bucket_name): exists = storage.client.bucket_exists(bucket_name) if not exists: raise CommandError(f"bucket {bucket_name} does not exist") def list_buckets(self, storage): objs = storage.client.list_buckets() for o in objs: self.stdout.write(f"{o.name}") def bucket_list( self, storage, bucket_name: str, *, prefix: str, list_dirs: bool, list_files: bool, recursive: bool, format: str = None, summary: bool = True, ): try: objs = storage.client.list_objects_v2( bucket_name, prefix=prefix, recursive=recursive ) template = None if format is not None and format != "$name": template = Template(format) def fmt(o): if template is None: return o.object_name return template.substitute( name=o.object_name, size=o.size, modified=o.last_modified, etag=o.etag, url=storage.url(o.object_name), ) n_files = 0 n_dirs = 0 for o in objs: if o.is_dir: n_dirs += 1 if list_dirs: self.stdout.write(fmt(o)) else: n_files += 1 if list_files: self.stdout.write(fmt(o)) if summary: print(f"{n_files} files and {n_dirs} directories", file=sys.stderr) except minio.error.NoSuchBucket: raise CommandError(f"bucket {bucket_name} does not exist") def bucket_create(self, storage, bucket_name): try: storage.client.make_bucket(bucket_name) print(f"created bucket: {bucket_name}", file=sys.stderr) except minio.error.BucketAlreadyOwnedByYou: raise CommandError(f"you have already created {bucket_name}") return def bucket_delete(self, storage, bucket_name): try: storage.client.remove_bucket(bucket_name) except minio.error.NoSuchBucket: raise CommandError(f"bucket {bucket_name} does not exist") except minio.error.BucketNotEmpty: raise CommandError(f"bucket {bucket_name} is not empty") def policy_get(self, storage, bucket_name): try: policy = storage.client.get_bucket_policy(bucket_name) policy = json.loads(policy) policy = json.dumps(policy, ensure_ascii=False, indent=2) return policy except (minio.error.NoSuchBucket, minio.error.NoSuchBucketPolicy) as e: raise CommandError(e.message) def policy_set(self, storage, bucket_name, policy: Policy): try: policy = Policy(policy) storage.client.set_bucket_policy(bucket_name, policy.bucket(bucket_name)) except minio.error.NoSuchBucket as e: raise CommandError(e.message)
Python
0.000001
@@ -165,25 +165,8 @@ rror -, no_translations %0Afro @@ -2550,29 +2550,8 @@ r)%0A%0A - @no_translations%0A
3c33b9d7ea3736329d3e0939b042db08e6365eb5
Move experiments to ``experiments`` module
dallinger/version.py
dallinger/version.py
"""Dallinger version number.""" __version__ = "3.0.0a1"
Python
0.000007
@@ -45,13 +45,11 @@ = %22 -3.0.0a1 +2.7.0 %22%0A
55e55eb7b74def06e3d15e7a4045c5c0a0ed0544
Fix TcpClient INIT state not returning state
blitz/io/client_states.py
blitz/io/client_states.py
__author__ = 'Will Hart' import logging import threading from blitz.constants import * class BaseState(object): """ A base state diagram which provides a few methods - this should not be directly instantiated. All methods return a BaseState derived object which should handle future message processing """ logger = logging.getLogger(__name__) def enter_state(self, tcp, state): """Called when entering the state""" self.logger.debug("[TCP] Calling base.enter_state >> " + state.__name__) return state() def process_message(self, tcp, msg): """Called when a message needs processing""" self.logger.debug("[TCP] Calling base.process_message: " + msg) raise NotImplementedError() def send_message(self, tcp, msg): """ Send the passed message over TCP and return the current state """ self.logger.debug("[TCP] Calling base.send_message: " + msg) tcp._do_send(msg) return self def go_to_state(self, tcp, state): """ Transition to a new state and call enter_state on it :return: the new state """ self.logger.debug("[TCP] Calling base.go_to_state >> " + state.__name__) return state().enter_state(tcp, state) def __str__(self): return "<" + __name__ + ">" class ClientInitState(BaseState): """ Handles the client starting up - sends a "logging" query to the logger and waits for the response """ def enter_state(self, tcp, state): """Send a logging query to the logger""" self.logger.debug("[TCP] Calling init.enter_state") tcp._do_send(CommunicationCodes.IsLogging) return self def process_message(self, tcp, msg): self.logger.debug("[TCP] Calling init.process_message: " + msg) if msg == CommunicationCodes.Acknowledge: # logger is logging, transition to LOGGING state return self.go_to_state(tcp, ClientLoggingState) elif msg == CommunicationCodes.Negative: # logger is not logging, go to idle return self.go_to_state(tcp, ClientIdleState) else: # no other messages are acceptable in this state raise Exception("Unable to process the given message from InitState: " + msg) class ClientIdleState(BaseState): """ Handles the client idling, waiting for further commands """ def process_message(self, tcp, msg): # no server messages are acceptable in this state self.logger.debug("[TCP] Calling idle.process_message: " + msg) raise Exception("Received unexpected message in IdleState: " + msg) def send_message(self, tcp, msg): self.logger.debug("[TCP] Calling idle.send_message: " + msg) if msg == CommunicationCodes.Start: return self.go_to_state(tcp, ClientStartingState) elif msg[0:8] == CommunicationCodes.Download: tcp._do_send(msg) return self.go_to_state(tcp, ClientDownloadingState) else: raise Exception("Unknown message for IDLE state - " + msg) class ClientStartingState(BaseState): """Handles logging starting - waits for ACK from server""" def enter_state(self, tcp, state): self.logger.debug("[TCP] Calling starting.enter_state: " + state.__name__) tcp._do_send(CommunicationCodes.Start) return self def process_message(self, tcp, msg): self.logger.debug("[TCP] Calling starting.process_message: " + msg) if msg == CommunicationCodes.Acknowledge or msg == CommunicationCodes.InSession: return self.go_to_state(tcp, ClientLoggingState) return self.go_to_state(tcp, ClientIdleState) class ClientLoggingState(BaseState): """ Handles the client in logging state - sends periodic status updates """ def enter_state(self, tcp, state): """sets up a timer which periodically polls the data logger for updates""" self.logger.debug("[TCP] Calling logging.enter_state") t = threading.Timer(1.0, self.request_update, args=[tcp]) t.start() def request_update(self, tcp): """called on timer tick to request an update from the TCP server""" self.logger.critical("TICK") tcp.request_update() # if we are still logging, request another update if type(tcp.current_state) is ClientLoggingState: t = threading.Timer(1.0, self.request_update, args=[tcp]) t.start() def send_message(self, tcp, msg): self.logger.debug("[TCP] Calling logging.send_message: " + msg) # check if we have requested logging to stop if msg == CommunicationCodes.Stop: return self.go_to_state(tcp, ClientStoppingState) # if not, are we requesting a status? if msg == CommunicationCodes.Update: tcp._do_send(CommunicationCodes.Update) elif len(msg) == COMMAND_MESSAGE_BYTES or len(msg) == SHORT_COMMAND_MESSAGE_BYTES: # this is likely to be a data message tcp.parse_reading(msg) else: # otherwise we just send the message and let the server sort it out tcp._do_send(msg) return self class ClientStoppingState(BaseState): """ Handles waiting for acknowledgement from a client before entering IDLE state """ def enter_state(self, tcp, state): self.logger.debug("[TCP] Calling stopping.enter_state: " + state.__name__) tcp._do_send(CommunicationCodes.Stop) return self def process_message(self, tcp, msg): self.logger.debug("[TCP] Calling stopping.process_message: " + msg) if msg == CommunicationCodes.Acknowledge: return self.go_to_state(tcp, ClientIdleState) return self class ClientDownloadingState(BaseState): """ Handles the client in logging state - sends periodic status updates """ def process_message(self, tcp, msg): self.logger.debug("[TCP] Calling downloading.process_message: " + msg) if msg == CommunicationCodes.Negative: # the data has been received self.send_message(tcp, CommunicationCodes.Acknowledge) return self.go_to_state(tcp, ClientIdleState) # otherwise we save the data row for processing tcp.parse_reading(msg) return self def go_to_state(self, tcp, state): self.logger.debug("[TCP] Calling downloading.go_to_state >> " + state.__name__) if type(state) == ClientIdleState: tcp._do_send(CommunicationCodes.Acknowledge) # acknowledge end of download recieved return super(ClientDownloadingState, self).go_to_state(tcp, state)
Python
0
@@ -4139,32 +4139,52 @@ t.start() +%0A return self %0A%0A def reques
24e3c8baf5739aead32c3314ecb39edc51de1a5d
Remove entry message to avoid reconnect spam.
bot/grenouille_irc_bot.py
bot/grenouille_irc_bot.py
import logging import sys import os from time import sleep import irc.bot import irc.strings from datetime import datetime, timezone, timedelta import threading import xml.etree.ElementTree class GrenouilleIrcBot(irc.bot.SingleServerIRCBot): """The module of the bot responsible for the Twitch (IRC) chat. Listen to all pub messages and respond accordingly. Attributes grenouille_bot - The main class the module is linked to. sanitizer - thread ran every 3 minutes to check if the bot is still alive commands - list of all commands supported by the bot who_data - streamer names displayed by who """ def __init__(self, grenouille_bot): self.grenouille_bot = grenouille_bot channel = self.grenouille_bot.config['DEFAULT']['channel'] nickname = self.grenouille_bot.config['DEFAULT']['nickname'] server = 'irc.chat.twitch.tv' password = self.grenouille_bot.config['DEFAULT']['token'] port = 6667 self.who_data = 'Aucune info sur le streamer actuel.' self.commands = { 'grenouille': self.grenouille, 'next': self.next, 'now': self.now, 'who': self.who, 'youtube': self.youtube, 'twitter': self.twitter } irc.bot.SingleServerIRCBot.__init__(self, [(server, port, password)], nickname, nickname) self.channel = channel self.sanitizer = threading.Timer(60, self.sanitize).start() self.last_ping = datetime.utcnow() self.twitters = xml.etree.ElementTree.parse(os.path.join(os.path.dirname(__file__), 'twitters.xml')).getroot() def on_welcome(self, connection, e): """Called when the bot is connected to the IRC server. """ connection.join(self.channel) connection.set_rate_limit(0.5) connection.send_raw('CAP REQ :twitch.tv/commands') connection.send_raw('CAP REQ :twitch.tv/tags') connection.privmsg(self.channel, "Je suis la !grenouille pour vous servir.") logging.info('Connected to channel.') def sanitize(self): """Check that IRC twitch didn't kick us. If that's the case, we reconnect. """ if datetime.utcnow() - self.last_ping > timedelta(minutes=7): self.last_ping = datetime.utcnow() logging.warning('Sanitizer detected lost connection. Reconnecting.') self.connection.disconnect() sleep(10) self.connection.reconnect() self.sanitizer = threading.Timer(60, self.sanitize).start() def on_ping(self, connection, e): """Save last ping for sanitizer. """ self.last_ping = datetime.utcnow() def on_pubmsg(self, connection, e): """Called for every public message. Extract command, call it with admin info. """ message = e.arguments[0] sender = e.source.nick tags = {key_value["key"]: key_value["value"] for key_value in e.tags} is_admin = False if 'user-type' in tags: is_admin = bool(tags['user-type']) if not message[0] == '!': return elif sender == connection.get_nickname(): return else: split = message[1:].split(' ', 1) if split[0] not in self.commands: return else: answer = self.commands[split[0]](is_admin, split[1] if len(split) > 1 else None) for line in answer or []: connection.privmsg(self.channel, line) ###################################### # Methods linked to the bot commands # ###################################### def grenouille(self, is_admin=False, parameters=None): """List all bot commands :return: """ return ["Les croassements que j'écoute sont: {0}.".format(', '.join(sorted(self.commands.keys())))] def next(self, is_admin=False, parameters=None): """Display the next event from the calendar. :return: """ try: now = datetime.now(timezone.utc) while self.grenouille_bot.event_list and self.grenouille_bot.event_list[0].end < now: self.grenouille_bot.event_list.pop(0) if len(self.grenouille_bot.event_list) == 0: return ['Aucun événement planifié dans le calendrier.'] else: if self.grenouille_bot.event_list[0].start > now: return [str(self.grenouille_bot.event_list[0])] else: return [str(self.grenouille_bot.event_list[1])] except Exception: logging.exception('Error when next.') return ['Erreur interne à la grenouille.'] def now(self, is_admin=False, parameters=None): """Display the current event from the calendar. :return: """ try: now = datetime.now(timezone.utc) while self.grenouille_bot.event_list and self.grenouille_bot.event_list[0].end < now: self.grenouille_bot.event_list.pop(0) if len(self.grenouille_bot.event_list) == 0: return ['Aucun événement planifié dans le calendrier.'] elif self.grenouille_bot.event_list[0].start < now < self.grenouille_bot.event_list[0].end: return [str(self.grenouille_bot.event_list[0])] else: return ["Aucune information dans le calendrier pour l'événement actuel."] except Exception: logging.exception('Error when now.') return ['Erreur interne à la grenouille.'] def who(self, is_admin=False, parameters=None): """Display current streamers. Mod can change with parameters :param parameters variable to set if not None :return: """ if is_admin and parameters is not None: self.who_data = 'Streamers actuels: {0}'.format(parameters) return [self.who_data] def youtube(self, is_admin=False, parameters=None): """Print the youtube official channel of the FroggedTV :param parameters useless in this function :return: Youtube of the FroggedTV """ return ['Le YouTube de la FroggedTV : https://www.youtube.com/FroggedTV'] def twitter(self, is_admin=False, parameters=None): """Display the Twitter account of the asked streamer. :param parameters name of the streamer :return: """ if parameters is not None: twitter = self.twitters.find('.//twitter[@name="{0}"]'.format(parameters.lower())) if twitter is not None: return [twitter.text] else: twitter = self.twitters.find('.//twitter[@alias="{0}"]'.format(parameters.lower())) if twitter is not None: return [twitter.text] else: return [] else: return ['Format de la commande : !twitter [nom du streamer]']
Python
0
@@ -1971,93 +1971,8 @@ s')%0A - connection.privmsg(self.channel, %22Je suis la !grenouille pour vous servir.%22)%0A
e7722145dd3074a22e19712cbd916accf8275050
Fix create new initiative first step
bluebottle/initiatives/models.py
bluebottle/initiatives/models.py
from django.db import models from django.db.models.deletion import SET_NULL from django.template.defaultfilters import slugify from django.forms.models import model_to_dict from django.utils.translation import ugettext_lazy as _ from djchoices.choices import DjangoChoices, ChoiceItem from multiselectfield import MultiSelectField from bluebottle.files.fields import ImageField from bluebottle.fsm import FSMField, TransitionNotAllowed from bluebottle.geo.models import Geolocation from bluebottle.initiatives.messages import InitiativeClosedOwnerMessage, InitiativeApproveOwnerMessage from bluebottle.organizations.models import Organization, OrganizationContact from bluebottle.utils.models import BasePlatformSettings class Initiative(models.Model): class Status(DjangoChoices): draft = ChoiceItem('draft', _('draft')) submitted = ChoiceItem('submitted', _('submitted')) needs_work = ChoiceItem('needs_work', _('needs work')) approved = ChoiceItem('approved', _('approved')) closed = ChoiceItem('closed', _('closed')) title = models.CharField(_('title'), max_length=255) status = FSMField( default=Status.draft, choices=Status.choices, protected=True ) owner = models.ForeignKey( 'members.Member', verbose_name=_('owner'), related_name='own_%(class)ss', ) reviewer = models.ForeignKey( 'members.Member', null=True, blank=True, verbose_name=_('reviewer'), related_name='review_%(class)ss', ) created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True) slug = models.SlugField(_('slug'), max_length=100) pitch = models.TextField( _('pitch'), help_text=_('Pitch your smart idea in one sentence'), blank=True ) story = models.TextField(_('story'), blank=True) theme = models.ForeignKey('bb_projects.ProjectTheme', null=True, blank=True, on_delete=SET_NULL) categories = models.ManyToManyField('categories.Category', blank=True) image = ImageField(blank=True, null=True) promoter = models.ForeignKey( 'members.Member', verbose_name=_('promoter'), null=True, ) video_url = models.URLField( _('video'), max_length=100, blank=True, null=True, default='', help_text=_( "Do you have a video pitch or a short movie that " "explains your initiative? Cool! We can't wait to see it! " "You can paste the link to YouTube or Vimeo video here" ) ) place = models.ForeignKey(Geolocation, null=True, blank=True, on_delete=SET_NULL) location = models.ForeignKey('geo.Location', null=True, blank=True, on_delete=models.SET_NULL) has_organization = models.NullBooleanField(null=True, default=None) organization = models.ForeignKey(Organization, null=True, blank=True, on_delete=SET_NULL) organization_contact = models.ForeignKey(OrganizationContact, null=True, blank=True, on_delete=SET_NULL) def is_complete(self): from bluebottle.initiatives.serializers import InitiativeSubmitSerializer serializer = InitiativeSubmitSerializer( data=model_to_dict(self) ) if not serializer.is_valid(): return [unicode(error) for errors in serializer.errors.values() for error in errors] @status.transition( source=Status.draft, target=Status.submitted, conditions=[is_complete], custom={'button_name': _('submit')} ) def submit(self): pass @status.transition( source=Status.needs_work, target=Status.submitted, conditions=[is_complete], custom={'button_name': _('resubmit')} ) def resubmit(self): pass @status.transition( source=Status.submitted, target=Status.needs_work, custom={'button_name': _('needs work')} ) def needs_work(self): pass @status.transition( source=Status.submitted, target=Status.approved, messages=[InitiativeApproveOwnerMessage], conditions=[is_complete], custom={'button_name': _('approve')} ) def approve(self): for activity in self.activities.filter(status='draft'): activity.initiative = self try: activity.open() activity.save() except TransitionNotAllowed: pass @status.transition( source=[Status.approved, Status.submitted, Status.needs_work], target=Status.closed, messages=[InitiativeClosedOwnerMessage], custom={'button_name': _('close')} ) def close(self): pass @status.transition( source=[Status.approved, Status.closed], target=Status.submitted, conditions=[is_complete], custom={'button_name': _('re-open')} ) def reopen(self): pass class Meta: verbose_name = _("Initiative") verbose_name_plural = _("Initiatives") permissions = ( ('api_read_initiative', 'Can view initiative through the API'), ('api_add_initiative', 'Can add initiative through the API'), ('api_change_initiative', 'Can change initiative through the API'), ('api_delete_initiative', 'Can delete initiative through the API'), ('api_read_own_initiative', 'Can view own initiative through the API'), ('api_add_own_initiative', 'Can add own initiative through the API'), ('api_change_own_initiative', 'Can change own initiative through the API'), ('api_change_own_running_initiative', 'Can change own initiative through the API'), ('api_delete_own_initiative', 'Can delete own initiative through the API'), ) class JSONAPIMeta: resource_name = 'initiatives' def __unicode__(self): return self.title def save(self, **kwargs): if self.slug == '' and self.title: self.slug = slugify(self.title) super(Initiative, self).save(**kwargs) class InitiativePlatformSettings(BasePlatformSettings): ACTIVITY_TYPES = ( ('funding', _('Funding')), ('event', _('Events')), ('job', _('Jobs')), ) activity_types = MultiSelectField(max_length=100, choices=ACTIVITY_TYPES) require_organization = models.BooleanField(default=False) class Meta: verbose_name_plural = _('initiative platform settings') verbose_name = _('initiative platform settings')
Python
0.000008
@@ -6067,17 +6067,38 @@ lug -== '' and +in %5B'', 'new'%5D:%0A if sel @@ -6106,16 +6106,20 @@ .title:%0A + @@ -6154,16 +6154,67 @@ .title)%0A + else:%0A self.slug = 'new' %0A
0516ca2a5bfaa162a44f407c13b55ca9487897fe
refresh group/keywords every hour
hortiradar/database/tasks_workers.py
hortiradar/database/tasks_workers.py
from configparser import ConfigParser from redis import StrictRedis import ujson as json from keywords import get_frog, get_keywords from selderij import app from tasks_master import insert_tweet keywords = get_keywords() config = ConfigParser() config.read("tasks_workers.ini") posprob_minimum = config["workers"].getfloat("posprob_minimum") redis = StrictRedis() rt_cache_time = 60 * 60 * 6 @app.task def find_keywords_and_groups(id_str, text, retweet_id_str): """Find the keywords and associated groups in the tweet.""" # First check if retweets are already processed in the cache if retweet_id_str: key = "t:%s" % retweet_id_str rt = redis.get(key) if rt: kw, groups, tokens = json.loads(rt) insert_tweet.apply_async((id_str, kw, groups, tokens), queue="master") redis.expire(key, rt_cache_time) return frog = get_frog() tokens = frog.process(text) # a list of dictionaries with frog's analysis per token kw = [] groups = [] for t in tokens: lemma = t["lemma"].lower() k = keywords.get(lemma, None) if k is not None: if t["posprob"] > posprob_minimum: if not t["pos"].startswith(k.pos + "("): continue kw.append(lemma) groups += k.groups kw, groups = list(set(kw)), list(set(groups)) insert_tweet.apply_async((id_str, kw, groups, tokens), queue="master") # put retweets in the cache if retweet_id_str: data = [kw, groups, tokens] redis.set(key, json.dumps(data), ex=rt_cache_time)
Python
0
@@ -30,16 +30,38 @@ igParser +%0Afrom time import time %0A%0Afrom r @@ -240,16 +240,44 @@ ywords() +%0Akeywords_sync_time = time() %0A%0Aconfig @@ -514,16 +514,16 @@ d_str):%0A - %22%22%22F @@ -578,16 +578,174 @@ eet.%22%22%22%0A + global keywords, keywords_sync_time%0A if (time() - keywords_sync_time) %3E 60 * 60:%0A keywords = get_keywords()%0A keywords_sync_time = time()%0A # Fi
2ec6aa75d80dce2dc68d1369173b9a361864409b
update save()
alogator/models.py
alogator/models.py
from django.db import models from django.core.mail import send_mail from django.utils import timezone class LogActor(models.Model): email = models.CharField(max_length=100, blank=True, null=True, help_text='Alogator will send a messages to this email address.') active = models.BooleanField(default=True) mute = models.BooleanField(default=False, help_text="suppress for notification") slackHook = models.URLField(null=True, blank=True) slackChannel = models.CharField(max_length=50, null=True, blank=True) postHook = models.URLField(null=True, blank=True) def __unicode__(self): return 'email to: %s' % (self.email) def getMutedFilename(self): return "/tmp/alogator_actor_%s_muted" % self.id def save(self, *args, **kwargs): if self.__class__.objects.get(pk=self.pk).mute and not self.mute: try: f = open(self.getMutedFilename(), 'r') content = f.read() except: content = "Muted file " + self.getMutedFilename() + " does not exist." send_mail( 'ALOGATOR: Muged logs for: %s' % self.getMutedFilename(), content, 'debug@arteria.ch', [self.email], fail_silently=True ) f = open(self.getMutedFilename(), 'a') f.flush() super(LogActor, self).save(*args, **kwargs) # Call the "real" save() method. class LogSensor(models.Model): pattern = models.CharField(max_length=100, blank=True, null=True) caseSensitive = models.BooleanField(default=False) actor = models.ForeignKey(LogActor) inactivityThreshold = models.IntegerField(default=0, null=True, blank=True) inactive = models.BooleanField(default=False) def __unicode__(self): return 'search for: %s' % (self.pattern) class LogFile(models.Model): path = models.CharField(max_length=1000, blank=True, null=True) lastModified = models.DateTimeField(default=timezone.now, blank=True) lastPosition = models.IntegerField(default=0) lastSize = models.IntegerField(default=0) sensors = models.ManyToManyField(LogSensor, blank=True) def __unicode__(self): return self.path
Python
0
@@ -823,18 +823,39 @@ -if +try:%0A orig = self.__ @@ -885,16 +885,96 @@ self.pk) +%0A except DoesNotExist:%0A pass%0A else:%0A if orig .mute an @@ -994,32 +994,36 @@ te:%0A + try:%0A @@ -1019,32 +1019,36 @@ + + f = open(self.ge @@ -1078,32 +1078,36 @@ + content = f.read @@ -1117,24 +1117,28 @@ + except:%0A @@ -1141,32 +1141,36 @@ + + content = %22Muted @@ -1236,16 +1236,20 @@ + send_mai @@ -1267,16 +1267,20 @@ + + 'ALOGATO @@ -1341,24 +1341,28 @@ + content,%0A @@ -1358,16 +1358,20 @@ ontent,%0A + @@ -1414,16 +1414,20 @@ + + %5Bself.em @@ -1432,16 +1432,20 @@ email%5D,%0A + @@ -1483,19 +1483,27 @@ + + )%0A%0A + @@ -1545,16 +1545,20 @@ ), 'a')%0A +
ea2e71ffe07fbb1e8a307d4b33e9718d6b9b2576
Increase closing threshold.
obfsproxy/transports/scramblesuit/state.py
obfsproxy/transports/scramblesuit/state.py
""" Provide a way to store the server's state information on disk. The server possesses state information which should persist across runs. This includes key material to encrypt and authenticate session tickets, replay tables and PRNG seeds. This module provides methods to load, store and generate such state information. """ import os import sys import time import cPickle import random import const import replay import mycrypto import probdist import obfsproxy.common.log as logging log = logging.get_obfslogger() def load( ): """ Load the server's state object from file. The server's state file is loaded and the state object returned. If no state file is found, a new one is created and returned. """ stateFile = const.STATE_LOCATION + const.SERVER_STATE_FILE log.info("Attempting to load the server's state file from `%s'." % stateFile) if not os.path.exists(stateFile): log.info("The server's state file does not exist (yet).") state = State() state.genState() return state try: with open(stateFile, 'r') as fd: stateObject = cPickle.load(fd) except IOError as err: log.error("Error reading server state file from `%s': %s" % (stateFile, err)) sys.exit(1) return stateObject class State( object ): """ Implement a state class which stores the server's state. This class makes it possible to store state information on disk. It provides methods to generate and write state information. """ def __init__( self ): """ Initialise a `State' object. """ self.prngSeed = None self.keyCreation = None self.hmacKey = None self.aesKey = None self.oldHmacKey = None self.oldAesKey = None self.ticketReplay = None self.uniformDhReplay = None self.pktDist = None self.iatDist = None self.fallbackPassword = None self.closingThreshold = None def genState( self ): """ Populate all the local variables with values. """ log.info("Generating parameters for the server's state file.") # PRNG seed for the client to reproduce the packet and IAT morpher. self.prngSeed = mycrypto.strongRandom(const.PRNG_SEED_LENGTH) # HMAC and AES key used to encrypt and authenticate tickets. self.hmacKey = mycrypto.strongRandom(const.TICKET_HMAC_KEY_LENGTH) self.aesKey = mycrypto.strongRandom(const.TICKET_AES_KEY_LENGTH) self.keyCreation = int(time.time()) # The previous HMAC and AES keys. self.oldHmacKey = None self.oldAesKey = None # Replay dictionary for both authentication mechanisms. self.replayTracker = replay.Tracker() # Distributions for packet lengths and inter arrival times. prng = random.Random(self.prngSeed) self.pktDist = probdist.new(lambda: prng.randint(const.HDR_LENGTH, const.MTU), seed=self.prngSeed) self.iatDist = probdist.new(lambda: prng.random() % const.MAX_PACKET_DELAY, seed=self.prngSeed) # Fallback UniformDH shared secret. Only used if the bridge operator # did not set `ServerTransportOptions'. self.fallbackPassword = os.urandom(const.SHARED_SECRET_LENGTH) # Unauthenticated connections are closed after having received the # following amount of bytes. self.closingThreshold = prng.randint(const.MAX_HANDSHAKE_LENGTH, const.MAX_HANDSHAKE_LENGTH * 2) self.writeState() def isReplayed( self, hmac ): """ Check if `hmac' is present in the replay table. Return `True' if the given `hmac' is present in the replay table and `False' otherwise. """ assert self.replayTracker is not None log.debug("Querying if HMAC is present in the replay table.") return self.replayTracker.isPresent(hmac) def registerKey( self, hmac ): """ Add the given `hmac' to the replay table. """ assert self.replayTracker is not None log.debug("Adding a new HMAC to the replay table.") self.replayTracker.addElement(hmac) # We must write the data to disk immediately so that other ScrambleSuit # connections can share the same state. self.writeState() def writeState( self ): """ Write the state object to a file using the `cPickle' module. """ stateFile = const.STATE_LOCATION + const.SERVER_STATE_FILE log.debug("Writing server's state file to `%s'." % stateFile) try: with open(stateFile, 'w') as fd: cPickle.dump(self, fd) except IOError as err: log.error("Error writing state file to `%s': %s" % (stateFile, err)) sys.exit(1)
Python
0
@@ -3790,9 +3790,9 @@ H * -2 +5 )%0A%0A
e8f97a1d8372dd1a351c5e1e22ecc3f288ae130c
Change batch size and epochs for NCF benchmarks
official/recommendation/ncf_keras_benchmark.py
official/recommendation/ncf_keras_benchmark.py
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Executes Keras benchmarks and accuracy tests.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import time from absl import flags from absl.testing import flagsaver import tensorflow as tf # pylint: disable=g-bad-import-order from official.recommendation import ncf_common from official.recommendation import ncf_keras_main from official.utils.flags import core FLAGS = flags.FLAGS NCF_DATA_DIR_NAME = 'movielens_data' class KerasNCFBenchmarkBase(tf.test.Benchmark): """Base class for NCF model benchmark.""" local_flags = None def __init__(self, output_dir=None, default_flags=None, **kwargs): self.output_dir = output_dir self.default_flags = default_flags or {} def _setup(self): """Sets up and resets flags before each test.""" tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.DEBUG) if KerasNCFBenchmarkBase.local_flags is None: ncf_common.define_ncf_flags() # Loads flags to get defaults to then override. List cannot be empty. flags.FLAGS(['foo']) core.set_defaults(**self.default_flags) saved_flag_values = flagsaver.save_flag_values() KerasNCFBenchmarkBase.local_flags = saved_flag_values else: flagsaver.restore_flag_values(KerasNCFBenchmarkBase.local_flags) def _run_and_report_benchmark(self): start_time_sec = time.time() stats = ncf_keras_main.run_ncf(FLAGS) wall_time_sec = time.time() - start_time_sec metrics = self._extract_benchmark_report_extras(stats) self.report_benchmark(iters=-1, wall_time=wall_time_sec, metrics=metrics) def _extract_benchmark_report_extras(self, stats): raise NotImplementedError('Not implemented') class KerasNCFRealData(KerasNCFBenchmarkBase): """Benchmark NCF model using real data.""" def __init__(self, output_dir=None, root_data_dir=None, default_flags=None, **kwargs): default_flags = {} default_flags['dataset'] = 'ml-20m' default_flags['num_gpus'] = 1 default_flags['train_epochs'] = 14 default_flags['clean'] = True default_flags['batch_size'] = 160000 default_flags['learning_rate'] = 0.00382059 default_flags['beta1'] = 0.783529 default_flags['beta2'] = 0.909003 default_flags['epsilon'] = 1.45439e-07 default_flags['layers'] = [256, 256, 128, 64] default_flags['num_factors'] = 64 default_flags['hr_threshold'] = 0.635 default_flags['ml_perf'] = True default_flags['use_synthetic_data'] = False default_flags['data_dir'] = os.path.join(root_data_dir, NCF_DATA_DIR_NAME) super(KerasNCFRealData, self).__init__( output_dir=output_dir, default_flags=default_flags, **kwargs) def _extract_benchmark_report_extras(self, stats): metrics = [] metrics.append({'name': 'exp_per_second', 'value': stats['avg_exp_per_second']}) # Target is 0.625, but some runs are below that level. Until we have # multi-run tests, we have to accept a lower target. metrics.append({'name': 'hr_at_10', 'value': stats['eval_hit_rate'], 'min_value': 0.618, 'max_value': 0.635}) metrics.append({'name': 'train_loss', 'value': stats['loss']}) return metrics def benchmark_1_gpu(self): self._setup() self._run_and_report_benchmark() def benchmark_2_gpus(self): self._setup() FLAGS.num_gpus = 2 self._run_and_report_benchmark() class KerasNCFSyntheticData(KerasNCFBenchmarkBase): """Benchmark NCF model using synthetic data.""" def __init__(self, output_dir=None, default_flags=None, **kwargs): default_flags = {} default_flags['dataset'] = 'ml-20m' default_flags['num_gpus'] = 1 default_flags['train_epochs'] = 14 default_flags['batch_size'] = 160000 default_flags['learning_rate'] = 0.00382059 default_flags['beta1'] = 0.783529 default_flags['beta2'] = 0.909003 default_flags['epsilon'] = 1.45439e-07 default_flags['layers'] = [256, 256, 128, 64] default_flags['num_factors'] = 64 default_flags['hr_threshold'] = 0.635 default_flags['use_synthetic_data'] = True super(KerasNCFSyntheticData, self).__init__( output_dir=output_dir, default_flags=default_flags, **kwargs) def _extract_benchmark_report_extras(self, stats): metrics = [] metrics.append({'name': 'exp_per_second', 'value': stats['avg_exp_per_second']}) return metrics def benchmark_1_gpu(self): self._setup() self._run_and_report_benchmark() def benchmark_2_gpus(self): self._setup() FLAGS.num_gpus = 2 self._run_and_report_benchmark()
Python
0
@@ -2838,34 +2838,33 @@ rain_epochs'%5D = -14 +8 %0A default_fla @@ -2908,35 +2908,34 @@ 'batch_size'%5D = -160 +99 000%0A default_ @@ -4653,10 +4653,9 @@ %5D = -14 +8 %0A @@ -4689,11 +4689,10 @@ %5D = -160 +99 000%0A
6bb45d2e0e2b716c284e747adc1e65324583ea0c
Improve INSDC to SO term mapping
rnacentral_pipeline/databases/data/utils.py
rnacentral_pipeline/databases/data/utils.py
# -*- coding: utf-8 -*- """ Copyright [2009-2018] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import re import unicodedata import six import attr from attr.validators import and_ from attr.validators import optional from attr.validators import instance_of as is_a from attr.validators import in_ as one_of SO_PATTERN = re.compile(r'^SO:\d+$') INSDC_SO_MAPPING = { "RNase_MRP_RNA": 'SO:0000385', "RNase_P_RNA": 'SO:0000386', "SRP_RNA": 'SO:0000590', "Y_RNA": 'SO:0000405', "antisense_RNA": 'SO:0000644', "autocatalytically_spliced_intron": 'SO:0000588', "guide_RNA": 'SO:0000602', "hammerhead_ribozyme": 'SO:0000380', "lncRNA": 'SO:0001877', "miRNA": 'SO:0000276', "ncRNA": 'SO:0000655', "misc_RNA": 'SO:0000673', "other": 'SO:0000655', "precursor_RNA": 'SO:0000185 ', "piRNA": 'SO:0001035', "rasiRNA": 'SO:0000454', "ribozyme": 'SO:0000374', "scRNA": 'SO:0000013', "scaRNA": 'SO:0002095', "siRNA": 'SO:0000646', "snRNA": 'SO:0000274', "snoRNA": 'SO:0000275', "telomerase_RNA": 'SO:0000390', "tmRNA": 'SO:0000584', "vault_RNA": 'SO:0000404', 'rRNA': 'SO:0000252', 'tRNA': 'SO:0000253', 'bidirectional_promoter_lncrna': 'SO:0002185', '3prime_overlapping_ncrna': 'SO:0002120', } NORMALIZE_TO_INSDC = { 'sRNA': 'other', 'bidirectional_promoter_lncrna': 'lncRNA', '3prime_overlapping_ncrna': 'other', } SO_INSDC_MAPPING = {v: k for k, v in INSDC_SO_MAPPING.items()} SO_INSDC_MAPPING['SO:0001244'] = "precursor_RNA" SO_INSDC_MAPPING['SO:0000209'] = "precursor_RNA" SO_INSDC_MAPPING['SO:0001904'] = "lncRNA" SO_INSDC_MAPPING['SO:0000370'] = 'ncRNA' SO_INSDC_MAPPING['SO:0005836'] = 'ncRNA' SO_INSDC_MAPPING['SO:0000035'] = 'ncRNA' SO_INSDC_MAPPING['SO:0000077'] = 'antisense_RNA' SO_INSDC_MAPPING['SO:0000204'] = 'ncRNA' class UnxpectedRnaType(Exception): """ Raised when the RNA type is not an SO term and cannot be converted to one. """ pass def optionally(instance_type, **kwargs): """ Return an attribute that is either none or of the given type. """ return attr.ib( validator=optional(is_a(instance_type)), default=None, **kwargs ) def possibly_empty(instance_type, **kwargs): """ Return an attribute that defaults to being empty and must be of the given type. """ factory = instance_type if hasattr(instance_type, 'empty'): factory = instance_type.empty return attr.ib( validator=is_a(instance_type), default=attr.Factory(factory), **kwargs ) def matches_pattern(pattern): def fn(instance, attribute, value): if not re.match(pattern, value): raise TypeError("Bad value (%s) for %s in %s" % (value, attribute, instance)) return fn def as_so_term(rna_type): if re.match(SO_PATTERN, rna_type): return rna_type if rna_type not in INSDC_SO_MAPPING: raise UnxpectedRnaType(rna_type) return INSDC_SO_MAPPING[rna_type] def from_so_term(so_term): if so_term in NORMALIZE_TO_INSDC: return six.text_type(NORMALIZE_TO_INSDC[so_term]) if so_term in INSDC_SO_MAPPING: return six.text_type(so_term) if so_term in SO_INSDC_MAPPING: return six.text_type(SO_INSDC_MAPPING[so_term]) raise UnxpectedRnaType(so_term) def optional_utf8(raw): if raw is None: return None return raw
Python
0.000001
@@ -2360,16 +2360,58 @@ 'ncRNA'%0A +SO_INSDC_MAPPING%5B'SO:0000594'%5D = 'snoRNA'%0A %0A%0Aclass
af073d3cfaddb33d9cb4675c33707a223348e3b8
fix nans from logs in large models
models/distributions/distributions.py
models/distributions/distributions.py
import math import theano.tensor as T # ---------------------------------------------------------------------------- # this is all taken from the parmesan lib c = - 0.5 * math.log(2*math.pi) def log_bernoulli(x, p, eps=1e-5): """ Compute log pdf of a Bernoulli distribution with success probability p, at values x. .. math:: \log p(x; p) = \log \mathcal{B}(x; p) Parameters ---------- x : Theano tensor Values at which to evaluate pdf. p : Theano tensor Success probability :math:`p(x=1)`, which is also the mean of the Bernoulli distribution. eps : float Small number used to avoid NaNs by clipping p in range [eps;1-eps]. Returns ------- Theano tensor Element-wise log probability, this has to be summed for multi-variate distributions. """ p = T.clip(p, eps, 1.0 - eps) return -T.nnet.binary_crossentropy(p, x) def log_normal(x, mean, std, eps=1e-5): """ Compute log pdf of a Gaussian distribution with diagonal covariance, at values x. Variance is parameterized as standard deviation. .. math:: \log p(x) = \log \mathcal{N}(x; \mu, \sigma^2I) Parameters ---------- x : Theano tensor Values at which to evaluate pdf. mean : Theano tensor Mean of the Gaussian distribution. std : Theano tensor Standard deviation of the diagonal covariance Gaussian. eps : float Small number added to standard deviation to avoid NaNs. Returns ------- Theano tensor Element-wise log probability, this has to be summed for multi-variate distributions. See also -------- log_normal1 : using variance parameterization log_normal2 : using log variance parameterization """ std += eps return c - T.log(T.abs_(std)) - (x - mean)**2 / (2 * std**2) def log_normal2(x, mean, log_var, eps=1e-5): """ Compute log pdf of a Gaussian distribution with diagonal covariance, at values x. Variance is parameterized as log variance rather than standard deviation, which ensures :math:`\sigma > 0`. .. math:: \log p(x) = \log \mathcal{N}(x; \mu, \sigma^2I) Parameters ---------- x : Theano tensor Values at which to evaluate pdf. mean : Theano tensor Mean of the Gaussian distribution. log_var : Theano tensor Log variance of the diagonal covariance Gaussian. eps : float Small number added to denominator to avoid NaNs. Returns ------- Theano tensor Element-wise log probability, this has to be summed for multi-variate distributions. See also -------- log_normal : using standard deviation parameterization log_normal1 : using variance parameterization """ # lv_clip = T.clip(log_var, -10., 10.) return c - log_var/2 - (x - mean)**2 / (2 * T.exp(log_var) + eps)
Python
0
@@ -2775,10 +2775,8 @@ %0A - # lv_ @@ -2864,22 +2864,22 @@ T.exp(l -og_var +v_clip ) + eps)
993d08b0ca0bcf90af77709e58698b7ecc5ba6b5
Update log.py
django_tenants/log.py
django_tenants/log.py
import logging from django.db import connection class TenantContextFilter(logging.Filter): """ Add the current ``schema_name`` and ``domain_url`` to log records. Thanks to @regolith for the snippet on https://github.com/bernardopires/django-tenant-schemas/issues/248 """ def filter(self, record): record.schema_name = connection.tenant.schema_name record.domain_url = getattr(connection.tenant, 'domain_url', 'none') return True
Python
0.000001
@@ -445,14 +445,12 @@ l', -'n +N one -' )%0A
b392649fc0ecdb3e770c60c2043110658b06daeb
Fix pep8
pastamaker/webhack.py
pastamaker/webhack.py
# -*- encoding: utf-8 -*- # # Copyright © 2017 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import re import requests from pastamaker import config LOG = logging.getLogger(__name__) global s s = None def get_web_session(): s = requests.Session() s.headers['User-Agent'] = ( 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/58.0.3029.96 Safari/537.36') s.trust_env = False # Don't read netrc r = s.get("https://github.com/login") r.raise_for_status() m = re.search('<input name="authenticity_token" ' 'type="hidden" value="([^"]*)" />', r.text) token = m.group(1) r = s.post("https://github.com/session", data={"commit": "Sign+in", "utf8": "✓", "authenticity_token": token, "login": config.WEBHACK_USERNAME, "password": config.WEBHACK_PASSWORD}) r.raise_for_status() return s def web_github_get_merge_button_page(p): global s if not s: s = get_web_session() r = s.get(p.html_url + "/merge-button", headers={'x-requested-with': 'XMLHttpRequest', 'accept': 'text/html'}) if r.status_code == 404: # NOTE(sileht): Maybe we got deconnected, so retry s = get_web_session() r = s.get(p.html_url + "/merge-button", headers={'x-requested-with': 'XMLHttpRequest', 'accept': 'text/html'}) r.raise_for_status() return s, r.text def _web_github_branch_status(text): if '/update_branch' not in text: # No update_branch form return "dirty" elif 'This branch is out-of-date with the base branch' in text: return "behind" if 'This branch is up to date.' in text: return "clean" else: return "unknown" def web_github_branch_status(p): s, text = web_github_get_merge_button_page(p) return _web_github_branch_status(text) def web_github_update_branch(p): s, text = web_github_get_merge_button_page(p) state = _web_github_branch_status(text) if state != "behind": LOG.error("PR#%s: Can't update branch, state is not behind but %s", p.number, state) return False m = re.search('/update_branch" .*<input name="authenticity_token" ' 'type="hidden" value="([^"]*)" />', text) if not m: LOG.error("PR#%s: Can't update branch, authenticity_token not found" % p.number) return False token = m.group(1) m = re.search('<input type="hidden" name="expected_head_oid" ' 'value="([^"]*)">', text) if not m: LOG.error("PR#%s: Can't update branch, head_oid not found" % p.number) return False expected_head_oid = m.group(1) r = s.post(p.html_url + "/update_branch", headers={ 'X-Requested-With': 'XMLHttpRequest', 'Content-Type': 'application/x-www-form-urlencoded; ' 'charset=UTF-8' }, data={"utf8": "✓", "expected_head_oid": expected_head_oid, "authenticity_token": token}) r.raise_for_status() return True if __name__ == '__main__': import sys import mock url = sys.argv[1] p = mock.Mock(number=int(url.split("/")[-1]), html_url=url) print(web_github_update_branch(p))
Python
0.000001
@@ -628,16 +628,39 @@ port re%0A +import sys%0A%0Aimport mock %0Aimport @@ -3910,39 +3910,8 @@ _':%0A - import sys%0A import mock%0A
463e6563bcfa63e672ec23231b1a16870b68c56d
Fix __str__ method
pathvalidate/error.py
pathvalidate/error.py
""" .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ import enum from typing import Optional, cast from ._common import Platform @enum.unique class ErrorReason(enum.Enum): FOUND_ABS_PATH = "FOUND_ABS_PATH" NULL_NAME = "NULL_NAME" INVALID_CHARACTER = "INVALID_CHARACTER" INVALID_LENGTH = "INVALID_LENGTH" MALFORMED_ABS_PATH = "MALFORMED_ABS_PATH" RESERVED_NAME = "RESERVED_NAME" class ValidationError(ValueError): """ Base exception class that indicates invalid name errors. """ @property def platform(self) -> Platform: return self.__platform @property def reason(self) -> Optional[ErrorReason]: return self.__reason @property def description(self) -> str: return self.__description @property def reusable_name(self) -> bool: return self.__reusable_name def __init__(self, *args, **kwargs): self.__platform = kwargs.pop("platform", None) self.__reason = kwargs.pop("reason", None) self.__description = kwargs.pop("description", None) self.__reusable_name = kwargs.pop("reusable_name", None) try: super().__init__(*args[0], **kwargs) except IndexError: super().__init__(*args, **kwargs) def __str__(self) -> str: item_list = [] if Exception.__str__(self): item_list.append(Exception.__str__(self)) if self.reason: item_list.append("reason={}".format(cast(ErrorReason, self.reason).value)) if self.platform: item_list.append("target-platform={}".format(self.platform.value)) if self.description: item_list.append("description={}".format(self.description)) if self.reusable_name: item_list.append("reusable_name={}".format(self.reusable_name)) return ", ".join(item_list).strip() def __repr__(self, *args, **kwargs): return self.__str__(*args, **kwargs) class NullNameError(ValidationError): """ Exception raised when a name is empty. """ def __init__(self, *args, **kwargs) -> None: kwargs["reason"] = ErrorReason.NULL_NAME super().__init__(args, **kwargs) class InvalidCharError(ValidationError): """ Exception raised when includes invalid character(s) within a string. """ def __init__(self, *args, **kwargs) -> None: kwargs["reason"] = ErrorReason.INVALID_CHARACTER super().__init__(args, **kwargs) class InvalidLengthError(ValidationError): """ Exception raised when a string too long/short. """ def __init__(self, *args, **kwargs) -> None: kwargs["reason"] = ErrorReason.INVALID_LENGTH super().__init__(args, **kwargs) class ReservedNameError(ValidationError): """ Exception raised when a string matched a reserved name. """ @property def reserved_name(self) -> str: return self.__reserved_name def __init__(self, *args, **kwargs) -> None: self.__reserved_name = kwargs.pop("reserved_name", None) kwargs["reason"] = ErrorReason.RESERVED_NAME super().__init__(args, **kwargs) class ValidReservedNameError(ReservedNameError): """ Exception raised when a string matched a reserved name. However, it can be used as a name. """ def __init__(self, *args, **kwargs) -> None: kwargs["reusable_name"] = True super().__init__(args, **kwargs) class InvalidReservedNameError(ReservedNameError): """ Exception raised when a string matched a reserved name. Moreover, the reserved name is invalid as a name. """ def __init__(self, *args, **kwargs) -> None: kwargs["reusable_name"] = False super().__init__(args, **kwargs)
Python
0.020279
@@ -1761,24 +1761,26 @@ if self. +__ reusable_nam @@ -1780,16 +1780,28 @@ ble_name + is not None :%0A
2756326b134acc6c343be8458870121baed963cb
fix db url
pergamena/settings.py
pergamena/settings.py
# -*- coding: utf-8 -*- import os os_env = os.environ class Config(object): SECRET_KEY = os_env.get('PERGAMENA_SECRET', 'secret-key') # TODO: Change me APP_DIR = os.path.abspath(os.path.dirname(__file__)) # This directory PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir)) BCRYPT_LOG_ROUNDS = 13 ASSETS_DEBUG = False DEBUG_TB_ENABLED = False # Disable Debug toolbar DEBUG_TB_INTERCEPT_REDIRECTS = False CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc. class ProdConfig(Config): """Production configuration.""" ENV = 'prod' DEBUG = False SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/pergamena_db' # TODO: Change me DEBUG_TB_ENABLED = False # Disable Debug toolbar class DevConfig(Config): """Development configuration.""" ENV = 'dev' DEBUG = True SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/pergamena_db' # TODO: Change me DEBUG_TB_ENABLED = True ASSETS_DEBUG = True # Don't bundle/minify static assets CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc. class TestConfig(Config): TESTING = True DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite://' BCRYPT_LOG_ROUNDS = 1 # For faster tests WTF_CSRF_ENABLED = False # Allows form testing
Python
0.999203
@@ -639,64 +639,34 @@ I = -'postgresql://localhost/pergamena_db' # TODO: Change me +os_env.get('DATABASE_URL') %0A
b7c531220fe7a46ad56eeeb160effe94510ba4b0
Use handler registration in listener
pg_bawler/listener.py
pg_bawler/listener.py
#!/usr/bin/env python ''' Listen on given channel for notification. $ python -m pg_bawler.listener mychannel If you installed notification trigger with ``pg_bawler.gen_sql`` then channel is the same as ``tablename`` argument. ''' import argparse import asyncio import importlib import logging import sys import pg_bawler.core LOGGER = logging.getLogger('pg_bawler.listener') class DefaultHandler: def __init__(self): self.count = 0 async def handle_notification(self, notification): self.count += 1 notification_number = self.count LOGGER.info( 'Received notification #%s pid %s from channel %s: %s', notification_number, notification.pid, notification.channel, notification.payload) def get_default_cli_args_parser(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--dsn', metavar='DSN', help='Connection string. e.g. `dbname=test user=postgres`') parser.add_argument( '--handler', metavar='HANDLER', default='pg_bawler.listener:default_handler', help=( 'Module and name of python callable.' ' e.g. `pg_bawler.listener:default_handler`')) parser.add_argument( 'channel', metavar='CHANNEL', type=str, help='Name of Notify/Listen channel to listen on.') return parser def resolve_handler(handler_str): module_name, callable_name = handler_str.split(':') return getattr(importlib.import_module(module_name), callable_name) default_handler = DefaultHandler().handle_notification class NotificationListener( pg_bawler.core.BawlerBase, pg_bawler.core.ListenerMixin ): pass def main(): args = get_default_cli_args_parser().parse_args() logging.basicConfig( format='[%(asctime)s][%(name)s][%(levelname)s]: %(message)s', level=logging.DEBUG) LOGGER.info('Starting pg_bawler listener for channel: %s', args.channel) loop = asyncio.get_event_loop() listener = NotificationListener(connection_params={'dsn': args.dsn}) listener.listen_timeout = 5 listener.handler = resolve_handler(args.handler) loop.run_until_complete(listener.register_channel(args.channel)) loop.run_until_complete(listener.listen()) if __name__ == '__main__': sys.exit(main())
Python
0
@@ -2217,26 +2217,33 @@ istener. +register_ handler - = +( resolve_ @@ -2263,16 +2263,17 @@ handler) +) %0A loo
36ae5c9502d8aa7189d2e89c094a18c9891cbb6a
Use PID, which represents stable ID, over ID, which is instance dependent
pg_bridge/pgbridge.py
pg_bridge/pgbridge.py
""" PostGIS bridge """ import psycopg2 import json class PGBMABridge(object): def __init__(self, layer, conn_args): self.layer = layer self.connect(conn_args) def connect(self, conn_args): self.conn = psycopg2.connect(host=conn_args['host'], user=conn_args['user'], database=conn_args['database'], password=conn_args['password'], ) self.cursor = self.conn.cursor() self.cursor.execute("SELECT column_name, udt_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name=%s",(self.layer,)); res = self.cursor.fetchall() self.cnames = [] self.geometry_col = None for r in res: if r[1] == 'geometry': self.geometry_col = r[0] else: self.cnames.append(r[0]) def get_all(self): query = ' '.join(['SELECT',','.join(self.cnames),'FROM',self.layer,';']) self.cursor.execute(query) rows = self.cursor.fetchall() ret = [] for row in rows: dr = {} for c in range(len(self.cnames)): dr[self.cnames[c]] = row[c] ret.append(dr) return json.dumps(ret) def get_pos(self, id): query = 'SELECT ST_AsGeoJSON(ST_Centroid(the_geom)) FROM '+self.layer+ ' WHERE pid=%s' self.cursor.execute(query,(id,)) res = self.cursor.fetchone() return res[0] def find_in_rect(self, N, E, S, W, srid): c0 = '%f %f'%(W,N) c1 = '%f %f'%(E,N) c2 = '%f %f'%(E,S) c3 = '%f %f'%(W,S) polygon = 'POLYGON(('+ ','.join([c0,c1,c2,c3,c0]) +'))' st_polygon = "ST_GeomFromText('" + polygon + "', "+ str(srid) +")" query = 'SELECT id FROM %s WHERE ST_Contains(ST_Transform(%s,ST_SRID(%s)), %s)'%(self.layer, st_polygon, self.geometry_col, self.geometry_col) #print('[RECT] %s'%query) self.cursor.execute(query) ret = [] for row in self.cursor.fetchall(): ret.append(row[0]) return json.dumps(ret)
Python
0
@@ -1898,16 +1898,17 @@ 'SELECT +p id FROM
77170407ad61370dda87c1ed3f24aa2a50cb4ccc
Access the current line directly from the Document instance instead of calculating it manually
pgcli/key_bindings.py
pgcli/key_bindings.py
import logging from prompt_toolkit.enums import EditingMode from prompt_toolkit.keys import Keys from prompt_toolkit.key_binding.manager import KeyBindingManager from prompt_toolkit.filters import Condition from .filters import HasSelectedCompletion _logger = logging.getLogger(__name__) def pgcli_bindings(get_vi_mode_enabled, set_vi_mode_enabled, expand_tab): """Custom key bindings for pgcli.""" assert callable(get_vi_mode_enabled) assert callable(set_vi_mode_enabled) tab_insert_text = ' ' * 4 if expand_tab else '\t' key_binding_manager = KeyBindingManager( enable_open_in_editor=True, enable_system_bindings=True, enable_auto_suggest_bindings=True, enable_search=True, enable_abort_and_exit_bindings=True) @key_binding_manager.registry.add_binding(Keys.F2) def _(event): """ Enable/Disable SmartCompletion Mode. """ _logger.debug('Detected F2 key.') buf = event.cli.current_buffer buf.completer.smart_completion = not buf.completer.smart_completion @key_binding_manager.registry.add_binding(Keys.F3) def _(event): """ Enable/Disable Multiline Mode. """ _logger.debug('Detected F3 key.') buf = event.cli.current_buffer buf.always_multiline = not buf.always_multiline @key_binding_manager.registry.add_binding(Keys.F4) def _(event): """ Toggle between Vi and Emacs mode. """ _logger.debug('Detected F4 key.') vi_mode = not get_vi_mode_enabled() set_vi_mode_enabled(vi_mode) event.cli.editing_mode = EditingMode.VI if vi_mode else EditingMode.EMACS @key_binding_manager.registry.add_binding(Keys.Tab) def _(event): """Force autocompletion at cursor on non-empty lines.""" _logger.debug('Detected <Tab> key.') buff = event.cli.current_buffer doc = buff.document line_start = doc.cursor_position + doc.get_start_of_line_position() line_end = doc.cursor_position + doc.get_end_of_line_position() current_line = doc.text[line_start:line_end] if current_line.strip(): if buff.complete_state: buff.complete_next() else: event.cli.start_completion(select_first=True) else: buff.insert_text(tab_insert_text, fire_event=False) @key_binding_manager.registry.add_binding(Keys.ControlSpace) def _(event): """ Initialize autocompletion at cursor. If the autocompletion menu is not showing, display it with the appropriate completions for the context. If the menu is showing, select the next completion. """ _logger.debug('Detected <C-Space> key.') b = event.cli.current_buffer if b.complete_state: b.complete_next() else: event.cli.start_completion(select_first=False) @key_binding_manager.registry.add_binding(Keys.ControlJ, filter=HasSelectedCompletion()) def _(event): """ Makes the enter key work as the tab key only when showing the menu. """ _logger.debug('Detected <C-J> key.') event.current_buffer.complete_state = None b = event.cli.current_buffer b.complete_state = None return key_binding_manager
Python
0
@@ -1959,213 +1959,15 @@ -line_start = doc.cursor_position + doc.get_start_of_line_position()%0A line_end = doc.cursor_position + doc.get_end_of_line_position()%0A current_line = doc.text%5Bline_start:line_end%5D%0A%0A if +if doc. curr
e3f7b73ee06301484dbb97209508c5f36a88236f
split Polar/Airfoil data. added more general modification slots for airfoil preprocessing
fusedwind/src/fusedwind/basic_airfoil.py
fusedwind/src/fusedwind/basic_airfoil.py
#!/usr/bin/env python # encoding: utf-8 from openmdao.main.api import Component, Assembly, VariableTree from openmdao.main.datatypes.api import Float, Array, Slot, Str, List # ------- variable trees --------- class PolarDataVT(VariableTree): """airfoil data at a given Reynolds number""" alpha = Array(units='deg', desc='angles of attack') cl = Array(desc='corresponding lift coefficients') cd = Array(desc='corresponding drag coefficients') cm = Array(desc='corresponding pitching moment coefficients') class AirfoilDataVT(VariableTree): Re = Array(desc='Reynolds number') polars = List(PolarDataVT, desc='corresponding Polar data') # ------------------------------------ # ------- base classes ---------- class BasicAirfoilBase(Component): """Evaluation of airfoil at angle of attack and Reynolds number""" # inputs alpha = Float(iotype='in', units='deg', desc='angle of attack') Re = Float(iotype='in', desc='Reynolds number') # outputs cl = Float(iotype='out', desc='lift coefficient') cd = Float(iotype='out', desc='drag coefficient') cm = Float(iotype='out', desc='pitching moment coefficient') def airfoilForces(airfoil, alpha, Re): """convenience method to use BasicAirfoilBase as a regular python function as opposed to a component""" airfoil.alpha = alpha airfoil.Re = Re airfoil.run() return airfoil.cl, airfoil.cd, airfoil.cm class ModifyAirfoilBase(Component): """Used for extrapolation, 3D corrections, etc.""" # inputs afIn = Slot(AirfoilDataVT, iotype='in', desc='tabulated airfoil data') # outputs afOut = Slot(AirfoilDataVT, iotype='out', desc='tabulated airfoil data') def __init__(self): super(ModifyAirfoilBase, self).__init__() self.afIn = AirfoilDataVT() self.afOut = AirfoilDataVT() class ReadAirfoilBase(Component): """Read airfoil data from a file""" # inputs fileIn = Str(iotype='in', desc='name of file') # outputs afOut = Slot(AirfoilDataVT, iotype='out', desc='tabulated airfoil data') def __init__(self): super(ReadAirfoilBase, self).__init__() self.afOut = AirfoilDataVT() class WriteAirfoilBase(Component): """Write airfoil data to a file""" # inputs afIn = Slot(AirfoilDataVT, iotype='in', desc='tabulated airfoil data') fileOut = Str(iotype='in', desc='name of file') def __init__(self): super(WriteAirfoilBase, self).__init__() self.afIn = AirfoilDataVT() # --------------------------- # ------- assemblies ------------- class AirfoilPreprocessingAssembly(Assembly): # for the benefit of the GUI read = Slot(ReadAirfoilBase) correct3D = Slot(ModifyAirfoilBase) extrapolate = Slot(ModifyAirfoilBase) write = Slot(WriteAirfoilBase) def configure(self): self.add('read', ReadAirfoilBase()) self.add('correct3D', ModifyAirfoilBase()) self.add('extrapolate', ModifyAirfoilBase()) self.add('write', WriteAirfoilBase()) self.driver.workflow.add(['read', 'correct3D', 'extrapolate', 'write']) self.connect('read.afOut', 'correct3D.afIn') self.connect('correct3D.afOut', 'extrapolate.afIn') self.connect('extrapolate.afOut', 'write.afIn') self.create_passthrough('read.fileIn') self.create_passthrough('write.fileOut') # ---------------------------------
Python
0
@@ -619,16 +619,21 @@ = List( +Slot( PolarDat @@ -635,16 +635,17 @@ arDataVT +) , desc=' @@ -1862,24 +1862,121 @@ lDataVT()%0A%0A%0A +class NoModification(ModifyAirfoilBase):%0A%0A def execute(self):%0A self.afOut = self.afIn%0A%0A %0Aclass ReadA @@ -2666,24 +2666,25 @@ -------%0A%0A%0A%0A%0A +%0A # ------- as @@ -2824,25 +2824,20 @@ se)%0A -correct3D +mod1 = Slot( @@ -2859,27 +2859,55 @@ se)%0A -extrapolate +mod2 = Slot(ModifyAirfoilBase)%0A mod3 = Slot( @@ -3054,37 +3054,29 @@ dd(' -correct3D +mod1 ', +No Modif -yAirfoilBase +ication ())%0A @@ -3097,39 +3097,72 @@ dd(' -extrapolate +mod2 ', +No Modif -yAirfoilBase +ication())%0A self.add('mod3', NoModification ())%0A @@ -3255,32 +3255,28 @@ ', ' -correct3D', 'extrapolate +mod1', 'mod2', 'mod3 ', ' @@ -3321,25 +3321,68 @@ fOut', ' -correct3D +mod1.afIn')%0A self.connect('mod1.afOut', 'mod2 .afIn')%0A @@ -3407,17 +3407,12 @@ ct(' -correct3D +mod2 .afO @@ -3417,27 +3417,20 @@ fOut', ' -extrapolate +mod3 .afIn')%0A @@ -3455,19 +3455,12 @@ ct(' -extrapolate +mod3 .afO
ef53285ce0777650dbbadce92ddfdb15e401887a
Add some error tracking hints for sentry
mainapp/functions/geo_functions.py
mainapp/functions/geo_functions.py
import logging import re from typing import Optional, Dict, Any, List, Tuple from django.conf import settings from geopy import OpenCage, Nominatim, MapBox from geopy.exc import GeocoderServiceError from geopy.geocoders.base import Geocoder from slugify import slugify logger = logging.getLogger(__name__) def get_geolocators() -> List[Tuple[str, Geocoder]]: geolocators = [] if settings.GEOEXTRACT_ENGINE.lower() == "opencage": if not settings.OPENCAGE_KEY: raise ValueError( "OpenCage Data is selected as Geocoder, however no OPENCAGE_KEY is set" ) geolocators.append(("opencage", OpenCage(settings.OPENCAGE_KEY))) if settings.MAPBOX_TOKEN: geolocators.append(("mapbox", MapBox(settings.MAPBOX_TOKEN))) geolocators.append( ("nominatim", Nominatim(user_agent=slugify(settings.PRODUCT_NAME) + "/1.0")) ) return geolocators def geocode(search: str) -> Optional[Dict[str, Any]]: for name, geolocator in get_geolocators(): try: if name == "mapbox": location = geolocator.geocode(search, exactly_one=False) else: # noinspection PyArgumentList location = geolocator.geocode( search, language=settings.GEOEXTRACT_LANGUAGE, exactly_one=False ) except GeocoderServiceError as e: logger.warning(f"Geocoding with {name} failed: {e}") continue if location: return { "type": "Point", "coordinates": [location[0].longitude, location[0].latitude], } else: logger.debug(f"No location found for {search}") return None logger.error(f"All geocoding attempts failed. Search string was {search}") return None def _format_opencage_location(location) -> str: components = location.raw["components"] if "road" in components: address = components["road"] if "house_number" in components: address += " " + components["house_number"] elif "pedestrian" in components: address = components["pedestrian"] else: address = location.address return address def _format_nominatim_location(location) -> str: if re.match("^\d", location.split(",")[0]): # Number at the beginning: probably a house number return location.split(",")[1] + " " + location.split(",")[0] else: return location.split(",")[0] def latlng_to_address(lat, lng) -> str: search_str = str(lat) + ", " + str(lng) if settings.GEOEXTRACT_ENGINE.lower() == "opencage": if not settings.OPENCAGE_KEY: raise ValueError( "OpenCage Data is selected as Geocoder, however no OPENCAGE_KEY is set" ) location = OpenCage(settings.OPENCAGE_KEY).reverse(search_str) if len(location) > 0: return _format_opencage_location(location[0]) else: location = Nominatim( user_agent=slugify(settings.PRODUCT_NAME) + "/1.0" ).reverse(search_str) if len(location) > 0: return _format_nominatim_location(location[0]) return search_str
Python
0
@@ -1747,16 +1747,60 @@ rn None%0A + # exc_info to help sentry with grouping%0A logg @@ -1864,24 +1864,39 @@ as %7Bsearch%7D%22 +, exc_info=True )%0A return
5ff6dffeaf757e360a42e22a9df6d74345a4f418
Fix panda part imports
malcolm/parts/pandabox/__init__.py
malcolm/parts/pandabox/__init__.py
# Don't import all the parts as they need to be created from # includes.pandabox.hardware_collection() from malcolm.parts.pandabox.pandaboxdriverpart import PandABoxDriverPart
Python
0.000001
@@ -1,178 +1,188 @@ # -Don't import all the parts as they need to be created from%0A# includes.pandabox.hardware_collection()%0Afrom malcolm.parts.pandabox.pandaboxdriverpart import PandABoxDriverPart%0A%0A +Find all subpackages, MethodMeta decorated callables, and YAML files%0Afrom malcolm.packageutil import prepare_package%0A%0A__all__ = prepare_package(globals(), __name__)%0A%0Adel prepare_package %0A
29846e0abbd27b05029bb3e937a6bdcbc1f31b0e
Raise an exception when the wpr_archive_info isn't created.
tools/telemetry/telemetry/page/record_wpr.py
tools/telemetry/telemetry/page/record_wpr.py
# Copyright 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import sys from telemetry import benchmark from telemetry.core import browser_options from telemetry.core import discover from telemetry.core import util from telemetry.core import wpr_modes from telemetry.page import page_runner from telemetry.page import page_set from telemetry.page import page_test from telemetry.page import profile_creator from telemetry.page import test_expectations from telemetry.results import results_options class RecorderPageTest(page_test.PageTest): # pylint: disable=W0223 def __init__(self, action_names): super(RecorderPageTest, self).__init__() self._action_names = action_names self.page_test = None def CanRunForPage(self, page): return page.url.startswith('http') def WillStartBrowser(self, browser): if self.page_test: self.page_test.WillStartBrowser(browser) def DidStartBrowser(self, browser): if self.page_test: self.page_test.DidStartBrowser(browser) def WillNavigateToPage(self, page, tab): """Override to ensure all resources are fetched from network.""" tab.ClearCache(force=False) if self.page_test: self.page_test.options = self.options self.page_test.WillNavigateToPage(page, tab) def DidNavigateToPage(self, page, tab): if self.page_test: self.page_test.DidNavigateToPage(page, tab) def WillRunActions(self, page, tab): if self.page_test: self.page_test.WillRunActions(page, tab) def DidRunActions(self, page, tab): if self.page_test: self.page_test.DidRunActions(page, tab) def ValidateAndMeasurePage(self, page, tab, results): if self.page_test: self.page_test.ValidateAndMeasurePage(page, tab, results) def RunPage(self, page, tab, results): tab.WaitForDocumentReadyStateToBeComplete() util.WaitFor(tab.HasReachedQuiescence, 30) if self.page_test: self._action_name_to_run = self.page_test.action_name_to_run self.page_test.RunPage(page, tab, results) return should_reload = False # Run the actions on the page for all available measurements. for action_name in self._action_names: # Skip this action if it is not defined if not hasattr(page, action_name): continue # Reload the page between actions to start with a clean slate. if should_reload: self.RunNavigateSteps(page, tab) self._action_name_to_run = action_name super(RecorderPageTest, self).RunPage(page, tab, results) should_reload = True def RunNavigateSteps(self, page, tab): if self.page_test: self.page_test.RunNavigateSteps(page, tab) else: super(RecorderPageTest, self).RunNavigateSteps(page, tab) def FindAllActionNames(base_dir): """Returns a set of of all action names used in our measurements.""" action_names = set() # Get all PageTests except for ProfileCreators (see crbug.com/319573) for _, cls in discover.DiscoverClasses( base_dir, base_dir, page_test.PageTest).items(): if not issubclass(cls, profile_creator.ProfileCreator): action_name = cls().action_name_to_run if action_name: action_names.add(action_name) return action_names def _MaybeGetInstanceOfClass(target, base_dir, cls): if isinstance(target, cls): return target classes = discover.DiscoverClasses(base_dir, base_dir, cls, index_by_class_name=True) return classes[target]() if target in classes else None class WprRecorder(object): def __init__(self, base_dir, target, args=None): action_names_to_run = FindAllActionNames(base_dir) self._record_page_test = RecorderPageTest(action_names_to_run) self._options = self._CreateOptions() self._benchmark = _MaybeGetInstanceOfClass(target, base_dir, benchmark.Benchmark) if self._benchmark is not None: self._record_page_test.page_test = self._benchmark.test() self._parser = self._options.CreateParser(usage='%prog <PageSet|Benchmark>') self._AddCommandLineArgs() self._ParseArgs(args) self._ProcessCommandLineArgs() self._page_set = self._GetPageSet(base_dir, target) @property def options(self): return self._options def _CreateOptions(self): options = browser_options.BrowserFinderOptions() options.browser_options.wpr_mode = wpr_modes.WPR_RECORD options.browser_options.no_proxy_server = True return options def CreateResults(self): if self._benchmark is not None: benchmark_metadata = self._benchmark.GetMetadata() else: benchmark_metadata = benchmark.BenchmarkMetadata('record_wpr') return results_options.CreateResults(benchmark_metadata, self._options) def _AddCommandLineArgs(self): page_runner.AddCommandLineArgs(self._parser) if self._benchmark is not None: self._benchmark.AddCommandLineArgs(self._parser) self._benchmark.SetArgumentDefaults(self._parser) self._SetArgumentDefaults() def _SetArgumentDefaults(self): self._parser.set_defaults(**{'output_formats': ['none']}) def _ParseArgs(self, args=None): args_to_parse = sys.argv[1:] if args is None else args self._parser.parse_args(args_to_parse) def _ProcessCommandLineArgs(self): page_runner.ProcessCommandLineArgs(self._parser, self._options) if self._benchmark is not None: self._benchmark.ProcessCommandLineArgs(self._parser, self._options) def _GetPageSet(self, base_dir, target): if self._benchmark is not None: return self._benchmark.CreatePageSet(self._options) ps = _MaybeGetInstanceOfClass(target, base_dir, page_set.PageSet) if ps is None: self._parser.print_usage() sys.exit(1) return ps def Record(self, results): self._page_set.wpr_archive_info.AddNewTemporaryRecording() self._record_page_test.CustomizeBrowserOptions(self._options) page_runner.Run(self._record_page_test, self._page_set, test_expectations.TestExpectations(), self._options, results) def HandleResults(self, results): if results.failures or results.skipped_values: logging.warning('Some pages failed and/or were skipped. The recording ' 'has not been updated for these pages.') results.PrintSummary() self._page_set.wpr_archive_info.AddRecordedPages( results.pages_that_succeeded) def Main(base_dir): quick_args = [a for a in sys.argv[1:] if not a.startswith('-')] if len(quick_args) != 1: print >> sys.stderr, 'Usage: record_wpr <PageSet|Benchmark>\n' sys.exit(1) target = quick_args.pop() wpr_recorder = WprRecorder(base_dir, target) results = wpr_recorder.CreateResults() wpr_recorder.Record(results) wpr_recorder.HandleResults(results) return min(255, len(results.failures))
Python
0.0001
@@ -5904,24 +5904,129 @@ , results):%0A + assert self._page_set.wpr_archive_info, (%0A 'Pageset archive_data_file path must be specified.')%0A self._pa
370731942a2b5cdc6e0f712f5ee307f1ee45e488
Improve memory usage
markovify/chain.py
markovify/chain.py
import random import itertools import operator import bisect import json from collections import defaultdict BEGIN = "___BEGIN__" END = "___END__" def accumulate(iterable, func=operator.add): """ Cumulative calculations. (Summation, by default.) Via: https://docs.python.org/3/library/itertools.html#itertools.accumulate """ it = iter(iterable) total = next(it) yield total for element in it: total = func(total, element) yield total class Chain(object): """ A Markov chain representing processes that have both beginnings and ends. For example: Sentences. """ def __init__(self, corpus, state_size, model=None): """ `corpus`: A list of lists, where each outer list is a "run" of the process (e.g., a single sentence), and each inner list contains the steps (e.g., words) in the run. If you want to simulate an infinite process, you can come very close by passing just one, very long run. `state_size`: An integer indicating the number of items the model uses to represent its state. For text generation, 2 or 3 are typical. """ self.state_size = state_size self.model = model or self.build(corpus, state_size) def build(self, corpus, state_size): """ Build a Python representation of the Markov model. Returns a dict of dicts where the keys of the outer dict represent all possible states, and point to the inner dicts. The inner dicts represent all possibilities for the "next" item in the chain, along with the count of times it appears. """ if (type(corpus) != list) or (type(corpus[0]) != list): raise Exception("`corpus` must be list of lists") model = defaultdict(lambda: defaultdict(int)) for run in corpus: items = ([ BEGIN ] * state_size) + run + [ END ] for i in range(len(run) + 1): state = tuple(items[i:i+state_size]) follow = items[i+state_size] model[state][follow] += 1 return model def move(self, state): """ Given a state, choose the next item at random. """ choices, weights = zip(*self.model[state].items()) cumdist = list(accumulate(weights)) r = random.random() * cumdist[-1] selection = choices[bisect.bisect(cumdist, r)] return selection def gen(self, init_state=None): """ Starting either with a naive BEGIN state, or the provided `init_state` (as a tuple), return a generator that will yield successive items until the chain reaches the END state. """ state = init_state or (BEGIN,) * self.state_size while True: next_word = self.move(state) if next_word == END: break yield next_word state = tuple(state[1:]) + (next_word,) def walk(self, init_state=None): """ Return a list representing a single run of the Markov model, either starting with a naive BEGIN state, or the provided `init_state` (as a tuple). """ return list(self.gen(init_state)) def to_json(self): """ Dump the model as a JSON object, for loading later. """ return json.dumps(list(self.model.items())) @classmethod def from_json(cls, json_thing): """ Given a JSON object or JSON string that was created by `self.to_json`, return the corresponding markovify.Chain. """ # Python3 compatibility try: basestring except NameError: basestring = str if isinstance(json_thing, basestring): obj = json.loads(json_thing) else: obj = json_thing state_size = len(obj[0][0]) rehydrated = {tuple(item[0]): item[1] for item in obj} inst = cls(None, state_size, rehydrated) return inst
Python
0.000228
@@ -11,25 +11,8 @@ dom%0A -import itertools%0A impo @@ -52,44 +52,8 @@ json -%0Afrom collections import defaultdict %0A%0ABE @@ -1741,24 +1741,25 @@ lists%22)%0A +%0A model = @@ -1754,53 +1754,136 @@ -model = d +# Using a D efault -dict(lambda: defaultdict(int)) +Dict here would be a lot more convenient, however the memory%0A # usage is far higher.%0A model = %7B%7D%0A %0A @@ -2062,16 +2062,16 @@ _size%5D)%0A - @@ -2107,16 +2107,187 @@ e_size%5D%0A + if state not in model:%0A model%5Bstate%5D = %7B%7D%0A%0A if follow not in model%5Bstate%5D:%0A model%5Bstate%5D%5Bfollow%5D = 0%0A%0A
d2fdf0d91f41350347ba460e33cc04aa1e59eb96
Call the run script from the analysis driver
analysis_driver.py
analysis_driver.py
#! /usr/bin/env python # Brokers communication between Dakota and SWASH through files. # # Arguments: # $1 is 'params.in' from Dakota # $2 is 'results.out' returned to Dakota import sys import os import re import shutil from subprocess import call import numpy as np def read(output_file, variable=None): """Read data from a MATfile. Returns a numpy array, or None on an error.""" from scipy.io import loadmat try: mat = loadmat(output_file) var = mat[variable] except IOError: return None else: return(var) def write(results_file, array, labels): """Write a Dakota results file from an input array.""" try: fp = open(results_file, 'w') for i in range(len(array)): fp.write(str(array[i]) + '\t' + labels[i] + '\n') except IOError: raise finally: fp.close() def get_labels(params_file): """Extract labels from a Dakota parameters file.""" labels = [] try: fp = open(params_file, 'r') for line in fp: if re.search('ASV_', line): labels.append(''.join(re.findall(':(\S+)', line))) except IOError: raise finally: fp.close() return(labels) if __name__ == '__main__': # Files and directories. start_dir = os.path.dirname(os.path.realpath(__file__)) input_template = 'INPUT.template' input_file = 'INPUT' output_file = 'bot07.mat' output_file_var = 'Botlev' # Use the parsing utility `dprepro` (from $DAKOTA_DIR/bin) to # incorporate the parameters from Dakota into the SWASH input # template, creating a new SWASH input file. shutil.copy(os.path.join(start_dir, input_template), os.curdir) call(['dprepro', sys.argv[1], input_template, input_file]) # Call SWASH with the new input file. call(['swash_mpi.exe', input_file]) # Calculate the mean and standard deviation of the 'Botlev' output # values for the simulation. Write the output to a Dakota results # file. labels = get_labels(sys.argv[1]) series = read(output_file, output_file_var) if series is not None: m_series = [np.mean(series), np.std(series)] else: m_series = [0, 0] write(sys.argv[2], m_series, labels)
Python
0
@@ -1481,16 +1481,48 @@ 'Botlev' +%0A run_script = 'run_swash.sh' %0A%0A # @@ -1854,65 +1854,151 @@ ith -the new input file.%0A call(%5B'swash_mpi.exe', input_file +a script containing PBS commands.%0A job_name = 'SWASH-Dakota' + os.path.splitext(os.getcwd())%5B1%5D%0A call(%5B'qsub', '-N', job_name, run_script %5D)%0A%0A
9e95522c847b12a19cff54737a44f569fe2cf6b7
Add method for getting Candidacy.party_name
opencivicdata/elections/admin/candidacy.py
opencivicdata/elections/admin/candidacy.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Custom administration panels for Candidacy-related models. """ from django import VERSION as django_version from django.contrib import admin from opencivicdata.core.admin import base from .. import models class CandidacySourceInline(base.LinkInline): """ Custom inline administrative panel for the CandidacySource model. """ model = models.CandidacySource @admin.register(models.Candidacy) class CandidacyAdmin(base.ModelAdmin): """ Custom inline administrative panel for the Candidacy model. """ raw_id_fields = ( 'person', 'contest', 'top_ticket_candidacy', ) fields = ( 'candidate_name', 'post', 'filed_date', 'is_incumbent', 'registration_status', 'party', ) + raw_id_fields list_display = ( 'candidate_name', 'contest', 'is_incumbent', 'registration_status', 'id', 'party__name', 'updated_at', ) search_fields = ('candidate_name', 'contest__name', 'post__label', ) list_filter = ( 'party__name', 'is_incumbent', 'registration_status', 'updated_at', ) # date_hierarchy across relations was added to django 1.11 if django_version[0] >= 1 and django_version[1] >= 11: date_hierarchy = 'contest__election__start_time' inlines = [ CandidacySourceInline, ]
Python
0.000001
@@ -984,33 +984,32 @@ %0A 'party_ -_ name',%0A ' @@ -1398,17 +1398,11 @@ on__ -start_tim +dat e'%0A%0A @@ -1454,8 +1454,246 @@ ,%0A %5D%0A +%0A def party_name(self, obj):%0A %22%22%22%0A Return the name of the Party associated with the Candidacy.%0A %22%22%22%0A if obj.party:%0A name = obj.party.name%0A else:%0A name = None%0A return name%0A
ce266cec800fd921f9b4de82fd9f9666ed2df053
Fix another shit
modules/gy-271/core/get.py
modules/gy-271/core/get.py
# Distributed with a free-will license. # Use it any way you want, profit or free, provided it fits in the licenses of its associated works. # HMC5883 # This code is designed to work with the HMC5883_I2CS I2C Mini Module available from ControlEverything.com. # https://www.controleverything.com/content/Compass?sku=HMC5883_I2CS#tabs-0-product_tabset-2 # Modified by Broda Noel (@BrodaNoel in all social networks) import smbus import time # Get I2C bus bus = smbus.SMBus(1) address = 0x1E # HMC5883 address, 0x1E(30) # Select configuration register A, 0x00(00) # 0x60(96) Normal measurement configuration, Data output rate = 0.75 Hz bus.write_byte_data(address, 0x00, 0x60) # HMC5883 address, 0x1E(30) # Select mode register, 0x02(02) # 0x00(00) Continuous measurement mode bus.write_byte_data(address, 0x02, 0x00) time.sleep(0.5) # HMC5883 address, 0x1E(30) # Read data back from 0x03(03), 6 bytes # X-Axis MSB, X-Axis LSB, Z-Axis MSB, Z-Axis LSB, Y-Axis MSB, Y-Axis LSB data = bus.read_i2c_block_data(address, 0x03, 6) # Convert the data xMag = data[0] * 256 + data[1] if xMag > 32767 : xMag -= 65536 zMag = data[2] * 256 + data[3] if zMag > 32767 : zMag -= 65536 yMag = data[4] * 256 + data[5] if yMag > 32767 : yMag -= 65536 # Output data to screen sys.stdout.write('{ "x": ' + str(xMag) + ', "y": ' + str(yMag) + ', "z": ' + str(zMag) + ' }')
Python
0.000005
@@ -431,16 +431,27 @@ ort time +%0Aimport sys %0A%0A# Get
4fccaeefd67c3c736861870a8fe711a934c96e6d
Add some documentation
mythril/laser/ethereum/transaction.py
mythril/laser/ethereum/transaction.py
import logging from mythril.laser.ethereum.state import GlobalState, Environment, CalldataType from mythril.laser.ethereum.cfg import Node, Edge, JumpType from z3 import BitVec class CallTransaction: def __init__(self, callee_address): self.callee_address = callee_address self.caller = BitVec("caller", 256) self.gas_price = BitVec("gasprice", 256) self.call_value = BitVec("callvalue", 256) self.origin = BitVec("origin", 256) pass def run(self, open_world_states, evm): for open_world_state in open_world_states: # Initialize the execution environment environment = Environment( open_world_state[self.callee_address], self.caller, [], self.gas_price, self.call_value, self.origin, calldata_type=CalldataType.SYMBOLIC, ) new_node = Node(environment.active_account.contract_name) evm.instructions_covered = [False for _ in environment.code.instruction_list] evm.nodes[new_node.uid] = new_node if open_world_state.node: evm.edges.append(Edge(open_world_state.node.uid, new_node.uid, edge_type=JumpType.Transaction, condition=None)) global_state = GlobalState(open_world_state, environment, new_node) new_node.states.append(global_state) evm.work_list.append(global_state) evm.exec() logging.info("Execution complete") logging.info("Achieved {0:.3g}% coverage".format(evm.coverage))
Python
0.000001
@@ -203,43 +203,264 @@ -def __init__(self, callee_address): +%22%22%22 Represents a call value transaction %22%22%22%0A def __init__(self, callee_address):%0A %22%22%22%0A Constructor for Call transaction, sets up all symbolic parameters%0A :param callee_address: Address of the contract that will be called%0A %22%22%22 %0A @@ -703,16 +703,16 @@ pass%0A%0A - def @@ -746,16 +746,100 @@ , evm):%0A + %22%22%22 Runs this transaction on the evm starting from the open world states%22%22%22%0A
9f3bf2756debb4534ddcbf538577044e2bae6528
remove unused import
memopol2/search.py
memopol2/search.py
# -*- coding: utf-8 -*- import os os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import logging from django.db.models import signals from django.conf import settings from whoosh import fields, index from whoosh.filedb.filestore import FileStorage log = logging.getLogger(__name__) WHOOSH_SCHEMA = fields.Schema(title=fields.TEXT(stored=True), content=fields.TEXT, url=fields.ID(stored=True, unique=True)) def create_index(sender=None, **kwargs): if not os.path.exists(settings.WHOOSH_INDEX): os.mkdir(settings.WHOOSH_INDEX) storage = FileStorage(settings.WHOOSH_INDEX) storage.create_index(WHOOSH_SCHEMA, indexname='memopol') signals.post_syncdb.connect(create_index) def update_index(sender, instance, created, **kwargs): try: url = unicode(instance.get_absolute_url()) except Exception, e: log.critical('Cant resolve url. Content %r not indexed' % instance) return content = getattr(instance, 'content', None) if content is None: content = unicode(instance) elif callable(content): content = content() storage = FileStorage(settings.WHOOSH_INDEX) ix = storage.open_index(indexname='memopol') writer = ix.writer() if created: writer.add_document(title=unicode(instance), content=content, url=url) writer.commit() else: writer.update_document(title=unicode(instance), content=content, url=url) writer.commit() _searchables = [] def searchable(klass): if hasattr(klass, 'get_absolute_url'): signals.post_save.connect(update_index, sender=klass) _searchables.append(klass) if not hasattr(klass, 'content'): log.warn('%s is declared as searchable but has no content attribute' % klass) else: log.warn('%s is declared as searchable but has no get_absolute_url' % klass) return klass def update(): from meps import models from mps import models from reps import models create_index() for klass in _searchables: for i in klass.objects.all(): update_index(None, i, created=False) if __name__ == '__main__': update()
Python
0
@@ -187,23 +187,16 @@ t fields -, index %0Afrom wh
e995a4725873f0587300aa1d0df6d05c7eaf412c
Move package folder deletion to start of execution
matador/commands/deploy_package.py
matador/commands/deploy_package.py
#!/usr/bin/env python from .command import Command from .deploy_ticket import execute_ticket from matador.session import Session import subprocess import os import shutil import yaml from importlib.machinery import SourceFileLoader class ActionPackage(Command): def _add_arguments(self, parser): parser.prog = 'matador deploy-package' parser.add_argument( '-e', '--environment', type=str, required=True, help='Agresso environment name') parser.add_argument( '-p', '--package', type=str, required=True, help='Package name') parser.add_argument( '-c', '--commit', type=str, default='none', help='Commit or tag ID') @staticmethod def _checkout_package(package, commit): proj_folder = Session.project_folder repo_folder = Session.matador_repository_folder package_folder = os.path.join( Session.matador_packages_folder, package) Session.update_repository() if commit == 'none': commit = subprocess.check_output( ['git', '-C', proj_folder, 'rev-parse', 'HEAD'], stderr=subprocess.STDOUT).decode('utf-8').strip('\n') subprocess.run([ 'git', '-C', repo_folder, 'checkout', commit], stderr=subprocess.STDOUT, stdout=open(os.devnull, 'w'), check=True) src = os.path.join(repo_folder, 'deploy', 'packages', package) shutil.copytree(src, package_folder) def _execute(self): Session.set_environment(self.args.environment) self._checkout_package(self.args.package, self.args.commit) class DeployPackage(ActionPackage): def _execute(self): super(DeployPackage, self)._execute() package_folder = os.path.join( Session.matador_packages_folder, self.args.package) Session.deployment_folder = package_folder ticketsFile = os.path.join(package_folder, 'tickets.yml') try: tickets = yaml.load(open(ticketsFile, 'r')) for ticket in tickets: execute_ticket(str(ticket), 'deploy', self.args.commit, True) finally: shutil.rmtree(package_folder) class RemovePackage(ActionPackage): def _execute(self): super(RemovePackage, self)._execute() package_folder = os.path.join( Session.matador_packages_folder, self.args.package) Session.deployment_folder = package_folder sourceFile = os.path.join(package_folder, 'remove.py') try: SourceFileLoader('remove', sourceFile).load_module() finally: shutil.rmtree(package_folder)
Python
0.000001
@@ -1045,24 +1045,83 @@ , package)%0A%0A + shutil.rmtree(package_folder, ignore_errors=True)%0A%0A Sess @@ -2138,33 +2138,17 @@ s.yml')%0A - try:%0A +%0A @@ -2199,20 +2199,16 @@ - for tick @@ -2234,20 +2234,16 @@ - - execute_ @@ -2300,66 +2300,8 @@ ue)%0A - finally:%0A shutil.rmtree(package_folder) %0A%0A%0Ac @@ -2627,25 +2627,9 @@ y')%0A - try:%0A +%0A @@ -2689,63 +2689,4 @@ e()%0A - finally:%0A shutil.rmtree(package_folder)%0A
534437a0d55fccae50a86a95182a0460d07c64da
Increment version number.
mopidy_pandora/__init__.py
mopidy_pandora/__init__.py
from __future__ import absolute_import, division, print_function, unicode_literals import os from mopidy import config, ext __version__ = '0.2.0' class Extension(ext.Extension): dist_name = 'Mopidy-Pandora' ext_name = 'pandora' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): from pandora import BaseAPIClient schema = super(Extension, self).get_config_schema() schema['api_host'] = config.String() schema['partner_encryption_key'] = config.String() schema['partner_decryption_key'] = config.String() schema['partner_username'] = config.String() schema['partner_password'] = config.String() schema['partner_device'] = config.String() schema['username'] = config.String() schema['password'] = config.Secret() schema['preferred_audio_quality'] = config.String(choices=[BaseAPIClient.LOW_AUDIO_QUALITY, BaseAPIClient.MED_AUDIO_QUALITY, BaseAPIClient.HIGH_AUDIO_QUALITY]) schema['sort_order'] = config.String(choices=['date', 'A-Z', 'a-z']) schema['auto_setup'] = config.Boolean() schema['auto_set_repeat'] = config.Deprecated() schema['cache_time_to_live'] = config.Integer(minimum=0) schema['event_support_enabled'] = config.Boolean() schema['double_click_interval'] = config.String() schema['on_pause_resume_click'] = config.String(choices=['thumbs_up', 'thumbs_down', 'sleep', 'add_artist_bookmark', 'add_song_bookmark', 'delete_station']) schema['on_pause_next_click'] = config.String(choices=['thumbs_up', 'thumbs_down', 'sleep', 'add_artist_bookmark', 'add_song_bookmark', 'delete_station']) schema['on_pause_previous_click'] = config.String(choices=['thumbs_up', 'thumbs_down', 'sleep', 'add_artist_bookmark', 'add_song_bookmark', 'delete_station']) schema['on_pause_resume_pause_click'] = config.String(choices=['thumbs_up', 'thumbs_down', 'sleep', 'add_artist_bookmark', 'add_song_bookmark', 'delete_station']) return schema def setup(self, registry): from .backend import PandoraBackend from .frontend import EventMonitorFrontend, PandoraFrontend registry.add('backend', PandoraBackend) registry.add('frontend', PandoraFrontend) registry.add('frontend', EventMonitorFrontend)
Python
0.000001
@@ -139,17 +139,17 @@ = '0.2. -0 +1 '%0A%0A%0Aclas
fa67f0326f9f57bc01b023a266e1f896da617ff7
make send_mail mockable by importing the module
osmaxx-py/excerptconverter/converter_helper.py
osmaxx-py/excerptconverter/converter_helper.py
from django.contrib import messages from django.core.mail import send_mail from django.utils.translation import ugettext_lazy as _ import stored_messages from osmaxx.excerptexport import models def module_converter_configuration(name, export_formats, export_options): """ :param export_formats example: { 'txt': { 'name': 'Text', 'file_extension': 'txt', 'mime_type': 'text/plain' }, 'markdown': { 'name': 'Markdown', 'file_extension': 'md', 'mime_type': 'text/markdown' } } :param export_options example: { 'image_resolution': { 'label': 'Resolution', 'type': 'number', 'default': '500' }, 'quality': { 'label': 'Quality', 'type': 'number', 'default': '10' } } """ return { 'name': name, 'formats': export_formats, 'options': export_options } # functions using database (extraction_order) must be instance methods of a class # -> free functions will not work: database connection error class ConverterHelper: def __init__(self, extraction_order): self.extraction_order = extraction_order self.user = extraction_order.orderer def file_conversion_finished(self): if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats): self.inform_user( messages.SUCCESS, _('The extraction of the order "{order_id}" has been finished.').format( order_id=self.extraction_order.id, ), email=True ) self.extraction_order.state = models.ExtractionOrderState.FINISHED self.extraction_order.save() def inform_user(self, message_type, message_text, email=True): stored_messages.api.add_message_for( users=[self.user], level=message_type, message_text=message_text ) if email: if hasattr(self.user, 'email'): send_mail( '[OSMAXX] '+message_text, message_text, 'no-reply@osmaxx.hsr.ch', [self.user.email] ) else: self.inform_user( messages.WARNING, _("There is no email address assigned to your account. " "You won't be notified by email on process finish!"), email=False )
Python
0
@@ -49,21 +49,16 @@ core -.mail import send @@ -53,21 +53,16 @@ import -send_ mail%0Afro @@ -2239,16 +2239,21 @@ +mail. send_mai
bac0b5e09fc172a991fb6b7172025c698c1a23d9
Add validation that type is type of Rule into MultipleRulesGrammar
grammpy/Grammars/MultipleRulesGrammar.py
grammpy/Grammars/MultipleRulesGrammar.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 15.08.2017 14:40 :Licence GNUv3 Part of grammpy """ from .StringGrammar import StringGrammar from ..HashContainer import HashContainer from ..IsMethodsRuleExtension import IsMethodsRuleExtension as Rule class MultipleRulesGrammar(StringGrammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): super().__init__(terminals, nonterminals, rules, start_symbol) self._count = 0 def _create_class(self, rule): name = 'SplitRules' + str(self._count) self._count += 1 return type(name, (Rule,), {"rule": rule}) def _transform_rules(self, rules): rules = HashContainer.to_iterable(rules) r = [] for i in rules: if i.is_valid(self) and i.count() > 1: for rule in i.rules: r.append(self._create_class(rule)) else: r.append(i) return rules def get_rule(self, rules=None): if rules is None: return super().get_rule() results = super().get_rule(self._transform_rules(rules)) if not HashContainer.is_iterable(rules): return results[0] return results def have_rule(self, rules): return super().have_rule(self._transform_rules(rules)) def remove_rule(self, rules=None): if rules is None: return super().remove_rule() super().remove_rule(self._transform_rules(rules)) def add_rule(self, rules): super().add_rule(self._transform_rules(rules))
Python
0.000045
@@ -105,16 +105,79 @@ py%0A%0A%22%22%22%0A +import inspect%0A%0Afrom grammpy.exceptions import NotRuleException %0Afrom .S @@ -319,16 +319,40 @@ as Rule +, IsMethodsRuleExtension %0A%0A%0Aclass @@ -965,24 +965,150 @@ i in rules:%0A + if not inspect.isclass(i) or not issubclass(i, IsMethodsRuleExtension):%0A raise NotRuleException(i)%0A @@ -1300,20 +1300,16 @@ return r -ules %0A%0A de
c26e61ca6e995142a0312800d8cf18a7f21a64ae
add gamma setting to physical DMDs
mpfmc/core/physical_dmd.py
mpfmc/core/physical_dmd.py
"""Physical DMD.""" import struct from kivy.clock import Clock from kivy.graphics.fbo import Fbo from kivy.graphics.opengl import glReadPixels, GL_RGB, GL_UNSIGNED_BYTE from kivy.graphics.texture import Texture from kivy.uix.effectwidget import EffectWidget, EffectBase from mpfmc.widgets.dmd import Gain class PhysicalDmdBase(object): """Base class for DMD devices.""" dmd_name_string = 'Physical DMD' def __init__(self, mc, name, config): """Initialise DMD.""" self.mc = mc self.name = name self.mc.log.info('Initializing Physical DMD') self.config = self._get_validated_config(config) self.source = self.mc.displays[self.config['source_display']] self.prev_data = None # put the widget canvas on a Fbo texture = Texture.create(size=self.source.size, colorfmt='rgb') self.fbo = Fbo(size=self.source.size, texture=texture) self.effect_widget = EffectWidget() effect_list = list() effect_list.append(FlipVertical()) if self.config['brightness'] != 1.0: if not 0.0 <= self.config['brightness'] <= 1.0: raise ValueError("DMD brightness value should be between 0.0 " "and 1.0. Yours is {}".format(self.config['brightness'])) effect_list.append(Gain(gain=self.config['brightness'])) self.effect_widget.effects = effect_list self.effect_widget.size = self.source.size self.fbo.add(self.effect_widget.canvas) self._set_dmd_fps() def _get_validated_config(self, config): raise NotImplementedError def _set_dmd_fps(self): # fps is the rate that the connected client requested. We'll use the # lower of the two mc_fps = self.config['fps'] if mc_fps == 0: # pylint: disable-msg=protected-access mc_fps = Clock._max_fps # pylint: disable-msg=protected-access if mc_fps > Clock._max_fps: self.mc.log.warning("%s fps is higher than mpf-mc fps. " "Will use mpf-mc fps setting for the DMD.", PhysicalDmdBase.dmd_name_string) # pylint: disable-msg=protected-access fps = Clock._max_fps update = 0 # pylint: disable-msg=protected-access elif Clock._max_fps > mc_fps > 0: fps = mc_fps update = 1 / fps else: # pylint: disable-msg=protected-access fps = Clock._max_fps update = 0 Clock.schedule_interval(self.tick, update) self.mc.log.info("Setting %s to %sfps", PhysicalDmdBase.dmd_name_string, fps) def tick(self, dt): """Draw image for DMD and send it.""" del dt widget = self.source fbo = self.fbo # detach the widget from the parent parent = widget.parent if parent: parent.remove_widget(widget) self.effect_widget.add_widget(widget) # clear the fbo background fbo.bind() fbo.clear_buffer() fbo.release() fbo.draw() fbo.bind() data = glReadPixels(0, 0, widget.native_size[0], widget.native_size[1], GL_RGB, GL_UNSIGNED_BYTE) fbo.release() # reattach to the parent if parent: self.effect_widget.remove_widget(widget) parent.add_widget(widget) if not self.config['only_send_changes'] or self.prev_data != data: self.prev_data = data self.send(data) def send(self, data): """Send data to DMD via BCP.""" raise NotImplementedError class PhysicalDmd(PhysicalDmdBase): """Physical monochrome DMD.""" def _get_validated_config(self, config): return self.mc.config_validator.validate_config('physical_dmds', config) @classmethod def _convert_to_single_bytes(cls, data): new_data = bytearray() loops = 0 for r, g, b in struct.iter_unpack('BBB', data): loops += 1 try: pixel_weight = ((r * .299) + (g * .587) + (b * .114)) / 255. new_data.append(int(round(pixel_weight * 15))) except ValueError: raise ValueError(loops, r, g, b) return bytes(new_data) def send(self, data): """Send data to DMD via BCP.""" data = self._convert_to_single_bytes(data) self.mc.bcp_processor.send('dmd_frame', rawbytes=data, name=self.name) class PhysicalRgbDmd(PhysicalDmdBase): """Physical RGB DMD.""" dmd_name_string = 'Physical RGB DMD' def _get_validated_config(self, config): return self.mc.config_validator.validate_config('physical_rgb_dmds', config) def send(self, data): """Send data to DMD via BCP.""" self.mc.bcp_processor.send('rgb_dmd_frame', rawbytes=data, name=self.name) class FlipVertical(EffectBase): """GLSL effect to veritically flip a texture""" def __init__(self): super().__init__() self.glsl = ''' vec4 effect(vec4 color, sampler2D texture, vec2 tex_coords, vec2 coords) {{ return texture2D(texture, vec2(tex_coords.x, 1.0 - tex_coords.y)); }} '''
Python
0
@@ -1377,32 +1377,139 @@ brightness'%5D))%0A%0A + if self.config%5B'gamma'%5D != 1.0:%0A effect_list.append(Gamma(gamma=self.config%5B'gamma'%5D))%0A%0A self.eff @@ -5524,12 +5524,465 @@ %0A ''' +%0A%0Aclass Gamma(EffectBase):%0A %22%22%22GLSL effect to apply a gamma setting to a texture%22%22%22%0A%0A def __init__(self, gamma=1.0):%0A super().__init__()%0A%0A gamma = float(gamma)%0A%0A self.glsl = '''%0A%0A vec4 effect(vec4 color, sampler2D texture, vec2 tex_coords, vec2 coords)%0A%0A %7B%7B%0A vec4 outColor = vec4(pow(color.x, %7B0%7D), pow(color.y, %7B0%7D), pow(color.z, %7B0%7D), 1.0);%0A return outColor;%0A %7D%7D%0A '''.format(gamma)%0A
f09470679ee831272c97dc0765a43faca5f28e75
Remove extra newline in bordered()
dodo_commands/framework/util.py
dodo_commands/framework/util.py
# -*- coding: utf-8 -*- """Utilities.""" from six.moves import input as raw_input import os import sys def query_yes_no(question, default="yes"): """Ask a yes/no question via raw_input() and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits <Enter>. It must be "yes" (the default), "no" or None (meaning an answer is required of the user). The "answer" return value is True for "yes" or False for "no". """ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} if default is None: prompt = " [y/n] " elif default == "yes": prompt = " [Y/n] " elif default == "no": prompt = " [y/N] " else: raise ValueError("invalid default answer: '%s'" % default) while True: sys.stdout.write(question + prompt) choice = raw_input().lower() if default is not None and choice == '': return valid[default] elif choice in valid: return valid[choice] else: sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") _global_config = """ [DodoCommands] projects_dir=~/projects python_interpreter=python diff_tool=diff """ def create_global_config(): """Create config file and default_commands dir.""" base_dir = os.path.expanduser('~/.dodo_commands') if not os.path.exists(base_dir): os.mkdir(base_dir) config_filename = os.path.join(base_dir, "config") if not os.path.exists(config_filename): with open(config_filename, 'w') as f: f.write(_global_config) default_commands_dir = os.path.join(base_dir, "default_commands") if not os.path.exists(default_commands_dir): os.mkdir(default_commands_dir) init_py = os.path.join(default_commands_dir, "__init__.py") if not os.path.exists(init_py): with open(init_py, 'w') as f: pass def remove_trailing_dashes(args): """Removes first -- item from args.""" return args[1:] if args[:1] == ['--'] else args def bordered(text): lines = text.splitlines() width = max(len(s) for s in lines) res = ['┌' + '─' * width + '┐'] for s in lines: res.append('│' + (s + ' ' * width)[:width] + '│') res.append('└' + '─' * width + '┘') return '\n'.join(res) + '\n'
Python
0.000001
@@ -2430,12 +2430,5 @@ res) - + '%5Cn' %0A
6d8b1ea0e459bd3383528fb32e6b1a348b00a9bc
Remove unknown attributes.
phoxpy/server/auth.py
phoxpy/server/auth.py
# -*- coding: utf-8 -*- # # Copyright (C) 2011 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # from random import randint from phoxpy import exceptions from phoxpy.messages import PhoxRequest, PhoxResponse from phoxpy.messages import auth from phoxpy.server.main import ServerExtension, request_type __all__ = ['AuthExt'] class AuthExt(ServerExtension): def __init__(self, db): db.update({ 'licenses': set([]), 'users': {}, 'sessions': set([]) }) super(AuthExt, self).__init__(db) def get_session_id(self): return str(randint(10000, 50000)) def add_license(self, key): self.db['licenses'].add(key) def add_user(self, login, password): self.db['users'][login] = password @request_type(auth.AuthRequest) def handle_login(self, request): if request.client_id not in self.db['licenses']: raise exceptions.LicenseNotFound(request.client_id) if request.instance_count is None: raise exceptions.LisBaseException(654) if request.login not in self.db['users']: raise exceptions.UnknownUser() if self.db['users'][request.login] != request.password: raise exceptions.AuthentificationError() sessionid = self.get_session_id() self.db['sessions'].add(sessionid) return auth.AuthResponse( sessionid=sessionid, buildnumber=self.build_number, version=self.server_version ) @request_type(PhoxRequest) def handle_logout(self, request): if request.sessionid not in self.db['sessions']: raise exceptions.UnknownSession() self.db['sessions'].remove(request.sessionid) return PhoxResponse( buildnumber=request.buildnumber, sessionid=request.sessionid, version=request.version )
Python
0.000002
@@ -1901,137 +1901,33 @@ nse( -%0A buildnumber=request.buildnumber,%0A sessionid=request.sessionid,%0A version=request.version%0A +sessionid=request.sessionid )%0A
a4cc48aa03a5924fb58807b87e69695b69811e6f
fix wiki_vectorize: switch table from wiki to wiki_ratings
src/data_prep/wiki_vectorize.py
src/data_prep/wiki_vectorize.py
from src.utils import article_to_category, glove, remove_templates, clean_categories, clean_links import string import numpy as np class wiki_vectorize(): def __init__(self, user_interactions, content, user_vector_type, content_vector_type, sqlCtx, **support_files): """ Class initializer to load the required files. The wikipedia data will be filtered to only the 2015 edits and content, as well as remove any articles without text, edits without a user id (IP addresses) and article namespaces that are not 0 (articles). Args: user_interactions: The raw RDD of the user interactions. For Wikipedia, this it is the full edit history. We have been reading it in as wiki_edits = sqlCtx.read.json(wiki_edit_json_data_path, schema=schema) content: The raw RDD containing the item content. For Wikipedia, this is the latest edit which contains full article content user_vector_type: The type of user vector desired. For Wikipedia you can choose between ['num_edits', 'any_interact', 'num_edits_ceil', 'none']. num_edits_ceil will count the number of edits but set an upper limit of 5 edits If 'none' is used then this means you will run your own custom mapping content_vector_type: The type of content vector desired. For Wikipedia you can choose between ['glove', 'category_map', 'none']. If none is chosen no content vector will be returned and None may be passed into the content argument. You do not need a content vector to run pure CF only but some performance metrics will not be able to be ran sqlCtx: The sequel content which is necessary for some of the queries support_files: If they exist, the supporting files, dataFrames, and/or file links necessary to run the content vectors. For example the category_map function at least needs the category_list from dbPedia """ self.user_vector_type = user_vector_type self.content_vector_type = content_vector_type self.sqlCtx = sqlCtx #Filter out uninteresting articles and users if they still exist in the dataset user_interactions.registerTempTable("ratings") content.registerTempTable("content") filtered = self.sqlCtx.sql("select * from ratings where redirect_target is null and article_namespace=0 and user_id is not null and timestamp like '2015%'") filtered_content = self.sqlCtx.sql("select * from content where redirect_target is null and article_namespace=0 and full_text is not null and timestamp like '2015%'") self.filtered = filtered self.filtered.registerTempTable("wiki_ratings") self.filtered_content = filtered_content self.filtered_content.registerTempTable("wiki_content") #if no support files were passed in, initialize an empty support file if support_files: self.support_files = support_files else: self.support_files = {} def get_user_vector(self): if self.user_vector_type=='num_edits': user_info = self.sqlCtx.sql("select user_id as user, article_id as item, count(1) as rating from wiki_ratings \ group by user_id, article_id") return user_info elif self.user_vector_type=='any_interact': user_info = self.sqlCtx.sql("select user_id as user, article_id as item, 1 as rating from wiki_ratings \ group by user_id, article_id") return user_info elif self.user_vector_type=='num_edits_ceil': user_info = self.sqlCtx.sql("select user_id as user, article_id as item, count(1) as rating from wiki \ group by user_id, article_id")\ .map(lambda (user, article, rating): (user, article, min(rating, 5))) return user_info elif self.user_vector_type=='none': return None else: print "Please choose a user_vector_type between num_edits, any_interact, num_edits_ceil or none" return None def get_content_vector(self): if self.content_vector_type=='glove': if len(self.support_files)==1: glove_model = self.support_files["glove_model"] article_mapping = self.filtered_content\ .map(lambda row: (row.article_id, remove_templates.remove_templates(row.full_text)))\ .map(lambda tup: (tup[0],clean_categories.clean_categories(tup[1])))\ .map(lambda tup: (tup[0],clean_links.clean_links(tup[1])))\ .map( lambda tup: (tup[0], tup[1]\ .replace('\n', ' ')\ .replace("<ref>", '')\ .replace("</ref>", '')\ ) )\ .map(lambda tup: (tup[0], remove_punctuation(tup[1])))\ .map(lambda tup: (tup[0], remove_urls(tup[1])))\ .map(lambda tup: (tup[0], article_to_glove(tup[1], glove_model))) return article_mapping else: print "Please pass in a glove_model. Like: support_files['glove_model']=Glove('glove.6B.50d.txt')" elif self.content_vector_type=='category_map': if len(self.support_files)==3: #The category map supporting dataFrames and objects are as followed: #high_level_idx: An array of the high level categories to map to e.g. ['Concepts', 'Life', 'Physical_universe', 'Society'] #category_index_graph_link: Path to the csv of the category links as created from wiki_categories.create_linked_list() #category_idx: Dictionary of the categories to an index as created from wiki_categories.create_category_idx_dicts() high_level_categories = self.support_files['high_level_categories'] category_index_graph_link = self.support_files['category_index_graph_link'] category_idx = self.support_file['category_idx'] ac = article_to_category(high_level_categories, category_index_graph_link, category_idx) article_mapping = ac.run_mapping(self.filtered_content) return article_mapping else: #print "To run category map you must at least have the category_list from dbPedia" ##TODO work on the article_to_category function so that it can just pull in the category list from dpPedia print "Please pass in the following files:" print "high_level_idx: An array of the high level categories to map to e.g. ['Concepts', 'Life', 'Physical_universe', 'Society']" print 'category_index_graph_link: Path to the csv of the category links as created from wiki_categories.create_linked_list()' print 'category_idx: Dictionary of the categories to an index as created from wiki_categories.create_category_idx_dicts()' print 'support_files = {"high_level_categories" : high_level_categories, \ "category_index_graph_link" : category_index_graph_link, \ "category_idx" : category_idx}' return None elif self.content_vector_type=='none': return None else: print "Please choose between glove, category_map or none" return None def remove_punctuation(text): for char in string.punctuation: text = text.replace(char, '') return text def article_to_glove(text, model): vec = np.zeros(model.vector_size) for word in text.split(): vec += model[word.lower()] return vec def remove_urls(text): stext = text.split() next_text = [] for word in stext: if word.startswith('http'): continue else: next_text.append(word) return ' '.join(next_text)
Python
0.00001
@@ -3765,16 +3765,24 @@ rom wiki +_ratings %5C%0A
e2cbc0a3acf793ca8c45eb17cb0071a254a7e2b7
Update parse_indepexpends.py
server/src/datasource/parse_indepexpends.py
server/src/datasource/parse_indepexpends.py
from datasource import fec from datasource import propublica import os FEC_APIKEY = os.getenv('FEC_API_KEY', '') ProPublica_APIKEY = os.getenv('PP_API_KEY', '') FecApiObj = fec.FECAPI(FEC_APIKEY) committees = FecApiObj.get_committees() PPCampFinObj = propublica.CampaignFinanceAPI(ProPublica_APIKEY) datafile = open("IndepExpends.json", 'w') for committee in committees: if(2016 in committee['cycles']): indepExpend = PPCampFinObj.get_indep_expends(str(committee['committee_id'])) datafile.write(str(indepExpend)) datafile.close()
Python
0
@@ -311,49 +311,541 @@ Y)%0D%0A -datafile = open(%22IndepExpends.json%22, 'w') +PPCongressApi = propublica.CongressAPI(ProPublica_APIKEY)%0D%0Alegislator_index = list()%0D%0Alegislators = PPCongressApi.list_members('house')%5B%22results%22%5D%5B0%5D%5B%22members%22%5D%0D%0Afor legislator in legislators:%0D%0A name = str(legislator%5B'first_name'%5D) + %22 %22 + str(legislator%5B'last_name'%5D)%0D%0A legislator_index.append(name)%0D%0Alegislators = PPCongressApi.list_members('senate')%5B%22results%22%5D%5B0%5D%5B%22members%22%5D%0D%0Afor legislator in legislators:%0D%0A name = str(legislator%5B'first_name'%5D) + %22 %22 + str(legislator%5B'last_name'%5D)%0D%0A legislator_index.append(name)%0D%0A%0D%0A %0D%0Afo @@ -1008,56 +1008,159 @@ -datafile.write(str(indepExpend))%0D%0Adatafile.close() +for expend in indepExpend%5B%22results%22%5D:%0D%0A if(expend%5B'candidate_name'%5D in legislator_index):%0D%0A #expend fo a particular expenditure %0D%0A
0c450f52bfd30b694cea19a80fed900b22a39b90
Update nbgrader/plugins/export.py
nbgrader/plugins/export.py
nbgrader/plugins/export.py
from traitlets import Unicode, List from .base import BasePlugin from ..api import MissingEntry class ExportPlugin(BasePlugin): """Base class for export plugins.""" to = Unicode("", help="destination to export to").tag(config=True) student = List([], help="list of students to export").tag(config=True) assignment = List([], help="list of assignments to export").tag(config=True) def export(self, gradebook): """Export grades to another format. This method MUST be implemented by subclasses. Users should be able to pass the ``--to`` flag on the command line, which will set the ``self.to`` variable. By default, this variable will be an empty string, which allows you to specify whatever default you would like. Arguments --------- gradebook: :class:`nbgrader.api.Gradebook` An instance of the gradebook """ raise NotImplementedError class CsvExportPlugin(ExportPlugin): """CSV exporter plugin.""" def export(self, gradebook): if self.to == "": dest = "grades.csv" else: dest = self.to if len(self.student) == 0: allstudents = False else: # make sure studentID(s) are a list of strings allstudents = [str(item) for item in self.student] if len(self.assignment) == 0: allassignments = False else: # make sure assignment(s) are a list of strings allassignments = [str(item) for item in self.assignment] self.log.info("Exporting grades to %s", dest) if allassignments: self.log.info("Exporting only assignments: %s", allassignments) if allstudents: self.log.info("Exporting only students: %s", allstudents) fh = open(dest, "w") keys = [ "assignment", "duedate", "timestamp", "student_id", "last_name", "first_name", "email", "raw_score", "late_submission_penalty", "score", "max_score" ] fh.write(",".join(keys) + "\n") fmt = ",".join(["{" + x + "}" for x in keys]) + "\n" # Loop over each assignment in the database for assignment in gradebook.assignments: # only continue if assignment is required if allassignments and assignment.name not in allassignments: continue # Loop over each student in the database for student in gradebook.students: # only continue if student is required if allstudents and student.id not in allstudents: continue # Create a dictionary that will store information # about this student's submitted assignment score = {} score['assignment'] = assignment.name score['duedate'] = assignment.duedate score['student_id'] = student.id score['last_name'] = student.last_name score['first_name'] = student.first_name score['email'] = student.email score['max_score'] = assignment.max_score # Try to find the submission in the database. If it # doesn't exist, the `MissingEntry` exception will be # raised, which means the student didn't submit # anything, so we assign them a score of zero. try: submission = gradebook.find_submission( assignment.name, student.id) except MissingEntry: score['timestamp'] = '' score['raw_score'] = 0.0 score['late_submission_penalty'] = 0.0 score['score'] = 0.0 else: penalty = submission.late_submission_penalty score['timestamp'] = submission.timestamp score['raw_score'] = submission.score score['late_submission_penalty'] = penalty score['score'] = max(0.0, submission.score - penalty) for key in score: if score[key] is None: score[key] = '' if not isinstance(score[key], str): score[key] = str(score[key]) fh.write(fmt.format(**score)) fh.close()
Python
0
@@ -1437,29 +1437,26 @@ signments = -False +%5B%5D %0A els
dc4471a09ecab349f4a7336df4f2a223369b5650
install gnupg and do not remove gnupg and dpkg
neurodocker/interfaces/neurodebian.py
neurodocker/interfaces/neurodebian.py
"""Add Dockerfile instructions to add NeuroDebian repository.""" # Author: Jakub Kaczmarzyk <jakubk@mit.edu> from neurodocker.utils import check_url, indent, manage_pkgs class NeuroDebian(object): """Object to add NeuroDebian repository. Parameters ---------- os_codename : str Operating system codename (e.g., 'zesty', 'jessie'). download_server : {'australia', 'china-tsinghua', 'china-scitech', 'china-zhejiang', 'germany-munich', 'germany-magdeburg', 'greece', 'japan', 'usa-ca', 'usa-nh', 'usa-tn'} The server to use to download NeuroDebian packages. Choose the one closest to you. full : bool If false (default), use the libre sources. If true, use the full NeuroDebian sources. pkgs : str or list or tuple Packages to install from NeuroDebian. pkg_manager : {'apt'} Linux package manager. check_urls : bool If true, raise error if a URL used by this class responds with an error code. """ SERVERS = {'australia': 'au', 'china-tsinghua': 'cn-bj1', 'china-scitech': 'cn-bj2', 'china-zhejiang': 'cn-zj', 'germany-munich': 'de-m', 'germany-magdeburg': 'de-md', 'greece': 'gr', 'japan': 'jp', 'usa-ca': 'us-ca', 'usa-nh': 'us-nh', 'usa-tn': 'us-tn',} def __init__(self, os_codename, download_server, full=False, pkgs=None, pkg_manager='apt', check_urls=True): self.pkgs = pkgs self.check_urls = check_urls download_server = self._get_server(download_server) suffix = "full" if full else "libre" self.url = self._create_url(os_codename, download_server, suffix) if self.check_urls: check_url(self.url) self.cmd = self._create_cmd() def _create_cmd(self): comment = ("#---------------------------" "\n# Add NeuroDebian repository" "\n#---------------------------") chunks = [comment, self._add_neurodebian()] if self.pkgs is not None and self.pkgs: chunks.append(self._install_pkgs()) return "\n".join(chunks) @classmethod def _get_server(cls, download_server): try: return cls.SERVERS[download_server] except KeyError: raise ValueError("Invalid download server: {}" "".format(download_server)) @staticmethod def _create_url(os_codename, download_server, suffix): """Return neurodebian URL.""" try: from urllib.parse import urljoin # Python 3 except ImportError: from urlparse import urljoin # Python 2 base = "http://neuro.debian.net/lists/" rel = "{0}.{1}.{2}".format(os_codename, download_server, suffix) return urljoin(base, rel) def _add_neurodebian(self): """Return instruction to add NeuroDebian repository.""" pkgs = "dirmngr" cmd = ("{install}" "\n&& {clean}" "\n&& curl -sSL {url}" "\n> /etc/apt/sources.list.d/neurodebian.sources.list" "\n&& apt-key adv --recv-keys --keyserver" " hkp://pool.sks-keyservers.net:80 0xA5D32F012649A5A9" "\n&& apt-get update" "\n&& {remove}" "".format(url=self.url, **manage_pkgs['apt']).format(pkgs=pkgs)) return indent("RUN", cmd) def _install_pkgs(self): """Return instruction to install NeuroDebian packages.""" if isinstance(self.pkgs, (list, tuple)): self.pkgs = " ".join(self.pkgs) cmd = ("{install}\n&& {clean}".format(**manage_pkgs['apt']) .format(pkgs=self.pkgs)) comment = "\n# Install NeuroDebian packages" return "\n".join((comment, indent("RUN", cmd)))
Python
0
@@ -3106,16 +3106,22 @@ %22dirmngr + gnupg %22%0A @@ -3448,39 +3448,8 @@ te%22%0A - %22%5Cn&& %7Bremove%7D%22%0A
7341bdd68b697ff13100388f4cf5c67b38144de9
Clarify log message.
nipype/interfaces/traits_extension.py
nipype/interfaces/traits_extension.py
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module contains Trait classes that we've pulled from the traits source and fixed due to various bugs. File and Directory are redefined as the release version had dependencies on TraitsUI, which we do not want Nipype to depend on. At least not yet. Undefined class was missing the __len__ operator, causing edit_traits and configure_traits to fail on List objects. Even though we don't require TraitsUI, this bug was the only thing preventing us from popping up GUIs which users like. These bugs have been in Traits v3.3.0 and v3.2.1. We have reported all of these bugs and they've been fixed in enthought svn repository (usually by Robert Kern). """ import os # perform all external trait imports here import traits if traits.__version__ < '3.7.0': raise ImportError('Traits version 3.7.0 or higher must be installed') import traits.api as traits from traits.trait_handlers import TraitDictObject, TraitListObject from traits.trait_errors import TraitError from traits.trait_base import _Undefined from .. import logging logger = logging.getLogger('interface') class BaseFile ( traits.BaseStr ): """ Defines a trait whose value must be the name of a file. """ # A description of the type of value this trait accepts: info_text = 'a file name' def __init__ ( self, value = '', filter = None, auto_set = False, entries = 0, exists = False, **metadata ): """ Creates a File trait. Parameters ---------- value : string The default value for the trait filter : string A wildcard string to filter filenames in the file dialog box used by the attribute trait editor. auto_set : boolean Indicates whether the file editor updates the trait value after every key stroke. exists : boolean Indicates whether the trait value must be an existing file or not. Default Value ------------- *value* or '' """ self.filter = filter self.auto_set = auto_set self.entries = entries self.exists = exists if exists: self.info_text = 'an existing file name' super( BaseFile, self ).__init__( value, **metadata ) def validate ( self, object, name, value ): """ Validates that a specified value is valid for this trait. Note: The 'fast validator' version performs this check in C. """ validated_value = super( BaseFile, self ).validate( object, name, value ) if not self.exists: return validated_value elif os.path.isfile( value ): return validated_value logger.error("Mandatory output file was not found: %s" % os.path.abspath(value)) self.error( object, name, value ) class File ( BaseFile ): """ Defines a trait whose value must be the name of a file using a C-level fast validator. """ def __init__ ( self, value = '', filter = None, auto_set = False, entries = 0, exists = False, **metadata ): """ Creates a File trait. Parameters ---------- value : string The default value for the trait filter : string A wildcard string to filter filenames in the file dialog box used by the attribute trait editor. auto_set : boolean Indicates whether the file editor updates the trait value after every key stroke. exists : boolean Indicates whether the trait value must be an existing file or not. Default Value ------------- *value* or '' """ if not exists: # Define the C-level fast validator to use: fast_validate = ( 11, basestring ) super( File, self ).__init__( value, filter, auto_set, entries, exists, **metadata ) #------------------------------------------------------------------------------- # 'BaseDirectory' and 'Directory' traits: #------------------------------------------------------------------------------- class BaseDirectory ( traits.BaseStr ): """ Defines a trait whose value must be the name of a directory. """ # A description of the type of value this trait accepts: info_text = 'a directory name' def __init__ ( self, value = '', auto_set = False, entries = 0, exists = False, **metadata ): """ Creates a BaseDirectory trait. Parameters ---------- value : string The default value for the trait auto_set : boolean Indicates whether the directory editor updates the trait value after every key stroke. exists : boolean Indicates whether the trait value must be an existing directory or not. Default Value ------------- *value* or '' """ self.entries = entries self.auto_set = auto_set self.exists = exists if exists: self.info_text = 'an existing directory name' super( BaseDirectory, self ).__init__( value, **metadata ) def validate ( self, object, name, value ): """ Validates that a specified value is valid for this trait. Note: The 'fast validator' version performs this check in C. """ validated_value = super( BaseDirectory, self ).validate( object, name, value ) if not self.exists: return validated_value if os.path.isdir( value ): return validated_value self.error( object, name, value ) class Directory ( BaseDirectory ): """ Defines a trait whose value must be the name of a directory using a C-level fast validator. """ def __init__ ( self, value = '', auto_set = False, entries = 0, exists = False, **metadata ): """ Creates a Directory trait. Parameters ---------- value : string The default value for the trait auto_set : boolean Indicates whether the directory editor updates the trait value after every key stroke. exists : boolean Indicates whether the trait value must be an existing directory or not. Default Value ------------- *value* or '' """ # Define the C-level fast validator to use if the directory existence # test is not required: if not exists: self.fast_validate = ( 11, basestring ) super( Directory, self ).__init__( value, auto_set, entries, exists, **metadata ) """ The functions that pop-up the Traits GUIs, edit_traits and configure_traits, were failing because all of our inputs default to Undefined deep and down in traits/ui/wx/list_editor.py it checks for the len() of the elements of the list. The _Undefined class in traits does not define the __len__ method and would error. I tried defining our own Undefined and even sublassing Undefined, but both of those failed with a TraitError in our initializer when we assign the Undefined to the inputs because of an incompatible type: TraitError: The 'vertical_gradient' trait of a BetInputSpec instance must be a float, but a value of <undefined> <class 'nipype.interfaces.traits._Undefined'> was specified. So... in order to keep the same type but add the missing method, I monkey patched. """ def length(self): return 0 ########################################################################## # Apply monkeypatch here _Undefined.__len__ = length ########################################################################## Undefined = _Undefined() def isdefined(object): return not isinstance(object, _Undefined) def has_metadata(trait, metadata, value=None, recursive=True): ''' Checks if a given trait has a metadata (and optionally if it is set to particular value) ''' count = 0 if hasattr(trait, "_metadata") and metadata in trait._metadata.keys() and (trait._metadata[metadata] == value or value==None): count += 1 if recursive: if hasattr(trait, 'inner_traits'): for inner_trait in trait.inner_traits(): count += has_metadata(inner_trait.trait_type, metadata, recursive) if hasattr(trait, 'handlers') and trait.handlers != None: for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) return count > 0
Python
0.000029
@@ -2856,16 +2856,19 @@ ndatory +%25s output f @@ -2891,16 +2891,44 @@ d: %25s%22 %25 +%0A (name, os.path @@ -2943,25 +2943,17 @@ (value)) -%0A +) %0A
737342a556b5a008690293839e5bbdabb6d9d329
delete old rpcmethods
device/rpcmethods.py
device/rpcmethods.py
import crypto import read import os import json import base64 import traceback """ All methods that are RPCs should go here. """ ALLOW_LIST = [ "package", "unpackage", "whoami", "test_prompt", "teapot", ] # Confirm controller handle. csc = None def package(src_uid, dst_uid, message): """Encrypt a message from from_uid to to_uid. Message is plaintext. Package it up with the index and MAC so the recipient can decode it. """ # TODO verify bit release with user message = message.encode("utf-8") (p_text, index) = read.read_encrypt_pad(src_uid, dst_uid, len(message)) (p_body, _) = read.read_encrypt_pad(src_uid, dst_uid, len(message) + crypto.TAG_LENGTH) (p_tag_key, _) = read.read_encrypt_pad(src_uid, dst_uid, crypto.TAG_KEY_LENGTH) try: package = crypto.package(index, message, p_text, p_body, p_tag_key, verbose=True) # Base64 encode the package for transport. package_b64 = base64.b64encode(package) return { "success": True, "package": package_b64, } except crypto.CryptoError: traceback.print_exc() return { "success": False, "error": "Encryption failed.", } def unpackage(src_uid, dst_uid, package_b64): # TODO verify bit release with user # b64 decode the package for decryption. package = base64.b64decode(package_b64) try: pre = crypto.pre_unpackage(package, verbose=True) except crypto.CryptoError: traceback.print_exc() return { "success" : False, "error": "Decryption failed.", } message_length = pre["message_length"] body_length = pre["body_length"] p_text_index = pre["p_text_index"] p_body_index = pre["p_body_index"] p_tag_key_index = pre["p_tag_key_index"] p_text = read.read_decrypt_pad(src_uid, dst_uid, p_text_index, message_length) p_body = read.read_decrypt_pad(src_uid, dst_uid, p_body_index, body_length) p_tag_key = read.read_decrypt_pad(src_uid, dst_uid, p_tag_key_index, crypto.TAG_KEY_LENGTH) try: message = crypto.unpackage(package, p_text, p_body, p_tag_key, verbose=True) return { "success" : True, "message" : message, } except crypto.CryptoError: traceback.print_exc() return { "success" : False, "error": "Decryption failed.", } def encrypt(recipient_uid, message): """ Encrypts a message using a one time pad recipient_uid is the id of the recipient. This impacts what pad will be used to encrypt message is the intended message to be encrypted returns a dictionary with keys cipher_text, and index_used """ ## I imagine the pad to be read_from_device depends on the recipient_uid, the index, and the length of the pad #TODO: index_used might buggy (pad,index_used) = read_encrypt_pad(recipient_uid, len(message)) cipher_list = encrypt.encrypt(message, pad) cipher_text = encrypt.pretty_print(cipher_list) return { "status":"ok", "cipher_text":cipher_text, "index_used":index_used, } def decrypt(sender_uid, cipher_text, index): """ Decrypts a message using a one time pad sender_uid is the id of the user who sent the cipher text. cipher_text is the contents of what they sent to be decrypt returns message decrypted """ pad = read_decrypt_pad(sender_uid, index, len(cipher_text)) message_list = encrypt.decrypt(message,pad) message = encrypt.pretty_print(message_list) return { "status":"ok", "message":message, } def sign(recipient_uid,message): """ takes in a message and returns an authentication tag for that message recipient_uid is the id of the reciever message is the message that needs to be authenticated """ pad = read_encrypt_pad(recipient_uid, len(message)) message_hash = encrypt.hash(message) return message_hash def verify(sender_uid,message,tag): """ verifies that the message hashes to the tag value """ return encrypt.hash(message)==tag # Returns UID of this device def whoami(true_id=None): return read.whoami(true_id) def test_prompt(): print "test prompt requested", csc return csc.yn_prompt("[Fake] Release 2000\nbits of pad?") def teapot(): return "Error 418: I'm a teapot"
Python
0.000001
@@ -2581,1726 +2581,8 @@ %7D%0A%0A -def encrypt(recipient_uid, message):%0A %22%22%22 Encrypts a message using a one time pad %0A recipient_uid is the id of the recipient. This impacts what pad will be used to encrypt%0A message is the intended message to be encrypted%0A returns a dictionary with keys cipher_text, and index_used%0A %22%22%22%0A ## I imagine the pad to be read_from_device depends on the recipient_uid, the index, and the length of the pad%0A #TODO: index_used might buggy%0A (pad,index_used) = read_encrypt_pad(recipient_uid, len(message)) %0A cipher_list = encrypt.encrypt(message, pad)%0A cipher_text = encrypt.pretty_print(cipher_list)%0A return %7B%0A %22status%22:%22ok%22,%0A %22cipher_text%22:cipher_text,%0A %22index_used%22:index_used,%0A %7D%0A%0Adef decrypt(sender_uid, cipher_text, index):%0A %22%22%22 Decrypts a message using a one time pad%0A sender_uid is the id of the user who sent the cipher text.%0A cipher_text is the contents of what they sent to be decrypt%0A returns message decrypted%0A %22%22%22%0A pad = read_decrypt_pad(sender_uid, index, len(cipher_text))%0A message_list = encrypt.decrypt(message,pad)%0A message = encrypt.pretty_print(message_list)%0A return %7B%0A %22status%22:%22ok%22,%0A %22message%22:message,%0A %7D%0A%0Adef sign(recipient_uid,message):%0A %22%22%22 takes in a message and returns an authentication tag for that message%0A recipient_uid is the id of the reciever%0A message is the message that needs to be authenticated %22%22%22%0A%0A pad = read_encrypt_pad(recipient_uid, len(message))%0A message_hash = encrypt.hash(message)%0A return message_hash%0A%0Adef verify(sender_uid,message,tag):%0A %22%22%22 verifies that the message hashes to the tag value %22%22%22%0A return encrypt.hash(message)==tag%0A%0A # Re
3eb99546062045f3b431b9d6c1c095bed197fedc
Add subscribe_commands method
devicehive/device.py
devicehive/device.py
from devicehive.api_object import ApiObject from devicehive.command import Command class Device(ApiObject): """Device class.""" ID_KEY = 'id' NAME_KEY = 'name' DATA_KEY = 'data' NETWORK_ID_KEY = 'networkId' IS_BLOCKED_KEY = 'isBlocked' def __init__(self, transport, token, device=None): ApiObject.__init__(self, transport) self._token = token self._id = None self.name = None self.data = None self.network_id = None self.is_blocked = None if device: self._init(device) def _init(self, device): self._id = device[self.ID_KEY] self.name = device[self.NAME_KEY] self.data = device[self.DATA_KEY] self.network_id = device[self.NETWORK_ID_KEY] self.is_blocked = device[self.IS_BLOCKED_KEY] def id(self): return self._id def get(self, device_id): url = 'device/%s' % device_id action = 'device/get' request = {'deviceId': device_id} params = {'request_delete_keys': ['deviceId'], 'response_key': 'device'} response = self._token.authorized_request(url, action, request, **params) self._ensure_success_response(response, 'Device get failure') device = response.response('device') self._init(device) def save(self): url = 'device/%s' % self._id action = 'device/save' device = {self.ID_KEY: self._id, self.NAME_KEY: self.name, self.DATA_KEY: self.data, self.NETWORK_ID_KEY: self.network_id, self.IS_BLOCKED_KEY: self.is_blocked} request = {'deviceId': self._id, 'device': device} params = {'method': 'PUT', 'request_key': 'device'} response = self._token.authorized_request(url, action, request, **params) self._ensure_success_response(response, 'Device save failure') def remove(self): # TODO: implement websocket support when API will be added. self._ensure_http_transport() url = 'device/%s' % self._id action = None request = {} params = {'method': 'DELETE'} response = self._token.authorized_request(url, action, request, **params) self._ensure_success_response(response, 'Device remove failure') self._id = None self.name = None self.data = None self.network_id = None self.is_blocked = None def list_commands(self, start=None, end=None, command=None, status=None, sort_field=None, sort_order=None, take=None, skip=None): # TODO: implement websocket support when API will be added. self._ensure_http_transport() url = 'device/%s/command' % self._id action = None request = {} params = {'response_key': 'commands', 'params': {}} if start: params['params']['start'] = start if end: params['params']['end'] = end if command: params['params']['command'] = command if status: params['params']['status'] = status if sort_field: params['params']['sortField'] = sort_field if sort_order: params['params']['sortOrder'] = sort_order if take: params['params']['take'] = take if skip: params['params']['skip'] = skip response = self._token.authorized_request(url, action, request, **params) self._ensure_success_response(response, 'List device commands failure') commands = response.response('commands') return [Command(self._transport, self._token, command) for command in commands] def send_command(self, command_name, parameters=None, lifetime=None, timestamp=None, status=None, result=None): url = 'device/%s/command' % self._id action = 'command/insert' command = {Command.COMMAND_KEY: command_name} if parameters: command[Command.PARAMETERS_KEY] = parameters if lifetime: command[Command.LIFETIME_KEY] = lifetime if timestamp: command[Command.TIMESTAMP_KEY] = timestamp if status: command[Command.STATUS_KEY] = status if result: command[Command.RESULT_KEY] = result request = {'deviceId': self._id, 'command': command} params = {'method': 'POST', 'request_key': 'command', 'response_key': 'command'} response = self._token.authorized_request(url, action, request, **params) self._ensure_success_response(response, 'Command send failure') command = response.response('command') command[Command.DEVICE_ID_KEY] = self._id command[Command.COMMAND_KEY] = command_name command[Command.PARAMETERS_KEY] = parameters command[Command.LIFETIME_KEY] = lifetime command[Command.STATUS_KEY] = status command[Command.RESULT_KEY] = result return Command(self._transport, self._token, command)
Python
0.000002
@@ -5334,28 +5334,1234 @@ port, self._token, command)%0A +%0A def subscribe_commands(self, names=None, limit=None, timestamp=None):%0A # TODO: finish HTTP support after server changes will be ready.%0A url = 'device/%25s/command/poll' %25 self._id%0A action = 'command/subscribe'%0A request = %7B'deviceId': self._id%7D%0A params = %7B'subscribe': True,%0A 'request_delete_keys': %5B'deviceId'%5D,%0A 'response_key': 'command',%0A 'params': %7B%7D%7D%0A if names:%0A request%5B'names'%5D = names%0A params%5B'request_delete_keys'%5D.append('names')%0A params%5B'params'%5D%5B'names'%5D = names%0A if limit:%0A request%5B'limit'%5D = limit%0A params%5B'request_delete_keys'%5D.append('limit')%0A params%5B'params'%5D%5B'limit'%5D = limit%0A if timestamp:%0A request%5B'timestamp'%5D = timestamp%0A params%5B'request_delete_keys'%5D.append('timestamp')%0A params%5B'params'%5D%5B'timestamp'%5D = timestamp%0A response = self._token.authorized_request(url, action, request,%0A **params)%0A self._ensure_success_response(response, 'Commands subscribe failure')%0A return response.response('subscriptionId')%0A
e751329b8aacdf51b70537be47172386deaded63
Fix alembic env
alembic/env.py
alembic/env.py
from __future__ import with_statement from alembic import context from sqlalchemy import engine_from_config, pool from logging.config import fileConfig # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata import os,sys parentdir = os.path.abspath(os.path.join('.', '.')) sys.path.insert(0,parentdir) from rootio import create_app from rootio.extensions import db app = create_app() config.set_main_option("sqlalchemy.url", app.config["SQLALCHEMY_DATABASE_URI"]) target_metadata = db.Model.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool) connection = engine.connect() context.configure( connection=connection, target_metadata=target_metadata, compare_type=True ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
Python
0.999689
@@ -647,16 +647,20 @@ m rootio +.app import
564b434c2fd7fadc5d467fe884e5bd88b794acc3
Fix config.
sample-config.py
sample-config.py
# -*- coding: utf-8 -*- """ Example configuration for GEAStarterKit """ ## ## Authentication/authorizationc config import authomatic from authomatic.providers import oauth2 from collections import OrderedDict AUTHOMATIC_CONFIG = OrderedDict([ ('google', { 'name': 'Google', 'id': 1000, 'icon': 'google' }), # ('github', { # 'name': 'Github', # # 'class_': oauth2.GitHub, # 'consumer_key': 'ADD YOURS', # 'consumer_secret': 'AD YOURS', # # 'id': 2000, # # 'icon': 'github', # # 'scope': ['user:email'] # }), ]) import os if os.environ.get('SERVER_SOFTWARE', '').startswith('Development') or os.environ.get('SERVER_SOFTWARE', '') == '': SECRET_STRING = 'YOUR SECRET KEY' DEVELOPMENT = True else: SECRET_STRING = 'YOUR SECRET KEY' DEVELOPMENT = False # # Talisman security import talisman csp_policy = { # Fonts from fonts.google.com 'font-src': "'self' themes.googleusercontent.com *.gstatic.com", # <iframe> based embedding for Maps and Youtube. 'frame-src': "'self' www.google.com www.youtube.com", # Assorted Google-hosted Libraries/APIs. 'script-src': "'self' ajax.googleapis.com *.googleanalytics.com " "*.google-analytics.com", # Used by generated code from http://www.google.com/fonts 'style-src': "'self' ajax.googleapis.com fonts.googleapis.com " "*.gstatic.com", # gravatar 'img-src': "'self' *.gravatar.com", # Other 'default-src': "'self' *.gstatic.com", } enable_talisman = False talisman_config = dict( force_https=True, force_https_permanent=False, frame_options=talisman.SAMEORIGIN, frame_options_allow_from=None, strict_transport_security=True, strict_transport_security_max_age=31556926, # One year in seconds strict_transport_security_include_subdomains=True, content_security_policy=csp_policy, session_cookie_secure=True, session_cookie_http_only=True ) # # Origin address for system emails. email_from_address = 'root@localhost' # # Options for login manager max_days_verification = 30 max_hours_password_reset = 48 # # How long to time.sleep() when an invalid login, token, or similar is tried. security_wait = 3 # # Languages application supports languages = OrderedDict([ ('en', 'English'), ('es', 'Español'), ('fr', 'Français') ]) # # Whether to use Paste debug panel while in development enable_debug_panel = DEVELOPMENT # # Where to send user when he logs in if nothing else is set. default_view = 'users.profile' # # Name of the site/product site_name = 'GAEStarterKit' # # Domain name for email links email_domain = 'http://localhost:8080' # # What to import automatically install_apps = [ 'apps.welcomekit', 'apps.simplecms', 'apps.error_pages', 'apps.users', 'apps.tenants', 'apps.email', 'apps.admin', ]
Python
0
@@ -2369,16 +2369,17 @@ ('en', +u 'English @@ -2393,16 +2393,17 @@ ('es', +u 'Espa%C3%B1ol @@ -2417,16 +2417,17 @@ ('fr', +u 'Fran%C3%A7ai
b76e91c4517e52528f8543fce276ff4b5af9a4f6
fix temp file creation to something more multiplatform friendly
burp_reports/lib/files.py
burp_reports/lib/files.py
import tempfile import os def temp_file(file='temporal'): """ return: str with tempfilename """ # Append uid to end of filename file += '_{}'.format(os.getuid()) # Simplified and reutilized core funtionally from python cache_path = os.path.join(tempfile.gettempdir(), file) return cache_path
Python
0
@@ -170,19 +170,21 @@ t(os.get -uid +login ())%0A
632b86289ef643381c954adeca1f58c78e2aa8d5
Add documentation for plugins
cactus/plugin/defaults.py
cactus/plugin/defaults.py
#coding:utf-8 # Define no-op plugin methods def preBuildPage(page, context, data): return context, data def postBuildPage(page): pass def preBuildStatic(static): pass def postBuildStatic(static): pass def preBuild(site): pass def postBuild(site): pass def preDeploy(site): pass def postDeploy(site): pass def preDeployFile(file): pass ORDER = -1 DEFAULTS = [ 'preBuildPage', 'postBuildPage', 'preBuildStatic', 'postBuildStatic', 'preBuild', 'postBuild', 'preDeploy', 'postDeploy', 'preDeployFile', ]
Python
0
@@ -85,297 +85,1741 @@ -return context, data%0A%0A%0Adef postBuildPage(page):%0A pass%0A%0A%0Adef preBuildStatic(static):%0A pass%0A%0A%0Adef postBuildStatic(static):%0A pass%0A%0A%0Adef preBuild(site):%0A pass%0A%0Adef postBuild(site):%0A pass%0A%0A%0Adef preDeploy(site):%0A pass%0A%0A%0Adef postDeploy(site):%0A pass%0A%0A%0Adef preDeployFile(file): +%22%22%22%0A Called prior to building a page.%0A%0A :param page: The page about to be built%0A :param context: The context for this page (you can modify this, but you must return it)%0A :param data: The raw body for this page (you can modify this).%0A :returns: Modified (or not) context and data.%0A %22%22%22%0A return context, data%0A%0A%0Adef postBuildPage(page):%0A %22%22%22%0A Called after building a page.%0A%0A :param page: The page that was just built.%0A :returns: None%0A %22%22%22%0A pass%0A%0A%0Adef preBuildStatic(static):%0A %22%22%22%0A Called before building (copying to the build folder) a static file.%0A%0A :param static: The static file about to be built.%0A :returns: None%0A %22%22%22%0A pass%0A%0A%0Adef postBuildStatic(static):%0A %22%22%22%0A Called after building (copying to the build folder) a static file.%0A%0A :param static: The static file that was just built.%0A :returns: None%0A %22%22%22%0A pass%0A%0A%0Adef preBuild(site):%0A %22%22%22%0A Called prior to building the site, after loading configuration, plugins and externals.%0A%0A :param site: The site about to be built.%0A :returns: None%0A %22%22%22%0A pass%0A%0Adef postBuild(site):%0A %22%22%22%0A Called after building the site.%0A%0A :param site: The site that was just built.%0A :returns: None%0A %22%22%22%0A pass%0A%0A%0Adef preDeploy(site):%0A %22%22%22%0A Called prior to deploying the site (built files)%0A%0A :param site: The site about to be deployed.%0A :returns: None%0A %22%22%22%0A pass%0A%0A%0Adef postDeploy(site):%0A %22%22%22%0A Called after deploying the site (built files)%0A%0A :param site: The site that was just built.%0A :returns: None%0A %22%22%22%0A pass%0A%0A%0Adef preDeployFile(file):%0A %22%22%22%0A Called prior to deploying a single built file%0A%0A :param file: The file about to be deployed.%0A :returns: None%0A %22%22%22 %0A
04fd80cda56a911289bca20c7ee1bd70ac263bd4
set readonly from true to false because the cursor is hidded if readonly is true.
call_seq/TextEdit/rich.py
call_seq/TextEdit/rich.py
from PySide import QtCore import pyqode.python # public API from pyqode.python.bootstrapper import Bootstrapper from pyqode.python.modes import PyAutoCompleteMode from pyqode.python.modes import CalltipsMode from pyqode.python.modes import CommentsMode from pyqode.python.modes import PyCodeCompletionMode, JediCompletionProvider from pyqode.python.modes import PEP8CheckerMode from pyqode.python.modes import PyAutoIndentMode from pyqode.python.modes import PyFlakesCheckerMode from pyqode.python.modes import PyHighlighterMode from pyqode.python.modes import PyIndenterMode from pyqode.python.modes import DEFAULT_DARK_STYLES from pyqode.python.modes import DEFAULT_LIGHT_STYLES from pyqode.python.modes import GoToAssignmentsMode from pyqode.python.modes import DocumentAnalyserMode from pyqode.python.panels import PreLoadPanel from pyqode.python.panels import SymbolBrowserPanel from pyqode.core.modes import CaretLineHighlighterMode from pyqode.python.panels import QuickDocPanel class RichTextEdit(pyqode.core.QCodeEdit): def __init__(self): super(RichTextEdit, self).__init__() self.setLineWrapMode(self.NoWrap) self.installPanel(pyqode.core.LineNumberPanel(), pyqode.core.PanelPosition.LEFT) self.installMode(pyqode.core.ZoomMode()) #self.installMode(pyqode.core.FileWatcherMode()) self.installMode(pyqode.core.SymbolMatcherMode()) self.installMode(pyqode.core.WordClickMode()) self.installMode(PyHighlighterMode(self.document())) self.installMode(PyAutoIndentMode()) self.installMode(PyFlakesCheckerMode()) self.installMode(PEP8CheckerMode()) self.installMode(CalltipsMode()) self.installMode(PyIndenterMode()) self.installMode(GoToAssignmentsMode()) self.installPanel(QuickDocPanel(), pyqode.core.PanelPosition.BOTTOM) self.installMode(CommentsMode()) self.installMode(CaretLineHighlighterMode()) self.setReadOnly(True)
Python
0
@@ -2000,9 +2000,10 @@ nly( -Tru +Fals e)
650fb08d6e7269d468bbb38f8e2ff2481583cff4
fix KeyError and remove 'or'
cliche/cli.py
cliche/cli.py
""":mod:`cliche.cli` --- Command-line interfaces ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ import code import functools import logging.config import os import pathlib import sys from alembic.util import CommandError from click import Path, argument, echo, group, option from flask import _request_ctx_stack from sassutils.wsgi import SassMiddleware from setuptools import find_packages from werkzeug.utils import import_string from .celery import app as celery_app from .config import read_config from .orm import downgrade_database, upgrade_database from .web.app import app as flask_app from .web.db import get_database_engine __all__ = ('initialize_app', 'config', 'main') ALEMBIC_LOGGING = { 'version': 1, 'handlers': { 'console': { 'level': 'NOTSET', 'class': 'logging.StreamHandler', 'formatter': 'generic' } }, 'formatters': { 'generic': { 'format': '%(levelname)-5.5s [%(name)s] %(message)s', 'datefmt': '%H:%M:%S' } }, 'root': { 'level': 'WARN', 'handlers': ['console'] }, 'loggers': { 'alembic': { 'level': 'INFO', 'handlers': [] }, 'sqlalchemy.engine': { 'level': 'WARN', 'handlers': [] } } } def config(func): """Provide :option:`--config` or :option:`-c` option and run :func:`initialize_app()` automatically. :param func: a command function to decorate :type func: :class:`collections.abc.Callable` :returns: decorated ``func`` """ @functools.wraps(func) def internal(*args, **kwargs): initialize_app(kwargs.pop('config')) func(*args, **kwargs) deco = option('--config', '-c', type=Path(exists=True), help='Configuration file (YAML or Python)') return deco(internal) def initialize_app(config=None): """Initialize celery/flask app. :param config: a config file path. accept :file:`.py`, :file:`.yml` file. default value is :const:`None` """ if config is None: try: config = os.environ['CLICHE_CONFIG'] except KeyError: print('The -c/--config option or CLICHE_CONFIG environment ' 'variable is required', file=sys.stderr) raise SystemExit(1) if not os.path.isfile(config): print('The configuration file', config, 'cannot be read.') raise SystemExit(1) config = read_config(filename=pathlib.Path(config)) flask_app.config.update(config) celery_app.conf.update(config) @group() def cli(): """cliche for integrated command for cliche.io service.""" @cli.command() @argument('revision', default='head') @config def upgrade(revision): """Create the database tables, or upgrade it to the latest revision.""" logging_config = dict(ALEMBIC_LOGGING) logging.config.dictConfig(logging_config) with flask_app.app_context(): engine = get_database_engine() try: upgrade_database(engine, revision) except CommandError as e: if revision != 'head': try: downgrade_database(engine, revision) except CommandError as e: echo(e, file=sys.stderr) raise SystemExit(1) else: echo(e, file=sys.stderr) raise SystemExit(1) @cli.command() @argument('service') @config def sync(service): # FIXME available service listing """Sync to services.""" package = 'cliche.services.' + service if package in find_packages(): import_string(package + ':sync').delay() else: echo('There is no such service \'{}\' suitable for synchronization.' .format(service), file=sys.stderr) @cli.command() @config def shell(): """Run a Python shell inside Flask application context.""" with flask_app.test_request_context(): context = dict(app=_request_ctx_stack.top.app) # Use basic python shell code.interact(local=context) @cli.command() @option('--host', '-h') @option('--port', '-p', type=int) @option('--threaded', is_flag=True) @option('--processes', type=int, default=1) @option('--passthrough-errors', is_flag=True) @option('--debug/--no-debug', '-d/-D', default=None, help='enable the Werkzeug debugger' ' (DO NOT use in production code)') @option('--reload/--no-reload', '-r/-R', default=None, help='monitor Python files for changes' ' (not 100% safe for production use)') @config def runserver(host, port, threaded, processes, passthrough_errors, debug, reload): """Run the Flask development server i.e. app.run()""" if flask_app.debug: # scss compile automatically in debug mode flask_app.wsgi_app = SassMiddleware(flask_app.wsgi_app, { 'cliche.web': ('static/sass', 'static/css', '/static/css') }) if debug is None: debug = flask_app.config['DEBUG'] or True if reload is None: reload = flask_app.config['DEBUG'] or True flask_app.run(host=host, port=port, debug=debug, use_debugger=debug, use_reloader=reload, threaded=threaded, processes=processes, passthrough_errors=passthrough_errors) #: (:class:`collections.abc.Callable`) The CLI entry point. main = cli
Python
0.000007
@@ -5090,33 +5090,35 @@ p.config -%5B +.get( 'DEBUG' -%5D or +, True +) %0A if @@ -5170,25 +5170,27 @@ nfig -%5B +.get( 'DEBUG' -%5D or +, True +) %0A%0A
29aed8ce12734ac0489a8b4e4aa9b48ff4a320a7
fix fail
client/cli.py
client/cli.py
#!/usr/bin/env python import base64 import sys import logging import firehose.common as common class CLI(common.FirehoseClient): def __select(self, chums, prompt): print prompt for n, chum in enumerate(chums): print "%02d> %s (%s)" % (n, chum.name, chum.keyid) inp = raw_input("Enter ID number> ") return chums[int(inp)] def main(self, args=sys.argv): common.FirehoseClient.__init__(self) self.load_config() try: my_self = self.__select(self.get_identities(), "Select an identity to send as:") my_chum = self.__select(self.get_chums(), "Select somebody to send to:") self.set_identity(my_self) self.start_recv_thread() while True: data = raw_input("Send to %s> " % chum.name) cmd, _, args = data.partition(" ") if cmd == "/me": data = "ACT " + args elif cmd == "/ping": data = "PING 0" else: data = "MSG " + data my_chum.send(data) except (EOFError, KeyboardInterrupt): pass def on_msg(self, chum, target, message): print "%s: %s" % (chum.name, message) def on_act(self, chum, target, message): print "* %s %s" % (chum.name, message) if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG, format="%(asctime)19.19s %(levelname)4.4s %(name)s: %(message)s") module_log = logging.getLogger("firehose") module_log.setLevel(logging.DEBUG) module_log = logging.getLogger("gnupg") module_log.setLevel(logging.INFO) sys.exit(CLI().main(sys.argv))
Python
0.000003
@@ -818,16 +818,19 @@ %25s%3E %22 %25 +my_ chum.nam
b2c879b782629fd063dcbcfd98178445ca3e499d
Update migrations to use the common URI function.
dataactcore/migrations/env.py
dataactcore/migrations/env.py
from __future__ import with_statement from alembic import context # Load all DB tables into metadata object # @todo - load these dynamically from dataactcore.models import ( baseModel, domainModels, fsrs, errorModels, jobModels, stagingModels, userModel, validationModels) from dataactcore.config import CONFIG_DB from sqlalchemy import engine_from_config, pool from logging.config import fileConfig import logging import re USE_TWOPHASE = False # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) logger = logging.getLogger('alembic.env') # Use the broker's config file to gather section names referring to different # databases. In db_dict, the key will = alembic .ini section names and # migration method names. Value[0] will = the actual database name as # set in the broker config. Value[1] is the corresponding model. db_dict = {} db_dict['data_broker'] = [CONFIG_DB['db_name'], baseModel] db_names = config.get_main_option('databases') for name in re.split(r',\s*', db_names): if name not in db_dict: raise Exception('The alembic.ini databases section is targeting ' 'a database ({}) that is not set up in env.py. ' 'Please add {} info to db_dict in env.py'. format(name, name)) # add your model's MetaData objects here # for 'autogenerate' support. These must be set # up to hold just those tables targeting a # particular database. table.tometadata() may be # helpful here in case a "copy" of # a MetaData is needed. # from myapp import mymodel # target_metadata = { # 'engine1':mymodel.metadata1, # 'engine2':mymodel.metadata2 #} target_metadata = {key: value[1].Base.metadata for (key, value) in db_dict.items()} # Set up database URLs based on config file username = str(CONFIG_DB['username']) password = str(CONFIG_DB['password']) host = str(CONFIG_DB['host']) port = str(CONFIG_DB['port']) for (key, value) in db_dict.items(): # key = db-related names expected by Alembic config/scripts # value[0] = actual db names as set in broker config file baseUrl = 'postgres://' + username + ':' + password + '@' + host + ':' + port config.set_section_option(key, 'sqlalchemy.url', baseUrl + '/' + value[0]) # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ # for the --sql use case, run migrations for each URL into # individual files. engines = {} for name in re.split(r',\s*', db_names): engines[name] = rec = {} rec['url'] = context.config.get_section_option(name, "sqlalchemy.url") for name, rec in engines.items(): logger.info("Migrating database %s" % name) file_ = "%s.sql" % name logger.info("Writing output to %s" % file_) with open(file_, 'w') as buffer: context.configure(url=rec['url'], output_buffer=buffer, target_metadata=target_metadata.get(name), literal_binds=True) with context.begin_transaction(): context.run_migrations(engine_name=name) def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # for the direct-to-DB use case, start a transaction on all # engines, then run all migrations, then commit all transactions. engines = {} for name in re.split(r',\s*', db_names): engines[name] = rec = {} rec['engine'] = engine_from_config( context.config.get_section(name), prefix='sqlalchemy.', poolclass=pool.NullPool) for name, rec in engines.items(): engine = rec['engine'] rec['connection'] = conn = engine.connect() if USE_TWOPHASE: rec['transaction'] = conn.begin_twophase() else: rec['transaction'] = conn.begin() try: for name, rec in engines.items(): logger.info("Migrating database %s" % name) context.configure( connection=rec['connection'], upgrade_token="%s_upgrades" % name, downgrade_token="%s_downgrades" % name, target_metadata=target_metadata.get(name), compare_type=True # instruct autogen to detect col type changes ) context.run_migrations(engine_name=name) if USE_TWOPHASE: for rec in engines.values(): rec['transaction'].prepare() for rec in engines.values(): rec['transaction'].commit() except: for rec in engines.values(): rec['transaction'].rollback() raise finally: for rec in engines.values(): rec['connection'].close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
Python
0
@@ -165,22 +165,16 @@ import -(%0A baseMode @@ -178,142 +178,93 @@ odel -, domainModels, fsrs, errorModels, jobModels, stagingModels,%0A userModel, validationModels)%0Afrom dataactcore.config import CONFIG_DB +%0Afrom dataactcore.config import CONFIG_DB%0Afrom dataactcore.interfaces.db import dbURI %0Afro @@ -1920,144 +1920,8 @@ ile%0A -username = str(CONFIG_DB%5B'username'%5D)%0Apassword = str(CONFIG_DB%5B'password'%5D)%0Ahost = str(CONFIG_DB%5B'host'%5D)%0Aport = str(CONFIG_DB%5B'port'%5D)%0A for @@ -2097,75 +2097,23 @@ l = -'postgres://' + username + ':' + password + '@' + host + ':' + port +dbURI(value%5B0%5D) %0A @@ -2173,25 +2173,8 @@ eUrl - + '/' + value%5B0%5D )%0A%0A#
d0c1dfaa884d96bd930aa07bc88c2929db7f97bd
version 0.61
lib/version.py
lib/version.py
ELECTRUM_VERSION = "0.60" SEED_VERSION = 4 # bump this everytime the seed generation is modified TRANSLATION_ID = 28079 # version of the wiki page
Python
0.000001
@@ -16,17 +16,17 @@ N = %220.6 -0 +1 %22%0ASEED_V @@ -114,11 +114,11 @@ = 28 -079 +344 # v
2135deb7fcc4ebf6ef07e31a957df1b18acb25f5
Remove JSONRPCServer from console service test
pyethapp/tests/test_console_service.py
pyethapp/tests/test_console_service.py
from itertools import count import pytest import serpent from devp2p.peermanager import PeerManager import ethereum from ethereum import tester from ethereum.ethpow import mine import ethereum.keys import ethereum.config from ethereum.slogging import get_logger from pyethapp.accounts import Account, AccountsService, mk_random_privkey from pyethapp.app import EthApp from pyethapp.config import update_config_with_defaults, get_default_config from pyethapp.db_service import DBService from pyethapp.eth_service import ChainService from pyethapp.jsonrpc import JSONRPCServer from pyethapp.pow_service import PoWService from pyethapp.console_service import Console # reduce key derivation iterations ethereum.keys.PBKDF2_CONSTANTS['c'] = 100 log = get_logger('test.console_service') @pytest.fixture def test_app(request, tmpdir): class TestApp(EthApp): def start(self): super(TestApp, self).start() log.debug('adding test accounts') # high balance account self.services.accounts.add_account(Account.new('', tester.keys[0]), store=False) # low balance account self.services.accounts.add_account(Account.new('', tester.keys[1]), store=False) # locked account locked_account = Account.new('', tester.keys[2]) locked_account.lock() self.services.accounts.add_account(locked_account, store=False) assert set(acct.address for acct in self.services.accounts) == set(tester.accounts[:3]) def mine_next_block(self): """Mine until a valid nonce is found. :returns: the new head """ log.debug('mining next block') block = self.services.chain.chain.head_candidate delta_nonce = 10**6 for start_nonce in count(0, delta_nonce): bin_nonce, mixhash = mine(block.number, block.difficulty, block.mining_hash, start_nonce=start_nonce, rounds=delta_nonce) if bin_nonce: break self.services.pow.recv_found_nonce(bin_nonce, mixhash, block.mining_hash) log.debug('block mined') assert self.services.chain.chain.head.difficulty == 1 return self.services.chain.chain.head config = { 'data_dir': str(tmpdir), 'db': {'implementation': 'EphemDB'}, 'pow': {'activated': False}, 'p2p': { 'min_peers': 0, 'max_peers': 0, 'listen_port': 29873 }, 'node': {'privkey_hex': mk_random_privkey().encode('hex')}, 'discovery': { 'boostrap_nodes': [], 'listen_port': 29873 }, 'eth': { 'block': { # reduced difficulty, increased gas limit, allocations to test accounts 'GENESIS_DIFFICULTY': 1, 'BLOCK_DIFF_FACTOR': 2, # greater than difficulty, thus difficulty is constant 'GENESIS_GAS_LIMIT': 3141592, 'GENESIS_INITIAL_ALLOC': { tester.accounts[0].encode('hex'): {'balance': 10**24}, tester.accounts[1].encode('hex'): {'balance': 1}, tester.accounts[2].encode('hex'): {'balance': 10**24}, } } }, 'jsonrpc': {'listen_port': 29873} } services = [DBService, AccountsService, PeerManager, ChainService, PoWService, JSONRPCServer, Console] update_config_with_defaults(config, get_default_config([TestApp] + services)) update_config_with_defaults(config, {'eth': {'block': ethereum.config.default_config}}) app = TestApp(config) for service in services: service.register_with_app(app) def fin(): log.debug('stopping test app') app.stop() request.addfinalizer(fin) log.debug('starting test app') app.start() return app def test_send_transaction_with_contract(test_app): serpent_code = ''' def main(a,b): return(a ^ b) ''' tx_to = b'' evm_code = serpent.compile(serpent_code) chain = test_app.services.chain.chain assert chain.head_candidate.get_balance(tx_to) == 0 sender = test_app.services.accounts.unlocked_accounts[0].address assert chain.head_candidate.get_balance(sender) > 0 eth = test_app.services.console.console_locals['eth'] tx = eth.transact(to='', data=evm_code, startgas=500000, sender=sender) code = chain.head_candidate.account_to_dict(tx.creates)['code'] assert len(code) > 2 assert code != '0x' test_app.mine_next_block() creates = chain.head.get_transaction(0).creates code = chain.head.account_to_dict(creates)['code'] assert len(code) > 2 assert code != '0x' def test_console_name_reg_contract(test_app): """ exercise the console service with the NameReg contract found in The_Console wiki https://github.com/ethereum/pyethapp/wiki/The_Console#creating-contracts """ solidity_code = """ contract NameReg { event AddressRegistered(bytes32 indexed name, address indexed account); mapping (address => bytes32) toName; function register(bytes32 name) { toName[msg.sender] = name; AddressRegistered(name, msg.sender); } function resolve(address addr) constant returns (bytes32 name) { return toName[addr]; } } """ import ethereum._solidity solidity = ethereum._solidity.get_solidity() if solidity is None: pytest.xfail("solidity not installed, not tested") else: # create the NameReg contract tx_to = b'' evm_code = solidity.compile(solidity_code) chain = test_app.services.chain.chain assert chain.head_candidate.get_balance(tx_to) == 0 sender = test_app.services.accounts.unlocked_accounts[0].address assert chain.head_candidate.get_balance(sender) > 0 eth = test_app.services.console.console_locals['eth'] tx = eth.transact(to='', data=evm_code, startgas=500000, sender=sender) code = chain.head_candidate.account_to_dict(tx.creates)['code'] assert len(code) > 2 assert code != '0x' test_app.mine_next_block() creates = chain.head.get_transaction(0).creates code = chain.head.account_to_dict(creates)['code'] assert len(code) > 2 assert code != '0x' # interact with the NameReg contract abi = solidity.mk_full_signature(solidity_code) namereg = eth.new_contract(abi, creates, sender=sender) register_tx = namereg.register('alice', startgas=90000, gasprice=50 * 10**9) test_app.mine_next_block() result = namereg.resolve(sender) assert result == 'alice' + ('\x00' * 27)
Python
0
@@ -529,51 +529,8 @@ ice%0A -from pyethapp.jsonrpc import JSONRPCServer%0A from @@ -3436,23 +3436,8 @@ ice, - JSONRPCServer, Con
f8ce7d7709c3b83e02dde352b8888f462be572ce
Make event handlers for Debugger non-filters with a priority of 100.0 (they aren't donig any filtering)
circuits/core/debugger.py
circuits/core/debugger.py
# Module: debugger # Date: 2nd April 2006 # Author: James Mills, prologic at shortcircuit dot net dot au """ Debugger component used to debug each event in a system by printing each event to sys.stderr or to a Logger Component instnace. """ import os import sys from cStringIO import StringIO from handlers import handler from components import Component from circuits.tools import reprhandler class Debugger(Component): """Create a new Debugger Component Creates a new Debugger Component that filters all events in teh system printing each event to sys.stderr or a Logger Component. :var IgnoreEvents: list of events (str) to ignore :var IgnoreChannels: list of channels (str) to ignore :var enabled: Enabled/Disabled flag :param log: Logger Component instnace or None (*default*) """ IgnoreEvents = [] IgnoreChannels = [] def __init__(self, errors=True, events=True, file=None, logger=None, chop=False, **kwargs): "initializes x; see x.__class__.__doc__ for signature" super(Debugger, self).__init__() self.errors = errors self.events = events if type(file) is str: self.file = open(os.path.abspath(os.path.expanduser(file)), "a") elif type(file) is file or hasattr(file, "write"): self.file = file else: self.file = sys.stderr self.logger = logger self.chop = chop self.IgnoreEvents.extend(kwargs.get("IgnoreEvents", [])) self.IgnoreChannels.extend(kwargs.get("IgnoreChannels", [])) @handler("exception", filter=True) def exception(self, type, value, traceback, handler=None): if not self.errors: return s = StringIO() if handler is None: handler = "" else: handler = reprhandler(self.root, handler) s.write("ERROR %s(%s): %s\n" % ("%s " % handler, type, value)) s.write("%s\n" % "".join(traceback)) s.seek(0) if self.logger is not None: self.logger.error(s.getvalue()) else: self.file.write(s.read()) self.file.flush() s.close() @handler(filter=True) def event(self, event, *args, **kwargs): """Global Event Handler Event handler to listen and filter all events printing each event to self.file or a Logger Component instnace by calling self.logger.debug """ if not self.events: return channel = event.channel if True in [event.name == x.__name__ for x in self.IgnoreEvents]: return elif channel in self.IgnoreChannels: return else: if self.logger is not None: self.logger.debug(repr(event)) else: s = repr(event) if self.file is sys.stderr and len(s) > 80 and self.chop: s = "%s ...>" % s[:75] self.file.write("%s\n" % s) self.file.flush()
Python
0
@@ -1608,27 +1608,30 @@ ption%22, -filter=True +priority=100.0 )%0A de @@ -2216,19 +2216,22 @@ ler( -filter=True +priority=100.0 )%0A
3157bbd5cca51ea2ac0c086a9337296c6652fafc
fix url order
citizendialer3000/urls.py
citizendialer3000/urls.py
from django.conf.urls.defaults import * urlpatterns = patterns('citizendialer3000.views', url(r'^$', 'callcampaign_list', name='call_list'), url(r'^(?P<slug>[\w\-]+)/$', 'callcampaign_detail', name='call_campaign'), url(r'^(?P<slug>[\w\-]+)/(?P<bioguide_id>\w+)/$', 'contact_detail', name='call_contact'), url(r'^(?P<slug>[\w\-]+)/thankyou/$', 'complete', name='call_complete'), url(r'^(?P<slug>[\w\-]+)/results/$', 'results', name='results'), url(r'^(?P<slug>[\w\-]+)/results/calls.csv$', 'results_calls', name='results_calls'), url(r'^(?P<slug>[\w\-]+)/results/summary.csv$', 'results_summary', name='results_summary'), )
Python
0.982361
@@ -222,102 +222,8 @@ '),%0A - url(r'%5E(?P%3Cslug%3E%5B%5Cw%5C-%5D+)/(?P%3Cbioguide_id%3E%5Cw+)/$', 'contact_detail', name='call_contact'),%0A @@ -550,10 +550,104 @@ mary'),%0A + url(r'%5E(?P%3Cslug%3E%5B%5Cw%5C-%5D+)/(?P%3Cbioguide_id%3E%5Cw+)/$', 'contact_detail', name='call_contact'),%0A )%0A
4e42da241c5edc43990778225ad84ae241973770
Convert unicode in sa engine
ckanserviceprovider/db.py
ckanserviceprovider/db.py
import sqlalchemy as sa engine = None metadata = None jobs_table = None metadata_table = None logs_table = None def setup_db(app): global engine, metadata engine = sa.create_engine(app.config.get('SQLALCHEMY_DATABASE_URI'), echo=app.config.get('SQLALCHEMY_ECHO')) metadata = sa.MetaData(engine) make_task_table() metadata.create_all(engine) def make_task_table(): global jobs_table, metadata_table, logs_table jobs_table = sa.Table('jobs', metadata, sa.Column('job_id', sa.UnicodeText, primary_key=True), sa.Column('job_type', sa.UnicodeText), sa.Column('status', sa.UnicodeText, index=True), sa.Column('data', sa.UnicodeText), sa.Column('error', sa.UnicodeText), sa.Column('requested_timestamp', sa.DateTime), sa.Column('finished_timestamp', sa.DateTime), sa.Column('sent_data', sa.UnicodeText), # Callback url sa.Column('result_url', sa.UnicodeText), # CKAN API key sa.Column('api_key', sa.UnicodeText), # Key to administer job sa.Column('job_key', sa.UnicodeText) ) metadata_table = sa.Table('metadata', metadata, sa.Column('job_id', sa.UnicodeText, primary_key=True), sa.Column('key', sa.UnicodeText, primary_key=True), sa.Column('value', sa.UnicodeText, index=True), sa.Column('type', sa.UnicodeText), ) logs_table = sa.Table('logs', metadata, sa.Column('job_id', sa.UnicodeText, index=True), sa.Column('timestamp', sa.DateTime), sa.Column('message', sa.UnicodeText), sa.Column('name', sa.UnicodeText), sa.Column('level', sa.UnicodeText), sa.Column('module', sa.UnicodeText), sa.Column('funcName', sa.UnicodeText), sa.Column('lineno', sa.Integer) )
Python
0.999999
@@ -296,16 +296,68 @@ Y_ECHO') +,%0A convert_unicode=True )%0A me
302934bfd8b30ee1b33cdfb60ca36021df153746
improve cleanup process of test by removing the downloaded file
quantecon/util/tests/test_notebooks.py
quantecon/util/tests/test_notebooks.py
""" Tests for Notebook Utilities Functions --------- fetch_nb_dependencies """ from quantecon.util import fetch_nb_dependencies import unittest FILES = ['README.md'] REPO = "https://github.com/QuantEcon/QuantEcon.py" RAW = "raw" BRANCH = "master" class TestNotebookUtils(unittest.TestCase): def test_fetch_nb_dependencies(self): """ Run First and Test Download """ status = fetch_nb_dependencies(files=FILES, repo=REPO, raw=RAW, branch=BRANCH) self.assertFalse(False in status) def test_fetch_nb_dependencies_overwrite(self): """ Run Second and Ensure file is skipped by checking a False is found in status """ status = fetch_nb_dependencies(files=FILES, repo=REPO, raw=RAW, branch=BRANCH) self.assertTrue(False in status)
Python
0
@@ -139,16 +139,26 @@ unittest +%0Aimport os %0A%0AFILES @@ -783,45 +783,230 @@ NCH) -%0A self.assertTrue(False in status + #First will succeed%0A status = fetch_nb_dependencies(files=FILES, repo=REPO, raw=RAW, branch=BRANCH) #Second should skip%0A self.assertTrue(False in status)%0A%0A def tearDown(self):%0A os.remove(%22README.md%22 )
02b7d5416ad55b78e256e58ed6a282681d1df48d
Add required get_model for Haystack 2.0
readthedocs/projects/search_indexes.py
readthedocs/projects/search_indexes.py
# -*- coding: utf-8-*- import codecs import os from django.utils.html import strip_tags #from haystack import site from haystack import indexes from haystack.fields import CharField #from celery_haystack.indexes import SearchIndex from projects.models import File, ImportedFile, Project import logging log = logging.getLogger(__name__) class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = CharField(document=True, use_template=True) author = CharField() title = CharField(model_attr='name') description = CharField(model_attr='description') repo_type = CharField(model_attr='repo_type') def prepare_author(self, obj): return obj.users.all()[0] class FileIndex(indexes.SearchIndex, indexes.Indexable): text = CharField(document=True, use_template=True) author = CharField() project = CharField(model_attr='project__name', faceted=True) title = CharField(model_attr='heading') def prepare_author(self, obj): return obj.project.users.all()[0] #Should prob make a common subclass for this and FileIndex class ImportedFileIndex(indexes.SearchIndex, indexes.Indexable): text = CharField(document=True) author = CharField() project = CharField(model_attr='project__name', faceted=True) title = CharField(model_attr='name') def prepare_author(self, obj): return obj.project.users.all()[0] def prepare_text(self, obj): """ Prepare the text of the html file. This only works on machines that have the html files for the projects checked out. """ #Import this here to hopefully fix tests for now. from pyquery import PyQuery full_path = obj.project.rtd_build_path() file_path = os.path.join(full_path, obj.path.lstrip('/')) try: with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: log.info('Unable to index file: %s, error :%s' % (file_path, e)) return log.debug('Indexing %s' % obj.slug) try: to_index = strip_tags(PyQuery(content)("div.document").html()).replace(u'¶', '') except ValueError: #Pyquery returns ValueError if div.document doesn't exist. return return to_index
Python
0
@@ -691,16 +691,65 @@ l()%5B0%5D%0A%0A + def get_model(self):%0A return Project%0A%0A class Fi @@ -1066,16 +1066,62 @@ l()%5B0%5D%0A%0A + def get_model(self):%0A return File%0A%0A #Should @@ -2425,12 +2425,66 @@ rn to_index%0A +%0A def get_model(self):%0A return ImportedFile%0A
96877f2cb706a465c5e7fb4d316dbd82ff2cb432
add comment
purelyjs/interpreter.py
purelyjs/interpreter.py
from .io import invoke class Interpreter(object): known_engines = ['js', 'rhino'] def __init__(self, exes=None): engines = exes if exes else self.known_engines self.exe = self.detect(engines) if not self.exe: raise ValueError("No js engine could be found, tried: %s" % ', '.join(engines)) def detect(self, engines): found = None for engine in engines: success, stdout, stderr = invoke(['which', engine]) if success: found = stdout break return found def run_module(self, filepath): success, stdout, stderr = invoke([self.exe, filepath]) return success, stderr
Python
0
@@ -443,16 +443,59 @@ ngines:%0A + # NOTE: Very platform specific%0A
b99ded7ddd0166d88111ced1a648bd9c79a8bbbe
mark xfail of test_get_psm3 (#803)
pvlib/test/test_psm3.py
pvlib/test/test_psm3.py
""" test iotools for PSM3 """ import os from pvlib.iotools import psm3 from conftest import needs_pandas_0_22 import numpy as np import pandas as pd import pytest from requests import HTTPError BASEDIR = os.path.abspath(os.path.dirname(__file__)) PROJDIR = os.path.dirname(BASEDIR) DATADIR = os.path.join(PROJDIR, 'data') TEST_DATA = os.path.join(DATADIR, 'test_psm3.csv') LATITUDE, LONGITUDE = 40.5137, -108.5449 HEADER_FIELDS = [ 'Source', 'Location ID', 'City', 'State', 'Country', 'Latitude', 'Longitude', 'Time Zone', 'Elevation', 'Local Time Zone', 'Dew Point Units', 'DHI Units', 'DNI Units', 'GHI Units', 'Temperature Units', 'Pressure Units', 'Wind Direction Units', 'Wind Speed', 'Surface Albedo Units', 'Version'] PVLIB_EMAIL = 'pvlib-admin@googlegroups.com' DEMO_KEY = 'DEMO_KEY' @needs_pandas_0_22 def test_get_psm3(): """test get_psm3""" header, data = psm3.get_psm3(LATITUDE, LONGITUDE, DEMO_KEY, PVLIB_EMAIL) expected = pd.read_csv(TEST_DATA) # check datevec columns assert np.allclose(data.Year, expected.Year) assert np.allclose(data.Month, expected.Month) assert np.allclose(data.Day, expected.Day) assert np.allclose(data.Hour, expected.Hour) # XXX: unclear if NSRDB changes to timesteps are permanent or temporary # assert np.allclose(data.Minute, expected.Minute) # check data columns assert np.allclose(data.GHI, expected.GHI) assert np.allclose(data.DNI, expected.DNI) assert np.allclose(data.DHI, expected.DHI) assert np.allclose(data.Temperature, expected.Temperature) assert np.allclose(data.Pressure, expected.Pressure) assert np.allclose(data['Dew Point'], expected['Dew Point']) assert np.allclose(data['Surface Albedo'], expected['Surface Albedo']) assert np.allclose(data['Wind Speed'], expected['Wind Speed']) assert np.allclose(data['Wind Direction'], expected['Wind Direction']) # check header for hf in HEADER_FIELDS: assert hf in header # check timezone assert (data.index.tzinfo.zone == 'Etc/GMT%+d' % -header['Time Zone']) # check errors with pytest.raises(HTTPError): # HTTP 403 forbidden because api_key is rejected psm3.get_psm3(LATITUDE, LONGITUDE, api_key='BAD', email=PVLIB_EMAIL) with pytest.raises(HTTPError): # coordinates were not found in the NSRDB psm3.get_psm3(51, -5, DEMO_KEY, PVLIB_EMAIL) with pytest.raises(HTTPError): # names is not one of the available options psm3.get_psm3(LATITUDE, LONGITUDE, DEMO_KEY, PVLIB_EMAIL, names='bad') with pytest.raises(HTTPError): # intervals can only be 30 or 60 minutes psm3.get_psm3(LATITUDE, LONGITUDE, DEMO_KEY, PVLIB_EMAIL, interval=15)
Python
0
@@ -809,16 +809,48 @@ _KEY'%0A%0A%0A +@pytest.mark.xfail(strict=True)%0A @needs_p
494e7ae13c7b8c0ef4a65cb0b005578f8a0d2857
Fix canary command
pwndbg/commands/misc.py
pwndbg/commands/misc.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import argparse import errno as _errno import struct import gdb import pwndbg as _pwndbg import pwndbg.arch as _arch import pwndbg.auxv import pwndbg.commands import pwndbg.regs import pwndbg.symbol _errno.errorcode[0] = 'OK' parser = argparse.ArgumentParser(description=''' Converts errno (or argument) to its string representation. ''') parser.add_argument('err', type=int, nargs='?', default=None, help='Errno; if not passed, it is retrieved from __errno_location') @_pwndbg.commands.ArgparsedCommand(parser) def errno(err): if err is None: # Dont ask. errno_location = pwndbg.symbol.get('__errno_location') err = pwndbg.memory.int(errno_location) # err = int(gdb.parse_and_eval('*((int *(*) (void)) __errno_location) ()')) err = abs(int(err)) if err >> 63: err -= (1<<64) elif err >> 31: err -= (1<<32) msg = _errno.errorcode.get(int(err), "Unknown error code") print("Errno %i: %s" % (err, msg)) parser = argparse.ArgumentParser(description=''' Prints out a list of all pwndbg commands. The list can be optionally filtered if filter_pattern is passed. ''') parser.add_argument('filter_pattern', type=str, nargs='?', default=None, help='Filter to apply to commands names/docs') @_pwndbg.commands.ArgparsedCommand(parser) def pwndbg(filter_pattern): sorted_commands = list(_pwndbg.commands._Command.commands) sorted_commands.sort(key=lambda x: x.__name__) if filter_pattern: filter_pattern = filter_pattern.lower() for c in sorted_commands: name = c.__name__ docs = c.__doc__ if docs: docs = docs.strip() if docs: docs = docs.splitlines()[0] if not filter_pattern or filter_pattern in name.lower() or (docs and filter_pattern in docs.lower()): print("%-20s %s" % (name, docs)) @_pwndbg.commands.ParsedCommand def distance(a, b): '''Print the distance between the two arguments''' a = int(a) & _arch.ptrmask b = int(b) & _arch.ptrmask distance = (b-a) print("%#x->%#x is %#x bytes (%#x words)" % (a, b, distance, distance // _arch.ptrsize)) @_pwndbg.commands.Command def canary(): """Print out the current stack canary""" auxv = pwndbg.auxv.get() if 'AT_SECURE' in auxv: print("AT_SECURE=%#x" % auxv['AT_SECURE']) else: print("Couldn't find AT_SECURE")
Python
0.000011
@@ -2414,16 +2414,17 @@ auxv = +_ pwndbg.a @@ -2441,30 +2441,76 @@ -if 'AT_SECURE' in auxv +at_random = auxv.get('AT_RANDOM', None)%0A if at_secure is not None :%0A @@ -2525,22 +2525,22 @@ int(%22AT_ -SECURE +RANDOM =%25#x%22 %25 @@ -2544,24 +2544,16 @@ %25 a -uxv%5B'AT_SECURE'%5D +t_secure )%0A @@ -2596,13 +2596,13 @@ AT_ -SECURE +RANDOM %22)%0A
5aa90e98abcfafa9036f8cc19cd49b33aa638181
update dev version after 0.26.0 tag [skip ci]
py/desispec/_version.py
py/desispec/_version.py
__version__ = '0.26.0'
Python
0
@@ -14,10 +14,18 @@ '0.26.0 +.dev3104 '%0A
b59b0e12a0f5fc83d69d9eaa1f7652e8e1b4ac81
Improve tuple and list converters
pybinding/utils/misc.py
pybinding/utils/misc.py
from functools import wraps import numpy as np def to_tuple(o): if isinstance(o, (tuple, list)): return tuple(o) else: return o, def with_defaults(options: dict, defaults_dict: dict=None, **defaults_kwargs): """Return a dict where missing keys are filled in by defaults >>> options = dict(hello=0) >>> with_defaults(options, hello=4, world=5) == dict(hello=0, world=5) True >>> defaults = dict(hello=4, world=5) >>> with_defaults(options, defaults) == dict(hello=0, world=5) True >>> with_defaults(options, defaults, world=7, yes=3) == dict(hello=0, world=5, yes=3) True """ options = options if options else {} if defaults_dict: options = dict(defaults_dict, **options) return dict(defaults_kwargs, **options) def x_pi(value): """Return str of value in 'multiples of pi' latex representation >>> x_pi(6.28) == r"$2\pi$" True >>> x_pi(3) == r"$0.95\pi$" True >>> x_pi(-np.pi) == r"$-\pi$" True >>> x_pi(0) == "0" True """ n = value / np.pi if np.isclose(n, 0): return "0" elif np.isclose(abs(n), 1): return r"$\pi$" if n > 0 else r"$-\pi$" else: return r"${:.2g}\pi$".format(n) def decorator_decorator(decorator_wrapper): """A decorator decorator which allows it to be used with or without arguments Parameters ---------- decorator_wrapper : Callable[[Any], Callable] Examples -------- >>> @decorator_decorator ... def decorator_wrapper(optional="default"): ... def actual_decorator(func): ... return lambda x: func(x, optional) ... return actual_decorator >>> @decorator_wrapper("hello") ... def foo(x, y): ... print(x, y) >>> foo(1) 1 hello >>> @decorator_wrapper ... def bar(x, y): ... print(x, y) >>> bar(2) 2 default """ @wraps(decorator_wrapper) def new_wrapper(*args, **kwargs): if len(args) == 1 and not kwargs and (isinstance(args[0], type) or callable(args[0])): return decorator_wrapper()(args[0]) else: return lambda cls_or_func: decorator_wrapper(*args, **kwargs)(cls_or_func) return new_wrapper
Python
0.000001
@@ -68,39 +68,129 @@ -if isinstance(o, (tuple, list)) +try:%0A return tuple(o)%0A except TypeError:%0A return (o,) if o is not None else ()%0A%0A%0Adef to_list(o):%0A try :%0A @@ -194,37 +194,36 @@ %0A return -tuple +list (o)%0A else:%0A @@ -211,27 +211,39 @@ ist(o)%0A e -lse +xcept TypeError :%0A re @@ -247,18 +247,44 @@ return -o, +%5Bo%5D if o is not None else %5B%5D %0A%0A%0Adef w
ee5a85df1d2db8babd8d6df6a188137051c3a48e
Change the improvement policies due to reorganizing reggie.
pybo/policies/simple.py
pybo/policies/simple.py
""" Acquisition functions based on the probability or expected value of improvement. """ from __future__ import division from __future__ import absolute_import from __future__ import print_function import numpy as np __all__ = ['EI', 'PI', 'UCB', 'Thompson'] def EI(model, _, xi=0.0): """ Expected improvement policy with an exploration parameter of `xi`. """ X = model.data[0] x = X[model.predict(X)[0].argmax()] def index(X, grad=False): """EI policy instance.""" return model.get_improvement(X, x, xi, grad) return index def PI(model, _, xi=0.05): """ Probability of improvement policy with an exploration parameter of `xi`. """ X = model.data[0] x = X[model.predict(X)[0].argmax()] def index(X, grad=False): """PI policy instance.""" return model.get_improvement(X, x, xi, grad, pi=True) return index def Thompson(model, _, n=100, rng=None): """ Thompson sampling policy. """ return model.sample_f(n, rng).get def UCB(model, _, delta=0.1, xi=0.2): """ The (GP)UCB acquisition function where `delta` is the probability that the upper bound holds and `xi` is a multiplicative modification of the exploration factor. """ d = model.ndata a = xi * 2 * np.log(np.pi**2 / 3 / delta) b = xi * (4 + d) def index(X, grad=False): """UCB policy instance.""" posterior = model.predict(X, grad=grad) mu, s2 = posterior[:2] beta = a + b * np.log(model.ndata + 1) if grad: dmu, ds2 = posterior[2:] return (mu + np.sqrt(beta * s2), dmu + 0.5 * np.sqrt(beta / s2[:, None]) * ds2) else: return mu + np.sqrt(beta * s2) return index
Python
0
@@ -378,36 +378,17 @@ -X = model.data%5B0%5D%0A x +target = -X%5B mode @@ -393,39 +393,52 @@ del.predict( -X +model.data%5B0%5D )%5B0%5D. -arg max() -%5D + + xi %0A%0A def in @@ -527,29 +527,30 @@ rovement(X, -x, xi +target , grad)%0A%0A @@ -693,36 +693,17 @@ -X = model.data%5B0%5D%0A x +target = -X%5B mode @@ -716,23 +716,36 @@ ict( -X +model.data%5B0%5D )%5B0%5D. -arg max() -%5D + + xi %0A%0A @@ -835,43 +835,28 @@ get_ -improvement(X, x, xi, grad, pi=True +tail(X, target, grad )%0A%0A
951817c3852c248a60a9ada242415b3f3e632777
remove unneeded import
pycket/test/test_ast.py
pycket/test/test_ast.py
import pytest from pycket.expand import expand, expand_string from pycket.values import W_Symbol from pycket.expand import _to_ast, to_ast, parse_module from pycket.interpreter import (LexicalVar, ModuleVar, Done, CaseLambda, variable_set, variables_equal, Lambda, Letrec, Let, Quote, App, If, ) from pycket.test.testhelper import format_pycket_mod def make_symbols(d): v = variable_set() for i, j in d.iteritems(): v[ModuleVar(W_Symbol.make(i), None, W_Symbol.make(i))] = j return v def expr_ast(s): m = parse_module(expand_string(format_pycket_mod(s, extra="(define x 0)"))) return m.body[-1] def test_mutvars(): p = expr_ast("(lambda (x) (set! x 2))") assert len(p.mutated_vars()) == 0 p = expr_ast(("(lambda (y) (set! x 2))")) print p assert variables_equal(p.mutated_vars(), make_symbols({"x": None})) p = expr_ast(("(let ([y 1]) (set! x 2))")) assert variables_equal(p.mutated_vars(), make_symbols({"x": None})) # assert p.mutated_vars() == make_symbols({"x": None}) p = expr_ast(("(let ([x 1]) (set! x 2))")) assert variables_equal(p.mutated_vars(), make_symbols({})) def test_cache_lambda_if_no_frees(): from pycket.interpreter import ToplevelEnv from pycket.values import W_PromotableClosure lamb = expr_ast("(lambda (y) (set! y 2))") toplevel = ToplevelEnv() w_cl1 = lamb.interpret_simple(toplevel) assert isinstance(w_cl1, W_PromotableClosure) w_cl2 = lamb.interpret_simple(toplevel) assert w_cl1 is w_cl2 assert w_cl1.closure._get_list(0).toplevel_env() is toplevel def test_remove_let(): p = expr_ast("(let ([a 1]) a)") assert isinstance(p, Quote) p = expr_ast("(let ([g cons]) (g 5 5))") assert isinstance(p, App) p = expr_ast("(let ([a 1]) (if a + -))") assert isinstance(p, If) def test_reclambda(): # simple case: p = expr_ast("(letrec ([a (lambda () a)]) a)") assert isinstance(p, CaseLambda) assert p.recursive_sym is not None # immediate application p = expr_ast("(letrec ([a (lambda () a)]) (a))") assert isinstance(p.rator, CaseLambda) assert p.rator.recursive_sym is not None # immediate application p = expr_ast("(letrec ([a (lambda (b) (a b))]) (a 1))") assert isinstance(p.rator, CaseLambda) assert p.rator.recursive_sym is not None # immediate application, need a let because the variable appears not just # once (but not a letrec) p = expr_ast("(letrec ([a (lambda (b) (a b))]) (a (a 1)))") assert isinstance(p, Let) assert isinstance(p.rhss[0], CaseLambda) assert p.rhss[0].recursive_sym is not None def test_asts_know_surrounding_lambda(): from pycket.interpreter import ToplevelEnv from pycket.values import W_PromotableClosure caselam = expr_ast("(lambda (y a b) (if y a b))") lam = caselam.lams[0] assert lam.body[0].surrounding_lambda is lam caselam = expr_ast("(lambda (y) (lambda (z) (+ y z)))") lam = caselam.lams[0] inner_caselam = lam.body[0] assert inner_caselam.surrounding_lambda is lam inner_lam = inner_caselam.lams[0] assert inner_lam.body[0].surrounding_lambda is inner_lam
Python
0.000003
@@ -2826,58 +2826,8 @@ Env%0A - from pycket.values import W_PromotableClosure%0A
1a8d7797e691bd5959fc8f7cdc0371e39208aee7
Update version #
pyhindsight/__init__.py
pyhindsight/__init__.py
__author__ = "Ryan Benson" __version__ = "2.0.5" __email__ = "ryan@obsidianforensics.com"
Python
0
@@ -41,11 +41,11 @@ %222. -0.5 +1.0 %22%0A__
c8b89d104d5676c006b39825fcdb4a1e80f6515a
Update wingding
pyquirks/quirk_funcs.py
pyquirks/quirk_funcs.py
from random import sample _wdalpha = { "!": 9999, "\"": 9986, "#": 9985, "$": 128083, "%": 128365, "&": 128366, "'": 128367, "(": 9742, ")": 9990, "*": 128386, "+": 128387, ",": 128234, "-": 128235, ".": 128236, "/": 128237, "0": 128193, "1": 128194, "2": 128196, "3": 128463, "4": 128464, "5": 128452, "6": 8987, "7": 128430, "8": 128432, "9": 128434, ":": 128435, ";": 128436, "<": 128427, "=": 128428, ">": 9991, "?": 9997, "@": 128398, "A": 9996, "B": 128076, "C": 128077, "D": 128078, "E": 9756, "F": 9758, "G": 9757, "H": 9759, "I": 9995, "J": 9786, "K": 128528, "L": 9785, "M": 128163, "N": 9760, "O": 9872, "P": 127985, "Q": 9992, "R": 9788, "S": 128167, "T": 10052, "U": 128326, "V": 10014, "W": 128328, "X": 10016, "Y": 10017, "Z": 9770, "[": 9775, "\\": 2384, "]": 9784, "^": 9800, "_": 9801, "`": 9802, "{": 10048, "|": 10047, "}": 10077, "~": 10078, "a": 9803, "b": 9804, "c": 9805, "d": 9806, "e": 9807, "f": 9808, "g": 9809, "h": 9810, "i": 9811, "k": 38, "l": 9679, "m": 10061, "n": 9632, "o": 9633, "q": 10065, "r": 10066, "s": 11047, "t": 10731, "u": 9670, "v": 10070, "w": 11045, "x": 8999, "y": 9043, "z": 8984 } def lower(str): return str.lower() def reverse(str): return reverse(str) def upper(str): return str.upper() def scramble(text): return "".join(sample(text, len(text))) def capitalize(str): return str.capitalize() def wingding(str): f = "" for char in str: try: char = chr(_wdalpha[char]) except KeyError: pass f += char return f
Python
0
@@ -1276,16 +1276,42 @@ z%22: 8984 +, %22p%22: 128912, %22j%22: 128624 %0A %7D%0A%0A
ec6191d63236a130e6a39f2383b7e8a6ae8ec672
Remove the unexisting import.
pytask/profile/forms.py
pytask/profile/forms.py
import os from django import forms from registration.forms import RegistrationFormUniqueEmail from registration.models import RegistrationProfile from pytask.utils import make_key from pytask.profile.models import GENDER_CHOICES, Profile class CustomRegistrationForm(RegistrationFormUniqueEmail): """Used instead of RegistrationForm used by default django-registration backend, this adds aboutme, dob, gender, address, phonenum to the default django-registration RegistrationForm""" full_name = forms.CharField(required=True, max_length=50, label="Name as on your bank account", help_text="Any DD/Cheque will be issued on \ this name") aboutme = forms.CharField(required=True, widget=forms.Textarea, max_length=1000, label=u"About Me", help_text="A write up about yourself to aid the\ reviewer in judging your eligibility for a task.\ It can have your educational background, CGPA,\ field of interests etc.," ) dob = forms.DateField(help_text = "YYYY-MM-DD", required=True, label=u'date of birth') gender = forms.ChoiceField(choices = GENDER_CHOICES, required=True, label=u'gender') address = forms.CharField(required=True, max_length=200, widget=forms.Textarea, help_text="This \ information will be used while sending DD/Cheque") phonenum = forms.CharField(required=True, max_length=10, label="Phone Number") def clean_aboutme(self): """ Empty not allowed """ data = self.cleaned_data['aboutme'] if not data.strip(): raise forms.ValidationError("Please write something about\ yourself") return data def clean_address(self): """ Empty not allowed """ data = self.cleaned_data['address'] if not data.strip(): raise forms.ValidationError("Please enter an address") return data def clean_phonenum(self): """ should be of 10 digits """ data = self.cleaned_data['phonenum'] if (not data.strip()) or \ (data.strip("1234567890")) or \ (len(data)!= 10): raise forms.ValidationError("This is not a valid phone number") return data def save(self,profile_callback=None): new_user = RegistrationProfile.objects.create_inactive_user( username=self.cleaned_data['username'], password=self.cleaned_data['password1'], email=self.cleaned_data['email']) new_profile = Profile(user=new_user, aboutme=self.cleaned_data['aboutme'], dob=self.cleaned_data['dob'], gender=self.cleaned_data['gender'], address=self.cleaned_data['address'], phonenum=self.cleaned_data['phonenum'], uniq_key=make_key(Profile), ) new_profile.save() return new_user class CreateProfileForm(forms.ModelForm): class Meta: model = Profile exclude = ['pynts', 'rights'] class EditProfileForm(forms.ModelForm): class Meta: model = Profile fields = ['full_name', 'aboutme', 'gender', 'dob', 'address', 'phonenum'] def clean_aboutme(self): """ Empty not allowed """ data = self.cleaned_data['aboutme'] if not data.strip(): raise forms.ValidationError("Please write something about\ yourself") return data def clean_address(self): """ Empty not allowed """ data = self.cleaned_data['address'] if not data.strip(): raise forms.ValidationError("Please enter an address") return data def clean_phonenum(self): """ should be of 10 digits """ data = self.cleaned_data['phonenum'] if (not data.strip()) or \ (data.strip("1234567890")) or \ (len(data)!= 10): raise forms.ValidationError("This is not a valid phone number") return data
Python
0.000012
@@ -146,42 +146,8 @@ le%0A%0A -from pytask.utils import make_key%0A from
51ad733a0e69dbb84969df9271a0a1631d67c8d2
change the inf to nan in the height maps computed by height_rpc_move
python/triangulation.py
python/triangulation.py
#!/usr/bin/env python import numpy as np import common import homography_cropper def compute_height_map(rpc1, rpc2, H1, H2, disp, mask, height, rpc_err): """ Computes a height map from a disparity map, using rpc. Args: rpc1, rpc2: paths to the xml files H1, H2: paths to the files containing the homography matrices disp, mask: paths to the diparity and mask maps height: path to the output height map rpc_err: path to the output rpc_error of triangulation """ common.run("disp_to_h %s %s %s %s %s %s %s %s" % (rpc1, rpc2, H1, H2, disp, mask, height, rpc_err)) return def transfer_height_map(height, msk, H, rpc, x, y, w, h, zoom, out_height, out_msk): """ Transfer the heights computed on the rectified grid to the original Pleiades image grid. Args: height: path to the input height map (on the rectified grid) msk: path to the associated mask H: path to the file containing the rectifying homography matrix rpc: path to the xml file x, y, w, h: four integers defining the rectangular ROI in the original image. (x, y) is the top-left corner, and (w, h) are the dimensions of the rectangle. zoom: zoom factor (usually 1, 2 or 4) used to produce the input height map out_height: path to the output height map out_msk: path to the output mask """ A = common.matrix_translation(-x, -y) f = 1.0/zoom Z = np.diag([f, f, 1]) A = np.dot(Z, A) H_crop = common.tmpfile('.txt') np.savetxt(H_crop, A) common.run("height_rpc_move %s %s %s %s %s %s %s %s %d %d" % (rpc, H, height, msk, rpc, H_crop, out_height, out_msk, w*f, h*f)) return def colorize(crop_panchro, im_color, H, out_colorized): """ Colorizes a Pleiades gray crop using low-resolution color information. Args: crop_panchro: path to the panchro (ie gray) rectified crop im_color: path to the full color image (tiff or jp2) H: path to the file containing the coefficients of the rectifying homography, that was used to generate crop_panchro out_colorized: path to the output file """ # 1. Get a rectified and zoomed crop from the color image. It has to be # sampled on exactly the same grid as the panchro rectified crop. To do # that we compose the rectifying homography with a 4x zoom (because color # pleiades images have 4x lower resolution). # There is also a small horizontal translation (4 pixels at the panchro # resolution) H = np.loadtxt(H) H_zoom = np.array([[4, 0, -4], [0, 4, 0], [0, 0, 1]]) H = np.dot(H, H_zoom) w, h = common.image_size(crop_panchro) crop_ms = common.tmpfile('.tif') homography_cropper.crop_and_apply_homography(crop_ms, im_color, H, w, h) # convert rgbi to rgb and requantify between 0 and 255 crop_rgb = common.rgbi_to_rgb(crop_ms) #rgb = common.image_qeasy(crop_rgb, 300, 3000) #panchro = common.image_qeasy(crop_panchro, 300, 3000) rgb = common.image_qauto(crop_rgb) panchro = common.image_qauto(crop_panchro) # 2. Combine linearly the intensity and the color to obtain the result common.run('plambda %s %s "dup split + + / *" | qeasy 0 85 - %s' % (panchro, rgb, out_colorized)) return def compute_point_cloud(crop_colorized, heights, rpc, H, cloud): """ Computes a color point cloud from a height map. Args: crop_colorized: path to the colorized rectified crop heights: height map. Its size is the same as the crop_color image rpc: path to xml file containing RPC data for the current Pleiade image H: path to the file containing the coefficients of the rectifying homography cloud: path to the output points cloud (ply format) """ common.run("colormesh %s %s %s %s %s" % (crop_colorized, heights, rpc, H, cloud)) return
Python
0.000001
@@ -1439,24 +1439,66 @@ ask%0A %22%22%22%0A + # write the matrix associated to crop%0A A = comm @@ -1527,16 +1527,16 @@ -x, -y)%0A - f = @@ -1649,24 +1649,97 @@ t(H_crop, A) +%0A%0A # run the height_rpc_move binary%0A tmp_h = common.tmpfile('.tif') %0A common. @@ -1835,26 +1835,21 @@ H_crop, -out_height +tmp_h , out_ms @@ -1861,16 +1861,171 @@ f, h*f)) +%0A%0A # replace the -inf with nan%0A # implements: if isinf(x) then nan, else x%0A common.run('plambda %25s %22x isinf nan x if%22 %3E %25s' %25 (tmp_h, out_height)) %0A ret
ff33ac85c6364915f647828692f881ab8c3431cc
Fix parameter URL.
pythonpro/core/views.py
pythonpro/core/views.py
from django.conf import settings from django.contrib.auth import login, update_session_auth_hash from django.contrib.auth.decorators import login_required from django.contrib.auth.forms import SetPasswordForm from django.contrib.auth.views import PasswordChangeView, PasswordResetView from django.http import HttpResponseRedirect from django.shortcuts import redirect, render from django.urls import reverse, reverse_lazy from django.views.generic import UpdateView from django_sitemaps import Sitemap from rolepermissions.checkers import has_role from pythonpro.core import facade as core_facade from pythonpro.core.forms import LeadForm, UserEmailForm, UserSignupForm, PythonProResetForm from pythonpro.core.models import User from pythonpro.domain import user_domain def index(request): if request.user.is_authenticated: return redirect(reverse('dashboard:home')) # Redirect retorna após campanha L7 # return redirect('https://pythonpro.com.br') return redirect('https://pythonpro.com.br/jornada-rumo-a-primeira-vaga-inscricao-l7-v1/?'+ 'utm_source=home&utm_medium=trafego-organico&utm_campaign=L7') def thanks(request): return render(request, 'core/lead_thanks.html', {}) @login_required def lead_change_password(request): if not has_role(request.user, 'lead'): return redirect(reverse('core:index')) if request.method == 'POST': form = SetPasswordForm(request.user, request.POST) if form.is_valid(): user = form.save() update_session_auth_hash(request, user) # Important! return redirect(reverse('core:thanks')) else: form = SetPasswordForm(request.user) return render(request, 'core/lead_change_password.html', { 'form': form }) def teck_talks(request): return render(request, 'core/tech_talks.html', {}) def podcast(request): return render(request, 'core/podcast.html', {}) @login_required def profile(request): return render(request, 'core/profile_detail.html', {}) def sitemap(request): map = Sitemap(build_absolute_uri=request.build_absolute_uri, ) named_views = [ 'core:index', 'core:lead_landing', 'checkout:bootcamp_lp', 'core:podcast', 'core:tech_talks', 'modules:index', 'launch:landing_page', 'launch:cpl1', 'launch:cpl2', 'launch:cpl3', ] for section in named_views: map.add(reverse(section), changefreq='weekly') return map.response( pretty_print=settings.DEBUG, ) class _ProfileUpdateName(UpdateView): model = User fields = ('first_name',) template_name = 'core/profile_name.html' success_url = reverse_lazy('core:profile') def get_object(self, queryset=None): return self.request.user profile_name = login_required(_ProfileUpdateName.as_view()) class _ProfileUpdateEmail(UpdateView): form_class = UserEmailForm template_name = 'core/profile_email.html' success_url = reverse_lazy('core:profile') def get_object(self, queryset=None): return self.request.user def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['user'] = self.request.user return kwargs profile_email = login_required(_ProfileUpdateEmail.as_view()) class _ProfileChangePassword(PasswordChangeView): template_name = 'core/profile_password.html' success_url = reverse_lazy('pages:leads_onboarding_page') profile_password = _ProfileChangePassword.as_view() def _lead_landing(request, template_name='core/lead_landing_page.html', form_action=None): user = request.user if user.is_authenticated and not user.is_superuser and core_facade.has_any_webdev_role(user): return HttpResponseRedirect(reverse('dashboard:home'), status=301) form_action = reverse('core:lead_form') if form_action is None else form_action form_action = f"{form_action}?{request.GET.urlencode(safe='&')}" return render(request, template_name, context={'form': LeadForm(), 'form_action': form_action}) def lead_landing(request): """ View with lead landing page :param request: :return: """ return _lead_landing(request) def lead_landing_lite(request): """ View with lead landing page lite version :param request: :return: """ return _lead_landing(request, template_name='core/lead_landing_lite_page.html') def lead_landing_with_no_offer(request): """ View with lead landing page normal version and no offer in sequence :param request: :return: """ return _lead_landing(request, form_action=reverse('core:lead_form_with_no_offer')) def programmer_week_ty(request): """ View with lead landing page :param request: :return: """ return render(request, 'core/lead_landing_page.html', context={'form': UserSignupForm()}) def _lead_form(request, *args, **kwargs): if request.method == 'GET': form = UserSignupForm() return render(request, 'core/lead_form_errors.html', context={'form': form}) source = request.GET.get('utm_source', default='unknown') first_name = request.POST.get('first_name') email = request.POST.get('email') tags = [kwargs.get('offer_tag', 'offer-funnel-0')] for key, value in request.GET.items(): if key.startswith('utm_'): tags.append(f"{key}={value}") try: user = user_domain.register_lead(first_name, email, source, tags=tags) except user_domain.UserCreationException as e: return render(request, 'core/lead_form_errors.html', context={'form': e.form}, status=400) login(request, user) return redirect(reverse('core:thanks')) def lead_form(request): return _lead_form(request, redirect_to_OTO=True, offer_tag='offer-funnel-0') def lead_form_with_no_offer(request): return _lead_form(request, redirect_to_OTO=False, offer_tag='offer-funnel-1') def linktree(request): return render(request, 'core/linktree.html', {}) class _PythonProResetView(PasswordResetView): form_class = PythonProResetForm password_reset = _PythonProResetView.as_view()
Python
0
@@ -1092,20 +1092,21 @@ _source= -home +iscas &utm_med
ac30ee4d7a5b91c07f18ab36eb8b1783f32f8045
Fix reading CPU Timestamps
tools/telemetry/telemetry/core/platform/android_platform_backend.py
tools/telemetry/telemetry/core/platform/android_platform_backend.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging from telemetry.core import exceptions from telemetry.core import platform from telemetry.core import util from telemetry.core.platform import proc_supporting_platform_backend # Get build/android scripts into our path. util.AddDirToPythonPath(util.GetChromiumSrcDir(), 'build', 'android') from pylib.perf import cache_control # pylint: disable=F0401 from pylib.perf import perf_control # pylint: disable=F0401 from pylib.perf import thermal_throttle # pylint: disable=F0401 try: from pylib.perf import surface_stats_collector # pylint: disable=F0401 except Exception: surface_stats_collector = None _HOST_APPLICATIONS = [ 'ipfw', ] class AndroidPlatformBackend( proc_supporting_platform_backend.ProcSupportingPlatformBackend): def __init__(self, adb, no_performance_mode): super(AndroidPlatformBackend, self).__init__() self._adb = adb self._surface_stats_collector = None self._perf_tests_setup = perf_control.PerfControl(self._adb) self._thermal_throttle = thermal_throttle.ThermalThrottle(self._adb) self._no_performance_mode = no_performance_mode self._raw_display_frame_rate_measurements = [] self._host_platform_backend = platform.CreatePlatformBackendForCurrentOS() self._can_access_protected_file_contents = \ self._adb.CanAccessProtectedFileContents() if self._no_performance_mode: logging.warning('CPU governor will not be set!') def IsRawDisplayFrameRateSupported(self): return True def StartRawDisplayFrameRateMeasurement(self): assert not self._surface_stats_collector # Clear any leftover data from previous timed out tests self._raw_display_frame_rate_measurements = [] self._surface_stats_collector = \ surface_stats_collector.SurfaceStatsCollector(self._adb) self._surface_stats_collector.Start() def StopRawDisplayFrameRateMeasurement(self): self._surface_stats_collector.Stop() for r in self._surface_stats_collector.GetResults(): self._raw_display_frame_rate_measurements.append( platform.Platform.RawDisplayFrameRateMeasurement( r.name, r.value, r.unit)) self._surface_stats_collector = None def GetRawDisplayFrameRateMeasurements(self): ret = self._raw_display_frame_rate_measurements self._raw_display_frame_rate_measurements = [] return ret def SetFullPerformanceModeEnabled(self, enabled): if self._no_performance_mode: return if enabled: self._perf_tests_setup.SetHighPerfMode() else: self._perf_tests_setup.SetDefaultPerfMode() def CanMonitorThermalThrottling(self): return True def IsThermallyThrottled(self): return self._thermal_throttle.IsThrottled() def HasBeenThermallyThrottled(self): return self._thermal_throttle.HasBeenThrottled() def GetSystemCommitCharge(self): for line in self._adb.RunShellCommand('dumpsys meminfo', log_result=False): if line.startswith('Total PSS: '): return int(line.split()[2]) * 1024 return 0 def GetCpuStats(self, pid): if not self._can_access_protected_file_contents: logging.warning('CPU stats cannot be retrieved on non-rooted device.') return {} return super(AndroidPlatformBackend, self).GetCpuStats(pid) def GetCpuTimestamp(self): if not self._can_access_protected_file_contents: logging.warning('CPU timestamp cannot be retrieved on non-rooted device.') return {} return super(AndroidPlatformBackend, self).GetCpuTimestamp() def GetMemoryStats(self, pid): self._adb.PurgeUnpinnedAshmem() memory_usage = self._adb.GetMemoryUsageForPid(pid)[0] return {'ProportionalSetSize': memory_usage['Pss'] * 1024, 'SharedDirty': memory_usage['Shared_Dirty'] * 1024, 'PrivateDirty': memory_usage['Private_Dirty'] * 1024, 'VMPeak': memory_usage['VmHWM'] * 1024} def GetIOStats(self, pid): return {} def GetChildPids(self, pid): child_pids = [] ps = self._GetPsOutput(['pid', 'name']) for curr_pid, curr_name in ps: if int(curr_pid) == pid: name = curr_name for curr_pid, curr_name in ps: if curr_name.startswith(name) and curr_name != name: child_pids.append(int(curr_pid)) break return child_pids def GetCommandLine(self, pid): ps = self._GetPsOutput(['pid', 'name']) for curr_pid, curr_name in ps: if int(curr_pid) == pid: return curr_name raise exceptions.ProcessGoneException() def GetOSName(self): return 'android' def CanFlushIndividualFilesFromSystemCache(self): return False def FlushEntireSystemCache(self): cache = cache_control.CacheControl(self._adb) cache.DropRamCaches() def FlushSystemCacheForDirectory(self, directory, ignoring=None): raise NotImplementedError() def LaunchApplication(self, application, parameters=None): if application in _HOST_APPLICATIONS: self._host_platform_backend.LaunchApplication(application, parameters) return if not parameters: parameters = '' self._adb.RunShellCommand('am start ' + parameters + ' ' + application) def IsApplicationRunning(self, application): if application in _HOST_APPLICATIONS: return self._host_platform_backend.IsApplicationRunning(application) return len(self._adb.ExtractPid(application)) > 0 def CanLaunchApplication(self, application): if application in _HOST_APPLICATIONS: return self._host_platform_backend.CanLaunchApplication(application) return True def InstallApplication(self, application): if application in _HOST_APPLICATIONS: self._host_platform_backend.InstallApplication(application) return raise NotImplementedError( 'Please teach Telemetry how to install ' + application) def _GetFileContents(self, fname): if not self._can_access_protected_file_contents: logging.warning('%s cannot be retrieved on non-rooted device.' % fname) return '' return ''.join(self._adb.GetProtectedFileContents(fname, log_result=False)) def _GetPsOutput(self, columns, pid=None): assert columns == ['pid', 'name'] or columns == ['pid'], \ 'Only know how to return pid and name. Requested: ' + columns command = 'ps' if pid: command += ' -p %d' % pid ps = self._adb.RunShellCommand(command, log_result=False)[1:] output = [] for line in ps: data = line.split() curr_pid = data[1] curr_name = data[-1] if columns == ['pid', 'name']: output.append([curr_pid, curr_name]) else: output.append([curr_pid]) return output
Python
0.000017
@@ -6184,15 +6184,26 @@ rn ' +%5Cn '.join( +%0A self
cd6028a0dc1ea0b5268c5a8520f4e0fb668b8845
Change default of grid search n_trials to None, so the search will span all discrete combinations
metal/mmtl/aws/grid_search_mmtl.py
metal/mmtl/aws/grid_search_mmtl.py
""" Sample call: python metal/mmtl/aws/mmtl_aws.py --mode run --aws_access_key_id xxx --aws_secret_access_key xxx --keypath ~/personalkeyncalifornia.pem Sample output: ... Putting file output/configspace/config_1.json -> config Putting file output/configspace/config_0.json -> config Getting file config -> output/1/config Getting dir metal/checkpoint/ -> output/1/checkpointdir Putting file output/configspace/config_2.json -> config Getting file config -> output/0/config Getting dir metal/checkpoint/ -> output/0/checkpointdir Getting file config -> output/2/config Getting dir metal/checkpoint/ -> output/2/checkpointdir Results (venv-mmtl) maxlam@dawn6:/lfs/1/maxlam/metal$ ls output/ 0 0.out 1 1.out 2 2.out configspace (venv-mmtl) maxlam@dawn6:/lfs/1/maxlam/metal$ ls output/0 checkpointdir config stderr stdout (venv-mmtl) maxlam@dawn6:/lfs/1/maxlam/metal$ tail output/0/stdout Requirement already satisfied: pycparser in /home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages (from cffi>=1.1->bcrypt>=3.1.3->paramiko->-r metal/mmtl/requirements-mmtl.txt (line 15)) (2.18) Requirement already satisfied: webencodings in /home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages (from html5lib!=1.0b1,!=1.0b2,!=1.0b3,!=1.0b4,!=1.0b5,!=1.0b6,!=1.0b7,!=1.0b8,>=0.99999999pre->bleach->nbconvert->jupyter->-r metal/mmtl/requirements-mmtl.txt (line 10)) (0.5.1) /home/ubuntu/metal Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. Loading QNLI Dataset Could not find kwarg "device" in destination dict. Could not find kwarg "lr_freeze" in destination dict. Beginning train loop. Expecting a total of _approximately_ 3 examples and 3 batches per epoch from 1 tasks. [1.0 epo]: TRAIN:[loss=74.170] (venv-mmtl) maxlam@dawn6:/lfs/1/maxlam/metal$ """ import argparse import copy import datetime import json import os import random import time import numpy as np from metal.tuners.random_tuner import RandomSearchTuner from metal.tuners.tuner import ModelTuner from metal.utils import recursive_merge_dicts def create_command_dict(args, config_path, launch_args): COMMAND_PREFIX = ( "pkill -9 tensorboard;" # Kill pre-existing tensorboard "pkill -9 python;" # Kill all python processes "source activate pytorch_p36;" "export GLUEDATA=/home/ubuntu/glue/;" # Assumes ami has this here "rm -rf metal;" "git clone -b mmtl https://github.com/HazyResearch/metal.git;" "cd metal; source add_to_path.sh; pip install -r metal/mmtl/requirements-mmtl.txt;" f"git fetch --all; git checkout {args.commit_hash};" "mkdir logs;" " ( screen -dm tensorboard --logdir logs );" ) # COMMAND = "python metal/mmtl/launch.py --tasks QNLI --n_epochs 2 --log_every 0.25 --score_every 0.25 --max_len 256 --batch_size 8 --checkpoint_dir ./checkpoint --checkpoint_metric QNLI/valid/accuracy --checkpoint_metric_mode max --max_datapoints 32 --override_train_config ../config" # COMMAND = " ( python metal/mmtl/launch.py --tasks COLA,SST2,MNLI,RTE,WNLI,QQP,MRPC,STSB,QNLI --checkpoint_dir ./checkpoint --batch_size 4 --n_epochs 3 --max_datapoints 32 --override_train_config ../config 2>&1 | tee output ) " COMMAND = "python metal/mmtl/launch.py" for ky in launch_args.keys(): COMMAND += f" --{ky} {launch_args[ky]}" print(COMMAND) COMMAND = " ( " + COMMAND + " 2>&1 | tee running_output ) " return { "cmd": COMMAND_PREFIX + COMMAND, "files_to_put": [(config_path, "config")], "files_to_get": [("config", "config")], "dirs_to_get": [("metal/logs", "logdir")], } def generate_configs_and_commands(args, launch_args, search_space, n=10): configspace_path = "%s/configspace" % args.outputpath if not os.path.exists(configspace_path): os.makedirs(configspace_path) tuner = RandomSearchTuner(None, seed=time.time()) configs = tuner.config_generator(search_space, n, tuner.rng, True) command_dicts = [] for i, random_config in enumerate(configs): # Recursive merge dicts launch_args with sampled parameters config_to_use = recursive_merge_dicts( launch_args, random_config, misses="insert" ) # Add commit hash to config config_to_use["commit_hash"] = args.commit_hash # Write to directory config_path = "%s/config_%d.json" % (configspace_path, i) with open(config_path, "w") as f: json.dump(config_to_use, f) # Create command dict command_dicts.append(create_command_dict(args, config_path, config_to_use)) return command_dicts
Python
0
@@ -3733,18 +3733,20 @@ pace, n= -10 +None ):%0A c
c45fc8485935c39af869204f9fc6b0dd6bc0deb1
Move I/O outside of properties for light/tplink platform (#8699)
homeassistant/components/light/tplink.py
homeassistant/components/light/tplink.py
""" Support for TPLink lights. For more details about this component, please refer to the documentation at https://home-assistant.io/components/light.tplink/ """ import logging from homeassistant.const import (CONF_HOST, CONF_NAME) from homeassistant.components.light import ( Light, ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_KELVIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP) from homeassistant.util.color import \ color_temperature_mired_to_kelvin as mired_to_kelvin from homeassistant.util.color import \ color_temperature_kelvin_to_mired as kelvin_to_mired REQUIREMENTS = ['pyHS100==0.2.4.2'] _LOGGER = logging.getLogger(__name__) SUPPORT_TPLINK = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP) def setup_platform(hass, config, add_devices, discovery_info=None): """Initialise pyLB100 SmartBulb.""" from pyHS100 import SmartBulb host = config.get(CONF_HOST) name = config.get(CONF_NAME) add_devices([TPLinkSmartBulb(SmartBulb(host), name)], True) def brightness_to_percentage(byt): """Convert brightness from absolute 0..255 to percentage.""" return int((byt*100.0)/255.0) def brightness_from_percentage(percent): """Convert percentage to absolute value 0..255.""" return (percent*255.0)/100.0 class TPLinkSmartBulb(Light): """Representation of a TPLink Smart Bulb.""" def __init__(self, smartbulb, name): """Initialize the bulb.""" self.smartbulb = smartbulb # Use the name set on the device if not set if name is None: self._name = self.smartbulb.alias else: self._name = name self._state = None _LOGGER.debug("Setting up TP-Link Smart Bulb") @property def name(self): """Return the name of the Smart Bulb, if any.""" return self._name def turn_on(self, **kwargs): """Turn the light on.""" if ATTR_COLOR_TEMP in kwargs: self.smartbulb.color_temp = \ mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) if ATTR_KELVIN in kwargs: self.smartbulb.color_temp = kwargs[ATTR_KELVIN] if ATTR_BRIGHTNESS in kwargs: brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness or 255) self.smartbulb.brightness = brightness_to_percentage(brightness) self.smartbulb.state = self.smartbulb.BULB_STATE_ON def turn_off(self): """Turn the light off.""" self.smartbulb.state = self.smartbulb.BULB_STATE_OFF @property def color_temp(self): """Return the color temperature of this light in mireds for HA.""" if self.smartbulb.is_color: if (self.smartbulb.color_temp is not None and self.smartbulb.color_temp != 0): return kelvin_to_mired(self.smartbulb.color_temp) else: return None else: return None @property def brightness(self): """Return the brightness of this light between 0..255.""" return brightness_from_percentage(self.smartbulb.brightness) @property def is_on(self): """True if device is on.""" return self.smartbulb.state == \ self.smartbulb.BULB_STATE_ON def update(self): """Update the TP-Link Bulb's state.""" from pyHS100 import SmartPlugException try: self._state = self.smartbulb.state == \ self.smartbulb.BULB_STATE_ON except (SmartPlugException, OSError) as ex: _LOGGER.warning('Could not read state for %s: %s', self.name, ex) @property def supported_features(self): """Flag supported features.""" return SUPPORT_TPLINK
Python
0
@@ -1627,24 +1627,88 @@ tate = None%0A + self._color_temp = None%0A self._brightness = None%0A _LOG @@ -1750,16 +1750,16 @@ Bulb%22)%0A - %0A @pr @@ -2354,25 +2354,24 @@ brightness)%0A -%0A self @@ -2666,296 +2666,31 @@ -if self.smartbulb.is_color:%0A if (self.smartbulb.color_temp is not None and%0A self.smartbulb.color_temp != 0):%0A return kelvin_to_mired(self.smartbulb.color_temp)%0A else:%0A return None%0A else:%0A return None +return self._color_temp %0A%0A @@ -2812,50 +2812,14 @@ urn -brightness_from_percentage(self.smartbulb. +self._ brig @@ -2824,17 +2824,16 @@ ightness -) %0A%0A @p @@ -2921,69 +2921,14 @@ elf. -smartbulb.state == %5C%0A self.smartbulb.BULB_STATE_ON +_state %0A%0A @@ -3075,24 +3075,42 @@ elf._state = + (%0A self.smartb @@ -3126,55 +3126,405 @@ == -%5C%0A self.smartbulb.BULB_STATE_ON%0A +self.smartbulb.BULB_STATE_ON)%0A self._brightness = brightness_from_percentage(%0A self.smartbulb.brightness)%0A if self.smartbulb.is_color:%0A if (self.smartbulb.color_temp is not None and%0A self.smartbulb.color_temp != 0):%0A self._color_temp = kelvin_to_mired(%0A self.smartbulb.color_temp) %0A
82a9dc620cc20692e5b5c84381be38084f89ad75
Add device_class to Shelly cover domain (#46894)
homeassistant/components/shelly/cover.py
homeassistant/components/shelly/cover.py
"""Cover for Shelly.""" from aioshelly import Block from homeassistant.components.cover import ( ATTR_POSITION, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, SUPPORT_STOP, CoverEntity, ) from homeassistant.core import callback from . import ShellyDeviceWrapper from .const import COAP, DATA_CONFIG_ENTRY, DOMAIN from .entity import ShellyBlockEntity async def async_setup_entry(hass, config_entry, async_add_entities): """Set up cover for device.""" wrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][config_entry.entry_id][COAP] blocks = [block for block in wrapper.device.blocks if block.type == "roller"] if not blocks: return async_add_entities(ShellyCover(wrapper, block) for block in blocks) class ShellyCover(ShellyBlockEntity, CoverEntity): """Switch that controls a cover block on Shelly devices.""" def __init__(self, wrapper: ShellyDeviceWrapper, block: Block) -> None: """Initialize light.""" super().__init__(wrapper, block) self.control_result = None self._supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP if self.wrapper.device.settings["rollers"][0]["positioning"]: self._supported_features |= SUPPORT_SET_POSITION @property def is_closed(self): """If cover is closed.""" if self.control_result: return self.control_result["current_pos"] == 0 return self.block.rollerPos == 0 @property def current_cover_position(self): """Position of the cover.""" if self.control_result: return self.control_result["current_pos"] return self.block.rollerPos @property def is_closing(self): """Return if the cover is closing.""" if self.control_result: return self.control_result["state"] == "close" return self.block.roller == "close" @property def is_opening(self): """Return if the cover is opening.""" if self.control_result: return self.control_result["state"] == "open" return self.block.roller == "open" @property def supported_features(self): """Flag supported features.""" return self._supported_features async def async_close_cover(self, **kwargs): """Close cover.""" self.control_result = await self.block.set_state(go="close") self.async_write_ha_state() async def async_open_cover(self, **kwargs): """Open cover.""" self.control_result = await self.block.set_state(go="open") self.async_write_ha_state() async def async_set_cover_position(self, **kwargs): """Move the cover to a specific position.""" self.control_result = await self.block.set_state( go="to_pos", roller_pos=kwargs[ATTR_POSITION] ) self.async_write_ha_state() async def async_stop_cover(self, **_kwargs): """Stop the cover.""" self.control_result = await self.block.set_state(go="stop") self.async_write_ha_state() @callback def _update_callback(self): """When device updates, clear control result that overrides state.""" self.control_result = None super()._update_callback()
Python
0
@@ -102,32 +102,58 @@ ATTR_POSITION,%0A + DEVICE_CLASS_SHUTTER,%0A SUPPORT_CLOS @@ -2281,16 +2281,148 @@ atures%0A%0A + @property%0A def device_class(self) -%3E str:%0A %22%22%22Return the class of the device.%22%22%22%0A return DEVICE_CLASS_SHUTTER%0A%0A asyn
f4d81f450a9c2627438dd4f310448e0d64f4541c
fix tests
mkt/webapps/tests/test_fakedata.py
mkt/webapps/tests/test_fakedata.py
import collections from nose.tools import eq_, ok_ import mkt.site.tests from mkt.webapps.fakedata import (fake_app_names, generate_app_data, generate_app_from_spec) class TestAppGeneration(mkt.site.tests.TestCase): def test_tinyset(self): size = 4 data = list(generate_app_data(size)) eq_(len(data), size) ctr = collections.defaultdict(int) for appname, cat in data: ctr[cat] += 1 # Apps are binned into categories, at least 3 in each. eq_(ctr.values(), [4]) # Names are unique. eq_(len(set(appname for appname, cat in data)), size) # Size is smaller than name list, so no names end in numbers. ok_(not any(appname[-1].isdigit() for appname, cat in data)) def test_smallset(self): size = 60 data = list(generate_app_data(size)) eq_(len(data), size) ctr = collections.defaultdict(int) for appname, cat in data: ctr[cat] += 1 eq_(set(ctr.values()), set([3, 4])) eq_(len(set(appname for appname, cat in data)), size) ok_(not any(appname[-1].isdigit() for appname, cat in data)) def test_bigset(self): size = 300 data = list(generate_app_data(size)) eq_(len(data), size) ctr = collections.defaultdict(int) for appname, cat in data: ctr[cat] += 1 # Apps are spread between categories evenly - the difference between # the largest and smallest category is less than 2. ok_(max(ctr.values()) - min(ctr.values()) < 2) eq_(len(set(appname for appname, cat in data)), size) # Every name is used without a suffix. eq_(sum(1 for appname, cat in data if not appname[-1].isdigit()), len(fake_app_names)) # Every name is used with ' 1' as a suffix. eq_(sum(1 for appname, cat in data if appname.endswith(' 1')), len(fake_app_names)) def test_generate_hosted_app(self): appname = 'a test app' categories = ['books', 'music'] app = generate_app_from_spec( appname, categories, 'hosted', num_previews=3, num_ratings=4, num_locales=1, status='public') eq_(app.name, appname) eq_(app.categories, categories) eq_(app.status, 4) eq_(app.reload().total_reviews, 4) eq_(app.reviews.count(), 4) eq_(app.get_previews().count(), 3) def test_generate_packaged_app(self): appname = 'a test app' categories = ['books', 'music'] app = generate_app_from_spec( appname, categories, 'packaged', num_previews=3, num_ratings=4, num_locales=1, status='public', versions=['public', 'disabled', 'public']) eq_(app.name, appname) eq_(app.categories, categories) eq_(app.status, 4) eq_(app.reload().total_reviews, 4) eq_(app.reviews.count(), 4) eq_(app.get_previews().count(), 3) eq_(app.versions.count(), 3) eq_(app.latest_version.version, '1.2') def test_generate_privileged_app(self): appname = 'a test app' categories = ['books', 'music'] app = generate_app_from_spec( appname, categories, 'privileged', num_previews=3, num_ratings=4, num_locales=1, status='public', permissions=['storage'], versions=['public', 'disabled', 'public']) eq_(app.name, appname) eq_(app.categories, categories) eq_(app.status, 4) eq_(app.reload().total_reviews, 4) eq_(app.reviews.count(), 4) eq_(app.get_previews().count(), 3) eq_(app.versions.count(), 3) eq_(app.latest_version.version, '1.2')
Python
0.000001
@@ -2246,16 +2246,52 @@ 'public' +,%0A description='test app' )%0A @@ -2821,32 +2821,68 @@ bled', 'public'%5D +,%0A description='test app' )%0A eq_(ap @@ -3525,16 +3525,52 @@ public'%5D +,%0A description='test app' )%0A
ea6f60838ae309e5fb0662b2416d3c4450be7823
correct straight function
design_of_computer_programs_cs212/lesson01/poker_game.py
design_of_computer_programs_cs212/lesson01/poker_game.py
def poker(hands): """Return the best hand: poker([hand,...]) => hand""" return max(hands, key=hand_rank) def hand_rank(hand): """define a rank for a specific hand""" ranks = card_ranks(hand) if straight(ranks) and flush(hand): # straight flush return (8, max(ranks)) elif kind(4, ranks): # 4 of a kind return (7, kind(4, ranks), kind(1, ranks)) elif kind(3, ranks) and kind(2, ranks): # full house return (6, kind(3, ranks), kind(2, ranks)) elif flush(hand): # flush return (5, ranks) elif straight(ranks): # straight return (4, max(ranks)) elif kind(3, ranks): # 3 of a kind return (3, kind(3, ranks), ranks) elif two_pair(ranks): # 2 pair return (2, two_pair(ranks), ranks) elif kind(2, ranks): # kind return (1, kind(2, ranks), ranks) else: # high card return (0, ranks) def card_ranks(cards): """ Return a list of the ranks, sorted with higher first""" ranks = ["--23456789TJQKA".index(r) for r, s in cards] ranks.sort(reverse=True) return ranks def straight(ranks): """ Return True if the ordered ranks from a 5 card straight""" return (max(ranks) - min(ranks) == 4) and len(set(ranks)) == 5 def flush(hand): """ Return True if all cards have the same suit""" suits = [s for r, s in hand] return len(set(suits)) == 1 def kind(n, ranks): """ Return the first rank that this hand has exactly n and return None otherwise""" for r in ranks: if ranks.count(r) == n: return r return None def two_pair(ranks): """If there are two pair, return the two ranks as a tuple: (highest, lowest); otherwise return None.""" pair_highest = kind(2, ranks) pair_lowest = kind(2, list(reversed(ranks))) if pair_highest and pair_highest != pair_lowest: return (pair_highest, pair_lowest) return None
Python
0.000437
@@ -1299,16 +1299,66 @@ return +%5B5, 4, 3, 2, 1%5D if ranks == %5B14, 5, 4, 3, 2%5D else ranks%0A%0A%0A
3422967b65838ae7eed1f79e2b9295102d5c18bb
Fix flake8
hs_core/management/commands/check_bag.py
hs_core/management/commands/check_bag.py
# -*- coding: utf-8 -*- """ Generate metadata and bag for a resource from Django """ import os from django.core.management.base import BaseCommand from hs_core.models import BaseResource from hs_core.hydroshare.hs_bagit import create_bag_files from hs_core.tasks import create_bag_by_irods from django_irods.icommands import SessionException class Command(BaseCommand): help = "Create metadata files and bag for a resource." def add_arguments(self, parser): # a list of resource id's, or none to check all resources parser.add_argument('resource_ids', nargs='*', type=str) # Named (optional) arguments parser.add_argument( '--reset', action='store_true', # True for presence, False for absence dest='reset', # value is options['reset'] help='delete metadata and bag and start over' ) parser.add_argument( '--reset_metadata', action='store_true', # True for presence, False for absence dest='reset_metadata', # value is options['reset_metadata'] help='delete metadata files and start over' ) parser.add_argument( '--reset_bag', action='store_true', # True for presence, False for absence dest='reset_bag', # value is options['reset_bag'] help='delete bag and start over' ) parser.add_argument( '--generate', action='store_true', # True for presence, False for absence dest='generate', # value is options['generate'] help='force generation of metadata and bag' ) parser.add_argument( '--generate_metadata', action='store_true', # True for presence, False for absence dest='generate_metadata', # value is options['generate_metadata'] help='force generation of metadata and bag' ) parser.add_argument( '--generate_bag', action='store_true', # True for presence, False for absence dest='generate_bag', # value is options['generate_bag'] help='force generation of metadata and bag' ) def handle(self, *args, **options): if len(options['resource_ids']) > 0: # an array of resource short_id to check. for rid in options['resource_ids']: try: resource = BaseResource.objects.get(short_id=rid) istorage = resource.get_irods_storage() scimeta_path = os.path.join(resource.root_path, 'data', 'resourcemetadata.xml') if istorage.exists(scimeta_path): print("found {}".format(scimeta_path)) else: print("{} NOT FOUND".format(scimeta_path)) resmap_path = os.path.join(resource.root_path, 'data', 'resourcemap.xml') if istorage.exists(resmap_path): print("found {}".format(resmap_path)) else: print("{} NOT FOUND".format(resmap_path)) if istorage.exists(resource.bag_path): print("found bag {}".format(resource.bag_path)) else: print("bag {} NOT FOUND".format(resource.bag_path)) dirty = istorage.getAVU(resource.root_path, 'metadata_dirty') print("metadata_dirty is {}".format(str(dirty))) modified = istorage.getAVU(resource.root_path, 'bag_modified') print("bag_modified is {}".format(str(modified))) if options['generate']: # generate usable bag create_bag_files(resource) print("metadata generated for {} from Django".format(rid)) istorage.setAVU(resource.root_path, 'metadata_dirty', 'false') print("metadata_dirty set to false for {}".format(rid)) create_bag_by_irods(rid) print("bag generated for {} from iRODs".format(rid)) istorage.setAVU(resource.root_path, 'bag_modified', 'false') print("bag_modified set to false for {}".format(rid)) elif options['generate_metadata']: create_bag_files(resource) print("metadata generated for {} from Django".format(rid)) istorage.setAVU(resource.root_path, 'metadata_dirty', 'false') print("metadata_dirty set to false for {}".format(rid)) elif options['generate_bag']: create_bag_by_irods(rid) print("bag generated for {} from iRODs".format(rid)) istorage.setAVU(resource.root_path, 'bag_modified', 'false') print("bag_modified set to false for {}".format(rid)) elif options['reset']: # reset all data to pristine istorage.setAVU(resource.root_path, 'metadata_dirty', 'true') print("metadata_dirty set to true for {}".format(rid)) try: istorage.delete(resource.scimeta_path) print("metadata {} deleted".format(resource.scimeta_path)) except SessionException as ex: print("delete of {} failed: {}" .format(resource.scimeta_path, ex.stderr)) try: istorage.delete(resource.resmap_path) print("map {} deleted".format(resource.resmap_path)) except SessionException as ex: print("delete of {} failed: {}" .format(resource.resmap_path, ex.stderr)) istorage.setAVU(resource.root_path, 'bag_modified', 'true') print("bag_modified set to true for {}".format(rid)) try: istorage.delete(resource.bag_path) print("bag {} deleted".format(resource.bag_path)) except SessionException as ex: print("delete of {} failed: {}" .format(resource.bag_path, ex.stderr)) elif options['reset_metadata']: istorage.setAVU(resource.root_path, 'metadata_dirty', 'true') print("metadata_dirty set to true for {}".format(rid)) try: istorage.delete(resource.scimeta_path) print("metadata {} deleted".format(resource.scimeta_path)) except SessionException as ex: print("delete of {} failed: {}" .format(resource.scimeta_path, ex.stderr)) try: istorage.delete(resource.resmap_path) print("map {} deleted".format(resource.resmap_path)) except SessionException as ex: print("delete of {} failed: {}" .format(resource.resmap_path, ex.stderr)) elif options['reset_bag']: istorage.setAVU(resource.root_path, 'bag_modified', 'true') print("bag_modified set to true for {}".format(rid)) try: istorage.delete(resource.bag_path) print("bag {} deleted".format(resource.bag_path)) except SessionException as ex: print("delete of {} failed: {}" .format(resource.bag_path, ex.stderr)) except BaseResource.DoesNotExist: print("Resource with id {} NOT FOUND in Django".format(rid))
Python
0
@@ -3016,33 +3016,32 @@ - 'resourcemap.xml
56c3c373563a38991da72bc235d4e3e40e711968
Use extra space.
remove_duplicates_from_sorted_array.py
remove_duplicates_from_sorted_array.py
#! /usr/bin/env python3 """ http://oj.leetcode.com/problems/remove-duplicates-from-sorted-array/ Given a sorted array, remove the duplicates in place such that each element appear only once and return the new length. Do not allocate extra space for another array, you must do this in place with constant memory. For example, Given input array A = [1,1,2], Your function should return length = 2, and A is now [1,2]. Since Apr-22-2014 18:16 """ class Solution: # @param a list of integers # @return an integer def removeDuplicates(self, A): L = len(A) if L in (0, 1): return L else: i = 0 while i <= L - 2: if A[i] == A[i + 1]: A.remove(A[i]) L = len(A) i += 1 return len(A) if __name__ == '__main__': s = Solution() A = [] assert s.removeDuplicates(A) == 0 assert A == [] A = [1] assert s.removeDuplicates(A) == 1 assert A == [1] A = [1, 1, 2] assert s.removeDuplicates(A) == 2 assert A == [1, 2] A = [1, 1, 2, 3, 4, 4, 5, 5] assert s.removeDuplicates(A) == 5 assert A == [1, 2, 3, 4, 5] A = [1, 2, 3, 4, 5] assert s.removeDuplicates(A) == 5 assert A == [1, 2, 3, 4, 5]
Python
0.000008
@@ -549,17 +549,17 @@ s(self, -A +a ):%0A @@ -557,33 +557,50 @@ a):%0A -L +global A%0A l = len( -A +a )%0A if @@ -600,17 +600,17 @@ if -L +l in (0, @@ -632,17 +632,17 @@ return -L +l %0A @@ -682,26 +682,132 @@ -while i %3C= L - 2 +B = %5B%5D%0A while i %3C l:%0A if i == l - 1 :%0A + B.append(a%5Bi%5D)%0A break%0A @@ -825,17 +825,17 @@ if -A +a %5Bi%5D == -A +a %5Bi + @@ -863,48 +863,70 @@ -A.remove(A%5Bi%5D)%0A L = len(A +i += 1%0A else:%0A B.append(a%5Bi%5D )%0A @@ -931,39 +931,67 @@ + i += 1%0A + A = list(B)%0A retu @@ -1001,16 +1001,17 @@ len(A)%0A%0A +%0A if __nam
97dbd18e12094820be5985b9daec4ceab4d86116
Fix getfolders()
offlineimap/repository/LocalStatus.py
offlineimap/repository/LocalStatus.py
# Local status cache repository support # Copyright (C) 2002 John Goerzen # <jgoerzen@complete.org> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from Base import BaseRepository from offlineimap.folder.LocalStatus import LocalStatusFolder, magicline from offlineimap.folder.LocalStatusSQLite import LocalStatusSQLiteFolder import os import re class LocalStatusRepository(BaseRepository): def __init__(self, reposname, account): BaseRepository.__init__(self, reposname, account) self.directory = os.path.join(account.getaccountmeta(), 'LocalStatus') #statusbackend can be 'plain' or 'sqlite' backend = self.account.getconf('status_backend', 'plain') if backend == 'sqlite': self._backend = 'sqlite' self.LocalStatusFolderClass = LocalStatusSQLiteFolder self.directory += '-sqlite' elif backend == 'plain': self._backend = 'plain' self.LocalStatusFolderClass = LocalStatusFolder else: raise SyntaxWarning("Unknown status_backend '%s' for account '%s'" \ % (backend, account.name)) if not os.path.exists(self.directory): os.mkdir(self.directory, 0700) # self._folders is a list of LocalStatusFolders() self._folders = None def getsep(self): return '.' def getfolderfilename(self, foldername): """Return the full path of the status file""" # replace with 'dot' if final path name is '.' foldername = re.sub('(^|\/)\.$','\\1dot', foldername) return os.path.join(self.directory, foldername) def makefolder(self, foldername): """Create a LocalStatus Folder Empty Folder for plain backend. NoOp for sqlite backend as those are created on demand.""" # Invalidate the cache. self._folders = None if self._backend == 'sqlite': return filename = self.getfolderfilename(foldername) file = open(filename + ".tmp", "wt") file.write(magicline + '\n') file.close() os.rename(filename + ".tmp", filename) # Invalidate the cache. self._folders = None def getfolder(self, foldername): """Return the Folder() object for a foldername""" return self.LocalStatusFolderClass(self.directory, foldername, self, self.accountname, self.config) def getfolders(self): """Returns a list of ALL folders on this server. This is currently nowhere used in the code.""" if self._folders != None: return self._folders for folder in os.listdir(self.directory): self._folders = retval.append(self.getfolder(folder)) return self._folders def forgetfolders(self): """Forgets the cached list of folders, if any. Useful to run after a sync run.""" self._folders = None
Python
0
@@ -3242,87 +3242,26 @@ of -ALL folders on this server.%0A%0A This is currently nowhere used in the code +all cached folders .%22%22%22 @@ -3325,24 +3325,51 @@ f._folders%0A%0A + self._folders = %5B%5D%0A for @@ -3398,32 +3398,32 @@ elf.directory):%0A + self @@ -3435,17 +3435,8 @@ ders - = retval .app
3ac4ceb808462a6a5702e7764148c7e9720d989f
Add on_delete parameter to all ForeignKey fields
modoboa_pfxadmin_migrate/models.py
modoboa_pfxadmin_migrate/models.py
# coding: utf-8 """postfixadmin models.""" # This is an auto-generated Django model module. # You'll have to do the following manually to clean this up: # * Rearrange models' order # * Make sure each model has one field with primary_key=True # Feel free to rename the models, but don't rename db_table values or # field names. # # Also note: You'll have to insert the output of 'django-admin.py # sqlcustom [appname]' into your database. from django.db import models class Admin(models.Model): username = models.CharField(max_length=255, primary_key=True) password = models.CharField(max_length=255) created = models.DateTimeField() modified = models.DateTimeField() active = models.BooleanField() class Meta: db_table = u'admin' app_label = 'pfxadmin_migrate' managed = False class Alias(models.Model): address = models.CharField(max_length=255, primary_key=True) goto = models.TextField() domain = models.CharField(max_length=255) created = models.DateTimeField() modified = models.DateTimeField() active = models.BooleanField() class Meta: db_table = u'alias' app_label = 'pfxadmin_migrate' managed = False class AliasDomain(models.Model): alias_domain = models.CharField(max_length=255, primary_key=True) target_domain = models.CharField(max_length=255) created = models.DateTimeField() modified = models.DateTimeField() active = models.BooleanField() class Meta: db_table = u'alias_domain' app_label = 'pfxadmin_migrate' managed = False class Config(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(unique=True, max_length=60) value = models.CharField(max_length=60) class Meta: db_table = u'config' app_label = 'pfxadmin_migrate' managed = False class Domain(models.Model): domain = models.CharField(max_length=255, primary_key=True) description = models.CharField(max_length=255) aliases = models.IntegerField() mailboxes = models.IntegerField() maxquota = models.BigIntegerField() quota = models.BigIntegerField() transport = models.CharField(max_length=255) backupmx = models.IntegerField() created = models.DateTimeField() modified = models.DateTimeField() active = models.BooleanField() admins = models.ManyToManyField(Admin, through='DomainAdmins') class Meta: db_table = u'domain' app_label = 'pfxadmin_migrate' managed = False class DomainAdmins(models.Model): username = models.ForeignKey(Admin, db_column='username') domain = models.ForeignKey(Domain, db_column='domain') created = models.DateTimeField(primary_key=True) active = models.BooleanField() class Meta: db_table = u'domain_admins' app_label = 'pfxadmin_migrate' managed = False class Fetchmail(models.Model): id = models.IntegerField(primary_key=True) mailbox = models.CharField(max_length=255) src_server = models.CharField(max_length=255) src_auth = models.CharField(max_length=33, blank=True) src_user = models.CharField(max_length=255) src_password = models.CharField(max_length=255) src_folder = models.CharField(max_length=255) poll_time = models.IntegerField() fetchall = models.IntegerField() keep = models.IntegerField() protocol = models.CharField(max_length=12, blank=True) usessl = models.IntegerField() extra_options = models.TextField(blank=True) returned_text = models.TextField(blank=True) mda = models.CharField(max_length=255) date = models.DateTimeField() class Meta: db_table = u'fetchmail' app_label = 'pfxadmin_migrate' managed = False class Log(models.Model): timestamp = models.DateTimeField() username = models.CharField(max_length=255) domain = models.CharField(max_length=255) action = models.CharField(max_length=255) data = models.TextField() class Meta: db_table = u'log' app_label = 'pfxadmin_migrate' managed = False class Mailbox(models.Model): username = models.CharField(max_length=255, primary_key=True) password = models.CharField(max_length=255) name = models.CharField(max_length=255) maildir = models.CharField(max_length=255) quota = models.BigIntegerField() local_part = models.CharField(max_length=255) domain = models.CharField(max_length=255) created = models.DateTimeField() modified = models.DateTimeField() active = models.BooleanField() class Meta: db_table = u'mailbox' app_label = 'pfxadmin_migrate' managed = False class Quota(models.Model): username = models.CharField(max_length=255, primary_key=True) path = models.CharField(max_length=100, primary_key=True) current = models.BigIntegerField(null=True, blank=True) class Meta: db_table = u'quota' app_label = 'pfxadmin_migrate' managed = False class Quota2(models.Model): username = models.CharField(max_length=100, primary_key=True) bytes = models.BigIntegerField() messages = models.IntegerField() class Meta: db_table = u'quota2' app_label = 'pfxadmin_migrate' managed = False class Vacation(models.Model): email = models.CharField(max_length=255) subject = models.CharField(max_length=255) body = models.TextField() cache = models.TextField() domain = models.CharField(max_length=255) created = models.DateTimeField() active = models.BooleanField() class Meta: db_table = u'vacation' app_label = 'pfxadmin_migrate' managed = False class VacationNotification(models.Model): on_vacation = models.ForeignKey(Vacation, db_column='on_vacation') notified = models.CharField(max_length=255, primary_key=True) notified_at = models.DateTimeField() class Meta: db_table = u'vacation_notification' app_label = 'pfxadmin_migrate' managed = False
Python
0
@@ -471,16 +471,62 @@ models%0A +from django.db.models.deletion import CASCADE%0A %0A%0Aclass @@ -2699,16 +2699,68 @@ sername' +,%0A on_delete=CASCADE )%0A do @@ -2810,16 +2810,35 @@ 'domain' +, on_delete=CASCADE )%0A cr @@ -5972,16 +5972,71 @@ acation' +,%0A on_delete=CASCADE )%0A no
e16d9bb8e384f29017e588ae347ae000318aeed8
Remove print statement
ddsc_worker/tasks.py
ddsc_worker/tasks.py
from __future__ import absolute_import import gzip import logging import os import shutil import string import time from celery.signals import after_setup_task_logger from celery.utils.log import get_task_logger from django.conf import settings #from pandas.io.parsers import read_csv from ddsc_core.models import Timeseries from ddsc_logging.handlers import DDSCHandler from tslib.readers import PiXmlReader from ddsc_worker.celery import celery from ddsc_worker.import_auth import get_usr_by_folder from ddsc_worker.import_auth import get_usr_by_ip #from ddsc_worker.importer import data_delete from ddsc_worker.importer import data_move from ddsc_worker.importer import file_ignored from ddsc_worker.importer import import_csv from ddsc_worker.importer import import_file from ddsc_worker.importer import import_geotiff #from ddsc_worker.importer import write2_cassandra pd = getattr(settings, 'IMPORTER_PATH') @after_setup_task_logger.connect def setup_ddsc_task_logger(**kwargs): """Log records in the ddsc_worker package to RabbitMQ. The logging level is inherited from the root logger (and will be `WARNING` if you accept the default when running Celery as a daemon). Records will be logged to a topic exchange (ddsc.log). """ handler = DDSCHandler(celery.conf['BROKER_URL']) logger = logging.getLogger('ddsc_worker') logger.addHandler(handler) logger = get_task_logger(__name__) @celery.task def add(x, y): logger.debug("Adding %r + %r" % (x, y)) time.sleep(9) return x + y @celery.task def mul(x, y): logger.info("Multiplying %r * %r" % (x, y)) time.sleep(3) return x * y @celery.task(ignore_result=True) def import_pi_xml(src): logger.info("Importing %r" % src) reader = PiXmlReader(src) for md, df in reader.get_series(): code = md['header']['parameterId'] ts, _ = Timeseries.objects.get_or_create(code=code) ts.set_events(df) ts.save() return src @celery.task(ignore_result=True) def move(src, dst): logger.info("Moving %r to %r" % (src, dst)) shutil.move(src, dst) if os.path.isdir(dst): return os.path.join(dst, os.path.split(src)[1]) else: return dst @celery.task(ignore_result=True) def compress(src): logger.info("Gzipping %r" % src) dst = src + ".gz" with open(src, "rb") as f_in: with gzip.open(dst, "wb") as f_out: f_out.writelines(f_in) os.remove(src) return dst # The lmw importing task implementation yet to be decided #@celery.task #def import_lmw(src, fileName): # date_spec = {"timedate": [0, 1]} # tsOBJ = read_csv(src, # skiprows=6, parse_dates=date_spec, # sep=";", index_col=0, header=None) # # tsOBJ = tsOBJ.rename( # columns={2: 'location', 3: 'met', 4: 'q_flag', 5: 'value'} # ) # # f = open(src, 'r') # # str = f.readline() # strlist = str.split('=') # data_bank = strlist[1].replace("\r\n", '') # # str = f.readline() # strlist = str.split('=') # location = strlist[1].replace("\r\n", '') # # str = f.readline() # strlist = str.split('=') # waarnemingsgroepcode = strlist[1].replace("\r\n", '') # # str = f.readline() # strlist = str.split('=') # x_cord = strlist[1].replace("\r\n", '') # # str = f.readline() # strlist = str.split('=') # y_cord = strlist[1].replace("\r\n", '') # # i = 0 # tsOBJ['flag'] = 'None' # for row in tsOBJ.iterrows(): # if tsOBJ.q_flag[i] in [10, 30, 50, 70]: # tsOBJ['flag'][i] = '0' # i += 1 # elif tsOBJ.q_flag[i] in [2, 22, 24, 28, 42, 44, 48, 62, 68]: # tsOBJ['flag'][i] = '3' # i += 1 # else: # tsOBJ['flag'][i] = '6' # i += 1 # logger.info("[x] %r _validated" % (src)) # tsOBJ = tsOBJ.tz_localize('UTC') # del tsOBJ['location'] # del tsOBJ['met'] # del tsOBJ['q_flag'] # # st = write2_cassandra(tsOBJ, src) # # data_delete(st, src) @celery.task def new_file_detected(pathDir, fileName): src = pathDir + fileName print src fileDirName, fileExtension = os.path.splitext(src) fileExtension = string.lower(fileExtension) usr = get_usr_by_folder(pathDir) if usr == 0: data_move(src, (pd['storage_base_path'] + pd['rejected_file'])) return else: logger.info('[x] start importing: %r' % src) logger.info('By User: %r' % usr.username) if fileExtension == ".filepart": fileName = fileName.replace(".filepart", "") src = pathDir + fileName fileDirName, fileExtension = os.path.splitext(src) if fileExtension == ".csv": import_csv(src, usr.id) elif (fileExtension == ".png") or \ (fileExtension == ".jpg") or \ fileExtension == ".jpeg": dst = pd['storage_base_path'] + pd['image'] import_file(pathDir, fileName, dst, usr.id) elif fileExtension == ".avi" or \ fileExtension == ".wmv": dst = pd['storage_base_path'] + pd['video'] import_file(pathDir, fileName, dst, usr.id) elif fileExtension == ".pdf": dst = pd['storage_base_path'] + pd['pdf'] import_file(pathDir, fileName, dst, usr.id) elif (fileExtension == ".tif" or fileExtension == ".tiff"): dst = pd['storage_base_path'] + pd['geotiff'] import_geotiff(pathDir, fileName, dst, usr.id) else: file_ignored.delay(src, fileExtension) @celery.task def new_socket_detected(pathDir, fileName): src = pathDir + fileName usr = get_usr_by_ip(fileName) if usr is False: data_move(src, (pd['storage_base_path'] + pd['rejected_file'])) raise Exception("[x] %r _FAILED to be imported" % src) return logger.info('[x] start importing: %r' % src) logger.info('By User: %r' % usr.username) import_csv(src, usr.id)
Python
0.007015
@@ -4144,22 +4144,8 @@ ame%0A - print src%0A
c1cbdf20e6c109ff1586f663cab7e24f1716af08
Make remove-if-exists function public
opwen_email_server/utils/temporary.py
opwen_email_server/utils/temporary.py
from contextlib import contextmanager from contextlib import suppress from os import close from os import remove from tempfile import mkstemp from typing import Generator def create_tempfilename() -> str: file_descriptor, filename = mkstemp() close(file_descriptor) return filename @contextmanager def removing(path: str) -> Generator[str, None, None]: try: yield path finally: _remove_if_exists(path) def _remove_if_exists(path: str): with suppress(FileNotFoundError): remove(path)
Python
0.000217
@@ -411,17 +411,16 @@ -_ remove_i @@ -440,17 +440,16 @@ )%0A%0A%0Adef -_ remove_i
f238d2f036d79cd9d192b09b05575a71864fb682
API tests should tearDown in the correct order
moniker/tests/test_api/test_v1/__init__.py
moniker/tests/test_api/test_v1/__init__.py
# Copyright 2012 Managed I.T. # # Author: Kiall Mac Innes <kiall@managedit.ie> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from moniker.openstack.common import log as logging from moniker.api.v1 import factory from moniker.api.auth import NoAuthContextMiddleware from moniker.tests.test_api import ApiTestCase LOG = logging.getLogger(__name__) class ApiV1Test(ApiTestCase): __test__ = False def setUp(self): super(ApiV1Test, self).setUp() # Create a Flask application self.app = factory({}) # Inject the NoAuth middleware self.app.wsgi_app = NoAuthContextMiddleware(self.app.wsgi_app) # Obtain a test client self.client = self.app.test_client() # Create and start an instance of the central service self.central_service = self.get_central_service() self.central_service.start() def tearDown(self): super(ApiV1Test, self).tearDown() self.central_service.stop()
Python
0.999982
@@ -1409,73 +1409,73 @@ s -uper(ApiV1Test, self).tearDown()%0A self.central_service.stop +elf.central_service.stop()%0A super(ApiV1Test, self).tearDown ()%0A
baf149711302fab8a29f32316cc78d7bd3a0f94f
Enable heartbeats by default for non-clustered agents (#385)
cloudify/broker_config.py
cloudify/broker_config.py
######## # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # AMQP broker configuration for agents and manager # Primarily used by celery, so provided with variables it understands from __future__ import absolute_import import json import os import ssl from cloudify.constants import BROKER_PORT_SSL, BROKER_PORT_NO_SSL workdir_path = os.getenv('CELERY_WORK_DIR') if workdir_path is None: # We are not in an appropriately configured celery environment config = {} else: conf_file_path = os.path.join(workdir_path, 'broker_config.json') if os.path.isfile(conf_file_path): with open(conf_file_path) as conf_handle: conf_file = conf_handle.read() config = json.loads(conf_file) else: config = {} # Provided as variables for retrieval by amqp_client and logger as required broker_cert_path = config.get('broker_cert_path', '') broker_username = config.get('broker_username', 'guest') broker_password = config.get('broker_password', 'guest') broker_hostname = config.get('broker_hostname', 'localhost') broker_vhost = config.get('broker_vhost', '/') broker_ssl_enabled = config.get('broker_ssl_enabled', False) broker_port = BROKER_PORT_SSL if broker_ssl_enabled else BROKER_PORT_NO_SSL # only enable heartbeat by default for agents connected to a cluster DEFAULT_HEARTBEAT = 30 if config.get('cluster') else None if os.name == 'nt': # celery doesn't support broker_heartbeat on windows broker_heartbeat = None else: broker_heartbeat = config.get('broker_heartbeat', DEFAULT_HEARTBEAT) if broker_ssl_enabled: BROKER_USE_SSL = { 'ca_certs': broker_cert_path, 'cert_reqs': ssl.CERT_REQUIRED, } if broker_heartbeat: options = '?heartbeat={heartbeat}'.format(heartbeat=broker_heartbeat) else: options = '' # BROKER_URL is held in the config to avoid the password appearing # in ps listings URL_TEMPLATE = \ 'amqp://{username}:{password}@{hostname}:{port}/{vhost}{options}' if config.get('cluster'): BROKER_URL = ';'.join(URL_TEMPLATE.format(username=broker_username, password=broker_password, hostname=node_ip, port=broker_port, vhost=broker_vhost, options=options) for node_ip in config['cluster']) else: BROKER_URL = URL_TEMPLATE.format( username=broker_username, password=broker_password, hostname=broker_hostname, port=broker_port, vhost=broker_vhost, options=options ) # celery will not use the failover strategy if there is only one broker url; # we need it to try and failover even with one initial manager, because # another node might've been added dynamically, while the worker was already # running; we add an empty broker url so that celery always sees at least two - # the failover strategy we're using (defined in cloudify_agent.app) filters out # the empty one BROKER_URL += ';' CELERY_RESULT_BACKEND = BROKER_URL CELERY_TASK_RESULT_EXPIRES = 600 CELERYD_PREFETCH_MULTIPLIER = 1 CELERY_ACKS_LATE = False
Python
0
@@ -1919,43 +1919,8 @@ = 30 - if config.get('cluster') else None %0Aif
94763abaf573bfd25cad06da0cffc6b94a7dedc8
Fix a flaw in the old implementation of checking whether the state has changed.
pervert/management/commands/pervert_migrate.py
pervert/management/commands/pervert_migrate.py
import json from django.core.management.base import BaseCommand, CommandError from pervert.models import AbstractPervert, SchemaState, PervertError class Command(BaseCommand): help = "Registers new schema for Pervert-controlled models" def handle(self, *args, **options): states = [] print "Reading the schema of Pervert-controlled models..." state_text = "" for cl in AbstractPervert.__subclasses__(): state = { "app_label": cl._meta.app_label, "model": cl._meta.object_name, "fields": [], "fks": [] } state_text += "%s.models.%s\n" % (state["app_label"], state["model"],) for field in cl._meta.fields: state_text += " * %s\n" % field.name if field.name == "uid": continue if field.__class__.__name__ == "ForeignKey": state["fks"].append(field.name) else: state["fields"].append(field.name) # Sort to make sure there is a unique json representation of each state state["fields"].sort() state["fks"].sort() states.append(state) jsonstate = json.dumps(states) # If the json is identical to the last saved state if SchemaState.objects.count() and \ SchemaState.objects.order_by("-when")[0].state == jsonstate: print "The state hasn't changed, nothing to do." else: # Save new state ss = SchemaState(state = json.dumps(states)) ss.save() print state_text + "SchemaState saved on %s" % ss.when
Python
0.000002
@@ -1168,130 +1168,24 @@ tate -%5B%22fields%22%5D.sort()%0A state%5B%22fks%22%5D.sort()%0A states.append(state)%0A%0A jsonstate = json.dumps(states) +s.append(state)%0A %0A @@ -1297,16 +1297,27 @@ +json.loads( SchemaSt @@ -1358,21 +1358,19 @@ tate +) == -json state +s :%0A
24c83211588ac71492640ce43e3a893e05466a54
Change old membership migration to null
amy/workshops/migrations/0065_multiple_memberships.py
amy/workshops/migrations/0065_multiple_memberships.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('workshops', '0064_membership'), ] operations = [ migrations.RemoveField( model_name='host', name='membership', ), migrations.AddField( model_name='membership', name='host', # the default value of 1 here doesn't break anything, because # migrations 0064-0065 should be applied together field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, default=1, to='workshops.Host'), preserve_default=False, ), ]
Python
0
@@ -706,16 +706,27 @@ ps.Host' +, null=True ),%0A
c267a580ca2179722d31941f4d02e2c67093769b
Add temporary streaming mechanism for output files
dockci/views/build.py
dockci/views/build.py
""" Views related to build management """ import logging import mimetypes import os.path import re from flask import (abort, flash, redirect, render_template, request, Response, url_for, ) from dockci.models.build import Build from dockci.models.job import Job from dockci.server import APP from dockci.util import is_valid_github @APP.route('/jobs/<job_slug>/builds/<build_slug>', methods=('GET',)) def build_view(job_slug, build_slug): """ View to display a build """ job = Job(slug=job_slug) build = Build(job=job, slug=build_slug) return render_template('build.html', build=build) @APP.route('/jobs/<job_slug>/builds/new', methods=('GET', 'POST')) def build_new_view(job_slug): """ View to create a new build """ job = Job(slug=job_slug) if request.method == 'POST': build = Build(job=job) build.repo = job.repo build_url = url_for('build_view', job_slug=job_slug, build_slug=build.slug) if 'X-Github-Event' in request.headers: if not job.github_secret: logging.warn("GitHub webhook secret not setup") abort(403) if not is_valid_github(job.github_secret): logging.warn("Invalid GitHub payload") abort(403) if request.headers['X-Github-Event'] == 'push': push_data = request.json build.commit = push_data['head_commit']['id'] else: logging.debug("Unknown GitHub hook '%s'", request.headers['X-Github-Event']) abort(501) build.save() build.queue() return build_url, 201 else: build.commit = request.form['commit'] if not re.match(r'[a-fA-F0-9]{1,40}', request.form['commit']): flash(u"Invalid git commit hash", 'danger') return render_template('build_new.html', build=build) build.save() build.queue() flash(u"Build queued", 'success') return redirect(build_url, 303) return render_template('build_new.html', build=Build(job=job)) @APP.route('/jobs/<job_slug>/builds/<build_slug>/output/<filename>', methods=('GET',)) def build_output_view(job_slug, build_slug, filename): """ View to download some build output """ job = Job(slug=job_slug) build = Build(job=job, slug=build_slug) # TODO possible security issue opending files from user input like this data_file_path = os.path.join(*build.build_output_path() + [filename]) if not os.path.isfile(data_file_path): abort(404) def loader(): """ Generator to stream the log file """ with open(data_file_path, 'rb') as handle: while True: data = handle.read(1024) yield data if len(data) == 0: return mimetype, _ = mimetypes.guess_type(filename) if mimetype is None: mimetype = 'application/octet-stream' return Response(loader(), mimetype=mimetype)
Python
0
@@ -92,16 +92,30 @@ mport re +%0Aimport select %0A%0Afrom f @@ -3108,16 +3108,17 @@ ld data%0A +%0A @@ -3125,16 +3125,280 @@ +#if build.state == 'running' and filename == %22%25s.log%22 %25 build.build_stage_slugs%5B-1%5D:%0A if filename == %22%25s.log%22 %25 build.build_stage_slugs%5B-1%5D:%0A select.select((handle,), (), (), 2)%0A build.load()%0A%0A el if len(d
9bc8b9967631064821112b5c7ff3b65fb0b176f6
Fix wrong column name in db migration script of ryu plugin
neutron/db/migration/alembic_migrations/versions/5a875d0e5c_ryu.py
neutron/db/migration/alembic_migrations/versions/5a875d0e5c_ryu.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2012 New Dream Network, LLC (DreamHost) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Mark McClain, DreamHost """ryu This retroactively provides migration support for https://review.openstack.org/#/c/11204/ Revision ID: 5a875d0e5c Revises: 2c4af419145b Create Date: 2012-12-18 12:32:04.482477 """ # revision identifiers, used by Alembic. revision = '5a875d0e5c' down_revision = '2c4af419145b' # Change to ['*'] if this migration applies to all plugins migration_for_plugins = [ 'neutron.plugins.ryu.ryu_neutron_plugin.RyuNeutronPluginV2' ] from alembic import op import sqlalchemy as sa from neutron.db import migration def upgrade(active_plugin=None, options=None): if not migration.should_run(active_plugin, migration_for_plugins): return op.create_table( 'tunnelkeys', sa.Column('network_id', sa.String(length=36), nullable=False), sa.Column('last_key', sa.Integer(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['network_id'], ['networks.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('last_key') ) op.create_table( 'tunnelkeylasts', sa.Column('last_key', sa.Integer(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('last_key') ) def downgrade(active_plugin=None, options=None): if not migration.should_run(active_plugin, migration_for_plugins): return op.drop_table('tunnelkeylasts') op.drop_table('tunnelkeys')
Python
0
@@ -1487,36 +1487,38 @@ sa.Column(' -last +tunnel _key', sa.Intege @@ -1721,36 +1721,38 @@ yKeyConstraint(' -last +tunnel _key')%0A )%0A%0A
687681724202949105a476254f7a122171b2cf3e
Update existing objects on reimport when the ID is the same.
open511/management/commands/import_xml_roadevents.py
open511/management/commands/import_xml_roadevents.py
import datetime import logging import sys from django.contrib.gis.geos import fromstr as geos_geom_from_string from django.core.management.base import BaseCommand, CommandError from lxml import etree from open511.models import RoadEvent from open511.utils.postgis import gml_to_ewkt from open511.utils.serialization import ELEMENTS logger = logging.getLogger(__name__) class Command(BaseCommand): element_lookup = dict( (e[1], e[0]) for e in ELEMENTS ) def handle(self, filename = sys.stdin, **options): root = etree.parse(filename).getroot() assert root.tag == 'Open511' created = [] for event in root.xpath('RoadEvent'): try: rdev = RoadEvent() rdev.source_id = event.get('id') logger.info("Importing event %s" % rdev.source_id) rdev.jurisdiction = rdev.source_id.split(':')[0] for event_el in event: if event_el.tag in self.element_lookup: setattr(rdev, self.element_lookup[event_el.tag], event_el.text) elif event_el.tag == 'Geometry': gml = etree.tostring(event_el[0]) ewkt = gml_to_ewkt(gml, force_2D=True) rdev.geom = geos_geom_from_string(ewkt) else: logger.warning("Unknown tag: %s" % etree.tostring(event_el)) if isinstance(rdev.start_date, basestring): rdev.start_date = _str_to_date(rdev.start_date) if isinstance(rdev.end_date, basestring): rdev.end_date = _str_to_date(rdev.end_date) rdev.save() created.append(rdev) except ValueError as e: logger.error("ValueError importing %s: %s" % (e, rdev.source_id)) print "%s entries imported." % len(created) def _str_to_date(s): """2012-02-12 to a datetime.date object""" return datetime.date(*[ int(x) for x in s.split('-') ])
Python
0
@@ -714,74 +714,230 @@ -rdev = RoadEvent()%0A rdev.source_id = event.get('id' +source_id = event.get('id')%0A try:%0A rdev = RoadEvent.objects.get(source_id=source_id)%0A except RoadEvent.DoesNotExist:%0A rdev = RoadEvent(source_id=source_id )%0A
927fcfaca3a07f46566d2c48d61ea96d380e88bc
Update validator in cancellation view
openprocurement/tender/limited/views/cancellation.py
openprocurement/tender/limited/views/cancellation.py
# -*- coding: utf-8 -*- from openprocurement.api.utils import ( json_view, context_unpack, APIResource, get_now, error_handler ) from openprocurement.tender.core.utils import ( apply_patch, save_tender, optendersresource ) from openprocurement.tender.core.validation import ( validate_cancellation_data, validate_patch_cancellation_data, ) from openprocurement.tender.belowthreshold.views.cancellation import ( TenderCancellationResource ) from openprocurement.tender.limited.validation import validate_cancellation_in_termainated_status @optendersresource(name='reporting:Tender Cancellations', collection_path='/tenders/{tender_id}/cancellations', path='/tenders/{tender_id}/cancellations/{cancellation_id}', procurementMethodType='reporting', description="Tender cancellations") class TenderReportingCancellationResource(APIResource): @json_view(content_type="application/json", validators=(validate_cancellation_data, validate_cancellation_in_termainated_status), permission='edit_tender') def collection_post(self): """Post a cancellation """ tender = self.request.validated['tender'] cancellation = self.request.validated['cancellation'] cancellation.date = get_now() if cancellation.status == 'active': tender.status = 'cancelled' tender.cancellations.append(cancellation) if save_tender(self.request): self.LOGGER.info('Created tender cancellation {}'.format(cancellation.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'tender_cancellation_create'}, {'cancellation_id': cancellation.id})) self.request.response.status = 201 self.request.response.headers['Location'] = self.request.route_url('{}:Tender Cancellations'.format(tender.procurementMethodType), tender_id=tender.id, cancellation_id=cancellation.id) return {'data': cancellation.serialize("view")} @json_view(permission='view_tender') def collection_get(self): """List cancellations """ return {'data': [i.serialize("view") for i in self.request.validated['tender'].cancellations]} @json_view(permission='view_tender') def get(self): """Retrieving the cancellation """ return {'data': self.request.validated['cancellation'].serialize("view")} @json_view(content_type="application/json", validators=(validate_patch_cancellation_data, validate_cancellation_in_termainated_status), permission='edit_tender') def patch(self): """Post a cancellation resolution """ tender = self.request.validated['tender'] apply_patch(self.request, save=False, src=self.request.context.serialize()) if self.request.context.status == 'active': tender.status = 'cancelled' if save_tender(self.request): self.LOGGER.info('Updated tender cancellation {}'.format(self.request.context.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'tender_cancellation_patch'})) return {'data': self.request.context.serialize("view")} @optendersresource(name='negotiation:Tender Cancellations', collection_path='/tenders/{tender_id}/cancellations', path='/tenders/{tender_id}/cancellations/{cancellation_id}', procurementMethodType='negotiation', description="Tender cancellations") class TenderNegotiationCancellationResource(TenderCancellationResource): """ Tender Negotiation Cancellation Resource """ def validate_cancellation(self, operation): """ TODO move validators This class is inherited from below package, but validate_cancellation function has different validators. For now, we have no way to use different validators on methods according to procedure type. """ if not super(TenderNegotiationCancellationResource, self).validate_cancellation(operation): return tender = self.request.validated['tender'] cancellation = self.request.validated['cancellation'] if tender.lots: if not cancellation.relatedLot: if [lot for lot in tender.lots if lot.status == 'complete']: self.request.errors.add( 'body', 'data', 'Can\'t {} cancellation, if there is at least one complete lot'.format(operation)) self.request.errors.status = 403 raise error_handler(self.request.errors) return True @optendersresource(name='negotiation.quick:Tender Cancellations', collection_path='/tenders/{tender_id}/cancellations', path='/tenders/{tender_id}/cancellations/{cancellation_id}', procurementMethodType='negotiation.quick', description="Tender cancellations") class TenderNegotiationQuickCancellationResource(TenderNegotiationCancellationResource): """ Tender Negotiation Quick Cancellation Resource """
Python
0
@@ -110,28 +110,36 @@ et_now, -error_handle +raise_operation_erro r%0A)%0A%0Afro @@ -4462,71 +4462,42 @@ -self.request.errors.add(%0A 'body', 'data' +raise_operation_error(self.request , 'C @@ -4581,122 +4581,8 @@ n))%0A - self.request.errors.status = 403%0A raise error_handler(self.request.errors)%0A