commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
29af0ac8ec3abb392e248561674f1927e28d97ed
Modify sensor to return pressure in bar and temp in celsius
sensor.py
sensor.py
from abc import ABCMeta, abstractmethod from quick2wire import i2c import logging import time logging.basicConfig() class Sensor(object): """ Sensor abstract base class represents interface for pressure sensors. """ __metaclass__=ABCMeta @abstractmethod def getPressure(self): pass @abstractmethod def getTemperature(self): pass @abstractmethod def reset(self): pass @staticmethod def factory(type): if type == "MS5803-14B": return MS5803() assert 0, "Bad sensor type: " + type class MS5803(): """ Implementation of class Sensor for the Amsys MS5803-14BA pressure sensor. Implementation of abstract base class Sensor for the Amsys MS5803-14BA pressure sensor has been derived from sensor datasheet at http://www.amsys-sensor.eu/sheets/amsys.en.ms5803_14ba.pdf. """ def __init__(self): self.logger = logging.getLogger('sensor(MS5803)') self.logger.setLevel(logging.DEBUG) self.bus = i2c.I2CMaster(1) self.address = 0x77 self.C = [0 for i in range(7)] self.D = [0 for i in range(3)] for x in range(1,7): self.C[x] = self.__readPROM(x) def reset(self): self.bus.transaction(i2c.writing_bytes(self.address, 0x1E)) time.sleep(0.01) def __readPROM(self, address): #PROM read code command = 0xA0 + (address << 1) values = self.bus.transaction(i2c.writing_bytes(self.address, command), i2c.reading(self.address, 2)) C = (values[0][0] << 8) | values[0][1] self.logger.debug("CMD(" + "{0:#04x}".format(command) + ") -> " + "C = " + str(C)) return C def __readADC(self): command = 0x00 #Read D1 self.bus.transaction(i2c.writing_bytes(self.address, 0x48)) time.sleep(0.01) values = self.bus.transaction(i2c.writing_bytes(self.address, command), i2c.reading(self.address, 3)) self.D[1] = (values[0][0] << 16) | (values[0][1] << 8) | (values[0][2]) self.logger.debug("CMD(" + "{0:#04x}".format(command) + ") -> D1 = " + str(self.D[1])) # Read D2 self.bus.transaction(i2c.writing_bytes(self.address, 0x58)) time.sleep(0.01) values = self.bus.transaction(i2c.writing_bytes(self.address, command), i2c.reading(self.address, 3)) self.D[2] = (values[0][0] << 16) | (values[0][1] << 8) | (values[0][2]) self.logger.debug("CMD(" + "{0:#04x}".format(command) + ") -> D2 = " + str(self.D[2])) def __calc(self): self.__readADC() dT = self.D[2] - self.C[5] * 256 TEMP = 2000 + dT * self.C[6] / 8388608 OFF = self.C[2] * 65536 + (self.C[4] * dT) / 128 SENS = self.C[1] * 32768 + (self.C[3] * dT) / 256 P = (self.D[1] * SENS / 2097152 - OFF) / 32768 return (TEMP, P) def getTemperature(self): return self.__calc()[0] def getPressure(self): return self.__calc()[1] if __name__ == "__main__": print("Getting sensor values:") sensor = Sensor.factory("MS5803-14B") sensor.reset() temp = sensor.getTemperature() / 100.0 print("Temperature is ", str(temp), " deg celsius") pressure = sensor.getPressure() / 10.0 print("Pressure is ", str(pressure), " mbar")
Python
0
@@ -2942,15 +2942,21 @@ )%5B0%5D +/100.0 %0A - %0A @@ -3010,16 +3010,26 @@ alc()%5B1%5D + / 10000.0 %0A%0A%0A%0A%0Aif @@ -3187,16 +3187,8 @@ re() - / 100.0 %0A @@ -3279,15 +3279,8 @@ re() - / 10.0 %0A @@ -3319,17 +3319,16 @@ ure), %22 -m bar%22)%0A
c52bb5699cc77d41095516c26ff909d856af8bd1
Add TODOs
src/cloud/firebase_io.py
src/cloud/firebase_io.py
import json from pyrebase import pyrebase from utils import TimeUtils class FirebaseIO(): def __init__(self): # pyrebase_config.json is of format # { # "apiKey": "xxx", # "authDomain": "xxx", # "databaseURL": "xxx", # "storageBucket": "xxx", # "serviceAccount": "xxx.json" # } with open('pyrebase_config.json') as fp: config = json.load(fp) firebase = pyrebase.initialize_app(config) self.db = firebase.database() def store_parking_event(self, request_json): register_number = request_json['registerNumber'] parking_context_type = request_json['parkingContextType'] parking_event_json = { 'timestamp': TimeUtils.get_local_timestamp(), 'parkingType': parking_context_type } if parking_context_type == 'PAID': parking_area_id = request_json['parkingAreaId'] parking_event_json['parkingDurationInMinutes'] = request_json['parkingDurationInMinutes'] elif parking_context_type == 'PARKING_DISC': parking_area_id = 'PARKING_DISC_AREA' results = self.db\ .child('parkingAreaParkingEvent')\ .child(parking_area_id)\ .child(register_number)\ .push(parking_event_json) # Store notification about the event for event consumption # > Notifications are stored in a flattened format # > Better use of indexing for server side event consumers notification_json = { 'parkingAreaId': parking_area_id, 'registerNumber': register_number, 'parkingEventId': results['name'], 'isConsumedByOccupancyAnalysis': False, 'isConsumedByLongTermDataStore': False, 'liveUntilTime': TimeUtils.get_epoch_timestamp_plus_seconds(60*60*24*7) } notification_result = self.db\ .child('parkingEventNotification')\ .push(notification_json) return json.dumps(results) def remove_dead_events(self): notifications_ref = self.db.child('parkingEventNotification') dead_notifications = notifications_ref\ .order_by_child('liveUntilTime')\ .start_at(TimeUtils.get_epoch_timestamp_plus_seconds(-365*24*60*60))\ .end_at(TimeUtils.get_epoch_timestamp_plus_seconds(0)).get() dead_notifications = [(dn.key(), dn.val()) for dn in dead_notifications.each() if all([dn.val()['isConsumedByOccupancyAnalysis'], dn.val()['isConsumedByLongTermDataStore']])] for dn_id, dn in dead_notifications: # Remove dead events self.db.child('parkingAreaParkingEvent')\ .child(dn['parkingAreaId'])\ .child(dn['registerNumber'])\ .child(dn['parkingEventId'])\ .remove() # Remove dead notifications self.db.child('parkingEventNotification')\ .child(dn_id)\ .remove() # consumer is either LongTermDataStore or OccupancyAnalysis def consume_new_parking_events_by(self, consumer): consumed_notifications = self.db\ .child('parkingEventNotification')\ .order_by_child('isConsumedBy' + consumer)\ .start_at(False).end_at(False)\ .get() result = [] for cn in consumed_notifications.each(): # Get parking event for the result set parking_event = self.db\ .child('parkingAreaParkingEvent')\ .child(cn.val()['parkingAreaId'])\ .child(cn.val()['registerNumber'])\ .child(cn.val()['parkingEventId'])\ .get() result.append(parking_event.val()) # TODO: notifications may be checked even if the following processes fail # TODO: form transaction # Set parking event as consumed self.db\ .child('parkingEventNotification')\ .child(cn.key())\ .update({'isConsumedBy'+consumer:True}) return result
Python
0.000001
@@ -359,24 +359,57 @@ # %7D%0A%0A + # TODO make configurable%0A with @@ -1937,16 +1937,41 @@ 60*24*7) + # TODO make configurable %0A @@ -2239,16 +2239,54 @@ ation')%0A + # TODO make time configurable%0A
e4be9429e050dae6b1c9e988fa3da3c3e9d1d417
Add bots root directory to parent.py
test/common/parent.py
test/common/parent.py
import os import sys TEST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) BOTS_DIR = os.path.join(os.path.dirname(TEST_DIR), "bots") sys.path.append(os.path.join(BOTS_DIR, "machine")) sys.path.append(os.path.join(TEST_DIR, "common"))
Python
0
@@ -145,16 +145,67 @@ %22bots%22)%0A +sys.path.append(os.path.join(BOTS_DIR)) # for lib%0A sys.path
826f23f0fc7eea4c72dcc26f637f3752bee51b47
Allow tests to be called from parent directory of "test"
test/ctypesgentest.py
test/ctypesgentest.py
import optparse, sys, StringIO sys.path.append("..") import ctypesgencore """ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a single function, test(). test() takes a string that represents a C header file, along with some keyword arguments representing options. It processes the header using ctypesgen and returns a tuple containing the resulting module object and the output that ctypesgen produced.""" def test(header, **more_options): assert isinstance(header, str) file("temp.h","w").write(header) options = ctypesgencore.options.get_default_options() options.headers = ["temp.h"] for opt in more_options: setattr(options, opt, more_options[opt]) # Redirect output sys.stdout = StringIO.StringIO() # Step 1: Parse descriptions=ctypesgencore.parser.parse(options.headers,options) # Step 2: Process ctypesgencore.processor.process(descriptions,options) # Step 3: Print ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions) # Un-redirect output output = sys.stdout.getvalue() sys.stdout.close() sys.stdout = sys.__stdout__ # Load the module we have just produced module = __import__("temp") return module, output
Python
0.000001
@@ -24,16 +24,103 @@ tringIO%0A +sys.path.append(%22.%22) # Allow tests to be called from parent directory with Python 2.6%0A sys.path
bd7a1f8fe5237efc0de9fd60ddc035cc4be620ca
Update path_helper.py
unintended_ml_bias/new_madlibber/path_helper.py
unintended_ml_bias/new_madlibber/path_helper.py
import os class PathHelper(object): def __init__(self, word_file, sentence_template_file, output_file): if not os.path.exists(word_file): raise IOError("Input word file '{}' does not exist!".format(word_file)) if not os.path.isfile(word_file): raise IOError("Input word file '{}' is not a file!".format(word_file)) self.word_file = word_file if not os.path.exists(sentence_template_file): raise IOError("Input sentence template file '{}' does not exist!".format(sentence_template_file)) if not os.path.isfile(sentence_template_file): raise IOError("Input sentence template file '{}' is not a file!".format(sentence_template_file)) self.sentence_template_file = sentence_template_file if os.path.basename(output_file) == '': raise IOError("Output file '{}' cannot be a directory.".format(output_file)) output_dirname = os.path.dirname(output_file) if not os.path.exists(output_dirname): print("Output directory '{}' does not exist...creating".format(output_dirname)) os.makedirs(output_dirname) self.output_file = output_file
Python
0.000004
@@ -4,16 +4,17 @@ ort os%0A%0A +%0A class Pa @@ -31,16 +31,17 @@ bject):%0A +%0A def __ @@ -487,32 +487,43 @@ exist!%22.format( +%0A sentence_templat @@ -661,16 +661,27 @@ .format( +%0A sentence @@ -762,16 +762,20 @@ %0A if +not os.path. @@ -799,14 +799,8 @@ ile) - == '' :%0A @@ -817,16 +817,27 @@ IOError( +%0A %22Output @@ -1049,24 +1049,35 @@ ing%22.format( +%0A output_dirna
3d4afd579bdd690c9fba94ee96e52257bf4d79d2
copy production procfile
reactive/huginn.py
reactive/huginn.py
from charms.reactive import ( hook, when, only_once, is_state ) import os.path as path from charmhelpers.core import hookenv, host from charmhelpers.core.templating import render from charmhelpers.fetch import apt_install from shell import shell # ./lib/nginxlib import nginxlib # ./lib/rubylib from rubylib import ruby_dist_dir, bundle # ./lib/huginnlib.py import huginnlib config = hookenv.config() # HOOKS ----------------------------------------------------------------------- @hook('config-changed') def config_changed(): if not is_state('nginx.available'): return host.service_restart('nginx') hookenv.status_set('active', 'Ready') # REACTORS -------------------------------------------------------------------- @when('nginx.available') @only_once def install_app(): """ Performs application installation """ hookenv.log('Installing Huginn', 'info') # Configure NGINX vhost nginxlib.configure_site('default', 'vhost.conf', app_path=ruby_dist_dir()) # Update application huginnlib.download_archive() shell("mkdir -p %s/{log,tmp/pids,tmp/sockets}" % (ruby_dist_dir())) shell("cp %(dir)s/config/unicorn.rb.example " "%(dir)s/config/unicorn.rb" % {'dir': ruby_dist_dir()}) bundle("install --deployment --without development test") bundle("exec rake assets:precompile RAILS_ENV=production") host.service_restart('nginx') hookenv.status_set('active', 'Huginn is installed!') @when('nginx.available', 'database.available') def setup_mysql(mysql): """ Mysql is available, update Huginn """ hookenv.status_set('maintenance', 'Huginn is connecting to MySQL!') target = path.join(ruby_dist_dir(), '.env') render(source='application.env', target=target, context=dict(db=mysql)) bundle("exec rake db:create RAILS_ENV=production") bundle("exec rake db:migrate RAILS_ENV=production") bundle("exec rake db:seed RAILS_ENV=production") host.service_restart('nginx') hookenv.status_set('active', 'Ready')
Python
0
@@ -1355,16 +1355,202 @@ test%22)%0A + procfile = path.join(hookenv.charm_dir(), 'templates/Procfile')%0A shell(%22cp %25(procfile)s %25(dir)s/Procfile%22 %25 %7B%0A 'procfile': procfile,%0A 'dir': ruby_dist_dir()%0A %7D)%0A%0A bund
86216b39365a7877103dfe075bf8e08a8ce696d0
bump version
radar/__init__.py
radar/__init__.py
__version__ = '2.47.22'
Python
0
@@ -14,11 +14,11 @@ '2.47.2 -2 +3 '%0A
3bfcc096acd5f3ed0cda2427bdc5177bd3e55dd7
bump version
radar/__init__.py
radar/__init__.py
__version__ = '2.46.25'
Python
0
@@ -18,7 +18,7 @@ 46.2 -5 +6 '%0A
d94e862d5775ecedf49fb0e15820b4744573c24c
bump to 1.1.1
radon/__init__.py
radon/__init__.py
'''This module contains the main() function, which is the entry point for the command line interface.''' __version__ = '1.1' def main(): '''The entry point for Setuptools.''' import sys from radon.cli import program, log_error if not sys.argv[1:]: sys.argv.append('-h') try: program() except Exception as e: log_error(e) if __name__ == '__main__': main()
Python
0.000001
@@ -117,16 +117,18 @@ _ = '1.1 +.1 '%0A%0A%0Adef
24503b8cd2faed7de757fceb597c5d54b91daf4c
update the interface of mriqc_clf
mriqc/bin/mriqc_clf.py
mriqc/bin/mriqc_clf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Author: oesteban # @Date: 2015-11-19 16:44:27 """ mriqc_fit command line interface definition """ from __future__ import absolute_import, division, print_function, unicode_literals from sys import version_info import warnings PY3 = version_info[0] > 2 from sklearn.metrics.base import UndefinedMetricWarning warnings.simplefilter("once", UndefinedMetricWarning) cached_warnings = [] def warn_redirect(message, category, filename, lineno, file=None, line=None): from mriqc import logging LOG = logging.getLogger('mriqc.warnings') if category not in cached_warnings: LOG.debug('captured warning (%s): %s', category, message) cached_warnings.append(category) def main(): """Entry point""" import yaml from io import open from argparse import ArgumentParser from argparse import RawTextHelpFormatter from pkg_resources import resource_filename as pkgrf from mriqc.classifier.cv import CVHelper from mriqc import logging, LOG_FORMAT from os.path import isfile warnings.showwarning = warn_redirect parser = ArgumentParser(description='MRIQC model selection and held-out evaluation', formatter_class=RawTextHelpFormatter) g_clf = parser.add_mutually_exclusive_group() g_clf.add_argument('--train', nargs=2, help='training data tables, X and Y') g_clf.add_argument('--load-classifier', nargs="?", type=str, default='', help='load pickled classifier in') parser.add_argument('--test-data', help='test data') parser.add_argument('--test-labels', help='test labels') parser.add_argument('-X', '--evaluation-data', help='classify this CSV table of IQMs') g_input = parser.add_argument_group('Inputs') g_input.add_argument('-P', '--parameters', action='store', default=pkgrf('mriqc', 'data/classifier_settings.yml')) g_input.add_argument('-S', '--scorer', action='store', default='roc_auc') g_input.add_argument('--save-classifier', action='store', help='write pickled classifier out') g_input.add_argument('--log-file', action='store', help='write log to this file') g_input.add_argument('--log-level', action='store', default='INFO', choices=['CRITICAL', 'ERROR', 'WARN', 'INFO', 'DEBUG']) g_input.add_argument('--njobs', action='store', default=-1, type=int, help='number of jobs') g_input.add_argument('-o', '--output', action='store', default='predicted_qa.csv', help='file containing the labels assigned by the classifier') g_input.add_argument('-t', '--threshold', action='store', default=0.5, type=float, help='decision threshold of the classifier') opts = parser.parse_args() if opts.log_file is not None: filelogger = logging.getLogger() fhl = logging.FileHandler(opts.log_file) fhl.setFormatter(fmt=logging.Formatter(LOG_FORMAT)) filelogger.addHandler(fhl) filelogger.setLevel(opts.log_level) parameters = None if opts.parameters is not None: with open(opts.parameters) as paramfile: parameters = yaml.load(paramfile) if opts.train is not None: train_exists = [isfile(fname) for fname in opts.train] if len(train_exists) > 0 and not all(train_exists): errors = ['file "%s" not found' % fname for fexists, fname in zip(train_exists, opts.train) if not fexists] raise RuntimeError('Errors (%d) loading training set: %s.' % ( len(errors), ', '.join(errors))) # Initialize model selection helper cvhelper = CVHelper(X=opts.train[0], Y=opts.train[1], n_jobs=opts.njobs, param=parameters, scorer=opts.scorer) # Perform model selection before setting held-out data, for hygene cvhelper.fit() # Pickle if required if opts.save_classifier: cvhelper.save(opts.save_classifier) # If no training set is given, need a classifier else: load_classifier = opts.load_classifier if load_classifier is None: load_classifier = pkgrf('mriqc', 'data/rfc-nzs-full-1.0.pklz') if not isfile(load_classifier): msg = 'was not provided' if load_classifier != '': msg = '("%s") was not found' % load_classifier raise RuntimeError( 'No training samples were given, and the --load-classifier ' 'option %s.' % msg) cvhelper = CVHelper(load_clf=load_classifier, n_jobs=opts.njobs, rate_label='rater_1') if opts.test_data and opts.test_labels: # Set held-out data cvhelper.setXtest(opts.test_data, opts.test_labels) # Evaluate print('%s=%f, accuracy=%f' % (opts.scorer, cvhelper.evaluate(scoring=opts.scorer), cvhelper.evaluate(matrix=True))) if opts.evaluation_data: cvhelper.predict_dataset(opts.evaluation_data, out_file=opts.output, thres=opts.threshold) if __name__ == '__main__': main()
Python
0.000001
@@ -1070,16 +1070,26 @@ t isfile +, splitext %0A%0A wa @@ -3278,16 +3278,139 @@ mfile)%0A%0A + save_classifier = None%0A if opts.save_classifier:%0A save_classifier, clf_ext = splitext(opts.save_classifier)%0A%0A if o @@ -3424,32 +3424,32 @@ in is not None:%0A - train_ex @@ -4171,29 +4171,24 @@ %0A if -opts. save_classif @@ -4218,21 +4218,16 @@ er.save( -opts. save_cla @@ -4229,24 +4229,45 @@ e_classifier + + '_train' + clf_ext )%0A%0A # If @@ -5207,16 +5207,16 @@ corer),%0A - @@ -5275,24 +5275,173 @@ ix=True)))%0A%0A + # Pickle if required%0A cvhelper.fit_full()%0A if save_classifier:%0A cvhelper.save(save_classifier + '_full' + clf_ext)%0A%0A if opts.
40b4a52e525b59921a5667740fa81ee3c15e6b06
Switch to future division in mtm_stats.py
mtm_stats/mtm_stats.py
mtm_stats/mtm_stats.py
'''The main script''' from __future__ import print_function from __future__ import absolute_import from future.utils import viewitems # To update with any Cython changes, just run: # python setup.py build_ext --inplace import numpy as np from .sparse_block_array import sba_compress_64 from . import cy_mtm_stats def extract_sets_from_connections(connections): '''Get two sorted sets from the connections tuples, one for the first elements and one for the second''' setA = sorted({i[0] for i in connections}) setB = sorted({i[1] for i in connections}) return setA, setB def convert_connections_to_binary(connections, setA, setB): '''connections is a many-to-many mapping from set A to set B Returns a binary matrix where each item in set B gets mapped to a single bit and each item in set A gets a row of these bits''' mappingA = {p: i for i, p in enumerate(setA)} mappingB = {p: i for i, p in enumerate(setB)} lenB64 = int(np.ceil(len(setB) * 1. / 64)) output = np.zeros((len(setA), lenB64), np.uint64) for a, b in connections: ia = mappingA[a] ib = mappingB[b] output[ia, ib // 64] |= np.uint64(1 << (ib % 64)) return output def get_grouped_indices(connections, mappingA, mappingB): grouped = {} for a, b in connections: grouped.setdefault(mappingA[a],[]).append(mappingB[b]) return grouped def convert_connections_to_sba_list_space_efficient(connections, setA, setB, chunk_length_64): '''connections is a many-to-many mapping from set A to set B Returns a list of SBA compressed binary arrays where each item in set B gets mapped to a single bit and each item in set A gets a row of these bits''' mappingA = {p: i for i, p in enumerate(setA)} mappingB = {p: i for i, p in enumerate(setB)} lenB64 = int(np.ceil(len(setB) * 1. / 64)) tmp_arr = np.empty(lenB64, np.uint64) grouped = get_grouped_indices(connections, mappingA, mappingB) sba_list = [None] * len(setA) for ia, ib_list in viewitems(grouped): tmp_arr *= 0 for ib in ib_list: tmp_arr[ib // 64] |= np.uint64(1 << (ib % 64)) sba_list[ia] = sba_compress_64(tmp_arr, chunk_length_64) return sba_list def mtm_stats_raw(connections, chunk_length_64=1, indices_a=None, cutoff=0, start_j=0, dense_input=False): '''The function that actually calls into cython Produces the sets from the connections, converts the connection to binary and compresses them into sba's and then performs the actual counts Returns: setA, setB, base_counts, intersection_counts''' setA, setB = extract_sets_from_connections(connections) if dense_input: rows_arr = convert_connections_to_binary(connections, setA, setB) base_counts, intersection_counts = cy_mtm_stats.cy_mtm_stats_dense_input(rows_arr, indices_a, cutoff, start_j) else: #sba_list = [sba_compress_64(i, chunk_length_64) # for i in convert_connections_to_binary(connections, setA, setB)] sba_list = convert_connections_to_sba_list_space_efficient(connections, setA, setB, chunk_length_64) base_counts, intersection_counts = cy_mtm_stats.cy_mtm_stats(sba_list, chunk_length_64, indices_a, cutoff, start_j) return setA, setB, base_counts, intersection_counts def get_dicts_from_array_outputs(base_counts, intersection_counts, setA): base_counts_dict = {setA[i]: p for i, p in enumerate(base_counts)} iu_counts_dict = {(setA[i], setA[j]): (ic, base_counts[i] + base_counts[j] - ic) for i, j, ic in intersection_counts} return base_counts_dict, iu_counts_dict def mtm_stats(connections, chunk_length_64=1, indices_a=None, cutoff=0, start_j=0, dense_input=False): setA, setB, base_counts, intersection_counts = mtm_stats_raw(connections, chunk_length_64, indices_a, cutoff, start_j, dense_input) base_counts_dict, iu_counts_dict = get_dicts_from_array_outputs(base_counts, intersection_counts, setA) return base_counts_dict, iu_counts_dict def get_Jaccard_index_from_sparse_connections(iu_counts_dict): return {k: ic * 1. / uc for k, (ic, uc) in viewitems(iu_counts_dict)} def get_Jaccard_index(connections, chunk_length_64=1, indices_a=None, cutoff=0, start_j=0, dense_input=False): base_counts_dict, iu_counts_dict = mtm_stats(connections, chunk_length_64, indices_a, cutoff, start_j, dense_input) jaccard_index = get_Jaccard_index_from_sparse_connections(iu_counts_dict) return base_counts_dict, jaccard_index if __name__ == '__main__': r = mtm_stats([('a1', 'b1'), ('a1', 'b2'), ('a1', 'b3'), ('a2', 'b1'), ('a2', 'b2'), ('a3', 'b3'), ('a4', 'b9'),]) print(r[0]) print(r[1])
Python
0
@@ -92,16 +92,48 @@ _import%0A +from __future__ import division%0A from fut @@ -1014,37 +1014,32 @@ p.ceil(len(setB) - * 1. / 64))%0A outp @@ -1884,21 +1884,16 @@ en(setB) - * 1. / 64))%0A @@ -4248,13 +4248,8 @@ : ic - * 1. / u
4d167d2b8ed024d1ea11f6f9ab0743601bddc8f5
word_count
examples/word_count/insert_data.py
examples/word_count/insert_data.py
import sys, os, logging import pymongo import bson import datetime config = { "db_name": "test", "collection_name": "wc", "input_uri": "mongodb://localhost/test.wc", "create_input_splits": True, "split_key": {'_id' : 1}, } if __name__ == '__main__': conn = pymongo.Connection() db = conn[config.get('db_name')] coll = db[config.get('collection_name')] logfile = open("beyond_lies_the_wub.txt","r").readlines() print 'opened file' for line in logfile: #print ', and line is %s'%line for word in line.split(): post = {"file_text" : word, "date" : datetime.datetime.utcnow()} print 'post: %s '%post coll.insert(post)
Python
0.998596
@@ -522,47 +522,8 @@ le:%0A - #print ', and line is %25s'%25line%0A @@ -633,43 +633,8 @@ ()%7D%0A - print 'post: %25s '%25post%0A
ebf09008dbb90b5155a9bbdfe395145b1a6b422d
Copy input to working directory - problem with long file names...
Wrappers/Solve/Solve.py
Wrappers/Solve/Solve.py
#!/usr/bin/env python # Solve.py # Copyright (C) 2006 CCLRC, Graeme Winter # # This code is distributed under the BSD license, a copy of which is # included in the root directory of this package. # # A wrapper for the phasing program SOLVE (Tom Terwilliger) # # 11th June 207 # import sys import os if not os.path.join(os.environ['XIA2CORE_ROOT'], 'Python') in sys.path: sys.path.append(os.path.join(os.environ['XIA2CORE_ROOT'], 'Python')) if not os.environ['XIA2_ROOT'] in sys.path: sys.path.append(os.environ['XIA2_ROOT']) from Driver.DriverFactory import DriverFactory def Solve(DriverType = None): '''Create a Solve instance based on the DriverType.''' DriverInstance = DriverFactory.Driver(DriverType) class SolveWrapper(DriverInstance.__class__): '''A wrapper class for Solve. This will take input from an MTZ file.''' def __init__(self): DriverInstance.__class__.__init__(self) # presume that this will use a "big" version of solve... self.set_executable('solve_huge') self._hklin = None self._hklout = None self._resolution_high = 40.0 self._resolution_low = 0.0 self._atom = None self._sites = None self._n_sites = 0 self._nres = 0 # this needs to contain label information for the # data sets and also the wavelength, f' anf f'' values self._wavelengths = [] return def set_resolution_high(self, resolution_high): self._resolution_high = resolution_high return def set_resolution_low(self, resolution_low): self._resolution_low = resolution_low return def set_hklin(self, hklin): self._hklin = hklin return def get_hklout(self, hklout): return os.path.join(self.get_working_directory(), 'solve.mtz') def set_atom(self, atom): self._atom = atom return def set_sites(self, sites): self._sites = sites return def set_n_sites(self, n_sites): self._n_sites = n_sites return def set_nres(self, nres): self._nres = nres return def add_wavelength(self, name, wavelength, fp, fpp): self._wavelengths.append({'name':name, 'wavelength':wavelength, 'fp':fp, 'fpp':fpp}) return def run(self): if not self._hklin: raise RuntimeError, 'no HKLIN set' self.start() self.input('logfile solve.logfile') self.input('resolution %f %f' % \ (self._resolution_low, self._resolution_high)) self.input('fixscattfactors') for j in range(len(self._wavelengths)): name = self._wavelengths[j]['name'] number = j + 1 self.input( 'labin FPH%d=F_%s SIGFPH%d=SIGF_%s DPH%d=DANO_%s SIGDPH%d=SIGDANO_%s' % \ (number, name, number, name, number, name, number, name)) self.input('hklin %s' % self._hklin) self.input('mad_atom %s' % self._atom) for j in range(len(self._wavelengths)): number = j + 1 self.input('lambda %d' % number) self.input('wavelength %f' % \ self._wavelengths[j]['wavelength']) self.input('fprimv_mad %f' % self._wavelengths[j]['fp']) self.input('fprprv_mad %f' % self._wavelengths[j]['fpp']) self.input('nres %d' % self._nres) self.input('nanomalous %d' % self._n_sites) self.input('scale_mad') self.input('analyze_mad') self.input('solve') self.close_wait() # need to get some interesting stuff out here... return return SolveWrapper()
Python
0
@@ -2753,16 +2753,194 @@ N set'%0A%0A + hklin = os.path.join(%0A self.get_working_directory(),%0A os.path.split(self._hklin)%5B-1%5D)%0A%0A shutil.copyfile(self._hklin, hklin)%0A%0A @@ -3560,27 +3560,40 @@ n %25s' %25 -self._hklin +os.path.split(hklin)%5B-1%5D )%0A
370d58420c48ed5291fb3291a3f89449b2fb5230
Add description to update-production script
docker/update-production.py
docker/update-production.py
#!/usr/bin/env python3 import argparse import subprocess import json import sys parser = argparse.ArgumentParser() args = parser.parse_args() def _info(msg): sys.stdout.write('* {}\n'.format(msg)) sys.stdout.flush() def _run_tutum(args): try: subprocess.check_call(['tutum',] + args, stdout=subprocess.PIPE) except subprocess.CalledProcessError as err: sys.stderr.write('{}\n'.format(err)) sys.exit(1) _info('Determining current production details...') output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode( 'utf-8') data = json.loads(output) linked_service = data['linked_to_service'][0]['name'] _info('Currently linked service is \'{}\''.format(linked_service)) if linked_service == 'muzhack-green': link_to = 'muzhack-blue' else: assert linked_service == 'muzhack-blue' link_to = 'muzhack-green' _info('Redeploying service \'{}\'...'.format(link_to)) _run_tutum(['service', 'redeploy', '--sync', link_to,]) _info('Linking to service \'{}\'...'.format(link_to)) _run_tutum(['service', 'set', '--link-service', '{0}:{0}'.format(link_to), '--sync', 'lb.muzhack-staging',]) _info('Successfully switched production service to {}'.format(link_to))
Python
0
@@ -108,16 +108,78 @@ tParser( +description='Update production server to latest Docker image.' )%0Aargs =
f4a8121bf38cdd8dea4a828316dc1c117c5ea0f3
update West Devon import script for parl.2017-06-08 (closes #902)
polling_stations/apps/data_collection/management/commands/import_west_devon.py
polling_stations/apps/data_collection/management/commands/import_west_devon.py
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E07000047' addresses_name = 'Democracy_Club__04May2017 - west devon.TSV' stations_name = 'Democracy_Club__04May2017 - west devon.TSV' elections = [ 'local.devon.2017-05-04', 'parl.2017-06-08' ] csv_delimiter = '\t'
Python
0
@@ -183,177 +183,122 @@ = ' -Democracy_Club__04May2017 - west devon.TSV'%0A stations_name = 'Democracy_Club__04May2017 - west devon.TSV'%0A elections = %5B%0A 'local.devon.2017-05-04',%0A +parl.2017-06-08/Version 2/merged.tsv'%0A stations_name = 'parl.2017-06-08/Version 2/merged.tsv'%0A elections = %5B 'par @@ -310,21 +310,16 @@ 7-06-08' -%0A %5D%0A cs
4686448c2de3a49f4c1d4593327e1072de9644f7
Return result of the exec runner.
dockermap/map/runner/cmd.py
dockermap/map/runner/cmd.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import logging from ..action import ContainerUtilAction from ..input import ItemType log = logging.getLogger(__name__) class ExecMixin(object): """ Utility mixin for executing configured commands inside containers. """ action_method_names = [ (ItemType.CONTAINER, ContainerUtilAction.EXEC_COMMANDS, 'exec_commands'), (ItemType.CONTAINER, ContainerUtilAction.EXEC_ALL, 'exec_container_commands'), ] def exec_commands(self, action, c_name, run_cmds, **kwargs): """ Runs a single command inside a container. :param action: Action configuration. :type action: dockermap.map.runner.ActionConfig :param c_name: Container name. :type c_name: unicode | str :param run_cmds: Commands to run. :type run_cmds: list[dockermap.map.input.ExecCommand] """ client = action.client for run_cmd in run_cmds: cmd = run_cmd.cmd cmd_user = run_cmd.user log.debug("Creating exec command in container %s with user %s: %s.", c_name, cmd_user, cmd) ec_kwargs = self.get_exec_create_kwargs(action, c_name, cmd, cmd_user) create_result = client.exec_create(**ec_kwargs) if create_result: e_id = create_result['Id'] log.debug("Starting exec command with id %s.", e_id) es_kwargs = self.get_exec_start_kwargs(action, c_name, e_id) client.exec_start(**es_kwargs) else: log.debug("Exec command was created, but did not return an id. Assuming that it has been started.") def exec_container_commands(self, action, c_name, **kwargs): """ Runs all configured commands of a container configuration inside the container instance. :param action: Action configuration. :type action: dockermap.map.runner.ActionConfig :param c_name: Container name. :type c_name: unicode | str """ config_cmds = action.config.exec_commands if not config_cmds: return self.exec_commands(action, c_name, run_cmds=config_cmds)
Python
0
@@ -905,16 +905,259 @@ ommand%5D%0A + :return: List of exec command return values (e.g. containing the command id), if applicable, or %60%60None%60%60%0A if either no commands have been run or no values have been returned from the API.%0A :rtype: list%5Bdict%5D %7C NoneType%0A @@ -1152,32 +1152,32 @@ ype%0A %22%22%22%0A - client = @@ -1191,16 +1191,42 @@ .client%0A + exec_results = %5B%5D%0A @@ -1821,32 +1821,83 @@ rt(**es_kwargs)%0A + exec_results.append(create_result)%0A else @@ -2013,16 +2013,93 @@ arted.%22) +%0A if exec_results:%0A return exec_results%0A return None %0A%0A de @@ -2435,32 +2435,275 @@ : unicode %7C str%0A + :return: List of exec command return values (e.g. containing the command id), if applicable, or %60%60None%60%60%0A if either no commands have been run or no values have been returned from the API.%0A :rtype: list%5Bdict%5D %7C NoneType%0A %22%22%22%0A @@ -2794,16 +2794,21 @@ return + None %0A @@ -2808,16 +2808,23 @@ +return self.exe
7b1871b311aae41d699a41da7c6553b45a588313
purge wip about cassandra metrics (not-the-right-place)
feedly/storage/cassandra/models.py
feedly/storage/cassandra/models.py
from cqlengine import columns from cqlengine.models import Model from cqlengine.exceptions import ValidationError class VarInt(columns.Column): db_type = 'varint' def validate(self, value): val = super(VarInt, self).validate(value) if val is None: return try: return long(val) except (TypeError, ValueError): raise ValidationError( "{} can't be converted to integer value".format(value)) def to_python(self, value): return self.validate(value) def to_database(self, value): return self.validate(value) class BaseActivity(Model): feed_id = columns.Ascii(primary_key=True, partition_key=True) activity_id = VarInt(primary_key=True, clustering_order='desc') class Activity(BaseActivity): actor = columns.Integer(required=False) extra_context = columns.Bytes(required=False) object = columns.Integer(required=False) target = columns.Integer(required=False) time = columns.DateTime(required=False) verb = columns.Integer(required=False) class AggregatedActivity(BaseActivity): activities = columns.Bytes(required=False) created_at = columns.DateTime(required=False) group = columns.Ascii(required=False) updated_at = columns.DateTime(required=False) class FanoutStats(Model): consumer_feed_id = columns.Ascii(primary_key=True, partition_key=True) fanout_at = columns.DateTime(primary_key=True, partition_key=True) date = columns.DateTime(primary_key=True, clustering_order='desc') producer_feed_id = columns.Ascii() activity_count = columns.Integer(default=1) operation = columns.Ascii() class ActivityStats(Model): producer_feed_id = columns.Ascii(primary_key=True) date = columns.DateTime(primary_key=True, partition_key=True) activity_count = columns.Integer(default=1)
Python
0
@@ -1317,568 +1317,4 @@ se)%0A -%0A%0Aclass FanoutStats(Model):%0A consumer_feed_id = columns.Ascii(primary_key=True, partition_key=True)%0A fanout_at = columns.DateTime(primary_key=True, partition_key=True)%0A date = columns.DateTime(primary_key=True, clustering_order='desc')%0A producer_feed_id = columns.Ascii()%0A activity_count = columns.Integer(default=1)%0A operation = columns.Ascii()%0A%0A%0Aclass ActivityStats(Model):%0A producer_feed_id = columns.Ascii(primary_key=True)%0A date = columns.DateTime(primary_key=True, partition_key=True)%0A activity_count = columns.Integer(default=1)%0A%0A
8d92e42bc524b72b40232b85fa59d83c05cea29b
send full domain and not only domain name
files/opensteak-metadata-server.py
files/opensteak-metadata-server.py
#!/usr/bin/python3 # -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Authors: # @author: David Blaisonneau <david.blaisonneau@orange.com> # @author: Arnaud Morin <arnaud1.morin@orange.com> import tornado.ioloop import tornado.web import socket import sys import argparse import configparser from foreman import Foreman confRoot = '/opt/metadata' confFile = '{}/metadata.conf'.format(confRoot) class UserDataHandler(tornado.web.RequestHandler): """ User Data handler """ def get(self): """ Function get Return UserData script from the foreman API @return RETURN: user-data script """ hostname = getNameFromSourceIP(getIP(self.request)) host = foreman.hosts[hostname] # Get the hostgroup if host['hostgroup_id']: hg = foreman.hostgroups[host['hostgroup_id']] else: hg = None # get the domain domain = foreman.domains[host['domain_id']] ret = host.getUserData(hostgroup=hg, domain=domain['name'], tplFolder='{}/templates/'.format(confRoot)) p.status(bool(ret), "VM {0}: sent user data".format(hostname)) self.write(ret) class MetaDataHandler(tornado.web.RequestHandler): """ Meta Data handler """ def get(self, meta): """ Function get Return meta data parameters from the foreman API @return RETURN: meta data parameters """ hostname = getNameFromSourceIP(getIP(self.request)) host = foreman.hosts[hostname] available_meta = { 'name': host['name'], 'instance-id': host['name'], 'hostname': host['name'], 'local-hostname': host['name'], } if meta in available_meta.keys(): ret = available_meta[meta] elif meta == '': ret = "\n".join(available_meta) else: raise tornado.web.HTTPError(status_code=404, log_message='No such metadata') p.status(bool(ret), "VM {0}: sent meta data '{1}' with value '{2}'" .format(hostname, meta, ret)) self.write(ret) class StatusPrinter: """ Just a nice message printer """ OKGREEN = '\033[92m' FAIL = '\033[91m' TABSIZE = 4 def status(self, res, msg, failed="", eol="\n", quit=True, indent=0): """ Function status Print status message - OK/KO if the result is a boolean - Else the result text @param res: The status to show @param msg: The message to show @param failed: The error message if failed @param eol: End of line @param quit: Exit the system in case of failure @param indent: Tab size at the beginning of the line @return RETURN: None """ ind = ' ' * indent * self.TABSIZE if res is True: msg = '{} [{}OK{}] {}'.format(ind, self.OKGREEN, self.ENDC, msg) else: msg = '{} [{}KO{}] {}'.format(ind, self.FAIL, self.ENDC, msg) if failed: msg += '\n > {}'.format(failed) msg = msg.ljust(140) + eol sys.stdout.write(msg) if res is False and quit is True: sys.exit(0) def getIP(request): if 'X-Forwarded-For' in request.headers.keys(): return request.headers['X-Forwarded-For'] else: return request.remote_ip def getNameFromSourceIP(ip): return socket.gethostbyaddr(ip)[0] application = tornado.web.Application([ (r'.*/user-data', UserDataHandler), (r'.*/meta-data/(.*)', MetaDataHandler), ]) if __name__ == "__main__": p = StatusPrinter() # Read the config file config = configparser.ConfigParser() config.read(confFile) # Update args with values from CLI args = {} parser = argparse.ArgumentParser(description='This script will run a ' 'metadata server connected ' 'to a foreman server.', usage='%(prog)s [options]') parser.add_argument('-a', '--admin', help='Username to connect to foreman (default is ' '{0}).'.format(config['foreman']['username']), default=config['foreman']['username']) parser.add_argument('-p', '--password', help='Password to connect to foreman (default is ' '{0}).'.format(config['foreman']['password']), default=config['foreman']['password']) parser.add_argument('-i', '--ip', help='IP address of foreman (default is ' '{0}).'.format(config['foreman']['ip']), default=config['foreman']['ip']) args.update(vars(parser.parse_args())) foreman = Foreman(login=args["admin"], password=args["password"], ip=args["ip"]) print("Run server on port {}".format(config['server']['port'])) application.listen(config['server']['port']) tornado.ioloop.IOLoop.instance().start()
Python
0
@@ -1573,24 +1573,16 @@ n=domain -%5B'name'%5D ,%0A @@ -2858,16 +2858,37 @@ 033%5B91m' +%0A ENDC = '%5C033%5B0m' %0A%0A TA
041f3b69af06dde44a7929675b4f7f6cc056c5c9
Fix missing refresh of apps due to trying to execute both commands in one statement
refreshcommands.py
refreshcommands.py
""" This module is about refreshing Rainmeter from within Sublime Text. This can be either activated via a command or as part of the build system. """ import os.path import sublime import sublime_plugin from . import rainmeter from .path.program_path_provider import get_cached_program_path def calculate_refresh_commands(rm_exe, config, fil, activate, is_inc): refresh_config = [ rm_exe, "!Refresh", config ] if activate: if is_inc: cmds = [rm_exe, "!ActivateConfig", config, "&&"] cmds.extend(refresh_config) return cmds else: cmds = [rm_exe, "!ActivateConfig", config, fil, "&&"] cmds.extend(refresh_config) return cmds else: return refresh_config class RainmeterRefreshConfigCommand(sublime_plugin.ApplicationCommand): """Refresh a given skin file, or Rainmeter if no path is specified.""" def run(self, cmd): # pylint: disable=R0201; sublime text API, no need for class reference """Called when the command is run.""" # Get Rainmeter exe path rm_path = get_cached_program_path() if not rm_path: sublime.error_message( "Error while trying to refresh Rainmeter" + " skin: The Rainmeter executable could not be found." + " Please check the value of your \"rainmeter_path\"" + " setting.") return rainmeter_exe = os.path.join(rm_path, "Rainmeter.exe") # Refresh skin (or whole rainmeter if no skin specified) if not cmd: sublime.status_message("Refreshing Rainmeter") sublime.active_window().run_command( "exec", {"cmd": [rainmeter_exe, "!RefreshApp"]} ) else: config = rainmeter.get_current_config(cmd[0]) fil = rainmeter.get_current_file(cmd[0]) if not fil: fil = "" if not config: sublime.error_message( "Error while trying to refresh Rainmeter skin:" + " The config could not be found. Please check the" + " path of the config and your" + " \"rainmeter_skins_path\" setting.") sublime.status_message("Refreshing config: " + config) # Load activate setting settings = sublime.load_settings("Rainmeter.sublime-settings") activate = settings.get("rainmeter_refresh_and_activate", True) is_inc = fil.endswith(".inc") refresh_commands = calculate_refresh_commands(rainmeter_exe, config, fil, activate, is_inc) sublime.active_window().run_command("exec", {"cmd": refresh_commands}) def description(self): # pylint: disable=R0201; sublime text API, no need for class reference """ Return a description of the command with the given arguments. Used in the menu, if no caption is provided. Return None to get the default description. """ return "Refresh Rainmeter Config" class RainmeterRefreshCommand(sublime_plugin.ApplicationCommand): # pylint: disable=R0903; sublime text API, methods are overriden """Refresh Rainmeter.""" def run(self): # pylint: disable=R0201; sublime text API, no need for class reference """Called when the command is run.""" sublime.run_command("rainmeter_refresh_config", {"cmd": []}) class RainmeterRefreshCurrentSkinCommand(sublime_plugin.TextCommand): """ TextCommands are instantiated once per view. The View object may be retrieved via self.view. Refresh the current skin file opened in a view. """ def run(self, _): # pylint: disable=R0201; sublime text API, no need for class reference """ Called when the command is run. edit param is not used """ # Get current file's path filepath = self.view.file_name() if not filepath: return # Refresh config sublime.run_command("rainmeter_refresh_config", {"cmd": [filepath]}) def is_enabled(self): # pylint: disable=R0201; sublime text API, no need for class reference """ Return True if the command is able to be run at this time. The default implementation simply always returns True. """ # Check if current syntax is rainmeter israinmeter = self.view.score_selector(self.view.sel()[0].a, "source.rainmeter") return israinmeter > 0 def description(self): # pylint: disable=R0201; sublime text API, no need for class reference """ Return a description of the command with the given arguments. Used in the menus, and for Undo/Redo descriptions. Return None to get the default description. """ return "Refresh Current Rainmeter Skin"
Python
0.000005
@@ -369,109 +369,21 @@ -refresh_config = %5B%0A rm_exe, %22!Refresh%22, config%0A %5D%0A%0A if activate:%0A if is_inc:%0A +if activate:%0A @@ -427,22 +427,16 @@ , config -, %22&&%22 %5D%0A @@ -441,144 +441,22 @@ - cmds.extend(refresh_config)%0A%0A return cmds%0A else:%0A cmds = %5Brm_exe, %22!ActivateConfig%22, config, fil, %22&&%22%5D +if not is_inc: %0A @@ -473,36 +473,20 @@ mds. -ext +app end( -refresh_con fi -g +l )%0A -%0A @@ -526,30 +526,20 @@ return -refresh_config +None %0A%0A%0Aclass @@ -2459,16 +2459,53 @@ is_inc)%0A + if refresh_commands:%0A @@ -2578,16 +2578,192 @@ mmands%7D) +%0A refresh_config = %5B%0A rainmeter_exe, %22!Refresh%22, config%0A %5D%0A sublime.active_window().run_command(%22exec%22, %7B%22cmd%22: refresh_config%7D) %0A%0A de
f7fc6556e3ef552ed570ad56db7dc3a19b3e75fd
Load config from site
fix-broken-double-redirect/edit.py
fix-broken-double-redirect/edit.py
# -*- coding: utf-8 -*- import argparse import os import re os.environ['PYWIKIBOT_DIR'] = os.path.dirname(os.path.realpath(__file__)) import pywikibot parser = argparse.ArgumentParser() parser.add_argument('-c', '--check', action='store_true', dest='check') parser.set_defaults(check=False) args = parser.parse_args() print(args) os.environ['TZ'] = 'UTC' site = pywikibot.Site() site.login() cat = pywikibot.Page(site, 'Category:快速删除候选') for sourcePage in site.categorymembers(cat): print(sourcePage.title()) text = sourcePage.text if '{{d|bot=Jimmy-bot|g15|' not in text: print('\tnot g15') continue m = re.search(r'#(?:重定向|REDIRECT) ?\[\[(.+?)]]', text, flags=re.I) if m: middlePage = pywikibot.Page(site, m.group(1)) logs = list(site.logevents(page=middlePage, total=1)) if len(logs) == 0: print('\tno logs') continue log = logs[0] if log.type() != 'move': print('\trecent log not move') continue targetPage = log.target_page print('\ttarget', targetPage.title()) text = re.sub(r'^{{d\|bot=Jimmy-bot\|g15\|.+\n', '', text) text = re.sub(r'(#(?:重定向|REDIRECT) ?\[\[).+?(]])', r'\g<1>{}\g<2>'.format(targetPage.title()), text) pywikibot.showDiff(sourcePage.text, text) summary = '-delete並修復損壞的雙重重定向,[[Special:Redirect/logid/{}|目標頁已被不留重定向移動]],若認為重定向不合適請提交存廢討論'.format(log.logid()) print(summary) if args.check and input('Save?').lower() not in ['', 'y', 'yes']: continue sourcePage.text = text sourcePage.save(summary=summary, minor=False, asynchronous=True) else: print('\tcannot get redirect target')
Python
0.000001
@@ -33,16 +33,28 @@ rgparse%0A +import json%0A import o @@ -55,16 +55,16 @@ port os%0A - import r @@ -157,16 +157,83 @@ wikibot%0A +from config import config_page_name # pylint: disable=E0611,W0614%0A %0A%0Aparser @@ -470,16 +470,214 @@ ogin()%0A%0A +config_page = pywikibot.Page(site, config_page_name)%0Acfg = config_page.text%0Acfg = json.loads(cfg)%0Aprint(json.dumps(cfg, indent=4, ensure_ascii=False))%0A%0Aif not cfg%5B'enable'%5D:%0A exit('disabled%5Cn')%0A%0A cat = py @@ -699,25 +699,27 @@ te, -'Category:%E5%BF%AB%E9%80%9F%E5%88%A0%E9%99%A4%E5%80%99%E9%80%89' +cfg%5B'csd_category'%5D )%0A%0Af @@ -1635,88 +1635,22 @@ y = -'-delete%E4%B8%A6%E4%BF%AE%E5%BE%A9%E6%90%8D%E5%A3%9E%E7%9A%84%E9%9B%99%E9%87%8D%E9%87%8D%E5%AE%9A%E5%90%91%EF%BC%8C%5B%5BSpecial:Redirect/logid/%7B%7D%7C%E7%9B%AE%E6%A8%99%E9%A0%81%E5%B7%B2%E8%A2%AB%E4%B8%8D%E7%95%99%E9%87%8D%E5%AE%9A%E5%90%91%E7%A7%BB%E5%8B%95%5D%5D%EF%BC%8C%E8%8B%A5%E8%AA%8D%E7%82%BA%E9%87%8D%E5%AE%9A%E5%90%91%E4%B8%8D%E5%90%88%E9%81%A9%E8%AB%8B%E6%8F%90%E4%BA%A4%E5%AD%98%E5%BB%A2%E8%A8%8E%E8%AB%96' +cfg%5B'summary'%5D .for
ebc43e2bd72bba3c4c660fd008cdfebdd3d8ece0
change url base
flexget/plugins/search_cpasbien.py
flexget/plugins/search_cpasbien.py
from __future__ import unicode_literals, division, absolute_import import logging import re import urllib from flexget import plugin, validator from flexget.entry import Entry from flexget.event import event from flexget.utils import requests from flexget.utils.soup import get_soup from flexget.utils.search import torrent_availability, normalize_unicode log = logging.getLogger('search_cpasbien') session = requests.Session() class SearchCPASBIEN(object): schema = { 'type': 'object', 'properties': { 'category': { 'type': 'string', 'enum': ['films', 'series', 'musique', 'films-french', '720p', 'series-francaise', 'films-dvdrip', 'all', 'films-vostfr', '1080p', 'series-vostfr', 'ebook'] }, }, 'required': ['category'], 'additionalProperties': False } @plugin.internet(log) def search(self, task, entry, config): """CPASBIEN search plugin Config example: tv_search_cpasbien: discover: what: - trakt_list: username: xxxxxxx api_key: xxxxxxx series: watchlist from: - cpasbien: category: "series-vostfr" interval: 1 day ignore_estimations: yes Category is ONE of: all films series musique films-french 1080p 720p series-francaise films-dvdrip films-vostfr series-vostfr ebook """ base_url = 'http://www.cpasbien.pe' entries = set() for search_string in entry.get('search_strings', [entry['title']]): search_string = search_string.replace(' ', '-').lower() search_string = search_string.replace('(', '') search_string = search_string.replace(')', '') query = normalize_unicode(search_string) query_url_fragment = urllib.quote_plus(query.encode('utf-8')) # http://www.cpasbien.pe/recherche/ncis.html if config['category'] == 'all': str_url = (base_url, 'recherche', query_url_fragment) url = '/'.join(str_url) else: category_url_fragment = '%s' % config['category'] str_url = (base_url, 'recherche', category_url_fragment, query_url_fragment) url = '/'.join(str_url) log.debug('search url: %s' % url + '.html') # GET URL f = task.requests.get(url + '.html').content soup = get_soup(f) if soup.findAll(text=re.compile(' 0 torrents')): log.debug('search returned no results') else: nextpage = 0 while (nextpage >= 0): if (nextpage > 0): newurl = url + '/page-' + str(nextpage) log.debug('-----> NEXT PAGE : %s' % newurl) f1 = task.requests.get(newurl).content soup = get_soup(f1) for result in soup.findAll('div', attrs={'class': re.compile('ligne')}): entry = Entry() link = result.find('a', attrs={'href': re.compile('dl-torrent')}) entry['title'] = link.contents[0] # REWRITE URL page_link = link.get('href') link_rewrite = page_link.split('/') # get last value in array remove .html and replace by .torrent endlink = link_rewrite[-1] str_url = (base_url, '/telechargement/', endlink[:-5], '.torrent') entry['url'] = ''.join(str_url) log.debug('Title: %s | DL LINK: %s' % (entry['title'], entry['url'])) entry['torrent_seeds'] = (int(result.find('span', attrs={'class': re.compile('seed')}).text)) entry['torrent_leeches'] = (int(result.find('div', attrs={'class': re.compile('down')}).text)) sizefull = (result.find('div', attrs={'class': re.compile('poid')}).text) size = sizefull[:-3] unit = sizefull[-2:] if unit == 'GB': entry['content_size'] = int(float(size) * 1024) elif unit == 'MB': entry['content_size'] = int(float(size)) elif unit == 'KB': entry['content_size'] = int(float(size) / 1024) if(entry['torrent_seeds'] > 0): entries.add(entry) else: log.debug('0 SEED, not adding entry') if soup.find(text=re.compile('Suiv')): nextpage += 1 else: nextpage = -1 return entries @event('plugin.register') def register_plugin(): plugin.register(SearchCPASBIEN, 'cpasbien', groups=['search'], api_ver=2)
Python
0.000001
@@ -1783,18 +1783,18 @@ pasbien. -pe +io '%0A
5ec45f9d8a7b4c54ecca0ad48f244c2ab0b8d532
remove no longer necessary package declaration
src/zeit/content/cp/blocks/tests.py
src/zeit/content/cp/blocks/tests.py
# -*- coding: utf-8 -*- # Copyright (c) 2009 gocept gmbh & co. kg # See also LICENSE.txt import zeit.content.cp.blocks import zeit.content.cp.testing def test_suite(): return zeit.content.cp.testing.FunctionalDocFileSuite( 'teaser.txt', 'xml.txt', package=zeit.content.cp.blocks)
Python
0
@@ -266,46 +266,6 @@ txt' -,%0A package=zeit.content.cp.blocks )%0A
ac912883be41ca4db298d6b8cf1a0c56dcf99d33
convert dns name to ip for dns-server
ddns.py
ddns.py
#!/usr/bin/env python3 import os import cgi import cgitb import datetime from configparser import SafeConfigParser from dnsupdate import doUpdate, isValidV4Addr, isValidV6Addr import dns.exception cgitb.enable() configuration_file = "ddns.ini" def read_data(domain): valid_options = ["password", "dns-server", "nsupdate-key", "origin"] cp = SafeConfigParser() cp.read(configuration_file) if cp.has_section(domain): data = dict() for option, value in cp.items(domain): if option in valid_options: data[option] = value else: print("Error: Found a invalid option for domain", domain, ":", option) exit() return data return None def print_header(): print("Content-Type: text/html") # HTML is following print() # blank line, end of headers ip_arguments = ["ip4addr", "ip6addr"] def read_arguments(): required_arguments = ["domain", "password"] ip_check = {"ip4addr": isValidV4Addr, "ip6addr": isValidV6Addr} valid_arguments = required_arguments + ip_arguments args = dict() # Read the arguments form = cgi.FieldStorage() for arg in valid_arguments: if arg in form: args[arg] = form[arg].value # Verify that they are complete error = False for reqarg in required_arguments: if reqarg not in args: print("Error:", reqarg, "was not passed<br>") error = True foundAny = False for iparg in ip_arguments: if iparg in args: try: ip_check[iparg](args[iparg]) foundAny = True except dns.exception.SyntaxError as e: print("Error:", args[iparg], "is not a valid", iparg) error = True if not error and not foundAny: # Get IP from REMOTE_ADDR anc check whether it is v4 or v6 ip = os.environ["REMOTE_ADDR"] for iparg in ip_arguments: try: if ip_check[iparg](ip): args[iparg] = ip except dns.exception.SyntaxError as e: pass if error is True: print("Error: There were errors while parsing the arguments.<br>") exit() return args def generate_nsupdate_key_string(ddata): origin = ddata["origin"] if not origin.endswith("."): origin = origin + "." return {origin: ddata["nsupdate-key"]} def generate_action_string(domain, ddata, ip, ipaddr_type, action="update"): ip_type = {"ip4addr": "A", "ip6addr": "AAAA"} TTL = 60 TYPE = ip_type[ipaddr_type] return [action, domain, str(TTL), TYPE, ip] def main(): print_header() # Parse the passed arguments arguments = read_arguments() domain = arguments["domain"] # Read the 'configuration_file' data = read_data(domain) # Is the request valid and the domain configured? if not (data is not None and arguments["password"] == data["password"]): print("Error: Invalid domain/password combination") exit() nsu_str = generate_nsupdate_key_string(data) def date_update_str(): return ["update", domain, "60", "TXT", "last update: %s Europe/Berlin" % datetime.datetime.now()] # add date record doUpdate(data["dns-server"], nsu_str, data["origin"], False, date_update_str()) # Update each requested ipaddr_type, or delete if not passed for ipaddr_type in ip_arguments: if ipaddr_type in arguments: a = "update" else: a = "delete" action = generate_action_string( domain, data, arguments[ipaddr_type], ipaddr_type, action=a) doUpdate(data["dns-server"], nsu_str, data["origin"], False, action) if __name__ == "__main__": main()
Python
0.999938
@@ -67,16 +67,30 @@ atetime%0A +import socket%0A from con @@ -3220,16 +3220,120 @@ (data)%0A%0A + # convert dns server name to ip%0A data%5B%22dns-server%22%5D = socket.gethostbyname(data%5B%22dns-server%22%5D)%0A%0A%0A def
69eafa95df4bdeb143d40c321f0a312d06efff1f
Add __all__ to segmentation package
skimage/segmentation/__init__.py
skimage/segmentation/__init__.py
from .random_walker_segmentation import random_walker from ._felzenszwalb import felzenszwalb from ._slic import slic from ._quickshift import quickshift from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries from ._clear_border import clear_border from ._join import join_segmentations, relabel_from_one
Python
0.000919
@@ -322,8 +322,295 @@ rom_one%0A +%0A%0A__all__ = %5B'random_walker',%0A 'felzenszwalb',%0A 'slic',%0A 'quickshift',%0A 'find_boundaries',%0A 'visualize_boundaries',%0A 'mark_boundaries',%0A 'clear_border',%0A 'join_segmentations',%0A 'relabel_from_one'%5D%0A
c1bb9d50205cdc850f0f3200d747d38252230d89
Correct a typo.
examples/julia-set/bench_dist.py
examples/julia-set/bench_dist.py
# encoding: utf-8 # --------------------------------------------------------------------------- # Copyright (C) 2008-2014, IPython Development Team and Enthought, Inc. # Distributed under the terms of the BSD License. See COPYING.rst. # --------------------------------------------------------------------------- """ Benchmark calculating the Julia set with Distarray for various array distributions and number of engines. Usage: $ dacluster start -n20 ... $ python bench_dist.py """ from timeit import default_timer as clock from IPython.parallel import Client from matplotlib import pyplot from distarray.dist import Context, Distribution from distarray.dist.decorators import local, vectorize # Make an empty distributed array def make_empty_da(resolution, dist, context): """Create the arr we will build the fractal with.""" distribution = Distribution(context, (resolution[0], resolution[1]), dist=dist) out = context.empty(distribution, dtype=complex) return out # Drawing the coordinate plane directly like this is currently much # faster than trying to do it by indexing a distarray. def draw_coord(arr, re_ax, im_ax, resolution): """Draw the complex coordinate plane""" re_step = float(re_ax[1] - re_ax[0]) / resolution[0] im_step = float(im_ax[1] - im_ax[0]) / resolution[1] for i in arr.distribution[0].global_iter: for j in arr.distribution[1].global_iter: arr.global_index[i, j] = complex(re_ax[0] + re_step*i, im_ax[0] + im_step*j) return arr # This exactly the same function as the one in julia_numpy.py, but here # we use distarray's vectorize decorator. def julia(z, c, z_max, n_max): n = 0 fn = lambda z, c: z**2 + c while abs(z) < z_max and n < n_max: z = fn(z, c) n += 1 return n def test_distarray(dist, context): global draw_coord global julia local_draw_coord = local(draw_coord) vect_julia = vectorize(julia) darr = make_empty_da(resolution, dist, context) darr = local_draw_coord(darr, re_ax, im_ax, resolution) start = clock() darr = vect_julia(darr, c, z_max, n_max) stop = clock() return stop - start # Grid parameteres re_ax = (-1., 1.) im_ax = (-1., 1.) resolution = (480, 480) # Julia set parameters, changing these is fun. c = complex(0., .75) z_max = 20 n_max = 100 # benchmark parameters # number of engines engines = range(4, 21, 2) # array distributions dists = [{0: 'c', 1: 'b'}, {0: 'c', 1: 'c'}, {0: 'b'}, {0: 'c'}, {0: 'b', 1: 'b'}, {0: 'b', 1: 'c'}] dist_data = [[] for i in range(len(dists))] engine_data = [] client = Client() for num_engines in engines: targets = list(range(num_engines)) context = Context(client, targets=targets) print(num_engines) for i, dist in enumerate(dists): print(dist) time = test_distarray(dist, context) dist_data[i].append(time) for i, data in enumerate(dist_data): pyplot.plot(list(engines), data, label=dists[i].__repr__(), lw=2) pyplot.title('Julia set benchmark - array distribution type vs number of ' 'engines') pyplot.xticks(list(engines), list(engines)) pyplot.xlabel('number of engines') pyplot.ylabel('time (s)') pyplot.legend(loc='upper right') pyplot.savefig("julia_timing.png", dpi=100) pyplot.show()
Python
0.023481
@@ -2273,17 +2273,16 @@ arameter -e s%0Are_ax
d59f3259875ffac49668ffb3ce34ca511385ebb7
Fix USE_X_FORWARDED_FOR for proxied environments
rated/settings.py
rated/settings.py
from django.conf import settings DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default') DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100) DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60) RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429) RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '') DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', []) REALMS = getattr(settings, 'RATED_REALMS', {}) REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {}) # Redis config parameters REDIS = getattr(settings, 'RATED_REDIS', {})
Python
0
@@ -597,16 +597,86 @@ ED_REDIS', %7B%7D)%0A%0A +USE_X_FORWARDED_FOR = getattr(settings, 'USE_X_FORWARDED_FOR', False)%0A
b613ecdb3e543a4c39c5bd80359c81e504c1da33
add -mlong-calls to gcc compile parameter
examples/module/rtconfig_lm3s.py
examples/module/rtconfig_lm3s.py
# bsp name BSP = 'lm3s8962' # toolchains EXEC_PATH = 'C:/Program Files/CodeSourcery/Sourcery G++ Lite/bin' PREFIX = 'arm-none-eabi-' CC = PREFIX + 'gcc' CXX = PREFIX + 'g++' AS = PREFIX + 'gcc' AR = PREFIX + 'ar' LINK = PREFIX + 'gcc' TARGET_EXT = 'so' SIZE = PREFIX + 'size' OBJDUMP = PREFIX + 'objdump' OBJCPY = PREFIX + 'objcopy' DEVICE = ' -mcpu=cortex-m3' CFLAGS = DEVICE + ' -mthumb -Dsourcerygxx -O0 -fPIC' AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp' LFLAGS = DEVICE + ' -mthumb -Wl,-z,max-page-size=0x4 -shared -fPIC -e main -nostdlib' CPATH = '' LPATH = ''
Python
0.000002
@@ -402,16 +402,29 @@ -mthumb +-mlong-calls -Dsource
3299cfb4d28999dd266f6fba58804c2d79ad5724
Use absolute path for base_path when not using local_repo_path
rdopkg/repoman.py
rdopkg/repoman.py
import os import re import shutil import time from rdopkg import exception from rdopkg.conf import cfg from rdopkg.utils import log from rdopkg.utils import cmd from rdopkg.utils import tidy_ssh_user from rdopkg import helpers def repo_name_from_url(repo_url): if repo_url: i = repo_url.rfind('/') if i != -1: d = repo_url[i + 1:] if d.endswith('.git'): d = d[:-4] return d return None class RepoManager(object): repo_desc = 'git' def __init__(self, base_path=None, url=None, local_repo_path=None, verbose=False, user=None): # remote repo (base_path, url) XOR local repo (local_repo_path) assert bool(base_path and url) != bool(local_repo_path) self.user = user if not self.user: # we need a user, so pick the current user by default env = os.environ.copy() # USERNAME is an env var used by gerrit self.user = env.get('USERNAME') or env.get('USER') if verbose: log.info('Using user %s with repo %s' % (self.user, url)) self.url = tidy_ssh_user(url, self.user) self.verbose = verbose if local_repo_path: self.repo_path = os.path.abspath(local_repo_path) self.base_path, self.repo_name = \ os.path.split(self.repo_path) else: self.base_path = base_path self.repo_name = repo_name_from_url(self.url) if not self.repo_name: raise exception.RepoError( what='Failed to parse %s repo URL: %s' % (self.repo_desc, self.url)) self.repo_path = os.path.join(self.base_path, self.repo_name) def _nuke(self): log.info("Removing %s repo: %s" % (self.repo_desc, self.repo_path)) shutil.rmtree(self.repo_path, ignore_errors=True) def _clone(self): if self.verbose: log.info("Cloning {desc} repo: {url}\n" " {space} into: {path}".format( desc=self.repo_desc, space=len(self.repo_desc) * ' ', url=self.url, path=self.repo_path)) with helpers.cdir(self.base_path): cmd.git('clone', self.url, self.repo_name, log_cmd=self.verbose) def _fetch(self, force=False): need_fetch = True with self.repo_dir(): if not force: try: t_fetch = os.path.getmtime('.git/FETCH_HEAD') t_now = int(time.time()) delta = t_now - t_fetch if delta < cfg['FETCH_PERIOD']: need_fetch = False except Exception: pass if need_fetch: if self.verbose: log.info("Fetching %s repo: %s" % ( self.repo_desc, self.repo_path)) cmd.git('fetch', 'origin', log_cmd=self.verbose) cmd.git('checkout', '-f', 'master', log_cmd=self.verbose) cmd.git('reset', '--hard', 'origin/master', log_cmd=self.verbose) def setup_review(self): with self.repo_dir(): with helpers.setenv(USERNAME=self.user): cmd.git('review', '-s', direct=True) def review(self): with self.repo_dir(): with helpers.setenv(USERNAME=self.user): cmd.git('review', direct=True) def get_review(self, review_id): with self.repo_dir(): with helpers.setenv(USERNAME=self.user): cmd.git('review', '-d', str(review_id), direct=True) def repo_dir(self): return helpers.cdir(self.repo_path) def git_check_remote(self): assert(self.url) with self.repo_dir(): remotes = cmd.git('remote', '-v', log_cmd=False) pattern = '^origin\s+%s\s+\(fetch\)$' % re.escape(self.url) if not re.search(pattern, remotes, re.MULTILINE): raise exception.RepoError(what="origin isn't set to expected URL: " "%s" % self.url) def init(self, force_fetch=False): if not self.url: if not os.path.isdir(self.repo_path): raise exception.NotADirectory(path=self.repo_path) return if self.base_path and not os.path.isdir(self.base_path): if self.verbose: log.info("Creating base directory: %s" % self.base_path) os.makedirs(self.base_path) if not os.path.isdir(self.repo_path): self._clone() else: try: self.git_check_remote() except exception.RepoError as e: if self.verbose: log.warn("%s repo didn't pass the checks, renewing: %s" % (self.repo_desc, e)) self._nuke() self._clone() else: self._fetch(force=force_fetch)
Python
0.000075
@@ -1491,16 +1491,32 @@ _path = +os.path.abspath( base_pat @@ -1516,16 +1516,17 @@ ase_path +) %0A
bde917596ce45090f6e719c438af251ea390b59e
Replace --platform in pullers with specific args, use custem docker client config dir
def.bzl
def.bzl
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def repositories(): native.new_http_archive( name = "httplib2", url = "https://codeload.github.com/httplib2/httplib2/tar.gz/v0.11.3", sha256 = "d9f568c183d1230f271e9c60bd99f3f2b67637c3478c9068fea29f7cca3d911f", strip_prefix = "httplib2-0.11.3/python2/httplib2/", type = "tar.gz", build_file_content = """ py_library( name = "httplib2", srcs = glob(["**/*.py"]), data = ["cacerts.txt"], visibility = ["//visibility:public"] )""", ) # Used by oauth2client native.new_http_archive( name = "six", url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz", sha256 = "e24052411fc4fbd1f672635537c3fc2330d9481b18c0317695b46259512c91d5", strip_prefix = "six-1.9.0/", type = "tar.gz", build_file_content = """ # Rename six.py to __init__.py genrule( name = "rename", srcs = ["six.py"], outs = ["__init__.py"], cmd = "cat $< >$@", ) py_library( name = "six", srcs = [":__init__.py"], visibility = ["//visibility:public"], )""", ) # Used for authentication in containerregistry native.new_http_archive( name = "oauth2client", url = "https://codeload.github.com/google/oauth2client/tar.gz/v4.0.0", sha256 = "7230f52f7f1d4566a3f9c3aeb5ffe2ed80302843ce5605853bee1f08098ede46", strip_prefix = "oauth2client-4.0.0/oauth2client/", type = "tar.gz", build_file_content = """ py_library( name = "oauth2client", srcs = glob(["**/*.py"]), visibility = ["//visibility:public"], deps = [ "@httplib2//:httplib2", "@six//:six", ] )""", ) # Used for parallel execution in containerregistry native.new_http_archive( name = "concurrent", url = "https://codeload.github.com/agronholm/pythonfutures/tar.gz/3.0.5", sha256 = "a7086ddf3c36203da7816f7e903ce43d042831f41a9705bc6b4206c574fcb765", strip_prefix = "pythonfutures-3.0.5/concurrent/", type = "tar.gz", build_file_content = """ py_library( name = "concurrent", srcs = glob(["**/*.py"]), visibility = ["//visibility:public"] )""", ) # For packaging python tools. native.git_repository( name = "subpar", remote = "https://github.com/google/subpar", commit = "07ff5feb7c7b113eea593eb6ec50b51099cf0261", )
Python
0
@@ -2806,23 +2806,16 @@ ls.%0A -native. git_repo
5e782f6a23a55a1db13a100803f73a9e1ff19c4c
update external-cblas patch directive (#23787)
var/spack/repos/builtin/packages/gsl/package.py
var/spack/repos/builtin/packages/gsl/package.py
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Gsl(AutotoolsPackage, GNUMirrorPackage): """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. It is free software under the GNU General Public License. The library provides a wide range of mathematical routines such as random number generators, special functions and least-squares fitting. There are over 1000 functions in total with an extensive test suite.""" homepage = "http://www.gnu.org/software/gsl" gnu_mirror_path = "gsl/gsl-2.3.tar.gz" version('2.6', sha256='b782339fc7a38fe17689cb39966c4d821236c28018b6593ddb6fd59ee40786a8') version('2.5', sha256='0460ad7c2542caaddc6729762952d345374784100223995eb14d614861f2258d') version('2.4', sha256='4d46d07b946e7b31c19bbf33dda6204d7bedc2f5462a1bae1d4013426cd1ce9b') version('2.3', sha256='562500b789cd599b3a4f88547a7a3280538ab2ff4939504c8b4ac4ca25feadfb') version('2.2.1', sha256='13d23dc7b0824e1405f3f7e7d0776deee9b8f62c62860bf66e7852d402b8b024') version('2.1', sha256='59ad06837397617f698975c494fe7b2b698739a59e2fcf830b776428938a0c66') version('2.0', sha256='e361f0b19199b5e6c21922e9f16adf7eca8dd860842802424906d0f83485ca2d') version('1.16', sha256='73bc2f51b90d2a780e6d266d43e487b3dbd78945dd0b04b14ca5980fe28d2f53') variant('external-cblas', default=False, description='Build against external blas') # from https://dev.gentoo.org/~mgorny/dist/gsl-2.3-cblas.patch.bz2 patch('gsl-2.3-cblas.patch', when="+external-cblas") conflicts('+external-cblas', when="@:2.2.9999") depends_on('m4', type='build', when='+external-cblas') depends_on('autoconf', type='build', when='+external-cblas') depends_on('automake', type='build', when='+external-cblas') depends_on('libtool', type='build', when='+external-cblas') depends_on('blas', when='+external-cblas') @property def force_autoreconf(self): # The external cblas patch touches configure return self.spec.satisfies('+external-cblas') def configure_args(self): configure_args = [] if self.spec.satisfies('+external-cblas'): configure_args.append('--with-external-cblas') configure_args.append('CBLAS_CFLAGS=%s' % self.spec['blas'].headers.include_flags) configure_args.append('CBLAS_LIBS=%s' % self.spec['blas'].libs.ld_flags) return configure_args
Python
0
@@ -1693,16 +1693,27 @@ , when=%22 +@2.3:2.5.99 +externa @@ -1774,13 +1774,12 @@ 2.99 -99 %22)%0A +%0A
ce869c128d728af4c296eb96ecae0db6f30996a7
Make brnn_ptb_test write checkpoints to temp directory
sonnet/examples/brnn_ptb_test.py
sonnet/examples/brnn_ptb_test.py
# Copyright 2017 The Sonnet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Tests for brnn_ptb.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import random import string from sonnet.examples import brnn_ptb import tensorflow as tf FLAGS = tf.flags.FLAGS def _make_random_word(): return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(random.randint(1, 15))) def _make_random_vocab(): # Make a limited vocab that all the sentences should be made out of, as the # BRNN model builds a finite vocab internally. return [_make_random_word() for _ in range(1000)] def _make_sentence_with_vocab(vocab): return ' '.join(vocab[random.randint(0, len(vocab) - 1)] for _ in range(random.randint(1, 30))) def _make_fake_corpus_with_vocab(vocab, corpus_size): return '\n'.join(_make_sentence_with_vocab(vocab) for _ in range(corpus_size)) class BrnnPtbTest(tf.test.TestCase): def testScriptRunsWithFakeData(self): # Make some small fake data in same format as real PTB. tmp_dir = tf.test.get_temp_dir() vocab = _make_random_vocab() with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.train.txt'), 'w') as f: f.write(_make_fake_corpus_with_vocab(vocab, 1000)) with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.valid.txt'), 'w') as f: f.write(_make_fake_corpus_with_vocab(vocab, 100)) with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.test.txt'), 'w') as f: f.write(_make_fake_corpus_with_vocab(vocab, 100)) # Make model small, only run for 1 epoch. FLAGS.num_training_epochs = 1 FLAGS.hidden_size = 50 FLAGS.embedding_size = 50 FLAGS.data_path = tmp_dir # Do training, test, evaluation. brnn_ptb.main(None) if __name__ == '__main__': tf.test.main()
Python
0
@@ -2390,16 +2390,190 @@ tmp_dir +%0A # Checkpoint to tmp directory so that test runs hermetically, and there is%0A # no possibility of reusing checkpoints from previous runs.%0A FLAGS.logbasedir = tmp_dir %0A%0A #
88a1cb9001b19f769f8be5dcde5d87be67c61a2f
comment out 1.6
streams/inference/back_integrate.py
streams/inference/back_integrate.py
# coding: utf-8 """ Contains likelihood function specific to back-integration and the Rewinder """ from __future__ import division, print_function __author__ = "adrn <adrn@astro.columbia.edu>" # Standard library import os, sys # Third-party import numpy as np import astropy.units as u # Project from ..coordinates import _hel_to_gc, _gc_to_hel from ..dynamics import Particle from ..integrate import LeapfrogIntegrator __all__ = ["back_integration_likelihood"] def xyz_sph_jac(hel): l,b,d,mul,mub,vr = hel.T cosl, sinl = np.cos(l), np.sin(l) cosb, sinb = np.cos(b), np.sin(b) Rsun = 8. dtmnt = d**2*(Rsun**2*cosb + Rsun*d*sinb**2*cosl - 2*Rsun*d*cosl + d**2*sinb**4*cosb - d**2*cosb**5 + 2*d**2*cosb**3)*cosb deet = np.log(np.abs(dtmnt)) return deet def back_integration_likelihood(t1, t2, dt, potential, p_hel, s_hel, tub): p_gc = _hel_to_gc(p_hel) s_gc = _hel_to_gc(s_hel) gc = np.vstack((s_gc,p_gc)).copy() acc = np.zeros_like(gc[:,:3]) integrator = LeapfrogIntegrator(potential._acceleration_at, np.array(gc[:,:3]), np.array(gc[:,3:]), args=(gc.shape[0], acc)) times, rs, vs = integrator.run(t1=t1, t2=t2, dt=dt) s_orbit = np.vstack((rs[:,0][:,np.newaxis].T, vs[:,0][:,np.newaxis].T)).T p_orbits = np.vstack((rs[:,1:].T, vs[:,1:].T)).T # These are the unbinding time indices for each particle t_idx = np.array([np.argmin(np.fabs(times - t)) for t in tub]) # get back 6D positions for stars and satellite at tub p_x = np.array([p_orbits[jj,ii] for ii,jj in enumerate(t_idx)]) s_x = np.array([s_orbit[jj,0] for jj in t_idx]) rel_x = p_x-s_x p_x_hel = _gc_to_hel(p_x) jac1 = xyz_sph_jac(p_x_hel) r_tide = potential._tidal_radius(2.5e8, s_x)*1.6 #v_esc = potential._escape_velocity(2.5e8, r_tide=r_tide) v_disp = 0.017198632325 R = np.sqrt(np.sum(rel_x[...,:3]**2, axis=-1)) V = np.sqrt(np.sum(rel_x[...,3:]**2, axis=-1)) lnR = np.log(R) lnV = np.log(V) sigma_r = 0.55 mu_r = np.log(r_tide) r_term = -0.5*(2*np.log(sigma_r) + ((lnR-mu_r)/sigma_r)**2) - np.log(R**3) sigma_v = 0.8 mu_v = np.log(v_disp) v_term = -0.5*(2*np.log(sigma_v) + ((lnV-mu_v)/sigma_v)**2) - np.log(V**3) return r_term + v_term + jac1
Python
0
@@ -1826,16 +1826,17 @@ e8, s_x) +# *1.6%0A
19f293bda612e0699c3fdb31de5c5cfebca6fb5e
make demo.py executable
demo.py
demo.py
#!/usr/bin/python """ The MIT License (MIT) Copyright (c) 2016 Luca Weiss Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import sys from PyQt5.QtCore import * from PyQt5.QtWidgets import * from waitingspinnerwidget import QtWaitingSpinner class Demo(QWidget): sb_roundness = None sb_opacity = None sb_fadeperc = None sb_lines = None sb_line_length = None sb_line_width = None sb_inner_radius = None sb_rev_s = None btn_start = None btn_stop = None btn_pick_color = None spinner = None def __init__(self): super().__init__() self.init_ui() def init_ui(self): grid = QGridLayout() groupbox1 = QGroupBox() groupbox1_layout = QHBoxLayout() groupbox2 = QGroupBox() groupbox2_layout = QGridLayout() button_hbox = QHBoxLayout() self.setLayout(grid) self.setWindowTitle("QtWaitingSpinner Demo") self.setWindowFlags(Qt.Dialog) # SPINNER self.spinner = QtWaitingSpinner(self) # Spinboxes self.sb_roundness = QDoubleSpinBox() self.sb_opacity = QDoubleSpinBox() self.sb_fadeperc = QDoubleSpinBox() self.sb_lines = QSpinBox() self.sb_line_length = QDoubleSpinBox() self.sb_line_width = QDoubleSpinBox() self.sb_inner_radius = QDoubleSpinBox() self.sb_rev_s = QDoubleSpinBox() # set spinbox default values self.sb_roundness.setValue(70) self.sb_roundness.setRange(0, 9999) self.sb_opacity.setValue(15) self.sb_opacity.setRange(0, 9999) self.sb_fadeperc.setValue(70) self.sb_fadeperc.setRange(0, 9999) self.sb_lines.setValue(12) self.sb_lines.setRange(1, 9999) self.sb_line_length.setValue(10) self.sb_line_length.setRange(0, 9999) self.sb_line_width.setValue(5) self.sb_line_width.setRange(0, 9999) self.sb_inner_radius.setValue(10) self.sb_inner_radius.setRange(0, 9999) self.sb_rev_s.setValue(1) self.sb_rev_s.setRange(0.1, 9999) # Buttons self.btn_start = QPushButton("Start") self.btn_stop = QPushButton("Stop") self.btn_pick_color = QPushButton("Pick Color") # Connects self.sb_roundness.valueChanged.connect(self.set_roundness) self.sb_opacity.valueChanged.connect(self.set_opacity) self.sb_fadeperc.valueChanged.connect(self.set_fadeperc) self.sb_lines.valueChanged.connect(self.set_lines) self.sb_line_length.valueChanged.connect(self.set_line_length) self.sb_line_width.valueChanged.connect(self.set_line_width) self.sb_inner_radius.valueChanged.connect(self.set_inner_radius) self.sb_rev_s.valueChanged.connect(self.set_rev_s) self.btn_start.clicked.connect(self.spinner_start) self.btn_stop.clicked.connect(self.spinner_stop) self.btn_pick_color.clicked.connect(self.show_color_picker) # Layout adds groupbox1_layout.addWidget(self.spinner) groupbox1.setLayout(groupbox1_layout) groupbox2_layout.addWidget(QLabel("Roundness:"), *(1, 1)) groupbox2_layout.addWidget(self.sb_roundness, *(1, 2)) groupbox2_layout.addWidget(QLabel("Opacity:"), *(2, 1)) groupbox2_layout.addWidget(self.sb_opacity, *(2, 2)) groupbox2_layout.addWidget(QLabel("Fade Perc:"), *(3, 1)) groupbox2_layout.addWidget(self.sb_fadeperc, *(3, 2)) groupbox2_layout.addWidget(QLabel("Lines:"), *(4, 1)) groupbox2_layout.addWidget(self.sb_lines, *(4, 2)) groupbox2_layout.addWidget(QLabel("Line Length:"), *(5, 1)) groupbox2_layout.addWidget(self.sb_line_length, *(5, 2)) groupbox2_layout.addWidget(QLabel("Line Width:"), *(6, 1)) groupbox2_layout.addWidget(self.sb_line_width, *(6, 2)) groupbox2_layout.addWidget(QLabel("Inner Radius:"), *(7, 1)) groupbox2_layout.addWidget(self.sb_inner_radius, *(7, 2)) groupbox2_layout.addWidget(QLabel("Rev/s:"), *(8, 1)) groupbox2_layout.addWidget(self.sb_rev_s, *(8, 2)) groupbox2.setLayout(groupbox2_layout) button_hbox.addWidget(self.btn_start) button_hbox.addWidget(self.btn_stop) button_hbox.addWidget(self.btn_pick_color) grid.addWidget(groupbox1, *(1, 1)) grid.addWidget(groupbox2, *(1, 2)) grid.addLayout(button_hbox, *(2, 1)) self.spinner.start() self.show() def set_roundness(self): self.spinner.setRoundness(self.sb_roundness.value()) def set_opacity(self): self.spinner.setMinimumTrailOpacity(self.sb_opacity.value()) def set_fadeperc(self): self.spinner.setTrailFadePercentage(self.sb_fadeperc.value()) def set_lines(self): self.spinner.setNumberOfLines(self.sb_lines.value()) def set_line_length(self): self.spinner.setLineLength(self.sb_line_length.value()) def set_line_width(self): self.spinner.setLineWidth(self.sb_line_width.value()) def set_inner_radius(self): self.spinner.setInnerRadius(self.sb_inner_radius.value()) def set_rev_s(self): self.spinner.setRevolutionsPerSecond(self.sb_rev_s.value()) def spinner_start(self): self.spinner.start() def spinner_stop(self): self.spinner.stop() def show_color_picker(self): self.spinner.setColor(QColorDialog.getColor()) if __name__ == '__main__': app = QApplication(sys.argv) main = Demo() sys.exit(app.exec())
Python
0.000002
c2a3443bd129b51df82826806829d50d6c01ee69
remove password
demo.py
demo.py
import chineseseg string = "蘇迪勒颱風造成土石崩塌,供應台北市用水的南勢溪挾帶大量泥沙,原水濁度一度飆高。" ckip = chineseseg.Ckip("wlzhuang", "xxxxaaaackip") stanford = chineseseg.stanford("/home/wlzhuang/stanford-segmenter-2015-04-20/stanford-segmenter-3.5.2.jar", debug=True) print( "stanford:", stanford.segment(string) ) print( "ckip:", ckip.segment(string) )
Python
0.000449
@@ -96,32 +96,31 @@ ip(%22 -wlzhuang%22, %22xxxxaaaackip +myaccount%22, %22mypassword %22)%0A%0A
f52a1d0a8a02da4504240d13cb2ff2d321f00983
Update comments.
dfpm.py
dfpm.py
#!/usr/bin/env python # encoding: utf-8 """ dfpm Usage: dfpm [--directory DIR] [--platform PLATFORM] update dfpm [--directory DIR] [--platform PLATFORM] install <package>... dfpm [--directory DIR] [--platform PLATFORM] remove <package>... dfpm [--directory DIR] [--platform PLATFORM] upgrade [<package>...] dfpm [--directory DIR] [--platform PLATFORM] show [<package>...] dfpm -h | --help dfpm -l | --license dfpm -v | --version Options: -d --directory DIR Directory to install packages to. -p --platform PLATFORM Override platform detection. Valid values: linux / osx / windows -c --config CONFIGFILE Use a different config file. -h --help Display the help text. -l --license Display the license -v --version Disaply the version Examples: dfpm install dwarffortress Install Dwarf Fortress. dfpm install dwarffortress phoebus lazynewbpack Install Dwarf Fortress, Phoebus tileset, and Lazy Newb Pack embark profiles. dfpm install phoebus Install Phoebus tileset, will install Dwarf Fortress if it isn't present. dfpm --directory ~/foobar/ install dwarffortress Install Dwarf Fortress in "~/foobar/". dfpm --platform windows install dwarffortress Install the Windows version of Dwarf Fortress, even if the current OS is different. dfpm update Check for updates to packages. dfpm upgrade Upgrade all packages. dfpm upgrade dwarffortress Upgrade only Dwarf Fortress. dfpm upgrade dwarffortress phoebus Upgrade Dwarf Fortress and Phoebus packages. dfpm show Show package information for all installed packages. dfpm show dwarffortress Show package information for package "dwarffortress" """ license = """ Copyright (c) 2012, haesken All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of haesken nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL haesken BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ version = "0.4.0" from os import getcwd, path from docopt import docopt import json import sys sys.path.append("modules/") import dftlib class packageManager(object): def __init__(self, path_root_dir, platform, path_config): """ Set up paths and file objects to use. """ self.df_paths = dftlib.make_df_paths(path_root_dir, platform) self.platform = platform self.config = json.loads(open(path_config, "r").read()) def _pkgs_download_available(self): """ Download the list of available packages. """ return dftlib.download_url(self.config["urls"]["pkg_list"]) def _pkgs_update_available(self): """ If avilable packages.json doesn't exist, download it. If it does exist download a fresh copy, and if the versions differ, write the contents of the new file to the old one. """ if not path.exists(self.config["paths"]["files"]["pkg_list"]): dftlib.write(self.config["paths"]["files"]["pkg_list"], self._pkgs_download_available()) else: pkgs_new = json.dumps(self._pkgs_download_available()) pkgs_cur = json.dumps(dftlib.read( self.config["paths"]["files"]["pkg_list"])) if int(pkgs_new["version"]) > int(pkgs_cur["version"]): dftlib.write(self.pkgs_cur_path, pkgs_new) def _pkgs_get_available(self): """ If there is no package list in the working dir, download one.""" if not path.exists(self.config["paths"]["files"]["pkg_list"]): self._pkgs_update_available() return json.dumps(dftlib.read( self.config["paths"]["files"]["pkg_list"])) def install(self, package_name): pkgs_avail = self._pkgs_get_available() dftlib.ensure_dir(self.config["paths"]["dirs"]["tmp"]) pass """ If the package manifest is not present, download it. If the package tar is not present, download it. Read package manifest. Verify the checksum provided in the manifest. if the checksum is correct: Extract the archive. Symlink the extracted files. if symlinking succeeded: read init options from manifest else: raise checksum exception """ def remove(self, package_name): pass def update(self): self._pkgs_update_available() def upgrade(self, package_name): pass def show(self, package_name): pass def main(args): """ Run selected options. """ if args["--license"]: print(license) sys.exit() if args["--version"]: print(version) sys.exit() if args["--platform"] is not None: platform = args["--platform"] else: platform = dftlib.detect_platform() if args["--directory"] is not None: path_root_dir = args["--directory"] else: path_root_dir = getcwd() if args["--config"] is not None: path_config = args["--config"] else: path_config = "dfpm_config.json" manage = packageManager(path_root_dir, platform, path_config) if args["install"]: manage.install(args["<package>"]) if args["remove"]: manage.remove(args["<package>"]) if args["update"]: manage.update() if args["upgrade"]: manage.upgrade(args["<package>"]) if args["show"]: manage.show(args["<package>"]) if __name__ == '__main__': try: main(docopt(__doc__)) except KeyboardInterrupt: sys.exit()
Python
0
@@ -5681,16 +5681,70 @@ manifest +%0A set options/values listed in manifest %0A%0A
fa07c966baeff5bd4b0c1e7437eae98aec16966e
remove .hg* files from pypi tarball
dodo.py
dodo.py
"""dodo file. test + management stuff""" import glob import os import pytest from doit.tools import create_folder DOIT_CONFIG = {'default_tasks': ['checker', 'ut']} CODE_FILES = glob.glob("doit/*.py") TEST_FILES = glob.glob("tests/test_*.py") TESTING_FILES = glob.glob("tests/*.py") PY_FILES = CODE_FILES + TESTING_FILES def task_checker(): """run pyflakes on all project files""" for module in PY_FILES: yield {'actions': ["pyflakes %(dependencies)s"], 'name':module, 'file_dep':(module,), 'title': (lambda task: task.name)} def run_test(test): return not bool(pytest.main(test)) def task_ut(): """run unit-tests""" for test in TEST_FILES: yield {'name': test, 'actions': [(run_test, (test,))], 'file_dep': PY_FILES, 'verbosity': 0} ################## coverage tasks def task_coverage(): """show coverage for all modules including tests""" return {'actions': ["coverage run --parallel-mode `which py.test` ", "coverage combine", ("coverage report --show-missing %s" % " ".join(CODE_FILES + TEST_FILES)) ], 'verbosity': 2} def task_coverage_code(): """show coverage for all modules (exclude tests)""" return {'actions': ["coverage run --parallel-mode `which py.test` ", "coverage combine", "coverage report --show-missing %s" % " ".join(CODE_FILES)], 'verbosity': 2} def task_coverage_module(): """show coverage for individual modules""" to_strip = len('tests/test_') for test in TEST_FILES: source = "doit/" + test[to_strip:] yield {'name': test, 'actions': ["coverage run --parallel-mode `which py.test` -v %s" % test, "coverage combine", "coverage report --show-missing %s %s" % (source, test)], 'verbosity': 2} ############# python3 # distribute => setup.py test together with use_2to3 doesnt work hence this def task_test3(): """run unitests on python3""" this_folder = os.path.dirname(os.path.abspath(__file__)) test_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../doit3test") return {'actions': [ "rm -rf %s" % test_folder, "cp -r %s %s" % (this_folder, test_folder), "2to3 --write --nobackups %s" % test_folder, "py.test-3.2 %s" % test_folder, ], 'verbosity': 2, } ############################ website DOC_ROOT = 'doc/' DOC_BUILD_PATH = DOC_ROOT + '_build/html/' def task_epydoc(): """# generate API docs""" target_path = DOC_BUILD_PATH + 'api/' return {'actions':[(create_folder, [target_path]), ("epydoc --config %sepydoc.config " % DOC_ROOT + "-o %(targets)s")], 'file_dep': CODE_FILES, 'targets': [target_path]} def task_sphinx(): """generate website docs""" action = "sphinx-build -b html -d %s_build/doctrees %s %s" return {'actions': [action % (DOC_ROOT, DOC_ROOT, DOC_BUILD_PATH)]} def task_website(): """dodo file create website html files""" return {'actions': None, 'task_dep': ['epydoc', 'sphinx'], } def task_website_update(): """update website on sourceforge""" return {'actions': ["rsync -avP -e ssh %s* schettino72,python-doit@web.sourceforge.net:htdocs/" % DOC_BUILD_PATH]} ################### dist def task_revision(): """create file with repo rev number""" return {'actions': ["hg tip --template '{rev}:{node}' > revision.txt"]} def task_manifest(): """create manifest file for distutils """ def check_version(): # using a MANIFEST file directly is broken on python2.7 # http://bugs.python.org/issue11104 import sys assert sys.version_info < (2,7) cmd = "hg manifest > MANIFEST;echo 'revision.txt' >> MANIFEST" return {'actions': [check_version, cmd]} def task_sdist(): """create source dist package""" return {'actions': ["python setup.py sdist"], 'task_dep': ['revision', 'manifest'], } def task_pypi(): """upload package to pypi""" return {'actions': ["python setup.py sdist upload"], 'task_dep': ['revision', 'manifest'], } # doit -f ../doit-recipes/deps/deps.py -d . --reporter=executed-only
Python
0
@@ -4054,17 +4054,125 @@ %3C (2,7) -%0A + or sys.version_info %3E (2,7,2)%0A%0A # create manifest will all files under version control without .hg* files %0A cmd @@ -4174,16 +4174,18 @@ cmd = +%22%22 %22hg mani @@ -4193,19 +4193,57 @@ est -%3E MANIFEST; +%7C grep -vE %22.*%5C.hg.*%22 %3E MANIFEST %22%22%22%0A cmd2 = %22 echo @@ -4313,16 +4313,22 @@ ion, cmd +, cmd2 %5D%7D%0A%0Adef
c1ae43fd33cd0f8eb3e270907a8ed7e728d1e268
Add captured_at timestamp to POST payload
server.py
server.py
import evdev import requests import json import datetime import yaml def main(): config = load_config() dev = evdev.InputDevice(config['device_path']) output_line('Initialized - Capturing device: ' + str(dev)) for event in dev.read_loop(): if event.type == evdev.ecodes.EV_KEY: event = evdev.categorize(event) output_line(event) payload = build_payload(event) output_line('Sending ' + str(payload) + ' to ' + config['post_url']) response = requests.post(config['post_url'], json.dumps(payload)) output_line(response) def build_payload(event): return { 'code': event.scancode, 'key': event.keycode[0] if type(event.keycode) == list else event.keycode, 'state': {0: 'UP', 1: 'DOWN', 2: 'HOLD'}[event.keystate] } def load_config(): with open('config.yml', 'r') as f: return yaml.safe_load(f.read()) def timestamp_s(): return '[' + str(datetime.datetime.now()) + ']' def output_line(string): print(timestamp_s() + ' ' + str(string)) if __name__ == '__main__': main()
Python
0.000076
@@ -290,46 +290,8 @@ EY:%0A - event = evdev.categorize(event)%0A @@ -307,24 +307,24 @@ line(event)%0A + payloa @@ -550,16 +550,50 @@ event):%0A + event = evdev.categorize(event)%0A return @@ -698,24 +698,24 @@ nt.keycode,%0A - 'state': @@ -762,16 +762,105 @@ eystate%5D +,%0A 'captured_at': datetime.datetime.fromtimestamp(event.event.timestamp()).isoformat() %0A %7D%0A%0Ade
50fa7ecea878781c8ca8764f09a723ac9be01850
fix test
test/test_bedtools.py
test/test_bedtools.py
import os from sequana import bedtools, sequana_data from sequana.tools import genbank_features_parser from easydev import TempFile def test_threshold(): t = bedtools.DoubleThresholds(-5,5) assert t.low == -5 assert t.high == 5 assert t.low2 == -2.5 t = bedtools.DoubleThresholds(-4, 3) assert t.low == -4 assert t.high == 3 t = bedtools.DoubleThresholds(-8,8) t.ldtr = 0.25 t.hdtr = 0.25 assert t.low2 == -2 assert t.high2 == 2 print(t) t.ldtr = 0.5 t.hdtr = 0.5 t.low = -3 t.high = 3 assert t.low2 == -1.5 assert t.high2 == 1.5 try: t = bedtools.DoubleThresholds(3, 4) assert False except: assert True try: t = bedtools.DoubleThresholds(3, -4) assert False except: assert True def test_genomecov(): filename = sequana_data('JB409847.bed') try: bed = bedtools.GenomeCov("dummy.csv") assert False except: assert True try: bed = bedtools.GenomeCov(filename, "dummy.gbk") assert False except: assert True # !now let us read the good data sets bed = bedtools.GenomeCov(filename, sequana_data('JB409847.gbk')) bed.compute_coverage() bed = bedtools.GenomeCov(filename, sequana_data('JB409847.gbk')) bed2 = bedtools.GenomeCov(filename, sequana_data('JB409847.gbk')) assert bed == bed # test equality for same chromosome but different data bed2.chr_list[0].df["cov"] += 100 assert bed != bed2 # test equality for same chromosome but different data bed2.chr_list[0].df["cov"] -= 100 bed2.chr_list.append("dummy") assert bed != bed2 # setter must be bool try: bed.circular = 1 assert False except: assert True # cant use setter try: bed.feature_dict = {} assert False except: assert True assert len(bed) == 1 # a getter for the first chromosome bed[0] # setter available but not sure this is useful bed.window_size = 4001 bed.hist() # This requires to call other method before for chrom in bed: chrom.moving_average(n=501) chrom.running_median(n=501, circular=True) chrom.running_median(n=501, circular=False) chrom.compute_zscore() roi = chrom.get_roi() with TempFile(suffix='.png') as fh: chrom.plot_coverage(filename=fh.name) with TempFile(suffix='.png') as fh: chrom.plot_hist_zscore(filename=fh.name) with TempFile(suffix='.png') as fh: chrom.plot_hist_normalized_coverage(filename=fh.name) len(chrom) print(chrom) chrom.get_size() chrom.get_mean_cov() chrom.get_var_coef() with TempFile(suffix='.csv') as fh: bed.to_csv(fh.name) bed2 = bedtools.GenomeCov(fh.name, sequana_data('JB409847.gbk')) # plotting bed.chr_list[0].plot_hist_coverage() bed.chr_list[0].plot_hist_coverage(logx=False,logy=True) bed.chr_list[0].plot_hist_coverage(logx=True,logy=False) with TempFile(suffix=".png") as fh: bed.chr_list[0].plot_hist_coverage(logx=False,logy=False, filename=fh.name) def test_gc_content(): bed = sequana_data('JB409847.bed') fasta = sequana_data('JB409847.fasta') cov = bedtools.GenomeCov(bed) cov.compute_gc_content(fasta) cov.get_stats() ch = cov[0] ch.moving_average(4001, circular=True) ch.running_median(4001,circular=True) ch.compute_zscore() ch.get_evenness() ch.get_cv() assert ch.get_centralness() > 0.84 and ch.get_centralness()<0.85 with TempFile(suffix=".png") as fh: ch.plot_gc_vs_coverage(filename=fh.name) from easydev import TempFile with TempFile() as fh: ch.to_csv(fh.name) ch.get_max_gc_correlation(fasta)
Python
0.000002
@@ -1251,16 +1251,20 @@ overage( +4001 )%0A%0A b @@ -3769,41 +3769,8 @@ e)%0A%0A - from easydev import TempFile%0A
0880846cde92e49863dfcb2342ff405d06ffe5c9
Format bugfix.
exporters/writers/base_writer.py
exporters/writers/base_writer.py
import gzip import os import shutil import uuid from exporters.logger.base_logger import WriterLogger from exporters.pipeline.base_pipeline_item import BasePipelineItem import tempfile TEMP_FILES_NAME = 'temp' ITEMS_PER_BUFFER_WRITE = 10000 SIZE_PER_BUFFER_WRITE = 0 class ItemsLimitReached(Exception): """ This exception is thrown when the desired items number has been reached """ class NoGroup(object): def __call__(self, batch): return {'': batch} def __repr__(self): return "NoGroup( )" def __nonzero__(self): return 0 def __getattr__(self, name): return self class BaseWriter(BasePipelineItem): """ This module receives a batch and writes it where needed. It adds an optionsl items_limit parameter to allow to limit the number of exported items. If set to 0, there is no limit. """ base_supported_options = { 'items_per_buffer_write': {'type': int, 'default': ITEMS_PER_BUFFER_WRITE}, 'size_per_buffer_write': {'type': int, 'default': SIZE_PER_BUFFER_WRITE}, 'items_limit': {'type': int, 'default': 0}, } def __init__(self, options): super(BaseWriter, self).__init__(options) self.finished = False self.tmp_folder = tempfile.mkdtemp() self.check_options() self.items_per_buffer_write = self.read_option('items_per_buffer_write') self.size_per_buffer_write = self.read_option('size_per_buffer_write') self.items_limit = self.read_option('items_limit') self.logger = WriterLogger({'log_level': options.get('log_level'), 'logger_name': options.get('logger_name')}) self.items_count = 0 self.grouping_info = {} def write(self, path, key): """ It receives where the tmp dump file is stored and group information, and it must write it wherever needed. """ raise NotImplementedError def write_batch(self, batch): """ It receives the batch and writes it. """ for item in batch: self._send_item_to_buffer(item) def _should_write_buffer(self, key): if self.size_per_buffer_write and os.path.getsize( self.grouping_info[key]['group_file'][-1]) >= self.size_per_buffer_write: return True return self.grouping_info[key].get('buffered_items', 0) >= self.items_per_buffer_write def _send_item_to_buffer(self, item): """ It receives an item and writes it. """ key = tuple(item.group_membership) if key not in self.grouping_info: self.grouping_info[key] = {} self.grouping_info[key]['membership'] = item.group_membership self.grouping_info[key]['total_items'] = 0 self.grouping_info[key]['buffered_items'] = 0 self.grouping_info[key]['group_file'] = [] self._add_to_buffer(item, key) if self._should_write_buffer(key): self.logger.debug('Buffer write is needed.') self._write_buffer(key) self.items_count += 1 if self.items_limit and self.items_limit == self.items_count: raise ItemsLimitReached( 'Finishing job after items_limit reached: {} items written.'.format(self.items_count)) def _get_group_path(self, key): if self.grouping_info[key]['group_file']: path = self.grouping_info[key]['group_file'][-1] else: path = os.path.join(self.tmp_folder, str(uuid.uuid4())) self.grouping_info[key]['group_file'].append(path) return path def _add_to_buffer(self, item, key): path = self._get_group_path(key) with open(path, 'a') as f: f.write(item.formatted + '\n') self.grouping_info[key]['total_items'] += 1 self.grouping_info[key]['buffered_items'] += 1 def _compress_file(self, path): compressed_path = path + '.gz' with gzip.open(compressed_path, 'wb') as predump_file, open(path) as fl: shutil.copyfileobj(fl, predump_file) return compressed_path def _create_buffer_path_for_key(self, key): new_buffer_path = os.path.join(self.tmp_folder, str(uuid.uuid4())) self.grouping_info[key]['group_file'].append(new_buffer_path) f = open(new_buffer_path, 'w') f.close() def _write_buffer(self, key): path = self._get_group_path(key) compressed_path = self._compress_file(path) self.write(compressed_path, self.grouping_info[key]['membership']) self._create_buffer_path_for_key(key) self._reset_key(key) def _reset_key(self, key): self.grouping_info[key]['buffered_items'] = 0 def close_writer(self): """ Called to clean all possible tmp files created during the process. """ for key in self.grouping_info.keys(): self._write_buffer(key) shutil.rmtree(self.tmp_folder, ignore_errors=True)
Python
0
@@ -230,17 +230,18 @@ WRITE = -1 +50 0000%0ASIZ @@ -3451,55 +3451,32 @@ h = -os.path.join(self.tmp_folder, str(uuid.uuid4()) +self._get_new_path_name( )%0A @@ -4140,55 +4140,32 @@ h = -os.path.join(self.tmp_folder, str(uuid.uuid4()) +self._get_new_path_name( )%0A @@ -4271,16 +4271,16 @@ h, 'w')%0A - @@ -4290,16 +4290,121 @@ lose()%0A%0A + def _get_new_path_name(self):%0A return os.path.join(self.tmp_folder, str(uuid.uuid4())+'.jl')%0A%0A def
c769a92ba88613932f3b8b881feac37d65ee2b7c
Add __str__ method
coalib/results/SourceRange.py
coalib/results/SourceRange.py
from os.path import relpath from coala_utils.decorators import enforce_signature, get_public_members from coalib.results.SourcePosition import SourcePosition from coalib.results.TextRange import TextRange from coalib.results.AbsolutePosition import AbsolutePosition class SourceRange(TextRange): @enforce_signature def __init__(self, start: SourcePosition, end: (SourcePosition, None)=None): """ Creates a new SourceRange. :param start: A SourcePosition indicating the start of the range. :param end: A SourcePosition indicating the end of the range. If ``None`` is given, the start object will be used here. end must be in the same file and be greater than start as negative ranges are not allowed. :raises TypeError: Raised when - start is not of type SourcePosition. - end is neither of type SourcePosition, nor is it None. :raises ValueError: Raised when file of start and end mismatch. """ TextRange.__init__(self, start, end) if self.start.file != self.end.file: raise ValueError('File of start and end position do not match.') @classmethod def from_values(cls, file, start_line=None, start_column=None, end_line=None, end_column=None): start = SourcePosition(file, start_line, start_column) if end_line or (end_column and end_column > start_column): end = SourcePosition(file, end_line if end_line else start_line, end_column) else: end = None return cls(start, end) @classmethod def from_clang_range(cls, range): """ Creates a SourceRange from a clang SourceRange object. :param range: A cindex.SourceRange object. """ return cls.from_values(range.start.file.name, range.start.line, range.start.column, range.end.line, range.end.column) @classmethod @enforce_signature def from_absolute_position(cls, file: str, position_start: AbsolutePosition, position_end: (AbsolutePosition, None)=None): """ Creates a SourceRange from a start and end positions. :param file: Name of the file. :param position_start: Start of range given by AbsolutePosition. :param position_end: End of range given by AbsolutePosition or None. """ start = SourcePosition(file, position_start.line, position_start.column) end = None if position_end: end = SourcePosition(file, position_end.line, position_end.column) return cls(start, end) @property def file(self): return self.start.file @enforce_signature def renamed_file(self, file_diff_dict: dict): """ Retrieves the filename this source range refers to while taking the possible file renamings in the given file_diff_dict into account: :param file_diff_dict: A dictionary with filenames as key and their associated Diff objects as values. """ diff = file_diff_dict.get(self.file) if diff is None: return self.file return diff.rename if diff.rename is not False else self.file def expand(self, file_contents): """ Passes a new SourceRange that covers the same area of a file as this one would. All values of None get replaced with absolute values. values of None will be interpreted as follows: self.start.line is None: -> 1 self.start.column is None: -> 1 self.end.line is None: -> last line of file self.end.column is None: -> last column of self.end.line :param file_contents: File contents of the applicable file :return: TextRange with absolute values """ tr = TextRange.expand(self, file_contents) return SourceRange.from_values(self.file, tr.start.line, tr.start.column, tr.end.line, tr.end.column) def __json__(self, use_relpath=False): _dict = get_public_members(self) if use_relpath: _dict['file'] = relpath(_dict['file']) return _dict def __contains__(self, item): return (super().__contains__(item) and self.start.file == item.start.file)
Python
0.000001
@@ -4839,16 +4839,1285 @@ _dict%0A%0A + def __str__(self):%0A %22%22%22%0A Creates a string representation of the SourceRange object.%0A%0A If the whole file is affected, then just the filename is shown.%0A%0A %3E%3E%3E str(SourceRange.from_values('test_file', None, None, None, None))%0A '...test_file'%0A%0A If the whole line is affected, then just the filename with starting%0A line number and ending line number is shown.%0A%0A %3E%3E%3E str(SourceRange.from_values('test_file', 1, None, 2, None))%0A '...test_file: L1 : L2'%0A%0A This is the general case where particular column and line are%0A specified. It shows the starting line and column and ending line%0A and column, with filename in the beginning.%0A%0A %3E%3E%3E str(SourceRange.from_values('test_file', 1, 1, 2, 1))%0A '...test_file: L1 C1 : L2 C1'%0A %22%22%22%0A if self.start.line is None and self.end.line is None:%0A format_str = '%7B0.start.file%7D'%0A elif self.start.column is None and self.end.column is None:%0A format_str = '%7B0.start.file%7D: L%7B0.start.line%7D : L%7B0.end.line%7D'%0A else:%0A format_str = ('%7B0.start.file%7D: L%7B0.start.line%7D C%7B0.start.column%7D' +%0A ' : L%7B0.end.line%7D C%7B0.end.column%7D')%0A%0A return format_str.format(self)%0A%0A def
13b6e289f3ced59068d91dff2b2ef12a7805fabe
Create test definitions.
test/test_cronquot.py
test/test_cronquot.py
import unittest import os from cronquot.cronquot import has_directory class CronquotTest(unittest.TestCase): def test_has_directory(self): sample_dir = os.path.join( os.path.dirname(__file__), 'crontab') self.assertTrue(has_directory(sample_dir)) if __name__ == '__main__': unittest.test()
Python
0
@@ -280,16 +280,282 @@ _dir))%0A%0A + def test_parse_command(self):%0A pass%0A%0A def test_is_cron_script(self):%0A pass%0A%0A def test_normalize_cron_script(self):%0A pass%0A%0A def test_has_cosistency_in_result(self):%0A pass%0A%0A def test_simple_cron_pattern(self):%0A pass%0A%0A if __nam
7f821683fa9803fda9b1d12a0229a74c334efdc0
discard phase before amplitude_to_db in HPSS display
docs/examples/plot_hprss.py
docs/examples/plot_hprss.py
# -*- coding: utf-8 -*- """ ===================================== Harmonic-percussive source separation ===================================== This notebook illustrates how to separate an audio signal into its harmonic and percussive components. We'll compare the original median-filtering based approach of `Fitzgerald, 2010 <http://arrow.dit.ie/cgi/viewcontent.cgi?article=1078&context=argcon>`_ and its margin-based extension due to `Dreidger, Mueller and Disch, 2014 <http://www.terasoft.com.tw/conf/ismir2014/proceedings/T110_127_Paper.pdf>`_. """ from __future__ import print_function import numpy as np import matplotlib.pyplot as plt import librosa import librosa.display ######################## # Load the example clip. y, sr = librosa.load('audio/Karissa_Hobbs_-_09_-_Lets_Go_Fishin.mp3', offset=40, duration=10) ############################################### # Compute the short-time Fourier transform of y D = librosa.stft(y) ##################################################### # Decompose D into harmonic and percussive components # # :math:`D = D_\text{harmonic} + D_\text{percussive}` D_harmonic, D_percussive = librosa.decompose.hpss(D) #################################################################### # We can plot the two components along with the original spectrogram # Pre-compute a global reference power from the input spectrum rp = np.max(np.abs(D)) plt.figure(figsize=(12, 8)) plt.subplot(3, 1, 1) librosa.display.specshow(librosa.amplitude_to_db(D, ref=rp), y_axis='log') plt.colorbar() plt.title('Full spectrogram') plt.subplot(3, 1, 2) librosa.display.specshow(librosa.amplitude_to_db(D_harmonic, ref=rp), y_axis='log') plt.colorbar() plt.title('Harmonic spectrogram') plt.subplot(3, 1, 3) librosa.display.specshow(librosa.amplitude_to_db(D_percussive, ref=rp), y_axis='log', x_axis='time') plt.colorbar() plt.title('Percussive spectrogram') plt.tight_layout() ################################################################################# # The default HPSS above assigns energy to each time-frequency bin according to # whether a horizontal (harmonic) or vertical (percussive) filter responds higher # at that position. # # This assumes that all energy belongs to either a harmonic or percussive source, # but does not handle "noise" well. Noise energy ends up getting spread between # D_harmonic and D_percussive. # # If we instead require that the horizontal filter responds more than the vertical # filter *by at least some margin*, and vice versa, then noise can be removed # from both components. # # Note: the default (above) corresponds to margin=1 # Let's compute separations for a few different margins and compare the results below D_harmonic2, D_percussive2 = librosa.decompose.hpss(D, margin=2) D_harmonic4, D_percussive4 = librosa.decompose.hpss(D, margin=4) D_harmonic8, D_percussive8 = librosa.decompose.hpss(D, margin=8) D_harmonic16, D_percussive16 = librosa.decompose.hpss(D, margin=16) ############################################################################# # In the plots below, note that vibrato has been suppressed from the harmonic # components, and vocals have been suppressed in the percussive components. plt.figure(figsize=(10, 10)) plt.subplot(5, 2, 1) librosa.display.specshow(librosa.amplitude_to_db(D_harmonic, ref=rp), y_axis='log') plt.title('Harmonic') plt.yticks([]) plt.ylabel('margin=1') plt.subplot(5, 2, 2) librosa.display.specshow(librosa.amplitude_to_db(D_percussive, ref=rp), y_axis='log') plt.title('Percussive') plt.yticks([]), plt.ylabel('') plt.subplot(5, 2, 3) librosa.display.specshow(librosa.amplitude_to_db(D_harmonic2, ref=rp), y_axis='log') plt.yticks([]) plt.ylabel('margin=2') plt.subplot(5, 2, 4) librosa.display.specshow(librosa.amplitude_to_db(D_percussive2, ref=rp), y_axis='log') plt.yticks([]) ,plt.ylabel('') plt.subplot(5, 2, 5) librosa.display.specshow(librosa.amplitude_to_db(D_harmonic4, ref=rp), y_axis='log') plt.yticks([]) plt.ylabel('margin=4') plt.subplot(5, 2, 6) librosa.display.specshow(librosa.amplitude_to_db(D_percussive4, ref=rp), y_axis='log') plt.yticks([]), plt.ylabel('') plt.subplot(5, 2, 7) librosa.display.specshow(librosa.amplitude_to_db(D_harmonic8, ref=rp), y_axis='log') plt.yticks([]) plt.ylabel('margin=8') plt.subplot(5, 2, 8) librosa.display.specshow(librosa.amplitude_to_db(D_percussive8, ref=rp), y_axis='log') plt.yticks([]), plt.ylabel('') plt.subplot(5, 2, 9) librosa.display.specshow(librosa.amplitude_to_db(D_harmonic16, ref=rp), y_axis='log') plt.yticks([]) plt.ylabel('margin=16') plt.subplot(5, 2, 10) librosa.display.specshow(librosa.amplitude_to_db(D_percussive16, ref=rp), y_axis='log') plt.yticks([]), plt.ylabel('') plt.tight_layout() plt.show()
Python
0
@@ -1486,17 +1486,25 @@ e_to_db( -D +np.abs(D) , ref=rp @@ -1628,32 +1628,39 @@ amplitude_to_db( +np.abs( D_harmonic, ref= @@ -1645,32 +1645,33 @@ p.abs(D_harmonic +) , ref=rp), y_axi @@ -1791,32 +1791,39 @@ amplitude_to_db( +np.abs( D_percussive, re @@ -1810,32 +1810,33 @@ abs(D_percussive +) , ref=rp), y_axi
f6af6c56d71d3dfc76b2ceb5e15b38b914067c3e
fix bug
server.py
server.py
import frame as fm from settings import ConnectReturn as CR from settings import TYPE import socket from threading import Timer class Broker(): def __init__(self, host = "127.0.0.1", port = 8888): self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.serv.bind((host, port)) self.host = host self.port = port self.clients = {} self.topics = {} self.wills = {} self.clientSubscribe = {} # NOTICE: keys of topics and clientSubscribe should be synchronized def runServer(self): self.serv.listen(1) while True: self.sock, self.addr = self.serv.accept() data = "dummy" while len(data): data = self.sock.recv(1 << 16) fm.parseFrame(data, self) if self.clients.has_key(self.addr): self.clients[self.addr].restartTimer() #when DISCONNECT frame coms, then error might occur because the socket is already closed def setClient(self, cliID, name, passwd, will, keepAlive, clean): self.clients[self.addr] = Client(self, self.addr, self.sock, cliID, name, passwd, will, keepAlive, clean) def setTopic(self, topicQoS, messageID): self.clients[self.addr].subscribe.append(topicQoS) if self.clientSubscribe.has_key(topicQoS[0]): self.clientSubscribe[topicQoS[0]].append([self.addr, topicQoS[1]]) # this is 'retain' if self.topics[topicQoS[0]]: frame = fm.makeFrame(TYPE.PUBLISH, 0, topicOoS[1], 1, topic = topicQoS[0], message = self.topics[topicQoS[0]], messageID = messageID) self.send(frame) else: self.clientSubscribe[topicQoS[0]] = [[self.addr, topicQoS[1]]] def unsetWill(self): #when pass def sendWill(self, frame): pass # send willFrame to clients ? def unsetTopic(self, topic): # not cool self.clients[self.addr].unsetTopic(topic) self.clientSubscribe[topic].remove(self.clientSubscribe[topic][[addr[0] for addr in self.clientSubscribe[topic]].index(self.addr)]) def disconnect(self): # when get DISCONNECT packet from client self.clients[self.addr].sock.close() if self.clients[self.addr].clean: # TODO: correct ? self.clients.pop(self.addr) print "disconnect" def suback(self, messageID): # this looks mistake, the qosList should contain only subscribed QoSs frame = fm.makeFrame(TYPE.SUBACK, 0, 0, 0, messageID = messageID, qosList = [topic[1] for topic in self.clients[self.addr].subscribe]) self.send(frame) def unsuback(self, messageID): frame = fm.makeFrame(TYPE.UNSUBACK, 0, 0, 0, messageID = messageID) self.send(frame) def connack(self): frame = fm.makeFrame(TYPE.CONNACK, 0, 0, 0, code = CR.ACCEPTED) self.send(frame) def puback(self, messageID): frame = fm.makeFrame(TYPE.PUBACK, 0, 0, 0, messageID = messageID) self.send(frame) def pubrec(self, messageID): frame = fm.makeFrame(TYPE.PUBREC, 0, 0, 0, messageID = messageID) self.send(frame) def pubrel(self, messageID): frame = fm.makeFrame(TYPE.PUBREL, 0, 1, 0, messageID = messageID) self.send(frame) def pubcomp(self, messageID): frame = fm.makeFrame(TYPE.PUBCOMP, 0, 0, 0, messageID = messageID) self.send(frame) def pingresp(self): self.clients[self.addr].pingresp() def publish(self, topic, message, messageID = 1, retain = 0): if self.topics.has_key(topic): for client in self.topics[topic]: frame = fm.makeFrame(TYPE.PUBLISH, 0, client[1], 0, topic = topic, message = message, messageID = messageID) self.clients[client[0]].send(frame) # TODO: send function should be unified else: self.topics[topic] = "" if retain: self.topics[topic] = message def pubrel(self, dup = 0, messageID = 1): # dup should be zero ? frame = fm.makeFrame(TYPE.PUBREL, dup, 1, 0, messageID = messageID) self.send(frame) def pubcomp(self, messageID = 1): frame = fm.makeFrame(TYPE.PUBCOMP, 0, 0, 0, messageID = messageID) self.send(frame) def send(self, frame): self.clients[self.addr].send(frame) class Client(): def __init__(self, server, addr, sock, cliID = "", name = "", passwd = "", will = {}, keepAlive = 2, clean = 1): self.server = server self.addr = addr self.sock = sock if not cliID: cliID = "random" # TODO: cliID should be determined in here if no cliID was delivered. self.cliID = cliID self.name = name self.passwd = passwd self.will = will self.keepAlive = keepAlive self.timer = Timer(keepAlive * 1.5, self.disconnect) self.subscribe = [] self.cleanSession = clean def sendWill(self, frame): self.server.sendWill(frame) def disconnect(self): # when ping packet didn't came within the keepAlive * 1.5 sec frame = fm.makeFrame(TYPE.PUBLISH, 0, self.will["QoS"], self.will["retain"], topic = self.will["topic"], message = self.will["message"], messageID = 1) self.sendWill(frame) frame = fm.makeFrame(TYPE.DISCONNECT, 0, 0, 0) self.send(frame) self.sock.close() self.server.clients.pop(self.addr) def pingresp(self): frame = fm.makeFrame(TYPE.PINGRESP, 0, 0, 0) self.send(frame) def unsetTopic(self, topic): self.subscribe.remove(self.subscribe[[t[0] for t in self.subscribe].index(topic)]) def send(self, frame): self.sock.send(frame) def restartTimer(self): self.timer.cancel() self.timer = Timer(self.keepAlive * 1.5, self.disconnect) self.timer.start()
Python
0.000001
@@ -5166,15 +5166,8 @@ lean -Session = c
e20cdd293a045dfca1829eed522cb941da2fb558
remove cruft
server.py
server.py
import json import os from bottle import get, run, post, request, HTTPError, response from requests import Session from requests import post as request_post # ---------------------------------------------------------------------------- # Settings # ---------------------------------------------------------------------------- env_var_names = ( 'GITHUB_API_KEY', 'GITHUB_ORGANIZATION_NAME', 'SLACK_API_TOKEN', 'SLACK_API_SECRET', 'SLACK_TEAM_NAME', ) env = {} for name in env_var_names: env[name] = os.environ.get(name, None) assert env[name], "Missing environment variable: %s" % name # ---------------------------------------------------------------------------- # Helpers # ---------------------------------------------------------------------------- def github_request(method, endpoint, data=None): github_session = Session() github_session.auth = (env['GITHUB_API_KEY'], 'x-oauth-basic') base_url = 'https://api.github.com' method_func = getattr(github_session, method.lower()) response = method_func( base_url + endpoint, data=data ) return response def github_add_member_to_org(github_username): return github_request( 'PUT', '/orgs/%s/memberships/%s' % (env['GITHUB_ORGANIZATION_NAME'], github_username), data=json.dumps({"role": "member"}) ) def slack_invite(email): print env['SLACK_API_TOKEN'] return request_post( 'https://%s.slack.com/api/users.admin.invite' % (env['SLACK_TEAM_NAME']), data=json.dumps({ "token": env['SLACK_API_TOKEN'], "email": email, "set_active": True, }), headers={ 'Content-type': 'application/json', 'Accept': 'text/plain' } ) import ipdb;ipdb.set_trace() # ---------------------------------------------------------------------------- # Views / server # ---------------------------------------------------------------------------- @post('/add') def add(): # Parse input if request.forms.get('token') != env['SLACK_API_TOKEN']: # Make sure we got a request from the actual slack server not some ass hole return HTTPError(status=403) text = request.forms.get('text') if not text or len(text.split(' ')) != 2: response.status_code = 400 return {"error": "Invalid text input, should look like /onboard <github name>; <email>"} github_username, email = text.split(' ') github_username = github_username.strip() email = email.strip() # Add to github resp = github_add_member_to_org(github_username) if resp.status_code != 200: response.status_code = 500 return {"error": "Bad response from Github (%s): %s" % (resp.status_code, resp.content)} # Add to slack resp = slack_invite(email) if resp.status_code != 200: response.status_code = 500 return {"error": "Bad response from Slack (%s): %s" % (resp.status_code, resp.content)} # Add to screenhero # TODO return "Successfully added user to Github, Slack and Screenhero... wee!" @get("/") def nice_index(): return "Hello, I am an <a href='https://github.com/dev-coop/onboard'>onboarding bot</a>!" # Heroku sets PORT env var run(host="0.0.0.0", port=int(os.environ.get("PORT", 5000)))
Python
0
@@ -1787,45 +1787,8 @@ )%0A%0A%0A -%0A%0Aimport ipdb;ipdb.set_trace()%0A%0A%0A%0A%0A%0A%0A # --
5e4d3c0b28104c1e98ed3e426dab9fc5d4d5a960
Add more comments to loadfail test
test/test_loadfail.py
test/test_loadfail.py
#!bin/env python import subprocess import os.path import unittest, re class TestSaveLoad(unittest.TestCase): @classmethod def setUpClass(self): subprocess.call('rm -rf remote local 2>> /dev/null', shell=True) subprocess.call('mkdir remote; mkdir local', shell=True) subprocess.call('cd remote; mkdir parent; cd parent; git init --bare', shell=True) subprocess.call('cd remote; mkdir child; cd child; git init --bare', shell=True) subprocess.call('cd local; git clone ../remote/parent', shell=True) subprocess.call('cd local; git clone ../remote/child', shell=True) subprocess.call('cd local/parent; echo "version: 0.1.0" >> .gitproj', shell=True) subprocess.call('cd local/parent; echo "repos:" >> .gitproj', shell=True) subprocess.call('cd local/parent; echo "\tc child ../../remote/child" >> .gitproj', shell=True) subprocess.call('cd local/parent; git add .gitproj; git commit -m "Initial Commit"; git push -u origin master', shell=True) def test_init(self): subprocess.call('cd local/parent; git project init', shell=True) subprocess.call('cd local/parent; git add .gitignore; git commit -m ".gitignore"; git push', shell=True) output = subprocess.call('test -d local/parent/child;', shell=True) self.assertEqual(output, 0) output = subprocess.check_output('cd local/parent/child; git remote show origin | grep Fetch | grep remote/child | wc -l', shell=True) self.assertEqual(output.strip(), '1') subprocess.call('cd local/parent/child; echo "Asdf" > test.txt; git add test.txt; git commit -m "Initial Commit"; git push', shell=True) subprocess.call('cd local/parent; git project save -f', shell=True) subprocess.call('cd local/parent; git add .gitproj; git commit -m "Save Sub-Repository State"', shell=True) subprocess.call('cd local/parent; sed \$d .gitproj > .gitproj2; echo " c master nonexistantcommit" >> .gitproj2', shell=True) subprocess.call('cd local/parent; mv .gitproj2 .gitproj', shell=True) res = subprocess.call('cd local/parent; git project load', shell=True) self.assertEqual(res, 1) @classmethod def tearDownClass(self): subprocess.call('rm -rf remote local', shell=True) if __name__ == '__main__': unittest.main()
Python
0
@@ -142,32 +142,91 @@ UpClass(self):%0A%0A + # ensure we start with a clean slate, just in case%0A subproce @@ -275,32 +275,75 @@ ', shell=True)%0A%0A + # Initialize %22remote%22 repositories%0A subproce @@ -564,32 +564,74 @@ ', shell=True)%0A%0A + # Initialize %22local%22 repositories%0A subproce @@ -760,32 +760,112 @@ ', shell=True)%0A%0A + # Add a .gitproj to the parent repo, and make child a subrepo of parent%0A subproce @@ -1280,32 +1280,92 @@ st_init(self):%0A%0A + # Initialize git-project (clones child into parent)%0A subproce @@ -1529,32 +1529,75 @@ ', shell=True)%0A%0A + # Ensure child was cloned properly%0A output = @@ -1693,16 +1693,65 @@ ut, 0)%0A%0A + # Ensure child's origin is set correctly%0A @@ -1933,16 +1933,81 @@ , '1')%0A%0A + # Add a commit to the child and update parent's .gitproj%0A @@ -2135,33 +2135,32 @@ h', shell=True)%0A -%0A subproce @@ -2211,33 +2211,32 @@ f', shell=True)%0A -%0A subproce @@ -2328,32 +2328,79 @@ ', shell=True)%0A%0A + # Change the .gitproj so it is invalid%0A subproce @@ -2513,32 +2513,108 @@ ', shell=True)%0A%0A + # Ensure loading the invalid .gitproj returns a non-zero error code%0A subproce @@ -2827,32 +2827,72 @@ wnClass(self):%0A%0A + # Remove remote and local repos%0A subproce
c138182451d1e3937cf5f923c9b927dc97a97f38
Fix the logger fix
mycroft/util/signal.py
mycroft/util/signal.py
import os import os.path import tempfile import mycroft import time from mycroft.util.logging import getLogger LOG = getLogger(__name__) def get_ipc_directory(domain=None): """Get the directory used for Inter Process Communication Files in this folder can be accessed by different processes on the machine. Useful for communication. This is often a small RAM disk. Args: domain (str): The IPC domain. Basically a subdirectory to prevent overlapping signal filenames. Returns: str: a path to the IPC directory """ config = mycroft.configuration.ConfigurationManager.instance() dir = config.get("ipc_path") if not dir: # If not defined, use /tmp/mycroft/ipc dir = os.path.join(tempfile.gettempdir(), "mycroft", "ipc") return ensure_directory_exists(dir, domain) def ensure_directory_exists(dir, domain=None): """ Create a directory and give access rights to all Args: domain (str): The IPC domain. Basically a subdirectory to prevent overlapping signal filenames. Returns: str: a path to the directory """ if domain: dir = os.path.join(dir, domain) dir = os.path.normpath(dir) if not os.path.isdir(dir): try: save = os.umask(0) os.makedirs(dir, 0777) # give everyone rights to r/w here except OSError: LOG.warn("Failed to create: " + dir) pass finally: os.umask(save) return dir def create_file(filename): """ Create the file filename and create any directories needed Args: filename: Path to the file to be created """ try: os.makedirs(os.path.dirname(filename)) except OSError: pass with open(filename, 'w') as f: f.write('') def create_signal(signal_name): """Create a named signal Args: signal_name (str): The signal's name. Must only contain characters valid in filenames. """ try: path = os.path.join(get_ipc_directory(), "signal", signal_name) create_file(path) return os.path.isfile(path) except IOError: return False def check_for_signal(signal_name, sec_lifetime=0): """See if a named signal exists Args: signal_name (str): The signal's name. Must only contain characters valid in filenames. sec_lifetime (int, optional): How many seconds the signal should remain valid. If 0 or not specified, it is a single-use signal. If -1, it never expires. Returns: bool: True if the signal is defined, False otherwise """ path = os.path.join(get_ipc_directory(), "signal", signal_name) if os.path.isfile(path): if sec_lifetime == 0: # consume this single-use signal os.remove(path) elif sec_lifetime == -1: return True elif int(os.path.getctime(path) + sec_lifetime) < int(time.time()): # remove once expired os.remove(path) return False return True # No such signal exists return False
Python
0
@@ -86,12 +86,8 @@ .log -ging imp
38b078eb13a42bf65d1f55141a69fcd8819a1f00
add models
mysite/polls/models.py
mysite/polls/models.py
from django.db import models # Create your models here.
Python
0
@@ -27,31 +27,338 @@ ls%0A%0A -# Create your models here. +class Question(models.Model):%0A question_text = models.CharField(max_length=200)%0A pub_date = models.DateTimeField('date published')%0A%0Aclass Choice(models.Model):%0A question = models.ForeignKey(Question, on_delete=models.CASCADE)%0A choice_text = models.CharField(max_length=200)%0A votes = models.IntegerField(default=0)%0A %0A
bd2d7fe62361594c7a659c2a38938521b6346dba
Text means no encode()
server.py
server.py
#!/usr/bin/env python3 # coding: utf-8 from aiohttp import web import aiohttp_jinja2 import asyncio import functools import jinja2 import os import sys import time import webbrowser from pypi_top_packages_async import get_packages_info from pypi_create_index_html import build_template_values START_TIME = time.time() MAX_PKGS = 200 # User can override this by entering a value on the commandline PORT = int(os.getenv('PORT', 8000)) # Cloud will provide a web server PORT id try: # Immediately change current directory to avoid exposure of control files os.chdir('static_parent_dir') except FileNotFoundError: pass try: # See if the user entered a maximum packages number on the commandline max_pkgs = int(sys.argv[1]) except (IndexError, ValueError): max_pkgs = MAX_PKGS app = web.Application() def done_callback(fut, app=None): # Called when PyPI data capture is complete app = app or {} elapsed = time.time() - START_TIME app['packages'], app['data_datetime'] = fut.result() fmt = ' Gathered Python 3 support info on {:,} PyPI packages in {:.2f} seconds.' print(fmt.format(len(app['packages']), elapsed)) fut = asyncio.run_coroutine_threadsafe(get_packages_info(max_pkgs, START_TIME), app.loop) fut.add_done_callback(functools.partial(done_callback, app=app)) async def index_handler(request): try: # return index.html if it exists with open('index.html') as in_file: return web.Response(text=in_file.read().encode()) except FileNotFoundError: return web.Response(text='Processing: Please refresh this page') @aiohttp_jinja2.template('index_db.html') async def handler(request): packages = request.app.get('packages', None) if not packages: # if data capture still ongoing, default to index.html return await index_handler(request) max_pkgs = request.match_info.get('max_pkgs', '').split('.')[0] max_pkgs = ''.join(c for c in max_pkgs if c.isdigit()) max_pkgs = max(int(max_pkgs) if max_pkgs else 0, 200) return build_template_values(packages[:max_pkgs], request.app.get('data_datetime')) def run_webserver(app, port=PORT): aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader(os.curdir)) app.router.add_route('GET', '/', index_handler) app.router.add_route('GET', '/{max_pkgs}', handler) app.router.add_static('/static/', path='./static') web.run_app(app, port=PORT) async def launch_browser(port=PORT): asyncio.sleep(0.2) # give the server a fifth of a second to come up webbrowser.open('localhost:{}'.format(port)) if PORT == 8000: # we are running the server on localhost asyncio.run_coroutine_threadsafe(launch_browser(PORT), app.loop) run_webserver(app, port=PORT)
Python
0.998498
@@ -1523,17 +1523,8 @@ ad() -.encode() )%0A
29835c78d1ddfd934aa552f4c68117a32379c5ea
add lmsd.sqlite
mzos/tests/__init__.py
mzos/tests/__init__.py
from __future__ import absolute_import import zipfile import os.path as op import os import shutil import logging class WithHMDBMixin(object): @staticmethod def unzip_hmdb(): """ Utility to unzip hmdb for test purposes :param self: :return: """ abspath = op.abspath('mzos/ressources/hmdb.zip') print abspath z = zipfile.ZipFile(abspath) hmdb_path = z.extract('hmdb.sqlite') logging.info("Moving extracted archive...") shutil.move(hmdb_path, abspath) logging.info("Done") @staticmethod def remove_hmdb(): logging.info("removing 'hmdb.sqlite'...") try: os.remove(op.abspath('mzos/ressources/hmdb.sqlite')) logging.info("Done") except OSError: logging.error("Unable to remove sqlite file or file does not exist")
Python
0.000001
@@ -301,18 +301,28 @@ -abspath = +z = zipfile.ZipFile( op.a @@ -359,66 +359,8 @@ ip') -%0A print abspath%0A z = zipfile.ZipFile(abspath )%0A @@ -485,23 +485,45 @@ b_path, -abspath +'mzos/ressources/hmdb.sqlite' )%0A @@ -852,9 +852,8 @@ exist%22) -%0A
3ffd045be41d226bcf1b533c3f5abf95a932eac0
Remove duplicate test
webcomix/scrapy/tests/test_crawler_worker.py
webcomix/scrapy/tests/test_crawler_worker.py
import pytest from webcomix.exceptions import NextLinkNotFound from webcomix.scrapy.crawler_worker import CrawlerWorker from webcomix.scrapy.verification.verification_spider import VerificationSpider from webcomix.tests.fake_websites.fixture import one_webpage_uri def test_spider_raising_error_gets_raised_by_crawler_worker(one_webpage_uri): settings = {"LOG_ENABLED": False} worker = CrawlerWorker( settings, False, VerificationSpider, start_urls=[one_webpage_uri], next_page_selector="//div/@href", comic_image_selector="//img/@src", number_of_pages_to_check=2, ) with pytest.raises(NextLinkNotFound): worker.start() def test_spider_raising_error_gets_raised_by_crawler_worker(one_webpage_uri): settings = {"LOG_ENABLED": False} worker = CrawlerWorker( settings, False, VerificationSpider, start_urls=[one_webpage_uri], next_page_selector="//div/@href", comic_image_selector="//img/@src", number_of_pages_to_check=2, ) with pytest.raises(NextLinkNotFound): worker.start()
Python
0.000014
@@ -701,440 +701,4 @@ t()%0A -%0Adef test_spider_raising_error_gets_raised_by_crawler_worker(one_webpage_uri):%0A settings = %7B%22LOG_ENABLED%22: False%7D%0A worker = CrawlerWorker(%0A settings,%0A False,%0A VerificationSpider,%0A start_urls=%5Bone_webpage_uri%5D,%0A next_page_selector=%22//div/@href%22,%0A comic_image_selector=%22//img/@src%22,%0A number_of_pages_to_check=2,%0A )%0A%0A with pytest.raises(NextLinkNotFound):%0A worker.start()
47ce98960cdfcd4c25109845047ad7fc6db2084b
Set serial default timeout to None
nanpy/serialmanager.py
nanpy/serialmanager.py
from nanpy.memo import memoized import fnmatch import logging import serial import sys import time DEFAULT_BAUDRATE = 115200 log = logging.getLogger(__name__) PY3 = sys.version_info[0] == 3 class SerialManagerError(Exception): pass def _auto_detect_serial_unix(preferred_list=['*']): import glob glist = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyACM*') glist += ['/dev/ttyS0', '/dev/ttyS1'] ret = [] for d in glist: for preferred in preferred_list: if fnmatch.fnmatch(d, preferred): ret.append(d) if len(ret) > 0: return ret for d in glist: ret.append(d) return ret class NoneSerialManager(object): def write(self, val): pass def read(self): return "" def readline(self): return "" class SerialManager(object): _serial = None def __init__(self, device=None, baudrate=DEFAULT_BAUDRATE, sleep_after_connect=2, timeout=1): self.device = device self.baudrate = baudrate self.sleep_after_connect = sleep_after_connect self.timeout = timeout def open(self, device=None): '''open connection''' if device: self.device = device if not self.device: ports = _auto_detect_serial_unix() if not len(ports): raise SerialManagerError("No port was set, and no port was found!") self.device = ports[0] log.debug('opening port:%s [%s baud]' % (self.device, self.baudrate)) assert self.device self._serial = serial.Serial(self.device, self.baudrate, timeout=self.timeout) if self.sleep_after_connect: time.sleep(self.sleep_after_connect) self._serial.flushInput() def write(self, value): if not self._serial: self.open() log.debug('sending:%s' % repr(value)) if PY3: self._serial.write(bytes(value, 'latin-1')) else: self._serial.write(value) def readline(self): if not self._serial: self.open() s = self._serial.readline() log.debug('received:%s' % repr(s)) s = s.decode() if not len(s): raise SerialManagerError('Serial timeout!') return s def flush_input(self): '''Flush input buffer, discarding all it's contents.''' if not self._serial: self.open() self._serial.flushInput() def close(self): '''close connection''' if self._serial: self._serial.close() self._serial = None serial_manager = SerialManager()
Python
0.999346
@@ -1014,17 +1014,20 @@ timeout= -1 +None ):%0A
ceb7b806c838a12d3447d0fd9bccc5aae49832d5
Use a new session so that server will not receive signals
garage/multiprocessing/__init__.py
garage/multiprocessing/__init__.py
__all__ = [ 'RpcConnectionError', 'RpcError', 'python', ] import contextlib import logging import os import os.path import random import shutil import subprocess import tempfile import time import garage.multiprocessing.server from garage.multiprocessing.client import Connector from garage.multiprocessing.client import RpcConnectionError from garage.multiprocessing.client import RpcError LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) @contextlib.contextmanager def python(executable='python2', protocol=2, authkey=None): """Start a server and return a Connector object (default to python2). """ authkey = authkey or str(random.randint(1, 1e8)) with create_socket() as addr, start_server(executable, addr, authkey): connector = Connector(addr, protocol, authkey) try: yield connector finally: connector.shutdown() @contextlib.contextmanager def create_socket(): tempdir = tempfile.mkdtemp() try: socket_path = tempfile.mktemp(dir=tempdir) LOG.info('socket path %s', socket_path) yield socket_path finally: LOG.info('remove socket path %s', socket_path) shutil.rmtree(tempdir) @contextlib.contextmanager def start_server(executable, address, authkey): script_path = garage.multiprocessing.server.__file__ args = [executable, script_path, '--listen-sock', address] if LOG.isEnabledFor(logging.INFO): args.append('-v') env = dict(os.environ) env['AUTHKEY'] = authkey server_proc = subprocess.Popen(args, env=env) try: wait_file_creation(address, timeout=3) yield server_proc finally: if server_proc.wait() != 0: LOG.warning('server returns %d', server_proc.returncode) def wait_file_creation(path, timeout): end_time = time.time() + timeout while not os.path.exists(path): time.sleep(0.1) if end_time < time.time(): raise Exception('timeout')
Python
0
@@ -1599,16 +1599,40 @@ en(args, + start_new_session=True, env=env
8b23c91b83982a85fe8a711c587d7db50e0bc14a
take 4
server.py
server.py
from flask import Flask, request import json import bot, setup app = Flask(__name__) PAT = '***REMOVED***' PASSWORD = '***REMOVED***' setup.create_persistent_menu(PAT) mr_bot = create_bot(PAT) def create_bot(token): return bot.Bot(token) @app.route('/', methods=['GET']) def handle_verification(): print "Handling Verification" if request.args.get('hub.verify_token', '') == PASSWORD: print "Verification successful" return request.args.get('hub.challenge', '') else: print "Verification failed" return 'Error: Verification failed' @app.route('/', methods=['POST']) def handle_messages(): print "Handling Messages" payload = request.get_data() for sender, message in messaging_events(payload): print "Incoming from %s: %s" % (sender, message) mr_bot.act_on_message(sender, message) return "ok" def messaging_events(payload): """Generate tuples of (sender_id, message_text) from the provided payload.""" data = json.loads(payload) messaging_events = data["entry"][0]["messaging"] for event in messaging_events: # Messages if "message" in event and "text" in event["message"]: yield event["sender"]["id"], event["message"]["text"].encode('unicode_escape') # Postbacks elif "postback" in event and "payload" in event["postback"]: yield event["sender"]["id"], event["postback"]["payload"].encode('unicode_escape') else: yield event["sender"]["id"], "I can't echo this" if __name__ == '__main__': app.run(debug=True)
Python
0.99946
@@ -179,74 +179,21 @@ t = -create_bot(PAT)%0A%0Adef create_bot(token):%0A return bot.Bot(token) +bot.Bot(PAT)%0A %0A%0A@a
05d2421668e663bf9e98ec51ec1d8977ffe8c1b3
Add static folder
server.py
server.py
import os from flask import Flask from flask import send_from_directory from flask_cors import CORS from igc.controller.controller_register import register_controllers from igc.util import cache app = Flask(__name__) CORS(app) app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('database_uri', 'sqlite:///./sqllite.db') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False register_controllers(app) @app.route("/") def index(): return app.send_static_file('index.html') @app.route("/<path:path>") def send_static(path): return send_from_directory('html', path) if __name__ == '__main__': thread = cache.CacheThread() thread.start() app.run(debug=True, port=5000) # http_server = WSGIServer(('', 5000), app) # http_server.serve_forever()
Python
0.000001
@@ -211,16 +211,40 @@ __name__ +, static_url_path='html' )%0ACORS(a
11e5ce0369250f7c979dc0fe9ea59f25cf12c1e7
Fix after Python 3.11 deprecation of test methods returning values
test/test_toplevel.py
test/test_toplevel.py
import mpi4py import unittest import warnings import os class TestRC(unittest.TestCase): def testCall(self): rc = type(mpi4py.rc)() rc(initialize = rc.initialize) rc(threads = rc.threads) rc(thread_level = rc.thread_level) rc(finalize = rc.finalize) rc(fast_reduce = rc.fast_reduce) rc(recv_mprobe = rc.recv_mprobe) return rc def testCallKwArgs(self): rc = self.testCall() kwargs = rc.__dict__.copy() rc(**kwargs) def testInitKwArgs(self): rc = self.testCall() kwargs = rc.__dict__.copy() rc = type(mpi4py.rc)(**kwargs) def testBadAttribute(self): error = lambda: mpi4py.rc(ABCXYZ=123456) self.assertRaises(TypeError, error) error = lambda: setattr(mpi4py.rc, 'ABCXYZ', 123456) self.assertRaises(TypeError, error) error = lambda: getattr(mpi4py.rc, 'ABCXYZ') self.assertRaises(AttributeError, error) def testRepr(self): repr(mpi4py.rc) class TestConfig(unittest.TestCase): def testGetInclude(self): path = mpi4py.get_include() self.assertTrue(isinstance(path, str)) self.assertTrue(os.path.isdir(path)) header = os.path.join(path, 'mpi4py', 'mpi4py.h') self.assertTrue(os.path.isfile(header)) def testGetConfig(self): conf = mpi4py.get_config() self.assertTrue(isinstance(conf, dict)) mpicc = conf.get('mpicc') if mpicc is not None: self.assertTrue(os.path.exists(mpicc)) @unittest.skipIf(os.name != 'posix', 'not-posix') class TestProfile(unittest.TestCase): def testProfile(self): import platform, sysconfig bits = platform.architecture()[0][:-3] triplet = sysconfig.get_config_var('MULTIARCH') or '' libpath = [ f"{prefix}{suffix}" for prefix in ("/lib", "/usr/lib") for suffix in (bits, f"/{triplet}", "") ] def mpi4py_profile(*args, **kargs): try: mpi4py.profile(*args, **kargs) except ValueError: pass with warnings.catch_warnings(): warnings.simplefilter('error') with self.assertRaises(UserWarning): mpi4py.profile('hosts', path=["/etc"]) warnings.simplefilter('ignore') for libname in ('c', 'm', 'dl', 'libdl.so.2'): mpi4py_profile(libname, path=libpath) for path in libpath: mpi4py_profile(libname, path=path) with self.assertRaises(ValueError): mpi4py.profile('@querty') with self.assertRaises(ValueError): mpi4py.profile('@querty', path="/usr/lib") with self.assertRaises(ValueError): mpi4py.profile('@querty', path=["/usr/lib"]) with self.assertRaises(ValueError): mpi4py.profile('@querty') class TestPackage(unittest.TestCase): def testImports(self): import mpi4py import mpi4py.MPI import mpi4py.typing import mpi4py.__main__ import mpi4py.bench import mpi4py.futures import mpi4py.futures.__main__ import mpi4py.futures.server import mpi4py.util import mpi4py.util.pkl5 import mpi4py.util.dtlib import mpi4py.run if __name__ == '__main__': unittest.main()
Python
0.000026
@@ -93,25 +93,36 @@ -def testCall(self +@staticmethod%0A def newrc( ):%0A @@ -459,32 +459,29 @@ rc = self. -testCall +newrc ()%0A k @@ -577,24 +577,21 @@ = self. -testCall +newrc ()%0A
d3f8922394ca2e18d624f1d542f2fc13a18475d3
Make sorting links reset pagination
wnpp_debian_net/templatetags/sorting_urls.py
wnpp_debian_net/templatetags/sorting_urls.py
# Copyright (C) 2021 Sebastian Pipping <sebastian@pipping.org> # Licensed under GNU Affero GPL v3 or later from django import template from ..url_tools import url_with_query register = template.Library() INTERNAL_DIRECTION_PREFIX_ASCENDING = '' INTERNAL_DIRECTION_PREFIX_DESCENDING = '-' EXTERNAL_DIRECTION_SUFFIX_ASCENDING = ';asc' EXTERNAL_DIRECTION_SUFFIX_DESCENDING = ';desc' _OPPOSITE_INTERNAL_PREFIX = { INTERNAL_DIRECTION_PREFIX_ASCENDING: INTERNAL_DIRECTION_PREFIX_DESCENDING, INTERNAL_DIRECTION_PREFIX_DESCENDING: INTERNAL_DIRECTION_PREFIX_ASCENDING, } _EXTERNAL_SUFFIX_FOR = { INTERNAL_DIRECTION_PREFIX_ASCENDING: EXTERNAL_DIRECTION_SUFFIX_ASCENDING, INTERNAL_DIRECTION_PREFIX_DESCENDING: EXTERNAL_DIRECTION_SUFFIX_DESCENDING, } def parse_sort_param(sort_param) -> tuple[str, str]: split_sort_param = sort_param.split(';') if len(split_sort_param) == 2 and split_sort_param[1] == 'desc': order = INTERNAL_DIRECTION_PREFIX_DESCENDING else: order = INTERNAL_DIRECTION_PREFIX_ASCENDING return split_sort_param[0], order def combine_sort_param(column, internal_direction_prefix): return column + _EXTERNAL_SUFFIX_FOR[internal_direction_prefix] @register.simple_tag(takes_context=True) def self_url_with_sorting_for(context, future_column): """ Takes the current page URL and adjusts the "sort=[..]" part in the query parameters to sort for a specific column. If the column is the same as the current one, direction is flipped: from ascending to descending and back. """ url = context['request'].get_full_path() current_column, internal_direction_prefix = parse_sort_param(context['sort']) if future_column == current_column: internal_direction_prefix = _OPPOSITE_INTERNAL_PREFIX[internal_direction_prefix] future_sort = combine_sort_param(future_column, internal_direction_prefix) return url_with_query(url, sort=future_sort)
Python
0
@@ -1943,10 +1943,18 @@ ure_sort +, page=1 )%0A
300e1461174107f1c2f8523ce105739d42d71803
Write EMAIL_HOST to settings only if specified
fab_bundle/templates/settings.py
fab_bundle/templates/settings.py
from {{ base_settings }} import * DEBUG = False TEMPLATE_DEBUG = DEBUG ADMINS = ({% for admin in admins %} ('{{ admin.name }}', '{{ admin.email }}'),{% endfor %} ) MANAGERS = ADMINS SEND_BROKEN_LINK_EMAILS = True SECRET_KEY = '{{ secret_key }}' BASE_URL = 'http{% if ssl_cert %}s{% endif %}://{{ http_host }}' MEDIA_ROOT = '{{ media_root }}' MEDIA_URL = BASE_URL + '/media/' {% if staticfiles %} STATIC_ROOT = '{{ static_root }}' STATIC_URL = BASE_URL + '/static/' {% endif %} {% if cache >= 0 %} CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': 'localhost:6379', 'OPTIONS': { 'DB': {{ cache }}, }, }, } MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage' SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' {% endif %} DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '{{ http_host }}', 'USER': 'postgres', } } {% if sentry_dsn %} SENTRY_DSN = '{{ sentry_dsn }}' {% endif %} {% if email %} EMAIL_SUBJECT_PREFIX = '[{{ http_host }}] ' SERVER_EMAIL = DEFAULT_FROM_EMAIL = '{{ email.from }}' EMAIL_HOST = '{{ email.host }}' {% if email.user %}EMAIL_HOST_USER = '{{ email.user }}'{% endif %} {% if email.password %}EMAIL_HOST_PASSWORD = '{{ email.password }}'{% endif %} {% if email.port %}EMAIL_PORT = {{ email.port }}{% endif %} {% if email.backend %}EMAIL_BACKEND = '{{ email.user }}'{% endif %} {% if email.tls %}EMAIL_USE_TLS = True{% endif %} {% endif %} SESSION_COOKIE_HTTPONLY = True{% if ssl_cert %} SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https'){% endif %} {% if settings %}{{ settings|safe }}{% endif %}
Python
0
@@ -1177,16 +1177,35 @@ rom %7D%7D'%0A +%7B%25 if email.host %25%7D EMAIL_HO @@ -1223,24 +1223,35 @@ ail.host %7D%7D' +%7B%25 endif %25%7D %0A%7B%25 if email
432961409ea44f393636f7d4d67742ea6e310bfb
Version up
fixtures_mongoengine/__init__.py
fixtures_mongoengine/__init__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from fixtures_mongoengine.exceptions import FixturesMongoengineException from fixtures_mongoengine.fixture import Fixture from fixtures_mongoengine.mixin import FixturesMixin __version__ = '1.1.1' __license__ = 'MIT' __all__ = [ 'FixturesMongoengineException', 'Fixture', 'FixturesMixin' ]
Python
0
@@ -253,17 +253,17 @@ = '1.1. -1 +2 '%0A__lice
5db8338ceb258bbe086d3314b5f50a9b6da5cd28
Make import of _constant be absolute import for py3.x; See #388
wrappers/Python/generate_constants_module.py
wrappers/Python/generate_constants_module.py
from __future__ import print_function import os,shutil """ A little module to wrap the params enum for use in Cython code Ian Bell, May 2014 """ def params_constants(enum_key): fName = os.path.join('..','..','include','DataStructures.h') contents = open(fName,'r').read() left = contents.find('{', contents.find('enum '+enum_key)); right = contents.find('}', left) entries = contents[left+1:right] if entries.find('/*') > -1: raise ValueError('/* */ style comments are not allowed, replace them with // style comments') if not entries: raise ValueError('Unable to find '+enum_key) lines = entries.split('\n') lines = [line for line in lines if not line.strip().startswith('//')] for i,line in enumerate(lines): if line.find('/'): lines[i] = line.split('/')[0] if '=' in lines[i]: lines[i] = lines[i].split('=')[0].strip() + ',' # Chomp all the whitespace, split at commas keys = ''.join(lines).replace(' ','').split(',') keys = [k for k in keys if k] return keys def generate_cython(data): print('****** Writing the constants module ******') # Write the PXD definition file pxd_output_file = open('CoolProp/constants_header.pxd','w') pxd_output_file.write('# This file is automatically generated by the generate_constants_module.py script in wrappers/Python.\n# DO NOT MODIFY THE CONTENTS OF THIS FILE!\n\ncdef extern from "DataStructures.h" namespace "CoolProp":\n') for enum_key, entries in data: pxd_output_file.write('\tctypedef enum '+enum_key+':\n') for param in entries: param = param.strip() pxd_output_file.write('\t\t'+param+'\n') pxd_output_file.close() # Write the PYX implementation file pyx_output_file = open('CoolProp/_constants.pyx','w') pyx_output_file.write('# This file is automatically generated by the generate_constants_module.py script in wrappers/Python.\n# DO NOT MODIFY THE CONTENTS OF THIS FILE!\ncimport constants_header\n\n') for enum_key, entries in data: for param in entries: param = param.strip() pyx_output_file.write(param+' = '+'constants_header.'+param+'\n') pyx_output_file.close() # Write the PY implementation file py_output_file = open('CoolProp/constants.py','w') py_output_file.write('# This file is automatically generated by the generate_constants_module.py script in wrappers/Python.\n# DO NOT MODIFY THE CONTENTS OF THIS FILE!\nimport _constants\n\n') for enum_key, entries in data: for param in entries: param = param.strip() py_output_file.write(param+' = '+'_constants.'+param+'\n') py_output_file.close() def generate(): data = [(enum,params_constants(enum)) for enum in ['parameters', 'input_pairs', 'fluid_types', 'phases']] generate_cython(data) if __name__=='__main__': generate()
Python
0
@@ -2566,16 +2566,65 @@ FILE!%5Cn +from __future__ import absolute_import%5Cn%5Cnfrom . import _
891a911b0523ce9ed42916c60934f52ba7dbedcb
Fix up 'limit' and 'expire' options for digest plugin.
flexget/plugins/plugin_digest.py
flexget/plugins/plugin_digest.py
from __future__ import unicode_literals, division, absolute_import import logging from datetime import datetime from sqlalchemy import Column, Unicode, PickleType, Integer, DateTime from flexget import plugin from flexget.db_schema import versioned_base from flexget.entry import Entry from flexget.event import event from flexget.manager import Session from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta log = logging.getLogger('digest') Base = versioned_base('digest', 0) class DigestEntry(Base): __tablename__ = 'digest_entries' id = Column(Integer, primary_key=True) list = Column(Unicode, index=True) added = Column(DateTime, default=datetime.now) _entry = Column('entry', PickleType) entry = safe_pickle_synonym('_entry') class OutputDigest(object): schema = {'type': 'string'} def on_task_learn(self, task, config): # TODO: Configurable entry state? with Session() as session: for entry in task.accepted: session.add(DigestEntry(list=config, entry=entry)) class EmitDigest(object): schema = { 'type': 'object', 'properties': { 'list': {'type': 'string'}, 'limit': {'type': 'integer', 'default': -1}, 'expire': { 'oneOf': [ {'type': 'string', 'format': 'interval'}, {'type': 'boolean'}], 'default': True } }, 'required': ['list'], 'additionalProperties': False } def on_task_input(self, task, config): entries = [] with Session() as session: digest_entries = (session.query(DigestEntry). filter(DigestEntry.list == config['list']). order_by(DigestEntry.added.desc())) if isinstance(config['expire'], basestring): expire_time = parse_timedelta(config['expire']) digest_entries.filter(DigestEntry.added < datetime.now() - expire_time).delete() if config['limit'] > 0: # TODO: This doesn't work, figure good way to clear extra #digest_entries.offset(config['limit']).delete() pass for digest_entry in digest_entries.all(): entries.append(Entry(digest_entry.entry)) if config['expire'] is True: session.delete(digest_entry) return entries @event('plugin.register') def register_plugin(): plugin.register(OutputDigest, 'digest', api_ver=2) plugin.register(EmitDigest, 'emit_digest', api_ver=2)
Python
0
@@ -1697,17 +1697,16 @@ tries = -( session. @@ -1728,39 +1728,8 @@ ry). -%0A filt @@ -1766,17 +1766,16 @@ 'list'%5D) -. %0A @@ -1783,61 +1783,68 @@ - order_by(DigestEntry.added.desc())) +# Remove any entries older than the expire time, if defined. %0A @@ -2074,145 +2074,192 @@ -if config%5B'limit'%5D %3E 0:%0A # TODO: This doesn't work, figure good way to clear extra%0A #digest_entries.offset( +for index, digest_entry in enumerate(digest_entries.order_by(DigestEntry.added.desc()).all()):%0A # Just remove any entries past the limit, if set.%0A if 0 %3C conf @@ -2269,26 +2269,26 @@ 'limit'%5D -).delete() + %3C= index: %0A @@ -2300,124 +2300,204 @@ -pass%0A for digest_entry in digest_entries.all():%0A entries.append(Entry(digest_entry.entry)) + session.delete(digest_entry)%0A continue%0A entries.append(Entry(digest_entry.entry))%0A # If expire is 'True', we remove it after it is output once. %0A
9694d5d0cbdcca874b791e6616dda831f8961373
Add a little debugging to the Papilio target
flipsyfat/targets/papilio_pro.py
flipsyfat/targets/papilio_pro.py
#!/usr/bin/env python3 import argparse from migen import * from flipsyfat.cores.sd_emulator import SDEmulator from flipsyfat.cores.sd_trigger import SDTrigger from misoc.targets.papilio_pro import BaseSoC from migen.build.generic_platform import * from misoc.integration.soc_sdram import * from misoc.integration.builder import * io = [ ("sdemu", 0, Subsignal("clk", Pins("C:8")), Subsignal("cmd", Pins("C:9")), Subsignal("d", Pins("C:10 C:11 C:12 C:13")), IOStandard("LVCMOS33") ), ("trigger", 0, Pins("C:0 C:1 C:2 C:3 C:4 C:5 C:6 C:7"), IOStandard("LVCMOS33") ), ("debug", 0, Pins("C:14 C:15"), IOStandard("LVCMOS33") ), ] class Flipsyfat(BaseSoC): mem_map = { "sdemu": 0x30000000, } mem_map.update(BaseSoC.mem_map) def __init__(self, **kwargs): BaseSoC.__init__(self, **kwargs) self.platform.add_extension(io) self.submodules.sdemu = SDEmulator(self.platform, self.platform.request("sdemu")) self.register_mem("sdemu", self.mem_map["sdemu"], self.sdemu.bus, self.sdemu.mem_size) self.csr_devices += ["sdemu"] self.interrupt_devices += ["sdemu"] self.submodules.sdtrig = SDTrigger(self.sdemu.ll, self.platform.request("trigger")) self.csr_devices += ["sdtrig"] # Activity LED self.io_activity = (self.sdemu.ll.block_read_act | self.sdemu.ll.block_write_act ) self.sync += self.platform.request("user_led").eq(self.io_activity) def main(): parser = argparse.ArgumentParser(description="Flipsyfat port to the Papilio Pro") builder_args(parser) soc_sdram_args(parser) args = parser.parse_args() soc = Flipsyfat(**soc_sdram_argdict(args)) builder = Builder(soc, **builder_argdict(args)) builder.build() if __name__ == "__main__": main()
Python
0.000001
@@ -1531,16 +1531,210 @@ ctivity) +%0A%0A # Just for debugging%0A self.comb += self.platform.request(%22debug%22).eq(Cat(%0A self.sdemu.ll.card_status%5B5%5D, # appcmd%0A self.sdemu.ll.cmd_in_act%0A )) %0A %0A%0Adef
dc9101601bfea6bc3e8cbd7a58973360cc525d1f
Fix flake8 issue
foundation/organisation/views.py
foundation/organisation/views.py
from django.views.decorators.cache import cache_page from django.views.generic.list import ListView from django.views.generic.detail import DetailView from django.shortcuts import get_object_or_404 from django.http import HttpResponse from iso3166 import countries import unicodecsv from .models import (Board, Project, ProjectType, Theme, WorkingGroup, NetworkGroup, NetworkGroupMembership) class BoardView(DetailView): model = Board template_name = 'organisation/board_details.html' board = None def get_object(self, *args, **kwargs): # Try to find the board based on the slug or 404 return get_object_or_404(Board, slug=self.board) class ProjectDetailView(DetailView): model = Project template_name = 'organisation/project_detail.html' class ProjectListView(ListView): model = Project paginate_by = 15 template_name = 'organisation/project_list.html' def get_queryset(self): # We only filter the list by one url parameter with # hierarchy as 1. filter, 2. theme, 3. type filter_param = self.request.GET.get('filter', None) if filter_param == 'popular': # Popular filter is featured projects return Project.objects.filter(featured=True) theme_param = self.request.GET.get('theme', None) if theme_param: return Project.objects.filter(themes__slug=theme_param) type_param = self.request.GET.get('type', None) if type_param: return Project.objects.filter(types__slug=type_param) return Project.objects.all() def get_context_data(self, **kwargs): context = super(ProjectListView, self).get_context_data(**kwargs) context['themes'] = Theme.objects.all() context['projecttypes'] = ProjectType.objects.all() return context class ThemeDetailView(DetailView): model = Theme def get_context_data(self, **kwargs): theme = self.kwargs.get('slug', None) context = super(ThemeDetailView, self).get_context_data(**kwargs) context['themes'] = Theme.objects.exclude(slug=theme) return context class WorkingGroupListView(ListView): model = WorkingGroup template_name = 'organisation/workinggroup_list.html' def get_queryset(self): return WorkingGroup.objects.active() def get_context_data(self, **kwargs): context = super(WorkingGroupListView, self).get_context_data(**kwargs) context['active_list'] = WorkingGroup.objects.active() context['incubator_list'] = WorkingGroup.objects.incubators() return context class NetworkGroupDetailView(DetailView): model = NetworkGroup def get_object(self): country = self.kwargs.get('country', '') region = self.kwargs.get('region', '') return get_object_or_404(NetworkGroup, country_slug=country, region_slug=region) def get_context_data(self, **kwargs): context = super(NetworkGroupDetailView, self)\ .get_context_data(**kwargs) # For country we want all members but only regional members for regions country = self.kwargs.get('country', None) region = self.kwargs.get('region', None) if region is None: context['regional_groups'] = NetworkGroup.objects.regions(country) members = NetworkGroupMembership.objects.filter( networkgroup__country_slug=country) else: members = NetworkGroupMembership.objects.filter( networkgroup__country_slug=country, networkgroup__region_slug=region) context['group_members'] = members.order_by('order', 'person__name') return context @cache_page(60 * 30) def networkgroup_csv_output(request): response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="network.csv"' writer = unicodecsv.writer(response) header_row = ['ISO3', 'Country', 'Geo coordinates', 'Map location', 'Local Groups status', 'Community Leaders', 'Website', 'Mailing List', 'Twitter handle', 'Youtube channel', 'Facebook page', 'Google+ page'] working_groups = [] for group in WorkingGroup.objects.all(): topic = u'Topic: {0}'.format(group.name) working_groups.append(topic) header_row.extend(working_groups) writer.writerow(header_row) for group in NetworkGroup.objects.all(): row = [countries.get(group.country.code).alpha3, # ISO3 group.get_country_display(), # Country u'{lat},{lon}'.format( lat=group.position.latitude, lon=group.position.longitude ) if group.position.latitude else '', # Geo coordinates u'{region}, {country}'.format( region=group.region, country=group.get_country_display() ) if group.region else '', # Map location group.get_group_type_display(), # Local group status u', '.join([member.name for member in group.members.all()]), # Leaders group.homepage_url, # Website group.mailinglist_url, group.twitter if group.twitter else '', group.youtube_url if group.youtube_url else '', group.facebook_url, group.gplus_url if group.gplus_url else '',] # Find topics of working group group_working_groups = [g.name for g in group.working_groups.all()] for working_group in working_groups: if working_group[len('Topic: '):] in group_working_groups: row.append('Y') else: row.append('') writer.writerow(row) return response
Python
0
@@ -5581,16 +5581,17 @@ else '', + %5D%0A%0A
f8a3b9fce8bfc7fdce9cb9e90e98d8a9991c3f29
Fix oper data not being encoded. Fixes #452
ydkgen/printer/python/class_has_data_printer.py
ydkgen/printer/python/class_has_data_printer.py
# ---------------------------------------------------------------- # Copyright 2016 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------ """ class_has_data_printer.py Printer for the _has_data method. """ from ydkgen.api_model import Bits, Class class ClassHasDataPrinter(object): def __init__(self, ctx): """ _has_data() printer :attribute ctx The printer context """ self.ctx = ctx def print_output(self, clazz): """ prints the _has_data() function. prints a function in the entity clazz that can be queried to find if the element has any data to be be updated or created in its hierarchy. """ self._print_has_data_functions_header(clazz) self._print_has_data_functions_body(clazz) self._print_has_data_functions_trailer(clazz) def _print_has_data_functions_header(self, clazz): self.ctx.writeln('def _has_data(self):') self.ctx.lvl_inc() def _print_has_data_functions_body(self, clazz): self._print_has_data_function('not self.is_config()', 'False') if clazz.stmt.search_one('presence'): self._print_has_data_function('self._is_presence') for prop in clazz.properties(): if isinstance(prop.property_type, Class): if not prop.is_many: if not prop.property_type.is_identity(): self._print_has_data_function( 'self.%s is not None and self.%s._has_data()' % (prop.name, prop.name)) self.ctx.bline() else: self._print_has_data_function( 'self.%s is not None' % prop.name) self.ctx.bline() else: self.ctx.writeln('if self.%s is not None:' % prop.name) self.ctx.lvl_inc() self.ctx.writeln('for child_ref in self.%s:' % prop.name) self.ctx.lvl_inc() self._print_has_data_function('child_ref._has_data()') self.ctx.lvl_dec() self.ctx.lvl_dec() self.ctx.bline() else: if not prop.is_many: self.ctx.writeln('if self.%s is not None:' % prop.name) self.ctx.lvl_inc() if isinstance(prop.property_type, Bits): self._print_has_data_function( 'self.%s._has_data()' % prop.name) else: self.ctx.writeln('return True') self.ctx.lvl_dec() self.ctx.bline() else: self.ctx.writeln('if self.%s is not None:' % prop.name) self.ctx.lvl_inc() self.ctx.writeln('for child in self.%s:' % prop.name) self.ctx.lvl_inc() self.ctx.writeln('if child is not None:') self.ctx.lvl_inc() if isinstance(prop.property_type, Bits): self._print_has_data_function('child._has_data()') else: self.ctx.writeln('return True') self.ctx.lvl_dec() self.ctx.lvl_dec() self.ctx.lvl_dec() self.ctx.bline() def _print_has_data_function(self, if_condition, return_value='True'): self.ctx.writeln('if {0}:'.format(if_condition)) self.ctx.lvl_inc() self.ctx.writeln('return ' + return_value) self.ctx.lvl_dec() def _print_has_data_functions_trailer(self, clazz): self.ctx.writeln('return False') self.ctx.lvl_dec() self.ctx.bline()
Python
0.000001
@@ -1641,32 +1641,33 @@ clazz):%0A +# self._print_has_
177bd7546faea56750a182c46a8fd6a892ff5d6a
Update State turns, those aren't game attributes
game.py
game.py
import datetime import json import map_loader import queue import state import utils class GAME_STATUS(object): """ Game status constants. """ lobby = 'lobby' # In matchmaking lobby, waiting for all players playing = 'playing' # In game mode, waiting for turns complete = 'complete' # Game finished cancelled = 'cancelled' # Broken? class PLAYER_STATUS(object): waiting = 'waiting' # Hasn't joined the lobby yet joined = 'joined' # Has joined the lobby playing = 'playing' # Sending moves and waiting for game state lost = 'lost' # Missed turns/broken? class Game(object): def __init__(self, id=None, players=None, name=None, map_name='default', max_turns=17): """ Initialize a new game. Note that when we load a game from the repo, we init an empty game, so all our arguments to the constructor are optional. """ self.id = id self.name = name self.map_name = map_name self.players = players # List of player usernames self.status = GAME_STATUS.lobby self.created = datetime.datetime.now() # These attributes are persisted in the state, not DB properties map = map_loader.read_map_file(map_name) print(map) self.state = state.State(map=map, max_turns=max_turns) self.queue = queue.Queue(players=players) def update(self, username, move): """ Execute a round. """ #TODO: Definitely somethign with the Queue! Not sure what at the moment... self.current_turn += 1 if self.current_turn == self.max_turns: self.status = GAME_STATUS.complete
Python
0
@@ -1548,24 +1548,66 @@ self. +queue.increment_move()%0A self.state. current_turn @@ -1628,16 +1628,22 @@ if self. +state. current_ @@ -1655,16 +1655,22 @@ == self. +state. max_turn
2a696cd458ab2f67df5a6cfce0fe2016a8106eb4
add default channels
gbot.py
gbot.py
#!/usr/bin/env python # ============================================================================= # file = gbot.py # description = IRC bot # author = GR <https://github.com/shortdudey123> # create_date = 2014-07-09 # mod_date = 2014-07-09 # version = 0.1 # usage = called as a class # notes = # python_ver = 2.7.6 # ============================================================================= import src.bot as bot if __name__ == "__main__": gbot = bot.IRCBot(server="chat.freenode.com", nick="grbot", port=6667, realName='gbot', identify='', debug=True, connectDelay=4) gbot.run()
Python
0.000001
@@ -576,16 +576,59 @@ elay=4)%0A + gbot.setDefautChannels(%7B'##gbot': ''%7D)%0A gbot
40ae754565f52c7631798823d13332b37f52e0c5
fix misuse of msg_split
nethud/proto/telnet.py
nethud/proto/telnet.py
from __future__ import print_function from twisted.internet import reactor, protocol, threads, defer from twisted.protocols.basic import LineReceiver from nethud.proto.client import NethackFactory class TelnetConnection(LineReceiver): def __init__(self, users): self.users = users self.uname = '' def connectionLost(self, reason): if NethackFactory.client: NethackFactory.client.deassoc_client(self.uname) if self.user.user_name in self.users: del self.users[self.user.user_name] self.uname = '' print(reason) def lineReceived(self, line): msg_split = line.split() if msg_split[0] == 'AUTH': if len(msg_split[0]) != 2: self.sendLine("ERR 406 Invalid Parameters.") return self.handle_auth(msg[1]) elif msg[0] == 'QUIT': self.loseConnection() else: self.sendLine("ERR 452 Invalid Command") def handle_auth(uname): self.users[uname] = self self.uname = uname if NethackFactory.client: NethackFactory.client.assoc_client(uname, self) def TelnetFactory(protocol.Factory): def __init__(self): self.users = {} def buildProtocol(self, addr): return TelnetConnection(users = self.users)
Python
0.000016
@@ -719,19 +719,16 @@ sg_split -%5B0%5D ) != 2:%0A @@ -843,16 +843,22 @@ auth(msg +_split %5B1%5D)%0A @@ -870,16 +870,22 @@ elif msg +_split %5B0%5D == '
fd9b9f8532bb6e481a18c4ead35e21bd572cd248
Return client station as StationUrl in SecureClient.register(_ex)
nintendo/nex/secure.py
nintendo/nex/secure.py
from nintendo.nex.service import ServiceClient from nintendo.nex.kerberos import KerberosEncryption from nintendo.nex.stream import NexStreamOut from nintendo.nex.common import NexEncoder, DataHolder, StationUrl import random import logging logger = logging.getLogger(__name__) class ConnectionData(NexEncoder): version_map = { 30504: 0, 30810: 0 } def decode_old(self, stream): self.station = StationUrl.parse(stream.string()) self.connection_id = stream.u32() decode_v0 = decode_old class SecureClient(ServiceClient): METHOD_REGISTER = 1 METHOD_REQUEST_CONNECTION_DATA = 2 METHOD_REQUEST_URLS = 3 METHOD_REGISTER_EX = 4 METHOD_TEST_CONNECTIVITY = 5 METHOD_UPDATE_URLS = 6 METHOD_REPLACE_URL = 7 METHOD_SEND_REPORT = 8 PROTOCOL_ID = 0xB def __init__(self, back_end, key, ticket, auth_client): super().__init__(back_end, key) self.ticket = ticket self.auth_client = auth_client self.kerberos_encryption = KerberosEncryption(self.ticket.key) station_url = self.auth_client.secure_station self.connection_id = int(station_url["CID"]) self.principal_id = int(station_url["PID"]) def connect(self, host, port): stream = NexStreamOut(self.back_end.version) stream.data(self.ticket.data) substream = NexStreamOut(self.back_end.version) substream.u32(self.auth_client.user_id) substream.u32(self.connection_id) substream.u32(random.randint(0, 0xFFFFFFFF)) #Used to check connection response stream.data(self.kerberos_encryption.encrypt(substream.buffer)) super().connect(host, port, stream.buffer) self.set_secure_key(self.ticket.key) def register(self): logger.info("Secure.register()") #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_REGISTER) client_url = str(StationUrl(address=self.s.get_address(), port=self.s.get_port(), sid=15, natm=0, natf=0, upnp=0, pmp=0)) stream.list([client_url], stream.string) self.send_message(stream) #--- response --- stream = self.get_response(call_id) result = stream.u32() connection_id = stream.u32() client_station = stream.string() logger.info("Secure.register -> (%i, %s)", connection_id, client_station) return client_station def request_connection_data(self, cid, pid): logger.info("Secure.request_connection_data(%i, %i)", cid, pid) #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_REQUEST_CONNECTION_DATA) stream.u32(cid) stream.u32(pid) self.send_message(stream) #--- response --- stream = self.get_response(call_id) bool = stream.bool() connection_data = stream.list(lambda: ConnectionData.from_stream(stream)) logger.info("Secure.request_connection_data -> (%i, %s)", bool, [dat.station for dat in connection_data]) return bool, connection_data def request_urls(self, cid, pid): logger.info("Secure.request_urls(%i, %i)", cid, pid) #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_REQUEST_URLS) stream.u32(cid) stream.u32(pid) self.send_message(stream) #--- response --- stream = self.get_response(call_id) bool = stream.bool() urls = stream.list(lambda: StationUrl.parse(stream.string())) logger.info("Secure.request_urls -> (%i, %s)", bool, urls) return bool, urls def register_ex(self, login_data): logger.info("Secure.register_ex(...)") #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_REGISTER_EX) client_url = str(StationUrl(address=self.s.get_address(), port=self.s.get_port(), sid=15, natm=0, natf=0, upnp=0, pmp=0)) stream.list([client_url], stream.string) DataHolder(login_data).encode(stream) self.send_message(stream) #--- response --- stream = self.get_response(call_id) result = stream.u32() connection_id = stream.u32() client_station = stream.string() logger.info("Secure.register_ex -> (%i, %s)", connection_id, client_station) return client_station def test_connectivity(self): logger.info("Secure.test_connectivity()") #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_TEST_CONNECTIVITY) self.send_message(stream) #--- response --- self.get_response(call_id) logger.info("Secure.test_connectivity -> done") def replace_url(self, url, new): logger.info("Secure.replace_url(%s, %s)", url, new) #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_REPLACE_URL) stream.string(url) stream.string(new) self.send_message(stream) #--- response --- self.get_response(call_id) logger.info("Secure.replace_url -> done") def send_report(self, unk, data): logger.info("Secure.send_report(%08X, ...)") #--- request --- stream, call_id = self.init_message(self.PROTOCOL_ID, self.METHOD_SEND_REPORT) stream.u32(unk) stream.u16(len(data)) stream.write(data) self.send_message(stream) #--- response --- self.get_response(call_id) logger.info("Secure.send_report -> done")
Python
0
@@ -2086,32 +2086,49 @@ lient_station = +StationUrl.parse( stream.string()%0A @@ -2118,32 +2118,33 @@ (stream.string() +) %0A%09%09logger.info(%22 @@ -3855,16 +3855,33 @@ ation = +StationUrl.parse( stream.s @@ -3887,16 +3887,17 @@ string() +) %0A%09%09logge
07e7e7618b8d24a478dbfae0106a61129bf90f3f
photon as a boolean
databricks/cluster_config.py
databricks/cluster_config.py
import sys import requests import argparse import json from run_databricks_jobs import getJobIds, getRequest def updateJsonFile(fileName): # Open the JSON file for reading jsonFile = open(fileName, "r") data = json.load(jsonFile) jsonFile.close() if args.env == "staging": envCode = "stg" elif args.env == "prod": envCode = "prd" else: envCode = args.env # Edit content # Set python params for job python_params = args.job_parameters.split(" ") secretKey = "DATABASE_URL" if args.env == "qat": secretKey = "PERF_DATABASE_URL" env_vars = { "DATABASE_URL": "{{secrets/" + args.env + "/" + secretKey + "}}", "BRANCH": args.branch, "ENV_CODE": envCode } # Populate spark_conf spark_conf = {} for i in args.spark_config_var: spark_conf[i[0]] = i[1] if "manage" in args.job_name: subnet = args.job_name.split("-") subnet_param = "us-gov-west-" + subnet[1] else: subnet_param = args.availability_zone jenkins_tags = {} if args.jenkins_job_id: jenkins_tags["jenkins_job_id"] = args.jenkins_job_id # If we wanted to add the ability to add more tasks, we would just require a # loop right below here adding to data["tasks"][x] data["tasks"][0]["new_cluster"]["custom_tags"] = jenkins_tags data["tasks"][0]["spark_python_task"]["python_file"] = "dbfs:/FileStore/" + args.branch + "/manage.py" data["tasks"][0]["spark_python_task"]["parameters"] = python_params data["tasks"][0]["new_cluster"]["spark_env_vars"] = env_vars data["tasks"][0]["new_cluster"]["spark_conf"] = spark_conf data["tasks"][0]["new_cluster"]["aws_attributes"]["zone_id"] = subnet_param data["tasks"][0]["new_cluster"]["node_type_id"] = args.node_type_id # data["tasks"][0]["new_cluster"]["driver_node_type_id"] = args.driver_node_type_id data["tasks"][0]["new_cluster"]["num_workers"] = args.workers data["tasks"][0]["new_cluster"]["spark_version"] = args.spark_version data["name"] = args.job_name ## Save our changes to JSON file jsonFile = open(fileName, "w+") jsonFile.write(json.dumps(data)) jsonFile.close() if __name__ == '__main__': # Setup args for cluster config parser = argparse.ArgumentParser() parser.add_argument('-i', '--instance-id', required=True) parser.add_argument('-j', '--job-name', default='manage', required=True) parser.add_argument('-b', '--branch', default='qat', required=True) parser.add_argument('-p', '--job-parameters', required=True) parser.add_argument('-e', '--env', required=True) parser.add_argument('-w', '--workers', default=16) parser.add_argument('-f', '--file-location', required=True) parser.add_argument('--availability-zone', default='us-gov-west-1a') parser.add_argument('--driver-node-type-id', default='i3en.2xlarge') parser.add_argument('--node-type-id', default='i3en.2xlarge') parser.add_argument('-s', '--spark-config-var', action='append', nargs='+') parser.add_argument('--spark-version', default='10.4.x-photon-scala2.12') parser.add_argument('--jenkins-job-id', default='not-set') args = parser.parse_args() INSTANCE_ID = args.instance_id # Start script jobs = getJobIds(getRequest("/jobs/list", INSTANCE_ID)) if( args.job_name in jobs ): sys.stdout.write( (str(jobs[args.job_name])) ) updateJsonFile(args.file_location) else: updateJsonFile(args.file_location)
Python
0.998469
@@ -778,16 +778,157 @@ %0A %7D%0A%0A + # Use photon%0A if args.photon:%0A spark_version = %2210.4.x-photon-scala2.12%22%0A else:%0A spark_version = %2210.4.x-scala2.12%22%0A%0A # Po @@ -2200,21 +2200,16 @@ ion%22%5D = -args. spark_ve @@ -3255,56 +3255,35 @@ ('-- -spark-version', default='10.4.x-photon-scala2.12 +photon', action='store_true ')%0A
a1c572b557b6fe5b94186763210f3bfa15f3e660
quick start
marsi/io/__init__.py
marsi/io/__init__.py
# Copyright 2016 Chr. Hansen A/S and The Novo Nordisk Foundation Center for Biosustainability, DTU. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
Python
0.999421
@@ -620,20 +620,94 @@ under the License.%0A +%0Aimport xlwt%0A%0Adef write_excel_file(data_frame, path, molecules):%0A pass%0A
db8a5a8b0316d8784f275b61835a40b9c6bcd8f7
Made option type as integer
hyde.py
hyde.py
#!/usr/bin/env python import os import sys import threading from optparse import OptionParser from hydeengine import Generator, Initializer, Server #import cProfile PROG_ROOT = os.path.dirname(os.path.abspath( __file__ )) def main(argv): parser = OptionParser(usage="%prog [-f] [-q]", version="%prog 0.3b") parser.add_option("-s", "--sitepath", dest = "site_path", help = "Change the path of the site folder.") parser.add_option("-i", "--init", action = 'store_true', dest = "init", default = False, help = "Create a new hyde site.") parser.add_option("-f", "--force", action = 'store_true', dest = "force_init", default = False, help = "") parser.add_option("-t", "--template", dest = "template", help = "Choose which template you want to use.") parser.add_option("-g", "--generate", action = "store_true", dest = "generate", default = False, help = "Generate the source for your hyde site.") parser.add_option("-k", "--keep_watching", action = "store_true", dest = "keep_watching", default = False, help = "Start monitoring the source folder for changes.") parser.add_option("-d", "--deploy_to", dest = "deploy_to", help = "Change the path of the deploy folder.") parser.add_option("-w", "--webserve", action = "store_true", dest = "webserve", default = False, help = "Start an instance of the CherryPy webserver.") parser.add_option("-p", "--port", dest = "port", default=8080, help = "Port webserver should listen on (8080).") parser.add_option("-a", "--address", dest = "address", default='localhost', help = "Address webserver should listen on (localhost).") (options, args) = parser.parse_args() if len(args): parser.error("Unexpected arguments encountered.") if not options.site_path: options.site_path = os.getcwdu() if options.deploy_to: options.deploy_to = os.path.abspath(options.deploy_to) if options.init: initializer = Initializer(options.site_path) initializer.initialize(PROG_ROOT, options.template, options.force_init) generator = None server = None def quit(*args, **kwargs): if server and server.alive: server.quit() if generator: generator.quit() if options.generate: generator = Generator(options.site_path) generator.generate(options.deploy_to, options.keep_watching, quit) if options.webserve: server = Server(options.site_path, address=options.address, port=options.port) server.serve(options.deploy_to, quit) if ((options.generate and options.keep_watching) or options.webserve): try: print "Letting the server and/or the generator do their thing..." if server: server.block() if generator: generator.quit() elif generator: generator.block() except: print sys.exc_info() quit() if argv == []: print parser.format_option_help() if __name__ == "__main__": main(sys.argv[1:]) # cProfile.run('main(sys.argv[1:])', filename='hyde.cprof') # import pstats # stats = pstats.Stats('hyde.cprof') # stats.strip_dirs().sort_stats('time').print_stats(20)
Python
0.999965
@@ -1837,16 +1837,53 @@ lt=8080, + %0A type='int', %0A
8ff2781029dcd2189879e4c164ae5833bd5b176b
set expected num of containers to 20
tests/common_setup.py
tests/common_setup.py
#!/usr/bin/python # Copyright 2017 Northern.tech AS # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest from MenderAPI import auth, adm, reset_mender_api from common import * from common_docker import * import conftest import time def wait_for_containers(expected_containers, defined_in): for _ in range(60 * 5): out = subprocess.check_output("docker-compose -p %s %s ps -q" % (conftest.docker_compose_instance, "-f " + " -f ".join(defined_in)), shell=True) if len(out.split()) == expected_containers: time.sleep(60) return else: time.sleep(1) pytest.fail("timeout: %d containers not running for docker-compose project: %s" % (expected_containers, conftest.docker_compose_instance)) @pytest.fixture(scope="function") def standard_setup_one_client(request): if getattr(request, 'param', False) and request.param != "force_new" and setup_type() == ST_OneClient: return restart_docker_compose() reset_mender_api() set_setup_type(ST_OneClient) def setup_set_client_number_bootstrapped(clients): docker_compose_cmd("scale mender-client=%d" % clients) ssh_is_opened() auth.reset_auth_token() adm.accept_devices(clients) set_setup_type(None) @pytest.fixture(scope="function") def standard_setup_one_client_bootstrapped(): if setup_type() == ST_OneClientBootstrapped: return restart_docker_compose() reset_mender_api() adm.accept_devices(1) set_setup_type(ST_OneClientBootstrapped) @pytest.fixture(scope="function") def standard_setup_two_clients_bootstrapped(): if setup_type() == ST_TwoClientsBootstrapped: return restart_docker_compose(2) reset_mender_api() adm.accept_devices(2) set_setup_type(ST_TwoClientsBootstrapped) @pytest.fixture(scope="function") def standard_setup_one_client_bootstrapped_with_s3(): if setup_type() == ST_OneClientsBootstrapped_AWS_S3: return stop_docker_compose() reset_mender_api() docker_compose_cmd("-f ../docker-compose.client.yml \ -f ../docker-compose.storage.s3.yml \ -f ../docker-compose.yml \ -f ../extra/travis-testing/s3.yml up -d", use_common_files=False) docker_compose_cmd("logs -f &") ssh_is_opened() auth.reset_auth_token() adm.accept_devices(1) set_setup_type(ST_OneClientsBootstrapped_AWS_S3) @pytest.fixture(scope="function") def standard_setup_without_client(): if setup_type() == ST_NoClient: return stop_docker_compose() reset_mender_api() docker_compose_cmd("-f ../docker-compose.yml \ -f ../docker-compose.storage.minio.yml \ -f ../docker-compose.testing.yml up -d", use_common_files=False) set_setup_type(ST_NoClient) @pytest.fixture(scope="function") def standard_setup_with_signed_artifact_client(request): if getattr(request, 'param', False) and request.param != "force_new" and setup_type() == ST_SignedClient: return stop_docker_compose() reset_mender_api() docker_compose_cmd("-f ../extra/signed-artifact-client-testing/docker-compose.signed-client.yml up -d") ssh_is_opened() auth.reset_auth_token() adm.accept_devices(1) set_setup_type(ST_SignedClient) @pytest.fixture(scope="function") def standard_setup_with_short_lived_token(): if setup_type() == ST_ShortLivedAuthToken: return stop_docker_compose() reset_mender_api() docker_compose_cmd("-f ../docker-compose.yml \ -f ../docker-compose.client.yml \ -f ../docker-compose.storage.minio.yml \ -f ../docker-compose.testing.yml \ -f ../extra/expired-token-testing/docker-compose.short-token.yml up -d", use_common_files=False) ssh_is_opened() auth.reset_auth_token() adm.accept_devices(1) set_setup_type(ST_ShortLivedAuthToken) @pytest.fixture(scope="function") def running_custom_production_setup(request): conftest.production_setup_lock.acquire() reset_mender_api() # since we are starting a manual instance of the backend, # let the script know the instance is called "testprod" # so that is cleaned up correctly on test failure/error def fin(): conftest.production_setup_lock.release() stop_docker_compose() conftest.docker_compose_instance = "testprod" request.addfinalizer(fin) set_setup_type(ST_CustomSetup) @pytest.fixture(scope="function") def multitenancy_setup_without_client(request): stop_docker_compose() reset_mender_api() docker_compose_cmd("-f ../docker-compose.yml \ -f ../docker-compose.storage.minio.yml \ -f ../docker-compose.testing.yml \ -f ../docker-compose.tenant.yml \ %s up -d" % (conftest.mt_docker_compose_file), use_common_files=False) # wait a bit for the backend to start wait_for_containers(19, ["../docker-compose.yml", "../docker-compose.tenant.yml", "../docker-compose.storage.minio.yml"]) def fin(): stop_docker_compose() request.addfinalizer(fin) set_setup_type(ST_MultiTenancyNoClient)
Python
0
@@ -5712,10 +5712,10 @@ ers( -19 +20 , %5B%22
f28494537e1ce281b25ae5a2439386477361cad9
Fix missing import in reminder code.
cogs/reminder.py
cogs/reminder.py
from .utils import checks, db, time from discord.ext import commands import discord import asyncio import datetime class Reminders(db.Table): id = db.PrimaryKeyColumn() expires = db.Column(db.Datetime, index=True) created = db.Column(db.Datetime, default="now() at time zone 'utc'") event = db.Column(db.String) extra = db.Column(db.JSON, default="'{}'::jsonb") class Timer: __slots__ = ('args', 'kwargs', 'event', 'id', 'created_at', 'expires') def __init__(self, *, record): self.id = record['id'] extra = record['extra'] self.args = extra.get('args', []) self.kwargs = extra.get('kwargs', {}) self.event = record['event'] self.created_at = record['created'] self.expires = record['expires'] @classmethod def temporary(cls, *, expires, created, event, args, kwargs): pseudo = { 'id': None, 'extra': { 'args': args, 'kwargs': kwargs }, 'event': event, 'created': created, 'expires': expires } return cls(record=pseudo) def __eq__(self, other): try: return self.id == other.id except AttributeError: return False def __hash__(self): return hash(self.id) @property def human_delta(self): return time.human_timedelta(self.created_at) def __repr__(self): return f'<Timer created={self.created_at} expires={self.expires} event={self.event}>' class Reminder: """Reminders to do something.""" def __init__(self, bot): self.bot = bot self._have_data = asyncio.Event(loop=bot.loop) self._current_timer = None self._task = bot.loop.create_task(self.dispatch_timers()) def __unload(self): self._task.cancel() async def __error(self, ctx, error): if isinstance(error, commands.BadArgument): await ctx.send(error) async def get_active_timers(self, *, connection=None, days=7): query = "SELECT * FROM reminders WHERE expires < (CURRENT_DATE + $1::interval) ORDER BY expires;" con = connection or self.bot.pool records = await con.fetch(query, datetime.timedelta(days=days)) return [Timer(record=a) for a in records] async def wait_for_active_timers(self, *, connection=None, days=7): async with db.MaybeAcquire(connection=connection, pool=self.bot.pool) as con: timers = await self.get_active_timers(connection=con, days=days) if len(timers): self._have_data.set() return timers self._have_data.clear() self._current_timer = None await self._have_data.wait() return await self.get_active_timers(connection=con, days=days) async def call_timer(self, timer): # delete the timer query = "DELETE FROM reminders WHERE id=$1;" await self.bot.pool.execute(query, timer.id) # dispatch the event event_name = f'{timer.event}_timer_complete' self.bot.dispatch(event_name, timer) async def dispatch_timers(self): try: while not self.bot.is_closed(): # can only asyncio.sleep for up to ~48 days reliably # so we're gonna cap it off at 40 days # see: http://bugs.python.org/issue20493 timers = await self.wait_for_active_timers(days=40) timer = self._current_timer = timers[0] now = datetime.datetime.utcnow() if timer.expires >= now: to_sleep = (timer.expires - now).total_seconds() await asyncio.sleep(to_sleep) await self.call_timer(timer) except asyncio.CancelledError: pass except (OSError, discord.ConnectionClosed, asyncpg.PostgresConnectionError): self._task.cancel() self._task = self.bot.loop.create_task(self.dispatch_timers()) async def short_timer_optimisation(self, seconds, timer): await asyncio.sleep(seconds) event_name = f'{timer.event}_timer_complete' self.bot.dispatch(event_name, timer) async def create_timer(self, *args, **kwargs): """Creates a timer. Parameters ----------- when: datetime.datetime When the timer should fire. event: str The name of the event to trigger. Will transform to 'on_{event}_timer_complete'. \*args Arguments to pass to the event \*\*kwargs Keyword arguments to pass to the event connection: asyncpg.Connection Special keyword-only argument to use a specific connection for the DB request. Note ------ Arguments and keyword arguments must be JSON serialisable. Returns -------- :class:`Timer` """ when, event, *args = args try: connection = kwargs.pop('connection') except KeyError: connection = self.bot.pool now = datetime.datetime.utcnow() timer = Timer.temporary(event=event, args=args, kwargs=kwargs, expires=when, created=now) delta = (when - now).total_seconds() if delta <= 60: # a shortcut for small timers self.bot.loop.create_task(self.short_timer_optimisation(delta, timer)) return timer query = """INSERT INTO reminders (event, extra, expires) VALUES ($1, $2::jsonb, $3) RETURNING id; """ row = await connection.fetchrow(query, event, { 'args': args, 'kwargs': kwargs }, when) timer.id = row[0] self._have_data.set() # check if this timer is earlier than our currently run timer if self._current_timer and when < self._current_timer.expires: # cancel the task and re-run it self._task.cancel() self._task = self.bot.loop.create_task(self.dispatch_timers()) return timer @commands.command(aliases=['timer']) async def reminder(self, ctx, when: time.FutureTime, *, message: commands.clean_content = 'something'): """Reminds you of something after a certain amount of time. The time can be any direct date (e.g. YYYY-MM-DD) or a human readable offset. Examples: - "next thursday at 3pm" - "tomorrow" - "3 days" - "2d" Times are in UTC. """ timer = await self.create_timer(when.dt, 'reminder', ctx.author.id, ctx.channel.id, message) delta = time.human_timedelta(when.dt) await ctx.send(f"Alright {ctx.author.mention}, I'll remind you about {message} in {delta}.") async def on_reminder_timer_complete(self, timer): author_id, channel_id, message = timer.args channel = self.bot.get_channel(channel_id) if channel is None: # peculiar return await channel.send(f'<@{author_id}>, {timer.human_delta} you asked to be reminded of {message}.') def setup(bot): bot.add_cog(Reminder(bot))
Python
0
@@ -92,16 +92,31 @@ asyncio%0A +import asyncpg%0A import d
c39e9fa5952a01de7527da5fc1ec0d04451e300e
change distutils build dir for --with-pydebug python builds.
command/build.py
command/build.py
"""distutils.command.build Implements the Distutils 'build' command.""" # This module should be kept compatible with Python 2.1. __revision__ = "$Id$" import sys, os from distutils.core import Command from distutils.util import get_platform def show_compilers (): from distutils.ccompiler import show_compilers show_compilers() class build (Command): description = "build everything needed to install" user_options = [ ('build-base=', 'b', "base directory for build library"), ('build-purelib=', None, "build directory for platform-neutral distributions"), ('build-platlib=', None, "build directory for platform-specific distributions"), ('build-lib=', None, "build directory for all distribution (defaults to either " + "build-purelib or build-platlib"), ('build-scripts=', None, "build directory for scripts"), ('build-temp=', 't', "temporary build directory"), ('compiler=', 'c', "specify the compiler type"), ('debug', 'g', "compile extensions and libraries with debugging information"), ('force', 'f', "forcibly build everything (ignore file timestamps)"), ('executable=', 'e', "specify final destination interpreter path (build.py)"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, "list available compilers", show_compilers), ] def initialize_options (self): self.build_base = 'build' # these are decided only after 'build_base' has its final value # (unless overridden by the user or client) self.build_purelib = None self.build_platlib = None self.build_lib = None self.build_temp = None self.build_scripts = None self.compiler = None self.debug = None self.force = 0 self.executable = None def finalize_options (self): plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3]) # 'build_purelib' and 'build_platlib' just default to 'lib' and # 'lib.<plat>' under the base build directory. We only use one of # them for a given distribution, though -- if self.build_purelib is None: self.build_purelib = os.path.join(self.build_base, 'lib') if self.build_platlib is None: self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier) # 'build_lib' is the actual directory that we will use for this # particular module distribution -- if user didn't supply it, pick # one of 'build_purelib' or 'build_platlib'. if self.build_lib is None: if self.distribution.ext_modules: self.build_lib = self.build_platlib else: self.build_lib = self.build_purelib # 'build_temp' -- temporary directory for compiler turds, # "build/temp.<plat>" if self.build_temp is None: self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier) if self.build_scripts is None: self.build_scripts = os.path.join(self.build_base, 'scripts-' + sys.version[0:3]) if self.executable is None: self.executable = os.path.normpath(sys.executable) # finalize_options () def run (self): # Run all relevant sub-commands. This will be some subset of: # - build_py - pure Python modules # - build_clib - standalone C libraries # - build_ext - Python extensions # - build_scripts - (Python) scripts for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) # -- Predicates for the sub-command list --------------------------- def has_pure_modules (self): return self.distribution.has_pure_modules() def has_c_libraries (self): return self.distribution.has_c_libraries() def has_ext_modules (self): return self.distribution.has_ext_modules() def has_scripts (self): return self.distribution.has_scripts() sub_commands = [('build_py', has_pure_modules), ('build_clib', has_c_libraries), ('build_ext', has_ext_modules), ('build_scripts', has_scripts), ] # class build
Python
0
@@ -2074,32 +2074,296 @@ .version%5B0:3%5D)%0A%0A + # Make it so Python 2.x and Python 2.x with --with-pydebug don't%0A # share the same build directories. Doing so confuses the build%0A # process for C modules%0A if hasattr(sys, 'gettotalrefcount'):%0A plat_specifier += '-pydebug'%0A%0A # 'build
2774627e095a0e1676f15adf21572436da3af318
use self.window
tests/test_3141596.py
tests/test_3141596.py
import os import re import shutil from functools import wraps from unittest import TestCase from unittesting.utils import UTSetting from unittesting import DeferrableTestCase from unittesting.helpers import TempDirectoryTestCase import sublime version = sublime.version() __dir__ = os.path.dirname(os.path.abspath(__file__)) UUT_dir = os.path.join( sublime.packages_path(), 'User', 'UnitTesting') def set_package(package): try: shutil.rmtree(os.path.join(sublime.packages_path(), package)) except: pass try: shutil.copytree( os.path.join(__dir__, package), os.path.join(sublime.packages_path(), package)) except: pass try: shutil.rmtree(os.path.join(UUT_dir, package)) except: pass def cleanup_package(package): try: shutil.rmtree(os.path.join(sublime.packages_path(), package)) except: pass def perpare_package(package, output=None, syntax_test=False, delay=None): def wrapper(func): @wraps(func) def real_wrapper(self): set_package(package) if output: # set by _Ooutput/unittesting.json outfile = None result_file = os.path.join(sublime.packages_path(), package, output) else: outfiledir = os.path.join(UUT_dir, package) outfile = os.path.join(outfiledir, "result") result_file = outfile if not os.path.isdir(outfiledir): os.makedirs(outfiledir) if syntax_test: yield 1000 sublime.run_command( "unit_testing_syntax", {"package": package, "output": outfile}) else: sublime.run_command( "unit_testing", {"package": package, "output": outfile}) if delay: yield delay with open(result_file, 'r') as f: txt = f.read() m = re.search('^UnitTesting: Done\\.', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) func(self, txt) cleanup_package(package) return real_wrapper return wrapper class TestUnitTesting(DeferrableTestCase): def tearDown(self): UTSetting.set("recent-package", "UnitTesting") @perpare_package("_Success") def test_success(self, txt): m = re.search('^OK', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) @perpare_package("_Failure") def test_failure(self, txt): m = re.search('^FAILED \(failures=1\)', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) @perpare_package("_Error") def test_error(self, txt): m = re.search('^ERROR', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) @perpare_package("_Output", "tests/result") def test_output(self, txt): m = re.search('^OK', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) @perpare_package("_Deferred", delay=2000) def test_deferred(self, txt): m = re.search('^OK', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) yield 1000 if version >= '3000': @perpare_package("_Async", delay=2000) def test_async(self, txt): m = re.search('^OK', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) yield 1000 if version >= '3103': class TestSyntax(DeferrableTestCase): def tearDown(self): UTSetting.set("recent-package", "UnitTesting") @perpare_package("_Syntax_Failure", syntax_test=True) def test_fail_syntax(self, txt): m = re.search('^FAILED: 1 of 21 assertions in 1 files failed$', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) @perpare_package("_Syntax_Success", syntax_test=True) def test_success_syntax(self, txt): m = re.search('^OK', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) @perpare_package("_Syntax_Error", syntax_test=True) def test_error_syntax(self, txt): m = re.search('^ERROR: No syntax_test', txt, re.MULTILINE) self.assertTrue(hasattr(m, "group")) def tidy_path(path): return os.path.realpath(os.path.normcase(path)) class TestTempDirectoryTestCase(TempDirectoryTestCase): def test_temp_dir(self): self.assertTrue(tidy_path( self._temp_dir), tidy_path(sublime.active_window().folders()[0]))
Python
0.000002
@@ -4539,30 +4539,18 @@ th(s -ublime.active_ +elf. window -() .fol
d253c7138e3f91c2e99ab160fd52ae9a6b4cd425
Test for the _lookup_and_handle_new_spells()
tests/test_classes.py
tests/test_classes.py
import unittest from classes import Paladin from models.spells.loader import load_paladin_spells_for_level class PaladinTests(unittest.TestCase): def setUp(self): self.name = "Netherblood" self.level = 3 self.dummy = Paladin(name=self.name, level=self.level, health=100, mana=100, strength=10) def test_init(self): """ The __init__ should load/save all the spells for the Paladin""" spells = [spell for level in range(1,self.level+1) for spell in load_paladin_spells_for_level(level)] self.assertNotEqual(len(self.dummy.learned_spells), 0) for spell in spells: self.assertIn(spell.name, self.dummy.learned_spells) char_spell = self.dummy.learned_spells[spell.name] # find the largest rank in our spells list (the char has the highest rank only) max_rank = list(sorted(filter(lambda x: x.name == spell.name, spells), key=lambda x: x.rank))[-1].rank self.assertEqual(char_spell.rank, max_rank) def test_leave_combat(self): """ Except the normal behaviour, leave_combat should remove the SOR buff from the pally and reset his spell cds """ self.dummy._in_combat = True self.dummy.SOR_ACTIVE = True for spell in self.dummy.learned_spells.values(): spell._cooldown_counter = 100 self.assertTrue(self.dummy.is_in_combat()) self.dummy.leave_combat() self.assertFalse(self.dummy.is_in_combat()) self.assertFalse(self.dummy.SOR_ACTIVE) # All cooldowns should be reset self.assertTrue(all([spell._cooldown_counter == 0 for spell in self.dummy.learned_spells.values()])) def test_reset_spell_cooldowns(self): """ The reset_spell_cooldowns goes through every spell and resets its CD""" for spell in self.dummy.learned_spells.values(): spell._cooldown_counter = 100 self.assertTrue(all([spell._cooldown_counter != 0 for spell in self.dummy.learned_spells.values()])) self.dummy.reset_spell_cooldowns() self.assertTrue(all([spell._cooldown_counter == 0 for spell in self.dummy.learned_spells.values()])) def test_level_up(self): """ Except the normal behaviour, it should learn new spells for the character """ # empty the learned spells, it's stored as a static variable, which is not good practice but doesn't hurt in the game Paladin.learned_spells = {} pl = Paladin(name="fuck a nine to five") spells_to_learn = [spell.name for spell in load_paladin_spells_for_level(pl.level + 1)] for spell in spells_to_learn: self.assertNotIn(spell, pl.learned_spells) pl._level_up() for spell in spells_to_learn: self.assertIn(spell, pl.learned_spells) def test_level_up_to_level(self): """ Except the normal behaviour, it should learn new spells for the character """ # empty the learned spells, it's stored as a static variable, which is not good practice but doesn't hurt in the game Paladin.learned_spells = {} pl = Paladin(name="fuck a nine to five") to_level = 4 spells_to_learn = [spell for level in range(2, to_level + 1) for spell in load_paladin_spells_for_level(level)] for spell in spells_to_learn: has_not_learned_spell = spell.name not in pl.learned_spells has_smaller_rank = spell.rank > pl.learned_spells[spell.name].rank if not has_not_learned_spell else False self.assertTrue(has_not_learned_spell or has_smaller_rank) pl._level_up(to_level=to_level) for spell in spells_to_learn: self.assertIn(spell.name, pl.learned_spells) if __name__ == '__main__': unittest.main()
Python
0.000002
@@ -3748,16 +3748,700 @@ pells)%0A%0A + def test_lookup_and_handle_new_spells(self):%0A %22%22%22 Should look up the available spells for our level and learn them or update our existing ones%22%22%22%0A Paladin.learned_spells = %7B%7D%0A pl = Paladin(name=%22fuck a nine to five%22)%0A print(pl.learned_spells)%0A pl.level = 3%0A spells_to_learn = %5Bspell for spell in load_paladin_spells_for_level(pl.level)%5D%0A for spell in spells_to_learn:%0A has_not_learned_spell = spell.name not in pl.learned_spells%0A has_smaller_rank = spell.rank %3E pl.learned_spells%5Bspell.name%5D.rank if not has_not_learned_spell else False%0A self.assertTrue(has_not_learned_spell or has_smaller_rank)%0A%0A %0Aif __na
4885c633fa43c9f98f2894f68b7c2b33a06d3094
test s3 with prefix and without prefix
tests/test_command.py
tests/test_command.py
import boto3 import os import pytest import shutil import sys import tempfile from botocore.exceptions import ClientError from moto import mock_s3 from piprepo import command from piprepo.utils import get_project_name_from_file PACKAGES = [ 'Django-1.11.2-py2.py3-none-any.whl', 'ansible-2.0.0.0.tar.gz', 'ansible-2.3.1.0.tar.gz', 'python_http_client-2.2.1-py2.py3-none-any.whl', 'avocado-framework-plugin-varianter-yaml-to-mux-53.0.tar.gz', 'affinitic.recipe.fakezope2eggs-0.3-py2.4.egg' ] # Fixtures @pytest.yield_fixture(scope="function") def tempindex(): temp = tempfile.mkdtemp() index = { 'packages': PACKAGES, 'source': os.path.join(temp, 'source'), 'destination': os.path.join(temp, 'destination'), } os.mkdir(index['source']) os.mkdir(index['destination']) for package in index['packages']: with open(os.path.join(index['source'], package), 'w') as f: f.write(package) yield index shutil.rmtree(temp) # Tests def test_bare_command(): with pytest.raises(SystemExit) as ex: sys.argv = ['piprepo'] command.main() assert 'usage: piprepo' in ex.message def test_build(tempindex): sys.argv = ['', 'build', tempindex['source']] command.main() for package in tempindex['packages']: source_file = os.path.join(tempindex['source'], package) index = os.path.join(tempindex['source'], 'simple', get_project_name_from_file(package), 'index.html') assert os.path.isfile(source_file) assert os.path.isfile(index) with open(os.path.join(tempindex['source'], 'simple', 'index.html')) as f: assert get_project_name_from_file(package) in f.read() with open(index, 'r') as f: assert package in f.read() def test_dir_sync(tempindex): sys.argv = ['', 'sync', tempindex['source'], tempindex['destination']] command.main() for package in tempindex['packages']: dest_file = os.path.join(tempindex['destination'], package) dest_index = os.path.join(tempindex['destination'], 'simple', get_project_name_from_file(package), 'index.html') assert os.path.isfile(dest_file) assert os.path.isfile(dest_index) with open(os.path.join(tempindex['destination'], 'simple', 'index.html')) as f: assert get_project_name_from_file(package) in f.read() with open(dest_index, 'r') as f: assert package in f.read() @mock_s3 def test_s3_sync(tempindex): conn = boto3.resource("s3") bucket = conn.create_bucket(Bucket='fake-piprepo-bucket') sys.argv = ['', 'sync', tempindex['source'], 's3://{}/piprepo'.format(bucket.name)] command.main() for package in tempindex['packages']: package_obj = conn.Object(bucket.name, os.path.join('piprepo', package)) package_index_obj = conn.Object( bucket.name, os.path.join('piprepo', 'simple', get_project_name_from_file(package), 'index.html') ) root_index_obj = conn.Object(bucket.name, os.path.join('piprepo', 'simple', 'index.html')) assert s3_object_exists(package_obj) assert s3_object_exists(package_index_obj) assert s3_object_exists(root_index_obj) assert get_project_name_from_file(package).encode() in root_index_obj.get()['Body'].read() assert package.encode() in package_index_obj.get()['Body'].read() def test_project_names(): expected = { 'affinitic-recipe-fakezope2eggs', 'ansible', 'avocado-framework-plugin-varianter-yaml-to-mux', 'django', 'python-http-client' } assert {get_project_name_from_file(p) for p in PACKAGES} == expected def s3_object_exists(obj): try: obj.load() except ClientError as e: if e.response['Error']['Code'] == "404": return False else: raise return True
Python
0.000013
@@ -2504,24 +2504,36 @@ test_s3_sync +_with_prefix (tempindex): @@ -3430,32 +3430,935 @@ Body'%5D.read()%0A%0A%0A +@mock_s3%0Adef test_s3_sync_without_prefix(tempindex):%0A conn = boto3.resource(%22s3%22)%0A bucket = conn.create_bucket(Bucket='fake-piprepo-bucket')%0A sys.argv = %5B'', 'sync', tempindex%5B'source'%5D, 's3://%7B%7D'.format(bucket.name)%5D%0A command.main()%0A%0A for package in tempindex%5B'packages'%5D:%0A package_obj = conn.Object(bucket.name, package)%0A package_index_obj = conn.Object(%0A bucket.name, os.path.join('simple', get_project_name_from_file(package), 'index.html')%0A )%0A root_index_obj = conn.Object(bucket.name, os.path.join('simple', 'index.html'))%0A%0A assert s3_object_exists(package_obj)%0A assert s3_object_exists(package_index_obj)%0A assert s3_object_exists(root_index_obj)%0A assert get_project_name_from_file(package).encode() in root_index_obj.get()%5B'Body'%5D.read()%0A assert package.encode() in package_index_obj.get()%5B'Body'%5D.read()%0A%0A%0A def test_project
95ef8aeb05db779563b72f1f4cbbb6d0d48e37cc
Use raw string in tests/test_cookies.py
tests/test_cookies.py
tests/test_cookies.py
# -*- coding: utf-8 -*- import json import pytest def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" # -*- coding: utf-8 -*- def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 @pytest.fixture def cookiecutter_template(tmpdir): template = tmpdir.ensure('cookiecutter-template', dir=True) template_config = { 'repo_name': 'foobar', 'short_description': 'Test Project', } template.join('cookiecutter.json').write(json.dumps(template_config)) template_readme = '\n'.join([ '{{cookiecutter.repo_name}}', '{% for _ in cookiecutter.repo_name %}={% endfor %}', '{{cookiecutter.short_description}}', ]) repo = template.ensure('{{cookiecutter.repo_name}}', dir=True) repo.join('README.rst').write(template_readme) return template def test_cookies_bake_with_template_kwarg(testdir, cookiecutter_template): """bake accepts a template kwarg.""" testdir.makepyfile(""" # -*- coding: utf-8 -*- def test_bake_project(cookies): result = cookies.bake( extra_context={'repo_name': 'helloworld'}, template='%s', ) assert result.exit_code == 0 assert result.exception is None assert result.project.basename == 'helloworld' assert result.project.isdir() assert str(result) == '<Result {}>'.format(result.project) """ % cookiecutter_template) # run pytest without the template cli arg result = testdir.runpytest('-v') result.stdout.fnmatch_lines([ '*::test_bake_project PASSED', ]) def test_cookies_bake_template_kwarg_overrides_cli_option( testdir, cookiecutter_template, ): """bake template kwarg overrides cli option.""" testdir.makepyfile(""" # -*- coding: utf-8 -*- def test_bake_project(cookies): result = cookies.bake( extra_context={'repo_name': 'helloworld'}, template='%s', ) assert result.exit_code == 0 assert result.exception is None assert result.project.basename == 'helloworld' assert result.project.isdir() assert str(result) == '<Result {}>'.format(result.project) """ % cookiecutter_template) # run pytest with a bogus template name # it should use template directory passed to `cookies.bake` result = testdir.runpytest('-v', '--template=foobar') result.stdout.fnmatch_lines([ '*::test_bake_project PASSED', ]) def test_cookies_bake(testdir, cookiecutter_template): """Programmatically create a **Cookiecutter** template and use `bake` to create a project from it. """ testdir.makepyfile(""" # -*- coding: utf-8 -*- def test_bake_project(cookies): result = cookies.bake(extra_context={'repo_name': 'helloworld'}) assert result.exit_code == 0 assert result.exception is None assert result.project.basename == 'helloworld' assert result.project.isdir() assert str(result) == '<Result {}>'.format(result.project) """) result = testdir.runpytest( '-v', '--template={}'.format(cookiecutter_template) ) result.stdout.fnmatch_lines([ '*::test_bake_project PASSED', ]) def test_cookies_bake_should_create_new_output_directories( testdir, cookiecutter_template ): """Programmatically create a **Cookiecutter** template and use `bake` to create a project from it. """ testdir.makepyfile(""" # -*- coding: utf-8 -*- def test_bake_should_create_new_output(cookies): first_result = cookies.bake() assert first_result.exception is None assert first_result.project.dirname.endswith('bake00') second_result = cookies.bake() assert second_result.exception is None assert second_result.project.dirname.endswith('bake01') """) result = testdir.runpytest( '-v', '--template={}'.format(cookiecutter_template) ) result.stdout.fnmatch_lines([ '*::test_bake_should_create_new_output PASSED', ]) def test_cookies_bake_should_handle_exception(testdir): """Programmatically create a **Cookiecutter** template and make sure that cookies.bake() handles exceptions that happen during project generation. We expect **Cookiecutter** to raise a `NonTemplatedInputDirException`. """ template = testdir.tmpdir.ensure('cookiecutter-fail', dir=True) template_config = { 'repo_name': 'foobar', 'short_description': 'Test Project' } template.join('cookiecutter.json').write(json.dumps(template_config)) template.ensure('cookiecutter.repo_name', dir=True) testdir.makepyfile(""" # -*- coding: utf-8 -*- def test_bake_should_fail(cookies): result = cookies.bake() assert result.exit_code == -1 assert result.exception is not None assert result.project is None """) result = testdir.runpytest('-v', '--template={}'.format(template)) result.stdout.fnmatch_lines([ '*::test_bake_should_fail PASSED', ])
Python
0.00001
@@ -1650,32 +1650,33 @@ template= +r '%25s',%0A @@ -2512,16 +2512,17 @@ emplate= +r '%25s',%0A
83006927725a16615930b748e2a46a85cafc6430
Fix one more typo on make_nearest_neighbour_index
tensorflow_hub/pip_package/setup.py
tensorflow_hub/pip_package/setup.py
# Copyright 2018 The TensorFlow Hub Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Setup for pip package.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from datetime import datetime from setuptools import find_packages from setuptools import setup import sys # Can't import the module during setup.py. # Use execfile to find __version__. with open('tensorflow_hub/version.py') as in_file: exec(in_file.read()) REQUIRED_PACKAGES = [ 'numpy >= 1.12.0', 'six >= 1.12.0', 'protobuf >= 3.8.0', # No less than what ../WORKSPACE uses. ] project_name = 'tensorflow-hub' if '--project_name' in sys.argv: project_name_idx = sys.argv.index('--project_name') project_name = sys.argv[project_name_idx + 1] sys.argv.remove('--project_name') sys.argv.pop(project_name_idx) # If we're dealing with a nightly build we need to make sure that the # version changes for every release. version = __version__ if project_name == 'tf-hub-nightly': version += datetime.now().strftime('%Y%m%d%H%M') setup( name=project_name, # Automatic: tensorflow_hub, etc. Case insensitive. version=version.replace('-', ''), description=('TensorFlow Hub is a library to foster the publication, ' 'discovery, and consumption of reusable parts of machine ' 'learning models.'), long_description='', url='https://github.com/tensorflow/hub', author='Google LLC', author_email='packages@tensorflow.org', packages=find_packages(), install_requires=REQUIRED_PACKAGES, extras_require={ 'make_image_classifier': ['keras_preprocessing[image]'], 'make_nearest_neighbour_index': [ 'apache_beam', 'annoy', ], }, entry_points={ 'console_scripts': [ ('make_image_classifier = ' 'tensorflow_hub.tools.make_image_classifier.' 'make_image_classifier:run_main [make_image_classifier]'), ('make_nearest_neighbour_index = tensorflow_hub.tools.' 'make_nearest_neighbour_index.main:main ' '[make_nearest_neighbour_index]'), ], }, # PyPI package information. classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Artificial Intelligence', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', ], license='Apache 2.0', keywords=('tensorflow machine learning share module subgraph component hub ' 'embedding retraining transfer'), )
Python
0.999999
@@ -2722,18 +2722,42 @@ index.ma -in +ke_nearest_neighbour_index :main '%0A
ddfc569ba310ce2de3b4a4ae63111556646496f8
remove more f-strings
tests/test_keyfile.py
tests/test_keyfile.py
# MIT licensed # Copyright (c) 2018 lilydjwg <lilydjwg@gmail.com>, et al. import os import tempfile import contextlib from nvchecker.source import HTTPError import pytest pytestmark = [pytest.mark.asyncio] @contextlib.contextmanager def unset_github_token_env(): token = os.environ.get('NVCHECKER_GITHUB_TOKEN') try: if token: del os.environ['NVCHECKER_GITHUB_TOKEN'] yield token finally: if token: os.environ['NVCHECKER_GITHUB_TOKEN'] = token async def test_keyfile_missing(run_source): test_conf = '''\ [example] github = harry-sanabria/ReleaseTestRepo ''' assert await run_source(test_conf) in ['20140122.012101', None] async def test_keyfile_invalid(run_source): with tempfile.NamedTemporaryFile(mode='w') as f, \ unset_github_token_env(): f.write('''\ [keys] github = xxx ''') f.flush() test_conf = f'''\ [example] github = harry-sanabria/ReleaseTestRepo [__config__] keyfile = {f.name} ''' try: version = await run_source(test_conf) assert version is None # out of allowance return except HTTPError as e: assert e.code == 401 return raise Exception('expected 401 response') @pytest.mark.skipif('NVCHECKER_GITHUB_TOKEN' not in os.environ, reason='no key given') async def test_keyfile_valid(run_source): with tempfile.NamedTemporaryFile(mode='w') as f, \ unset_github_token_env() as token: f.write('''\ [keys] github = {token} '''.format(token=token)) f.flush() test_conf = '''\ [example] github = harry-sanabria/ReleaseTestRepo [__config__] keyfile = {name} '''.format(name=f.name) assert await run_source(test_conf) == '20140122.012101'
Python
0.000118
@@ -868,17 +868,16 @@ _conf = -f '''%5C%0A%5Bex @@ -952,25 +952,37 @@ = %7B -f. name%7D%0A - ''' +'''.format(name=f.name) %0A%0A
1fa3acf2b926162235372f34d368aab31acc14b0
Add unittest exception for timedelta on Python < 2.6
tests/test_max_age.py
tests/test_max_age.py
# -*- coding: utf-8 -*- """ test ~~~~ Flask-CORS is a simple extension to Flask allowing you to support cross origin resource sharing (CORS) using a simple decorator. :copyright: (c) 2014 by Cory Dolphin. :license: MIT, see LICENSE for more details. """ from datetime import timedelta from tests.base_test import FlaskCorsTestCase from flask import Flask try: # this is how you would normally import from flask.ext.cors import * except: # support local usage without installed package from flask_cors import * class MaxAgeTestCase(FlaskCorsTestCase): def setUp(self): self.app = Flask(__name__) @self.app.route('/defaults') @cross_origin() def defaults(): return 'Should only return headers on OPTIONS' @self.app.route('/test_string') @cross_origin(max_age=600) def test_string(): return 'Open!' @self.app.route('/test_time_delta') @cross_origin(max_age=timedelta(minutes=10)) def test_time_delta(): return 'Open!' def test_defaults(self): ''' By default, Access-Control-Allow-Methods should only be returned if the client makes an OPTIONS request. ''' with self.app.test_client() as c: for verb in self.iter_verbs(c): self.assertFalse(ACL_MAX_AGE in verb('/defaults').headers) def test_string(self): ''' If the methods parameter is defined, always return the allowed methods defined by the user. ''' with self.app.test_client() as c: for verb in self.iter_verbs(c): self.assertEqual( verb('/test_string').headers.get(ACL_MAX_AGE), '600' ) def test_time_delta(self): ''' If the methods parameter is defined, always return the allowed methods defined by the user. ''' with self.app.test_client() as c: for verb in self.iter_verbs(c): self.assertEqual( verb('/test_time_delta').headers.get(ACL_MAX_AGE), '600' ) if __name__ == "__main__": unittest.main()
Python
0
@@ -303,16 +303,27 @@ medelta%0A +import sys%0A from tes @@ -1957,32 +1957,169 @@ er.%0A '''%0A + # timedelta.total_seconds is not available in older versions of Python%0A if sys.version_info %3C (2, 7):%0A return%0A%0A with sel
b8374e20640630044f59e4b4733e588345e07ab5
Fix unittest in asyncio debug mode
tests/test_prepare.py
tests/test_prepare.py
import inspect from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = await st.get_first_row(1) self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, await st.get_value(10)) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = await st.get_value(val) if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val) async def test_prepare_4(self): s = await self.con.prepare('SELECT $1::smallint') self.assertEqual(await s.get_value(10), 10) s = await self.con.prepare('SELECT $1::smallint * 2') self.assertEqual(await s.get_value(10), 20) async def test_prepare_5_unknownoid(self): s = await self.con.prepare("SELECT 'test'") self.assertEqual(await s.get_value(), 'test') async def test_prepare_6_with(self): async with self.con.prepare('SELECT $1::smallint') as stmt: self.assertEqual(await stmt.get_value(10), 10) async def test_prepare_7_with(self): with self.assertRaisesRegex(RuntimeError, 'nested.*async with'): async with self.con.prepare('SELECT $1::smallint') as stmt: async with stmt: pass with self.assertRaisesRegex(RuntimeError, 'nested.*async with'): s = await self.con.prepare("SELECT 'test'") async with s: async with s: pass async def test_prepare_8_uninitialized(self): methods = {'get_parameters', 'get_attributes', 'get_aiter', 'get_list', 'get_value', 'get_first_row'} stmt = self.con.prepare('SELECT $1::smallint') for meth in methods: with self.subTest(method=meth, closed=False, initialized=False): with self.assertRaisesRegex(RuntimeError, 'not initialized'): val = getattr(stmt, meth)() if inspect.iscoroutine(val): await val await stmt.free() for meth in methods: with self.subTest(method=meth, closed=True, initialized=False): with self.assertRaisesRegex(RuntimeError, 'cannot.*closed'): val = getattr(stmt, meth)() if inspect.iscoroutine(val): await val
Python
0.000006
@@ -2966,32 +2966,32 @@ f inspect.is -coroutin +awaitabl e(val):%0A @@ -3315,16 +3315,16 @@ t.is -coroutin +awaitabl e(va
a44e3be0b0a6188dfa85fcb53433b64ca81f5f46
test output_subprocess
tests/test_process.py
tests/test_process.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Test scriptharness/process.py """ from __future__ import absolute_import, division, print_function, \ unicode_literals import mock import os import psutil from scriptharness.exceptions import ScriptHarnessFatal import scriptharness.process as shprocess from scriptharness.unicode import to_unicode from six.moves.queue import Queue import sys import unittest def find_unused_pid(): """Find an unused pid for testing. """ for num in range(1000, 10000): if not psutil.pid_exists(num): return num return None # TestProcess {{{1 class TestProcess(unittest.TestCase): """Test process. """ def test_kill_nonexistent_pid(self): """test_process | Kill a nonexistent pid """ pid = find_unused_pid() self.assertRaises(psutil.NoSuchProcess, shprocess.kill_proc_tree, pid, include_parent=True) self.assertRaises(psutil.NoSuchProcess, shprocess.kill_proc_tree, pid, include_parent=False) @mock.patch('scriptharness.process.psutil') def test_kill_proc_tree(self, mock_psutil): """test_process | kill_proc_tree """ parent = mock.MagicMock() mock_psutil.Process.return_value = parent shprocess.kill_proc_tree(99, include_parent=False) self.assertFalse(parent.kill.called) shprocess.kill_proc_tree(99, include_parent=True) parent.kill.assert_called_once_with() @staticmethod @mock.patch('scriptharness.process.psutil') def test_kill_runner(mock_psutil): """test_process | kill_runner """ def raise_nosuchprocess(*args, **kwargs): """test helper""" if args or kwargs: pass raise psutil.NoSuchProcess(50) mock_psutil.Process = raise_nosuchprocess process = mock.MagicMock() # This should not raise shprocess.kill_runner(process) def test_command_subprocess(self): """test_process | command_subprocess """ queue = Queue() self.assertRaises( SystemExit, shprocess.command_subprocess, queue, [sys.executable, "-c", "from __future__ import print_function;print('foo')"], ) line = queue.get(block=True, timeout=.1) self.assertEqual(to_unicode("foo"), to_unicode(line).rstrip()) @mock.patch('scriptharness.process.psutil') def test_keyboard_interrupt(self, mock_psutil): """test_process | KeyboardInterrupt """ class FakeQueue(object): """Raises KeyboardInterrupt""" def get(self, **_): """Raise KeyboardInterrupt""" self.raise_ki() @staticmethod def raise_ki(): """Silence pylint""" raise KeyboardInterrupt() queue = FakeQueue() logger = mock.MagicMock() runner = mock.MagicMock() add_line_cb = mock.MagicMock() self.assertRaises( ScriptHarnessFatal, shprocess.watch_command, logger, queue, runner, add_line_cb ) mock_psutil.Process.assert_called_once_with(os.getpid())
Python
0.000185
@@ -408,16 +408,32 @@ ort sys%0A +import tempfile%0A import u @@ -2484,16 +2484,814 @@ rip())%0A%0A + def test_output_subprocess(self):%0A %22%22%22test_process %7C output_subprocess%0A %22%22%22%0A stdout = tempfile.NamedTemporaryFile()%0A stderr = tempfile.NamedTemporaryFile()%0A self.assertRaises(%0A SystemExit, shprocess.output_subprocess,%0A stdout, stderr,%0A %5Bsys.executable, %22-c%22,%0A %22from __future__ import print_function; import sys;print('foo');%22%0A %22print('bar', file=sys.stderr)%22%5D,%0A )%0A with open(stdout.name) as filehandle:%0A contents = filehandle.read().rstrip()%0A self.assertEqual(contents, %22foo%22)%0A with open(stderr.name) as filehandle:%0A contents = filehandle.read().rstrip()%0A self.assertEqual(contents, %22bar%22)%0A stdout.close()%0A stderr.close()%0A%0A @moc
9e62b41dc762b1088bd5c1474678d7e7ed120add
test case with stage parameter
tests/test_profile.py
tests/test_profile.py
import os import sys import pytest from pkgstack.profile import Profile TESTS_PATH=os.path.realpath(os.path.dirname(__file__)) def test_profile_create(tmpdir): config = Profile(os.path.join(TESTS_PATH, 'resources/sample.yml')).config assert config == [ {'install': 'pytest', 'stage': 'test'}, {'name': 'Install pytest-cov', 'install': 'pytest-cov', 'stage': 'test'}, {'name': 'Install codecov', 'install': 'codecov', 'alternatives': ['test1', 'test2'], 'stage': 'test'}, {'name': 'Install dtguess', 'install': 'dtguess==0.1.3'}, {'install': 'dtguess==0.1.3', 'alternatives': ['https://github.com/ownport/dtguess/releases/download/v0.1.3/dtguess-0.1.3.tar.gz'], } ] def test_process(): assert Profile(os.path.join(TESTS_PATH, 'resources/sample.yml')).process() == { 'packages.successed': 1, 'packages.failed': 1, 'packages.total': 5 } def test_profile_incorrect_stage_type(): with pytest.raises(RuntimeError): p = Profile(os.path.join(TESTS_PATH, 'resources/sample.yml'), stages='test')
Python
0.000001
@@ -941,16 +941,256 @@ %7D%0A%0A%0A +def test_profile_process_via_stage():%0A%0A assert Profile(os.path.join(TESTS_PATH, 'resources/sample.yml'), stages=%5B'test',%5D).process() == %7B%0A 'packages.successed': 5,%0A 'packages.failed': 0,%0A 'packages.total': 5%0A %7D%0A%0A%0A def test
c2b846468f9f02173f35ce97c0a37da465aa7a92
Fix safe_mmkdir import
tests/test_profile.py
tests/test_profile.py
import pytest import json import os import shutil from great_expectations.profile.base import DataSetProfiler from great_expectations.profile.basic_dataset_profiler import BasicDatasetProfiler from great_expectations.profile.columns_exist import ColumnsExistProfiler from great_expectations.dataset.pandas_dataset import PandasDataset from great_expectations.data_context import DataContext from great_expectations.util import safe_mmkdir # Tests to write: # test_cli_method_works -> test_cli # test context-based profile methods # test class-based profile methods def test_DataSetProfiler_methods(): toy_dataset = PandasDataset({"x": [1, 2, 3]}) assert DataSetProfiler.validate_dataset(1) == False assert DataSetProfiler.validate_dataset(toy_dataset) with pytest.raises(NotImplementedError) as e_info: DataSetProfiler.profile(toy_dataset) def test_ColumnsExistProfiler(): toy_dataset = PandasDataset({"x": [1, 2, 3]}) expectations_config, evr_config = ColumnsExistProfiler.profile(toy_dataset) print(json.dumps(expectations_config, indent=2)) # assert expectations_config == { # "data_asset_name": None, # "data_asset_type": "Dataset", # "meta": { # "great_expectations.__version__": "0.7.0-beta", # "ColumnsExistProfiler": { # "created_by": "BasicDatasetProfiler", # "created_at": 0, # }, # }, # "expectations": [ # { # "expectation_type": "expect_column_to_exist", # "kwargs": { # "column": "x" # } # } # ] # } def test_BasicDatasetProfiler(): toy_dataset = PandasDataset({"x": [1, 2, 3]}) assert len(toy_dataset.get_expectations( suppress_warnings=True)["expectations"]) == 0 expectations_config, evr_config = BasicDatasetProfiler.profile(toy_dataset) # print(json.dumps(expectations_config, indent=2)) assert len(toy_dataset.get_expectations( suppress_warnings=True)["expectations"]) > 0 # We should add an additional test that instantiates the batch via context, so the data_asset_name will be populated. assert expectations_config["data_asset_name"] == None assert "BasicDatasetProfiler" in expectations_config["meta"] # We should add an additional test that instantiates the batch via context, so that batch_kwargs will be populated. assert set(expectations_config["meta"]["BasicDatasetProfiler"].keys()) == { "created_by", "created_at" } # for exp in expectations_config["expectations"]: # assert "BasicDatasetProfiler" in exp["meta"] # assert exp["meta"]["BasicDatasetProfiler"] == { # "confidence": "very low" # } # Example: # { # "data_asset_name": "notable_works_by_charles_dickens", # "meta": { # "great_expectations.__version__": "0.7.0-beta", # "BasicDatasetProfiler": { # "created_by": "BasicDatasetProfiler", # "created_at": 0, # "batch_kwargs": {}, # }, # }, # "expectations": [ # { # "expectation_type": "expect_column_to_exist", # "meta": { # "BasicDatasetProfiler": { # "confidence": "very low" # } # } # }] # } @pytest.fixture() def filesystem_csv_2(tmp_path_factory): base_dir = tmp_path_factory.mktemp('test_files') base_dir = str(base_dir) # Put a file in the directory toy_dataset = PandasDataset({"x": [1, 2, 3]}) toy_dataset.to_csv(os.path.join(base_dir, "f1.csv"), index=None) return base_dir def test_context_profiler(empty_data_context, filesystem_csv_2): empty_data_context.add_datasource( "my_datasource", "pandas", base_directory=str(filesystem_csv_2)) assert empty_data_context.list_expectations_configs() == [] empty_data_context.profile_datasource("my_datasource") print(empty_data_context.list_expectations_configs()) assert empty_data_context.list_expectations_configs() != [] profiled_expectations = empty_data_context.get_expectations('f1') print(json.dumps(profiled_expectations, indent=2)) assert len(profiled_expectations["expectations"]) > 0 # assert False # FIXME: This test needs a different home. # def test_validate_on_a_context_loaded_batch(empty_data_context, filesystem_csv_2): # toy_dataset = PandasDataset({"x": [1, 2, 3]}) # toy_dataset.validate() # empty_data_context.add_datasource( # "my_datasource", "pandas", base_directory=str(filesystem_csv_2)) # not_so_empty_data_context = empty_data_context # # my_ds = not_so_empty_data_context.get_datasource("my_datasource") # # print(my_ds.list_available_data_asset_names()) # my_batch = not_so_empty_data_context.get_batch("my_datasource", "f1") # my_batch.validate()
Python
0.000001
@@ -410,16 +410,29 @@ tations. +data_context. util imp
17b196e159fb7dd5b5951ba613e83815186238f9
improve project auth testing
tests/test_project.py
tests/test_project.py
""" Test Project """ import unittest from requests.exceptions import HTTPError from hubstorage import HubstorageClient from hubstorage.utils import millitime from hstestcase import HSTestCase class ProjectTest(HSTestCase): def test_projectid(self): p1 = self.hsclient.get_project(int(self.projectid)) p2 = self.hsclient.get_project(str(self.projectid)) self.assertEqual(p1.projectid, p2.projectid) self.assertEqual(type(p1.projectid), str) self.assertEqual(type(p2.projectid), str) self.assertRaises(AssertionError, self.hsclient.get_project, '111/3') def test_get_job_from_key(self): job = self.project.push_job(self.spidername) parts = tuple(job.key.split('/')) self.assertEqual(len(parts), 3) self.assertEqual(parts[:2], (self.projectid, self.spiderid)) samejob1 = self.hsclient.get_job(job.key) samejob2 = self.project.get_job(job.key) samejob3 = self.project.get_job(parts[1:]) self.assertEqual(samejob1.key, job.key) self.assertEqual(samejob2.key, job.key) self.assertEqual(samejob3.key, job.key) def test_get_jobs(self): p = self.project j1 = p.push_job(self.spidername, testid=0) j2 = p.push_job(self.spidername, testid=1) j3 = p.push_job(self.spidername, testid=2) # global list must list at least one job self.assertTrue(list(p.get_jobs(count=1))) # List all jobs for test spider r = list(p.get_jobs(self.spiderid)) self.assertEqual([j.key for j in r], [j1.key, j2.key, j3.key]) def test_push_job(self): job = self.project.push_job(self.spidername, state='running', priority=self.project.jobq.PRIO_HIGH, foo=u'bar') self.assertEqual(job.metadata.get('state'), u'running') self.assertEqual(job.metadata.get('foo'), u'bar') self.project.jobq.delete(job) job.metadata.expire() self.assertEqual(job.metadata.get('state'), u'deleted') self.assertEqual(job.metadata.get('foo'), u'bar') @unittest.expectedFailure def test_botgroup(self): self.project.settings.update(botgroups=['foo'], created=millitime()) self.project.settings.save() p1 = self.project.push_job(self.spidername) j1 = self.project.start_job() self.assertEqual(j1, None, 'got %s, pushed job was %s' % (j1.key, p1.key)) j2 = self.project.start_job(botgroup='bar') self.assertEqual(j2, None, 'got %s, pushed job was %s' % (j2.key, p1.key)) j3 = self.project.start_job(botgroup='foo') self.assertEqual(j3.key, p1.key) def test_auth(self): # client without global auth set hsc = HubstorageClient(endpoint=self.hsclient.endpoint) self.assertEqual(hsc.auth, None) # check no-auth access try: hsc.push_job(self.projectid, self.spidername) except HTTPError as exc: self.assertTrue(exc.response.status_code, 401) try: hsc.get_project(self.projectid).push_job(self.spidername) except HTTPError as exc: self.assertTrue(exc.response.status_code, 401) try: hsc.get_job((self.projectid, 1, 1)) except HTTPError as exc: self.assertTrue(exc.response.status_code, 401) try: hsc.get_project(self.projectid).get_job((self.projectid, 1, 1)) except HTTPError as exc: self.assertTrue(exc.response.status_code, 401) # create project with auth auth = self.hsclient.auth project = hsc.get_project(self.projectid, auth) self.assertEqual(project.auth, auth) job = project.push_job(self.spidername) samejob = project.get_job(job.key) self.assertEqual(samejob.key, job.key) def test_broad(self): project = self.hsclient.get_project(self.projectid) # populate project with at least one job job = project.push_job(self.spidername) self.assertEqual(job.metadata.get('state'), 'pending') job = project.start_job() self.assertEqual(job.metadata.get('state'), 'running') job.items.write({'title': 'bar'}) job.logs.info('nice to meet you') job.samples.write([1, 2, 3]) job.finished() # keep a jobid for get_job and unreference job jobid = job.key jobauth = job.auth del job self.assertTrue(list(project.jobs.list(self.spiderid, count=1))) self.assertTrue(list(project.items.list(self.spiderid, count=1))) self.assertTrue(list(project.logs.list(self.spiderid, count=1))) self.assertTrue(list(project.samples.list(self.spiderid, count=1))) job = project.client.get_job(jobid, jobauth=jobauth) job.purged() def test_settings(self): project = self.hsclient.get_project(self.projectid) self.assertEqual(dict(project.settings), {}) project.settings['created'] = created = millitime() project.settings['botgroups'] = ['g1', 'g2'] project.settings.save() self.assertEqual(project.settings.liveget('created'), created) self.assertEqual(project.settings.liveget('botgroups'), ['g1', 'g2']) project.settings.expire() self.assertEqual(dict(project.settings), { 'created': created, 'botgroups': ['g1', 'g2'], })
Python
0.000001
@@ -3064,32 +3064,99 @@ tatus_code, 401) +%0A else:%0A self.assertTrue(False, '401 not raised') %0A%0A try:%0A @@ -3307,32 +3307,99 @@ tatus_code, 401) +%0A else:%0A self.assertTrue(False, '401 not raised') %0A%0A try:%0A @@ -3436,32 +3436,45 @@ rojectid, 1, 1)) +.items.list() %0A except @@ -3541,32 +3541,99 @@ tatus_code, 401) +%0A else:%0A self.assertTrue(False, '401 not raised') %0A%0A try:%0A @@ -3798,16 +3798,96 @@ de, 401) +%0A else:%0A self.assertTrue(False, '401 not raised').items.list() %0A%0A
e24c4d4b225ca5d6c2130677fca34df2b0d2188d
Improve branch coverage
tests/test_reactor.py
tests/test_reactor.py
# -*- coding: utf-8 -*- # vim: set ts=4 # Copyright 2016 Rémi Duraffort # This file is part of ReactOBus. # # ReactOBus is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ReactOBus is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with ReactOBus. If not, see <http://www.gnu.org/licenses/> import pytest import zmq class ZMQMockSocket(object): def __init__(self): self.connected = False self.opts = {} self.url = None self.msgs = [] self.send_msgs = [] def setsockopt(self, key, value): self.opts[key] = value def connect(self, url): self.connected = True self.url = url def bind(self, url): self.connected = True self.url = url def recv_multipart(self): return self.msgs.pop(0) def send_multipart(self, msg): self.send_msgs.append(msg) class ZMQMock(object): def __init__(self): self.socks = {} def __call__(self): return self def socket(self, sock_type): if sock_type not in self.socks: self.socks[sock_type] = ZMQMockSocket() return self.socks[sock_type] class MockWorker(object): def __init__(self, matchers): self.matchers = matchers self.started = False def start(self): self.started = True def test_reactor(monkeypatch): # Replace zmq.Context.instance() zmq_mock = ZMQMock() monkeypatch.setattr(zmq.Context, "instance", zmq_mock) import ReactOBus.reactor monkeypatch.setattr(ReactOBus.reactor, "Worker", MockWorker) from ReactOBus.reactor import Reactor options = { "rules": {}, "workers": 0 } r = Reactor(options, "inproc://test") with pytest.raises(IndexError): r.run() assert zmq_mock.socks[zmq.SUB].connected is True assert zmq_mock.socks[zmq.SUB].url == "inproc://test" assert zmq_mock.socks[zmq.SUB].opts == {zmq.SUBSCRIBE: b''} assert zmq_mock.socks[zmq.DEALER].connected is True assert zmq_mock.socks[zmq.DEALER].opts == {} assert zmq_mock.socks[zmq.DEALER].url == "inproc://workers" assert len(zmq_mock.socks[zmq.DEALER].send_msgs) == 0 options = { "rules": [{"name": "first test", "match": {"field": "topic", "pattern": "^org.reactobus.lava"}, "exec": {"path": "/bin/true", "args": ["topic", "$topic", "username", "$username"], "timeout": 1}}], "workers": 2 } r = Reactor(options, "inproc://test") zmq_mock.socks[zmq.SUB].msgs = [ ["org.reactobus.lava", "uuid", "2016", "lavauser", "{}"], ["org.reactobus.lava", ""] ] with pytest.raises(IndexError): r.run() assert len(r.matchers) == 1 assert r.matchers[0].name == "first test" assert len(r.workers) == 2 assert r.workers[0].started assert r.workers[1].started assert zmq_mock.socks[zmq.SUB].connected is True assert zmq_mock.socks[zmq.SUB].url == "inproc://test" assert zmq_mock.socks[zmq.SUB].opts == {zmq.SUBSCRIBE: b''} assert zmq_mock.socks[zmq.DEALER].connected is True assert zmq_mock.socks[zmq.DEALER].opts == {} assert zmq_mock.socks[zmq.DEALER].url == "inproc://workers" assert len(zmq_mock.socks[zmq.DEALER].send_msgs) == 1 assert zmq_mock.socks[zmq.DEALER].send_msgs[0] == [b"0", b"org.reactobus.lava", b"uuid", b"2016", b"lavauser", b"{}"]
Python
0.000001
@@ -3179,24 +3179,89 @@ er%22, %22%7B%7D%22%5D,%0A + %5B%22org.reactobus.lav%22, %22uuid%22, %222016%22, %22lavauser%22, %22%7B%7D%22%5D,%0A %5B%22or
0cef40e4ee30acbee12e179196dfc65c69890518
Add a failed-connect test for sock_connect
tests/test_sockets.py
tests/test_sockets.py
import asyncio import socket import uvloop from uvloop import _testbase as tb _SIZE = 1024 * 1024 class _TestSockets: async def recv_all(self, sock, nbytes): buf = b'' while len(buf) < nbytes: buf += await self.loop.sock_recv(sock, nbytes - len(buf)) return buf def test_socket_connect_recv_send(self): def srv_gen(): yield tb.write(b'helo') data = yield tb.read(4 * _SIZE) self.assertEqual(data, b'ehlo' * _SIZE) yield tb.write(b'O') yield tb.write(b'K') async def client(sock, addr): await self.loop.sock_connect(sock, addr) data = await self.recv_all(sock, 4) self.assertEqual(data, b'helo') await self.loop.sock_sendall(sock, b'ehlo' * _SIZE) data = await self.recv_all(sock, 2) self.assertEqual(data, b'OK') with tb.tcp_server(srv_gen) as srv: sock = socket.socket() with sock: sock.setblocking(False) self.loop.run_until_complete(client(sock, srv.addr)) def test_socket_accept_recv_send(self): async def server(): sock = socket.socket() sock.setblocking(False) with sock: sock.bind(('127.0.0.1', 0)) sock.listen() fut = self.loop.run_in_executor(None, client, sock.getsockname()) client_sock, _ = await self.loop.sock_accept(sock) with client_sock: data = await self.recv_all(client_sock, _SIZE) self.assertEqual(data, b'a' * _SIZE) await fut def client(addr): sock = socket.socket() with sock: sock.connect(addr) sock.sendall(b'a' * _SIZE) self.loop.run_until_complete(server()) class TestUVSockets(_TestSockets, tb.UVTestCase): pass class TestAIOSockets(_TestSockets, tb.AIOTestCase): pass
Python
0.000003
@@ -1958,16 +1958,481 @@ ver())%0A%0A + def test_socket_failed_connect(self):%0A sock = socket.socket()%0A with sock:%0A sock.bind(('127.0.0.1', 0))%0A addr = sock.getsockname()%0A%0A async def run():%0A sock = socket.socket()%0A with sock:%0A sock.setblocking(False)%0A with self.assertRaises(ConnectionRefusedError):%0A await self.loop.sock_connect(sock, addr)%0A%0A self.loop.run_until_complete(run())%0A%0A %0Aclass T
244fc6b436398055f650ea3a64e9388586604cd9
Add test for group collection access.
testsuite/test_acl.py
testsuite/test_acl.py
import pytest pytestmark = pytest.mark.django_db def test_collections_acl(client): from django.contrib.auth.models import User, AnonymousUser from hoover.search.models import Collection from hoover.search.views import collections_acl anonymous = AnonymousUser() alice = User.objects.create_user('alice') foo = Collection.objects.create(name='foo', public=True) bar = Collection.objects.create(name='bar') baz = Collection.objects.create(name='baz') assert collections_acl(anonymous, []) == set() assert collections_acl(anonymous, ['foo']) == {foo} assert collections_acl(anonymous, ['foo', 'bar', 'baz']) == {foo} assert collections_acl(anonymous, ['foo', 'bar', 'foo', 'bar']) == {foo} assert collections_acl(alice, []) == set() assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo} baz.users.add(alice) assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo, baz}
Python
0
@@ -68,16 +68,22 @@ ions_acl +_users (client) @@ -945,8 +945,988 @@ o, baz%7D%0A +%0A%0Adef test_collections_acl_groups(client):%0A from django.contrib.auth.models import User, AnonymousUser, Group%0A from hoover.search.models import Collection%0A from hoover.search.views import collections_acl%0A anonymous = AnonymousUser()%0A alice = User.objects.create_user('alice')%0A bob = User.objects.create_user('bob')%0A alice_group = Group.objects.create(name='alice1')%0A alice_group.user_set.add(alice)%0A foo = Collection.objects.create(name='foo', public=True)%0A bar = Collection.objects.create(name='bar')%0A baz = Collection.objects.create(name='baz')%0A assert collections_acl(alice, %5B'foo', 'bar', 'baz'%5D) == %7Bfoo%7D%0A assert collections_acl(bob, %5B'foo', 'bar', 'baz'%5D) == %7Bfoo%7D%0A bar.groups.add(alice_group)%0A assert collections_acl(alice, %5B'foo', 'bar', 'baz'%5D) == %7Bfoo, bar%7D%0A assert collections_acl(bob, %5B'foo', 'bar', 'baz'%5D) == %7Bfoo%7D%0A alice_group.user_set.add(bob)%0A assert collections_acl(bob, %5B'foo', 'bar', 'baz'%5D) == %7Bfoo, bar%7D%0A
fa7b2a707be689c57d744d0ada5049dfb6b15789
Set leave=False on pbar
thinc/neural/train.py
thinc/neural/train.py
from __future__ import unicode_literals, print_function from .optimizers import Eve, Adam, SGD, linear_decay from .util import minibatch import numpy.random from tqdm import tqdm class Trainer(object): def __init__(self, model, **cfg): self.ops = model.ops self.model = model self.L2 = cfg.get('L2', 0.0) self.optimizer = Adam(model.ops, 0.001, decay=0.0, eps=1e-8, L2=self.L2) self.batch_size = cfg.get('batch_size', 128) self.nb_epoch = cfg.get('nb_epoch', 20) self.i = 0 self.dropout = cfg.get('dropout', 0.) self.dropout_decay = cfg.get('dropout_decay', 0.) self.each_epoch = [] def __enter__(self): return self, self.optimizer def __exit__(self, exc_type, exc_val, exc_tb): self.model.use_params(self.optimizer.averages) def iterate(self, train_X, train_y, progress_bar=True): orig_dropout = self.dropout for i in range(self.nb_epoch): indices = numpy.arange(len(train_X)) numpy.random.shuffle(indices) indices = self.ops.asarray(indices) j = 0 with tqdm(total=indices.shape[0]) as pbar: while j < indices.shape[0]: slice_ = indices[j : j + self.batch_size] X = _take_slice(train_X, slice_) y = _take_slice(train_y, slice_) yield X, y self.dropout = linear_decay(orig_dropout, self.dropout_decay, self.optimizer.nr_iter) j += self.batch_size if progress_bar: pbar.update(self.batch_size) for func in self.each_epoch: func() def _take_slice(data, slice_): if isinstance(data, list) or isinstance(data, tuple): return [data[int(i)] for i in slice_] else: return data[slice_]
Python
0.000189
@@ -1170,16 +1170,29 @@ shape%5B0%5D +, leave=False ) as pba
603bfdc9cb0f9bf8e29306e161728423f1f57f86
Update dependency bazelbuild/bazel to latest version
third_party/bazel.bzl
third_party/bazel.bzl
# Copyright 2019 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file is autogenerated by copybara, please do not edit. bazel_version = "87550ea81d92796facbb068a6ce7365326d00aff" bazel_sha256 = "7abdba2562f80ae8327627b554785173b65f8758cf98f901b595d8d252210b94"
Python
0.000001
@@ -655,128 +655,128 @@ = %22 -87550ea81d92796facbb068a6ce7365326d00aff%22%0Abazel_sha256 = %227abdba2562f80ae8327627b554785173b65f8758cf98f901b595d8d252210b94 +6820bea27ff251086d683793c379e1d327bdd193%22%0Abazel_sha256 = %2248d36a03f519f17ea0f0e082857c5e71dd375c7a5f1ce198f72d790ffa833356 %22%0A
34721c0078d564538a4cf20ac15560a1bf119bac
Update dependency bazelbuild/bazel to latest version
third_party/bazel.bzl
third_party/bazel.bzl
# Copyright 2019 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file is autogenerated by copybara, please do not edit. bazel_version = "81f5771b0a5d26884841bbdecc77166142c87ca1" bazel_sha256 = "d4787388b73177edf0d618e49a802d76b60e91adcee8e8aec882aeb02671046d"
Python
0.000021
@@ -655,128 +655,128 @@ = %22 -81f5771b0a5d26884841bbdecc77166142c87ca1%22%0Abazel_sha256 = %22d4787388b73177edf0d618e49a802d76b60e91adcee8e8aec882aeb02671046d +dc7db2490a9bf6941a1bcb1fbfc709fff1f37739%22%0Abazel_sha256 = %220a15609a976bc1d8e588d6252b12880723ff14bf06d6b73488853bcef8717840 %22%0A
fd641ebb631d4b7d03bf978de2dc22f4c2966dd5
Update Bazel to latest version
third_party/bazel.bzl
third_party/bazel.bzl
# Copyright 2019 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. bazel_version = "f3d1683fcb3c4a25c4c4b8251d00ddfa66f958ba" bazel_sha256 = "44643a2437c709cec8bf4a0f7a1bb0c31ba5e8a04680a6a669352b7e3a0545ef"
Python
0.000016
@@ -591,128 +591,128 @@ = %22 -f3d1683fcb3c4a25c4c4b8251d00ddfa66f958ba%22%0Abazel_sha256 = %2244643a2437c709cec8bf4a0f7a1bb0c31ba5e8a04680a6a669352b7e3a0545ef +1c03c8c4fac1e1028fcb5bb342da3a7fdfc88327%22%0Abazel_sha256 = %22e2d73ad1de6001669c87e0ec954738641dc6eb7e960f04601f7a260699bb0b9e %22%0A
b3abe856ff2e430f64c60d28b77e95c73b842b47
fix wrong table header
src/search.py
src/search.py
import xml.etree.ElementTree as ET import texttable as tt import re from config import user_id, password from datetime import date from wos import WosClient def _draw_table(data): # Generate table tab = tt.Texttable() tab.add_rows(data) tab.set_cols_align(['l', 'l', 'l']) tab.header(['year', 'id', 'title']) tab.set_cols_width([5, 55, 20]) # Use fixed terminal dimension (80 char) s = tab.draw() print s def search(author, years, results): client = WosClient(user_id, password) client.connect() # Build query query = 'AU=%s' % author # Build timespan current_year = date.today().year sq = client.search(query, count=results, offset=1, timeSpan={'begin': '%s-01-01' % (current_year - years), 'end': '%s-01-01' % (current_year + 1)}) # Format xml my_xml = re.sub(' xmlns="[^"]+"', '', sq.records, count=1).encode('utf-8') tree = ET.fromstring(my_xml) # Get results res = [] for t in tree: element = list(t) idwos = element[0].text data = list(element[1]) # static_data summary = list(data[0]) # summary titles = list(summary[2]) # titles year = summary[1].attrib['pubyear'] paper = '' for title in titles: if title.attrib['type'] == 'item': paper = title.text res.append([year, paper, idwos]) _draw_table(res)
Python
0.000008
@@ -306,27 +306,31 @@ r(%5B' -y +Y ear', ' -id', 'title +Title', 'ID WOS '%5D)%0A
2feb3880dc390f202f0280f764eea6f2a39af9e1
Fix the way stepper moves. Fix indentation.
src/server.py
src/server.py
#!/usr/bin/env python3 import sys from pymata_aio.pymata3 import PyMata3 from pymata_aio.constants import Constants class Actuator: """ This class represent the motor used on my robot, it tries to create the same interface for different kind of motors :param board instance of pymata_aio.pymata3.PyMata3 class :param pin int or tuple cointaing pin or pins used by actuators to receive signal :param steps_per_rev characteristic of a stepper motor used for angle conversion """ def __init__(self, board, pin, steps_per_rev = 4096): self.position = 0 # tracks the current position of the actuator (angle) self.max_angle = None self.min_angle = None self.servo = None self.pins = [] self.active_coil = 0 self.pin = pin # Reference to the board on which motors are connected. self.board = board # Constant for the stepper motor self.steps_per_rev = steps_per_rev self.angle_to_step_ratio = self.steps_per_rev / 360 # Check are we using servo or stepper motor (we will just use 4 coil per motor). self.servo = True if isinstance(pin, tuple): self.servo = False self.pins = pin # Check if the values are right and configure the pins. self.__check_values() self.__configure_actuator() def __check_values(self): """ Checks if the values in pin variable are valid. For servos, class expects one pin defined as integer. For steppers, class expects tuple containing four integers """ if self.servo: if not isinstance(self.pin, int): print("Pin number must be an integer.") sys.exit() else: if not len(self.pins) == 4: print("There must be 4 pins defined to be used for stepper motor.") sys.exit() for pin in self.pins: if not isinstance(pin, int): print("Pin numbers must be integers.") sys.exit() def __configure_actuator(self): """ Configures the board to use the pins correctly. Defines pin for servo as servo pin or defines stepper pins. """ if self.servo: self.board.servo_config(self.pin) elif not self.servo: # We need to set all output signal pins for stepper motor. for pin in self.pins: self.board.set_pin_mode(pin, Constants.OUTPUT) def set_angle(self, angle): """ Moves the actuator to the specified angle: :param angle integer """ angle = int(angle) if self.servo: self.__set_angle_servo(angle) else: self.__set_angle_stepper(angle) self.position = angle return True def __set_angle_servo(self, angle): """ Should not be used publicly. Moves the servo to given angle. :param angle integer """ self.board.analog_write(self.pin, int(angle)) self.position = angle return True def __set_angle_stepper(self, angle): """ Should not be used publicly. Moves the stepper to the given angle. :param angle integer """ d_angle = self.__find_delta_angle(self, angle) # For the given change in angle calculate how many steps stepper has to move steps = int(d_angle * self.angle_to_step_ratio) direction = 1 if steps < 0: direction = -1 for step in range(direction * steps): # Pins are just sending signal to activate coil. Coils here # represent index in the array (tuple) of pins. coil = (step + direction) % 4 self.board.digital_write(self.pins[coil], 1) #print("Setting coil {} to 1".format(coil)) self.board.digital_write(self.pins[coil-1], 0) self.position = self.position +\ (direction * step // self.angle_to_step_ratio) return True def __find_delta_angle(self, angle): """ Find the angle we need to move in order to get to the wanted position. :param angle integer """ return (self.position + angle) % 360 def main_servo(): board = PyMata3(2) servo = Actuator(board, 5) from tkinter import Tk, Scale, HORIZONTAL, CENTER main_window = Tk() slider=Scale( command=servo.set_angle, length=500, orient=HORIZONTAL, to=180, background="#fff", troughcolor="#f00", label="SERVO") slider.pack(anchor=CENTER) main_window.mainloop() def main_step(): board = PyMata3(2) stepper = Actuator(board, (8,9,10,11)) from tkinter import Tk, Scale, HORIZONTAL, CENTER main_window = Tk() slider=Scale( command=stepper.set_angle, length=500, orient=HORIZONTAL, to=180, background="#fff", troughcolor="#f00", label="STEPPER") slider.pack(anchor=CENTER) main_window.mainloop() def main_servo_step(): board = PyMata3(2) servo = Actuator(board, 5) stepper = Actuator(board, (8,9,10,11)) from tkinter import Tk, Scale, HORIZONTAL, CENTER main_window = Tk() servo_slider=Scale( command=servo.set_angle, length=500, orient=HORIZONTAL, to=180, background="#fff", troughcolor="#f00", label="SERVO") servo_slider.pack(anchor=CENTER) step_slider=Scale( command=stepper.set_angle, length=500, orient=HORIZONTAL, to=180, background="#fff", troughcolor="#f00", label="STEPPER") step_slider.pack(anchor=CENTER) main_window.mainloop() # TODO: Make tests to check if everything is ok instead of trying to run the code manually and adjusting it. # It's a lot of work but still ... if __name__ == "__main__": main_servo()
Python
0
@@ -3381,30 +3381,24 @@ delta_angle( -self, angle)%0A%0A @@ -3606,16 +3606,120 @@ n = -1%0A%0A + # We only need positive number of steps when the direction is known%0A steps = abs(steps)%0A%0A @@ -3736,28 +3736,16 @@ n range( -direction * steps):%0A @@ -3901,17 +3901,17 @@ = (step -+ +* directi @@ -3979,64 +3979,8 @@ 1)%0A - #print(%22Setting coil %7B%7D to 1%22.format(coil))%0A @@ -4031,19 +4031,23 @@ oil- -1 +direction %5D, 0)%0A - @@ -4083,20 +4083,16 @@ tion +%5C%0A - @@ -4163,20 +4163,17 @@ n True%0A%0A - +%0A def @@ -4334,28 +4334,24 @@ %22%22%22%0A - retu @@ -6198,15 +6198,14 @@ main_s -ervo +tep ()%0A
cdfa28910b48ae8847203ea8ad9ab8f173a64027
Format with black.
spotseeker_server/org_filters/__init__.py
spotseeker_server/org_filters/__init__.py
""" Copyright 2013 Board of Trustees, University of Illinois Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Description ================================================================= Support for Organization search filters. This allows you to hook into views/search. """ from django.conf import settings from django.core.exceptions import ImproperlyConfigured from spotseeker_server.load_module import load_object_by_name class SearchFilter(object): """ A search filter base class. Implementers should subclass this and redefine the methods they're interested in. A new instance of this class is created for each search request. Instance Variables: request: The HTTP request. has_valid_search_param: If, in filter_query, we encountered a search parameter that will limit the query. Default: False. keys: set of keys this filter handles. """ keys = set() def __init__(self, request): self.request = request self.has_valid_search_param = False def filter_query(self, query): """ Filters the model query before it is actualized. Set self.has_valid_search_param to True if you add a parameter that limits the query. Return a query object (QuerySet), whether you modify it or not. """ return query def filter_results(self, spots): """ Filters the spots actualized from the query. Remove or add spots based on non-database conditions. Return the modified set of spots, or the original set if no modification occurred. """ return spots class SearchFilterChain(object): """ A collection of filters to run on a spot search. A new instance of this class is created for each search. Instance Variables: request: The HTTP request. has_valid_search_param: If any filter in the query has a search parameter that will limit the query. Default: False. filters: array of filter instances. keys: set of keys this filter chain handles. """ filters = [] keys = set() @classmethod def _load_filters(cls): """Loads the filters and their modules""" if hasattr(settings, 'SPOTSEEKER_SEARCH_FILTERS'): for filtername in settings.SPOTSEEKER_SEARCH_FILTERS: filt = load_object_by_name(filtername) cls.filters.append(filt) cls.keys.update(filt.keys) def __init__(self, request): self.request = request self.has_valid_search_param = False self.filters = [] for fclass in SearchFilterChain.filters: self.filters.append(fclass(request)) def filter_query(self, query): """Calls filter_query for each defined filter.""" for f in self.filters: query = f.filter_query(query) if f.has_valid_search_param: self.has_valid_search_param = True return query def filter_results(self, spots): """Calls filter_results for each defined filter.""" for f in self.filters: spots = f.filter_results(spots) return spots def filters_key(self, key): return key in SearchFilterChain.keys SearchFilterChain._load_filters()
Python
0
@@ -2817,17 +2817,17 @@ ttings, -' +%22 SPOTSEEK @@ -2843,17 +2843,17 @@ _FILTERS -' +%22 ):%0A
f09c65f980fd9a7364d038ca8eb0b007f74677f5
Increase version
tinymce_4/__init__.py
tinymce_4/__init__.py
# -*- coding: utf-8 -*- __version__ = '0.0.24'
Python
0
@@ -42,7 +42,11 @@ .0.2 -4 +5-dev '%0A
0bcc5d7a292b8176f0f3dc1be0baa03ddf81919e
Add async_remove_config_entry_device support to lookin (#73381)
homeassistant/components/lookin/__init__.py
homeassistant/components/lookin/__init__.py
"""The lookin integration.""" from __future__ import annotations import asyncio from collections.abc import Callable, Coroutine from datetime import timedelta import logging from typing import Any import aiohttp from aiolookin import ( Climate, LookInHttpProtocol, LookinUDPSubscriptions, MeteoSensor, NoUsableService, Remote, start_lookin_udp, ) from aiolookin.models import UDPCommandType, UDPEvent from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN, PLATFORMS, TYPE_TO_PLATFORM from .coordinator import LookinDataUpdateCoordinator, LookinPushCoordinator from .models import LookinData LOGGER = logging.getLogger(__name__) UDP_MANAGER = "udp_manager" def _async_climate_updater( lookin_protocol: LookInHttpProtocol, uuid: str, ) -> Callable[[], Coroutine[None, Any, Remote]]: """Create a function to capture the cell variable.""" async def _async_update() -> Climate: return await lookin_protocol.get_conditioner(uuid) return _async_update def _async_remote_updater( lookin_protocol: LookInHttpProtocol, uuid: str, ) -> Callable[[], Coroutine[None, Any, Remote]]: """Create a function to capture the cell variable.""" async def _async_update() -> Remote: return await lookin_protocol.get_remote(uuid) return _async_update class LookinUDPManager: """Manage the lookin UDP subscriptions.""" def __init__(self) -> None: """Init the manager.""" self._lock = asyncio.Lock() self._listener: Callable | None = None self._subscriptions: LookinUDPSubscriptions | None = None async def async_get_subscriptions(self) -> LookinUDPSubscriptions: """Get the shared LookinUDPSubscriptions.""" async with self._lock: if not self._listener: self._subscriptions = LookinUDPSubscriptions() self._listener = await start_lookin_udp(self._subscriptions, None) return self._subscriptions async def async_stop(self) -> None: """Stop the listener.""" async with self._lock: assert self._listener is not None self._listener() self._listener = None self._subscriptions = None async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up lookin from a config entry.""" domain_data = hass.data.setdefault(DOMAIN, {}) host = entry.data[CONF_HOST] lookin_protocol = LookInHttpProtocol( api_uri=f"http://{host}", session=async_get_clientsession(hass) ) try: lookin_device = await lookin_protocol.get_info() devices = await lookin_protocol.get_devices() except (asyncio.TimeoutError, aiohttp.ClientError, NoUsableService) as ex: raise ConfigEntryNotReady from ex push_coordinator = LookinPushCoordinator(entry.title) meteo_coordinator: LookinDataUpdateCoordinator = LookinDataUpdateCoordinator( hass, push_coordinator, name=entry.title, update_method=lookin_protocol.get_meteo_sensor, update_interval=timedelta( minutes=5 ), # Updates are pushed (fallback is polling) ) await meteo_coordinator.async_config_entry_first_refresh() device_coordinators: dict[str, LookinDataUpdateCoordinator] = {} for remote in devices: if (platform := TYPE_TO_PLATFORM.get(remote["Type"])) is None: continue uuid = remote["UUID"] if platform == Platform.CLIMATE: updater = _async_climate_updater(lookin_protocol, uuid) else: updater = _async_remote_updater(lookin_protocol, uuid) coordinator = LookinDataUpdateCoordinator( hass, push_coordinator, name=f"{entry.title} {uuid}", update_method=updater, update_interval=timedelta( seconds=60 ), # Updates are pushed (fallback is polling) ) await coordinator.async_config_entry_first_refresh() device_coordinators[uuid] = coordinator @callback def _async_meteo_push_update(event: UDPEvent) -> None: """Process an update pushed via UDP.""" LOGGER.debug("Processing push message for meteo sensor: %s", event) meteo: MeteoSensor = meteo_coordinator.data meteo.update_from_value(event.value) meteo_coordinator.async_set_updated_data(meteo) if UDP_MANAGER not in domain_data: manager = domain_data[UDP_MANAGER] = LookinUDPManager() else: manager = domain_data[UDP_MANAGER] lookin_udp_subs = await manager.async_get_subscriptions() entry.async_on_unload( lookin_udp_subs.subscribe_event( lookin_device.id, UDPCommandType.meteo, None, _async_meteo_push_update ) ) hass.data[DOMAIN][entry.entry_id] = LookinData( host=host, lookin_udp_subs=lookin_udp_subs, lookin_device=lookin_device, meteo_coordinator=meteo_coordinator, devices=devices, lookin_protocol=lookin_protocol, device_coordinators=device_coordinators, ) hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) loaded_entries = [ entry for entry in hass.config_entries.async_entries(DOMAIN) if entry.state == ConfigEntryState.LOADED ] if len(loaded_entries) == 1: manager: LookinUDPManager = hass.data[DOMAIN][UDP_MANAGER] await manager.async_stop() return unload_ok
Python
0
@@ -660,16 +660,72 @@ otReady%0A +from homeassistant.helpers import device_registry as dr%0A from hom @@ -6113,8 +6113,542 @@ load_ok%0A +%0A%0Aasync def async_remove_config_entry_device(%0A hass: HomeAssistant, entry: ConfigEntry, device_entry: dr.DeviceEntry%0A) -%3E bool:%0A %22%22%22Remove lookin config entry from a device.%22%22%22%0A data: LookinData = hass.data%5BDOMAIN%5D%5Bentry.entry_id%5D%0A all_identifiers: set%5Btuple%5Bstr, str%5D%5D = %7B%0A (DOMAIN, data.lookin_device.id),%0A *((DOMAIN, remote%5B%22UUID%22%5D) for remote in data.devices),%0A %7D%0A return not any(%0A identifier%0A for identifier in device_entry.identifiers%0A if identifier in all_identifiers%0A )%0A
db5060b32374c24cbe6e41b7405a16166fde0ecf
remove unused stuff and update the names (same as in owm sensor)
homeassistant/components/sensor/forecast.py
homeassistant/components/sensor/forecast.py
""" homeassistant.components.sensor.forecast ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Forecast.io service. Configuration: To use the Forecast sensor you will need to add something like the following to your config/configuration.yaml sensor: platform: forecast api_key: YOUR_APP_KEY monitored_conditions: - summary - precip_type - precip_intensity - temperature - dew_point - wind_speed - wind_bearing - cloud_cover - humidity - pressure - visibility - ozone Variables: api_key *Required To retrieve this value log into your account at http://forecast.io/. You can make 1000 requests per day. This means that you could create every 1.4 minute one. monitored_conditions *Required An array specifying the conditions to monitor. These are the variables for the monitored_conditions array: type *Required The condition you wish to monitor, see the configuration example above for a list of all available conditions to monitor. Details for the API : https://developer.forecast.io/docs/v2 """ import logging from datetime import timedelta import forecastio from homeassistant.util import Throttle from homeassistant.const import (CONF_API_KEY, TEMP_CELCIUS, TEMP_FAHRENHEIT) from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) SENSOR_TYPES = { 'summary': ['Summary', ''], 'precip_type': ['Precip', ''], 'precip_intensity': ['Precip intensity', 'mm'], 'temperature': ['Temperature', ''], 'dew_point': ['Dew point', '°C'], 'wind_speed': ['Wind Speed', 'm/s'], 'wind_bearing': ['Wind Bearing', '°'], 'cloud_cover': ['Cloud coverage', '%'], 'humidity': ['Humidity', '%'], 'pressure': ['Pressure', 'mBar'], 'visibility': ['Visibility', 'km'], 'ozone': ['Ozone', ''], } # Return cached results if last scan was less then this time ago MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120) def setup_platform(hass, config, add_devices, discovery_info=None): """ Get the Forecast.io sensor. """ if None in (hass.config.latitude, hass.config.longitude): _LOGGER.error("Latitude or longitude not set in Home Assistant config") return False SENSOR_TYPES['temperature'][1] = hass.config.temperature_unit unit = hass.config.temperature_unit try: forecast = forecastio.load_forecast(config.get(CONF_API_KEY, None), hass.config.latitude, hass.config.longitude) forecast.currently() except ValueError: _LOGGER.error( "Connection error " "Please check your settings for Forecast.io.") return False data = ForeCastData(config.get(CONF_API_KEY, None), hass.config.latitude, hass.config.longitude) dev = [] for variable in config['monitored_conditions']: if variable not in SENSOR_TYPES: _LOGGER.error('Sensor type: "%s" does not exist', variable) else: dev.append(ForeCastSensor(data, variable, unit)) add_devices(dev) # pylint: disable=too-few-public-methods class ForeCastSensor(Entity): """ Implements an OpenWeatherMap sensor. """ def __init__(self, weather_data, sensor_type, unit): self.client_name = 'Forecast' self._name = SENSOR_TYPES[sensor_type][0] self.forecast_client = weather_data self._unit = unit self.type = sensor_type self._state = None self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self.update() @property def name(self): return '{} - {}'.format(self.client_name, self._name) @property def state(self): """ Returns the state of the device. """ return self._state @property def unit_of_measurement(self): """ Unit of measurement of this entity, if any. """ return self._unit_of_measurement # pylint: disable=too-many-branches def update(self): """ Gets the latest data from Forecast.io and updates the states. """ self.forecast_client.update() data = self.forecast_client.data try: if self.type == 'summary': self._state = data.summary # elif self.type == 'sunrise_time': # self._state = data.sunriseTime # elif self.type == 'sunset_time': # self._state = data.sunsetTime elif self.type == 'precip_intensity': if data.precipIntensity == 0: self._state = 'None' self._unit_of_measurement = '' else: self._state = data.precipIntensity elif self.type == 'precip_type': if data.precipType is None: self._state = 'None' self._unit_of_measurement = '' else: self._state = data.precipType elif self.type == 'dew_point': if self._unit == TEMP_CELCIUS: self._state = round(data.dewPoint, 1) elif self._unit == TEMP_FAHRENHEIT: self._state = round(data.dewPoint * 1.8 + 32.0, 1) else: self._state = round(data.dewPoint, 1) elif self.type == 'temperature': if self._unit == TEMP_CELCIUS: self._state = round(data.temperature, 1) elif self._unit == TEMP_FAHRENHEIT: self._state = round(data.temperature * 1.8 + 32.0, 1) else: self._state = round(data.temperature, 1) elif self.type == 'wind_speed': self._state = data.windSpeed elif self.type == 'wind_bearing': self._state = data.windBearing elif self.type == 'cloud_cover': self._state = round(data.cloudCover * 100, 1) elif self.type == 'humidity': self._state = round(data.humidity * 100, 1) elif self.type == 'pressure': self._state = round(data.pressure, 1) elif self.type == 'visibility': self._state = data.visibility elif self.type == 'ozone': self._state = round(data.ozone, 1) except forecastio.utils.PropertyUnavailable: pass class ForeCastData(object): """ Gets the latest data from Forecast.io. """ def __init__(self, api_key, latitude, longitude): self._api_key = api_key self.latitude = latitude self.longitude = longitude self.data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """ Gets the latest data from Forecast.io. """ forecast = forecastio.load_forecast(self._api_key, self.latitude, self.longitude) self.data = forecast.currently()
Python
0
@@ -79,17 +79,16 @@ ~~~~~~~%0A -%0A Forecast @@ -3226,22 +3226,19 @@ an -OpenWeatherMap +Forecast.io sen @@ -3332,24 +3332,23 @@ name = ' -Forecast +Weather '%0A @@ -3660,18 +3660,16 @@ turn '%7B%7D - - %7B%7D'.for @@ -4235,24 +4235,24 @@ 'summary':%0A + @@ -4286,200 +4286,8 @@ ary%0A - # elif self.type == 'sunrise_time':%0A # self._state = data.sunriseTime%0A # elif self.type == 'sunset_time':%0A # self._state = data.sunsetTime%0A
197fad99d2e60064dca76bec41400390bc0a2937
Remove mail templates since we're not testing them here; might be good to include a test for this elsewhere
helpdesk/tests/test_get_email.py
helpdesk/tests/test_get_email.py
from helpdesk.models import Queue, Ticket from helpdesk.management.commands.get_email import process_email from django.test import TestCase from django.core import mail from django.core.management import call_command from django.test.client import Client from django.utils import six from django.core.urlresolvers import reverse from django.shortcuts import get_object_or_404 try: # python 3 from urllib.parse import urlparse except ImportError: # python 2 from urlparse import urlparse try: # Python >= 3.3 from unittest import mock except ImportError: # Python < 3.3 import mock class GetEmailTestCase(TestCase): fixtures = ['emailtemplate.json'] def setUp(self): self.queue_public = Queue.objects.create(title='Queue 1', slug='QQ', allow_public_submission=True, allow_email_submission=True, new_ticket_cc='new.public@example.com', updated_ticket_cc='update.public@example.com', email_box_type='local', email_box_local_dir='/var/lib/mail/helpdesk/') # tests correct syntax for command line option def test_get_email_quiet_option(self): with mock.patch('helpdesk.management.commands.get_email.process_email') as mocked_processemail: call_command('get_email', quiet=True) mocked_processemail.assert_called_with(quiet=True) call_command('get_email') mocked_processemail.assert_called_with(quiet=False) # tests reading emails from a queue and creating tickets def test_read_email(self): test_email = "To: update.public@example.com\nFrom: comment@example.com\nSubject: Some Comment\n\nThis is the helpdesk comment via email." with mock.patch('helpdesk.management.commands.get_email.listdir') as mocked_listdir, \ mock.patch('helpdesk.management.commands.get_email.isfile') as mocked_isfile, \ mock.patch('builtins.open' if six.PY3 else '__builtin__.open', mock.mock_open(read_data=test_email)): mocked_isfile.return_value = True mocked_listdir.return_value = ['filename1', 'filename2'] call_command('get_email') mocked_listdir.assert_called_with('/var/lib/mail/helpdesk/') mocked_isfile.assert_any_call('/var/lib/mail/helpdesk/filename1') mocked_isfile.assert_any_call('/var/lib/mail/helpdesk/filename2') ticket1 = get_object_or_404(Ticket, pk=1) self.assertEqual(ticket1.ticket_for_url, "QQ-%s" % ticket1.id) self.assertEqual(ticket1.description, "This is the helpdesk comment via email.") ticket2 = get_object_or_404(Ticket, pk=2) self.assertEqual(ticket2.ticket_for_url, "QQ-%s" % ticket2.id) self.assertEqual(ticket2.description, "This is the helpdesk comment via email.")
Python
0
@@ -641,16 +641,17 @@ e):%0A +# fixtures @@ -675,16 +675,66 @@ e.json'%5D + # may don't need this, not testing templates here %0A%0A de
fb705488aedeec3de842cd0be1b7aff9fe018962
Allow to specify additional external links for Javadocs
tools/bzl/javadoc.bzl
tools/bzl/javadoc.bzl
# Copyright (C) 2016 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Javadoc rule. def _impl(ctx): zip_output = ctx.outputs.zip transitive_jar_set = set() source_jars = set() for l in ctx.attr.libs: source_jars += l.java.source_jars transitive_jar_set += l.java.transitive_deps transitive_jar_paths = [j.path for j in transitive_jar_set] dir = ctx.outputs.zip.path + ".dir" source = ctx.outputs.zip.path + ".source" cmd = [ "rm -rf %s" % source, "mkdir %s" % source, " && ".join(["unzip -qud %s %s" % (source, j.path) for j in source_jars]), "rm -rf %s" % dir, "mkdir %s" % dir, " ".join([ ctx.file._javadoc.path, "-Xdoclint:-missing", "-protected", "-encoding UTF-8", "-charset UTF-8", "-notimestamp", "-quiet", "-windowtitle '%s'" % ctx.attr.title, "-link", "http://docs.oracle.com/javase/8/docs/api", "-sourcepath %s" % source, "-subpackages ", ":".join(ctx.attr.pkgs), " -classpath ", ":".join(transitive_jar_paths), "-d %s" % dir]), "find %s -exec touch -t 198001010000 '{}' ';'" % dir, "(cd %s && zip -qr ../%s *)" % (dir, ctx.outputs.zip.basename), ] ctx.action( inputs = list(transitive_jar_set) + list(source_jars) + ctx.files._jdk, outputs = [zip_output], command = " && ".join(cmd)) java_doc = rule( attrs = { "libs": attr.label_list(allow_files = False), "pkgs": attr.string_list(), "title": attr.string(), "_javadoc": attr.label( default = Label("@local_jdk//:bin/javadoc"), single_file = True, allow_files = True), "_jdk": attr.label( default = Label("@local_jdk//:jdk-default"), allow_files = True), }, implementation = _impl, outputs = {"zip" : "%{name}.zip"}, )
Python
0
@@ -963,16 +963,104 @@ source%22%0A + external_docs = %5B%22http://docs.oracle.com/javase/8/docs/api%22%5D + ctx.attr.external_docs%0A cmd = @@ -1061,16 +1061,16 @@ cmd = %5B%0A - %22r @@ -1501,58 +1501,60 @@ %22 --link%22, %22http://docs.oracle.com/javase/8/docs/api%22 + %22.join(%5B'-link %25s' %25 url for url in external_docs%5D) ,%0A @@ -2133,24 +2133,24 @@ ing_list(),%0A - %22title @@ -2163,24 +2163,67 @@ r.string(),%0A + %22external_docs%22: attr.string_list(),%0A %22_java