commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
e04bf5dd12a1f5e28258541dcf9d2eb8c5567ad0
|
Add tests for lead price
|
tests/lead_price_tests.py
|
tests/lead_price_tests.py
|
Python
| 0
|
@@ -0,0 +1,1301 @@
+import unittest%0Aimport datetime%0Aimport json%0Aimport sys%0A%0Asys.path.append('..')%0Aimport sabre_dev_studio%0Aimport sabre_dev_studio.sabre_exceptions as sabre_exceptions%0A%0A'''%0Arequires config.json in the same directory for api authentication%0A%0A%7B%0A%09%22sabre_client_id%22: -----,%0A%09%22sabre_client_secret%22: -----%0A%7D%0A%0A'''%0Aclass TestBasicLeadPrice(unittest.TestCase):%0A def read_config(self):%0A raw_data = open('config.json').read()%0A%0A data = json.loads(raw_data)%0A%0A client_secret = data%5B'sabre_client_secret'%5D%0A client_id = data%5B'sabre_client_id'%5D%0A%0A return (client_id, client_secret)%0A%0A def setUp(self):%0A # Read from config%0A self.client_id, self.client_secret = self.read_config()%0A self.sds = sabre_dev_studio.SabreDevStudio()%0A self.sds.set_credentials(self.client_id, self.client_secret)%0A self.sds.authenticate()%0A%0A def test_request_with_args(self):%0A prices = self.sds.lead_price('YTO', 'SFO', %5B3,4%5D)%0A%0A self.assertIsNotNone(prices)%0A%0A def test_basic_request(self):%0A opts = %7B%0A 'origin': 'YTO',%0A 'destination': 'SFO',%0A 'lengthofstay': %5B3,4%5D%0A %7D%0A%0A prices = self.sds.lead_price_opts(opts)%0A%0A self.assertIsNotNone(prices)%0A %0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
b7c22cddecb743e9597c92160e3aa0100e149e19
|
Introduce hades test fixtures and first tests.
|
tests/model/test_hades.py
|
tests/model/test_hades.py
|
Python
| 0
|
@@ -0,0 +1,3153 @@
+from datetime import datetime, timedelta%0A%0Afrom pycroft.model import session%0Afrom pycroft.model.hades import radgroup_property_mappings, radcheck%0Afrom tests import FactoryDataTestBase%0Afrom tests.factories import PropertyGroupFactory, MembershipFactory, UserWithHostFactory, %5C%0A SwitchFactory, PatchPortFactory%0A%0A%0Aclass HadesViewTest(FactoryDataTestBase):%0A def create_factories(self):%0A self.user = UserWithHostFactory.create()%0A self.network_access_group = PropertyGroupFactory.create(%0A name=%22Member%22,%0A granted=%7B'network_access'%7D,%0A )%0A self.blocked_by_finance_group = PropertyGroupFactory.create(%0A name=%22Blocked (finance)%22,%0A granted=%7B'blocked_by_finance'%7D,%0A denied=%7B'network_access'%7D,%0A )%0A self.blocked_by_traffic_group = PropertyGroupFactory.create(%0A name=%22Blocked (traffic)%22,%0A granted=%7B'blocked_by_traffic'%7D,%0A denied=%7B'network_access'%7D,%0A )%0A%0A # the user's room needs to be connected to provide %60nasipaddress%60 and %60nasportid%60%0A # TODO: remove owner and see if things still work%0A self.switch = SwitchFactory.create(host__owner=self.user)%0A PatchPortFactory.create_batch(2, patched=True, switch_port__switch=self.switch,%0A # This needs to be the HOSTS room!%0A room=self.user.hosts%5B0%5D.room)%0A%0A # TODO: create this membership in each test, not here%0A MembershipFactory.create(user=self.user, group=self.network_access_group,%0A begins_at=datetime.now() + timedelta(-1),%0A ends_at=datetime.now() + timedelta(1))%0A MembershipFactory.create(user=self.user, group=self.blocked_by_finance_group,%0A begins_at=datetime.now() + timedelta(-1),%0A ends_at=datetime.now() + timedelta(1))%0A%0A session.session.execute(radgroup_property_mappings.insert(values=%5B%0A %7B'property': 'blocked_by_finance', 'radgroup': 'finance'%7D,%0A %7B'property': 'blocked_by_traffic', 'radgroup': 'traffic'%7D,%0A %5D))%0A%0A def test_radcheck(self):%0A # %3Cmac%3E - %3Cnasip%3E - %3Cnasport%3E - %22Cleartext-Password%22 - := - %3Cmac%3E - 10%0A # We have one interface with a MAC whose room has two ports on the same switch%0A rows = session.session.query(radcheck.table).all()%0A host = self.user.hosts%5B0%5D%0A mac = host.interfaces%5B0%5D.mac%0A for row in rows:%0A self.assertEqual(row.username, mac)%0A self.assertEqual(row.nasipaddress, self.switch.management_ip)%0A self.assertEqual(row.attribute, %22Cleartext-Password%22)%0A self.assertEqual(row.op, %22:=%22)%0A self.assertEqual(row.value, mac)%0A self.assertEqual(row.priority, 10)%0A%0A self.assertEqual(%7Brow.nasportid for row in rows%7D,%0A %7Bport.switch_port.name for port in host.room.patch_ports%7D)%0A%0A # TODO: Put Entries in some basetable to test tagged vlans (separate test)%0A # TODO: test radreply, radgroupreply (with base, see above), radgroupcheck%0A
|
|
33bb7c4e026d46dda184d682c89fad7481ab1a77
|
Add migration script.
|
contrib/migration/migrate-compose-file-v1-to-v2.py
|
contrib/migration/migrate-compose-file-v1-to-v2.py
|
Python
| 0
|
@@ -0,0 +1,2967 @@
+#!/usr/bin/env python%0A%22%22%22%0AMigrate a Compose file from the V1 format in Compose 1.5 to the V2 format%0Asupported by Compose 1.6+%0A%22%22%22%0Afrom __future__ import absolute_import%0Afrom __future__ import unicode_literals%0A%0Aimport argparse%0Aimport logging%0Aimport sys%0A%0Aimport ruamel.yaml%0A%0A%0Alog = logging.getLogger('migrate')%0A%0A%0Adef migrate(content):%0A data = ruamel.yaml.load(content, ruamel.yaml.RoundTripLoader)%0A%0A service_names = data.keys()%0A for name, service in data.items():%0A # remove links and external links%0A service.pop('links', None)%0A external_links = service.pop('external_links', None)%0A if external_links:%0A log.warn(%0A %22Service %7Bname%7D has external_links: %7Bext%7D, which are no longer %22%0A %22supported. See https://docs.docker.com/compose/networking/ %22%0A %22for options on how to connect external containers to the %22%0A %22compose network.%22.format(name=name, ext=external_links))%0A%0A # net is now networks%0A if 'net' in service:%0A service%5B'networks'%5D = %5Bservice.pop('net')%5D%0A%0A # create build section%0A if 'dockerfile' in service:%0A service%5B'build'%5D = %7B%0A 'context': service.pop('build'),%0A 'dockerfile': service.pop('dockerfile'),%0A %7D%0A%0A # create logging section%0A if 'log_driver' in service:%0A service%5B'logging'%5D = %7B'driver': service.pop('log_driver')%7D%0A if 'log_opt' in service:%0A service%5B'logging'%5D%5B'options'%5D = service.pop('log_opt')%0A%0A # volumes_from prefix with 'container:'%0A for idx, volume_from in enumerate(service.get('volumes_from', %5B%5D)):%0A if volume_from.split(':', 1)%5B0%5D not in service_names:%0A service%5B'volumes_from'%5D%5Bidx%5D = 'container:%25s' %25 volume_from%0A%0A data%5B'services'%5D = %7Bname: data.pop(name) for name in data.keys()%7D%0A data%5B'version'%5D = 2%0A return data%0A%0A%0Adef write(stream, new_format, indent, width):%0A ruamel.yaml.dump(%0A new_format,%0A stream,%0A Dumper=ruamel.yaml.RoundTripDumper,%0A indent=indent,%0A width=width)%0A%0A%0Adef parse_opts(args):%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22filename%22, help=%22Compose file filename.%22)%0A parser.add_argument(%22-i%22, %22--in-place%22, action='store_true')%0A parser.add_argument(%0A %22--indent%22, type=int, default=2,%0A help=%22Number of spaces used to indent the output yaml.%22)%0A parser.add_argument(%0A %22--width%22, type=int, default=80,%0A help=%22Number of spaces used as the output width.%22)%0A return parser.parse_args()%0A%0A%0Adef main(args):%0A logging.basicConfig()%0A%0A opts = parse_opts(args)%0A%0A with open(opts.filename, 'r') as fh:%0A new_format = migrate(fh.read())%0A%0A if opts.in_place:%0A output = open(opts.filename, 'w')%0A else:%0A output = sys.stdout%0A write(output, new_format, opts.indent, opts.width)%0A%0A%0Aif __name__ == %22__main__%22:%0A main(sys.argv)%0A
|
|
5fc72b0a6efcd14196d33c8e0ba9b4b763ebf4d1
|
Add a parse_time example
|
examples/parse_time.py
|
examples/parse_time.py
|
Python
| 0.00019
|
@@ -0,0 +1,1955 @@
+%22%22%22%0A================================================%0AParsing times with sunpy.time.parse_time%0A================================================%0A%0AExample to show some example usage of parse_time%0A%22%22%22%0A%0Afrom datetime import datetime, date%0Aimport numpy as np%0Aimport pandas%0A%0Afrom sunpy.time import parse_time%0A%0A# dict used for coloring the terminal output%0Acol = %7B'y': '%5Cx1b%5B93m', 'g': '%5Cx1b%5B92m', 'r': '%5Cx1b%5B96m', 'bold': '%5Cx1b%5B1m',%0A 'end': '%5Cx1b%5B0m'%7D%0A%0A%0Adef print_time(*args, **kwargs):%0A '''Parses and pretty prints a parse_time compatible object%0A '''%0A%0A # Parse the time%0A time = parse_time(*args, **kwargs) # Pass all arguments to parse_time%0A%0A # Color and print to terminal%0A print(col%5B'r'%5D + '%5CnInput string/object: ' + col%5B'end'%5D +%0A col%5B'bold'%5D + '%7Bts!r%7D'.format(ts=args%5B0%5D)+col%5B'end'%5D)%0A print(col%5B'r'%5D + 'Parsed Time: ' + col%5B'end'%5D + col%5B'y'%5D + col%5B'bold'%5D +%0A '%7Btime!r%7D'.format(time=time) + col%5B'end'%5D)%0A%0A%0A# Strings%0Aprint('%5CnSTRINGS')%0Aprint_time('2005-08-04T00:18:02.000', scale='tt')%0Aprint_time('20140101000001')%0Aprint_time('2016.05.04_21:08:12_TAI')%0Aprint_time('1995-12-31 23:59:60') # Leap second%0Aprint_time('1995-Dec-31 23:59:60')%0A%0A# datetime%0Aprint('%5CnDATETIME')%0Aprint_time(datetime.now(), scale='tai')%0Aprint_time(date.today())%0A%0A# numpy%0Aprint('%5Cnnumpy.datetime64')%0Aprint_time(np.datetime64('1995-12-31 18:59:59-0500'))%0Aprint_time(np.arange('2005-02-01T00', '2005-02-01T10', dtype='datetime64'))%0A%0A# astropy compatible times%0Aprint('%5CnAstroPy compatible')%0Aprint_time(1234.0, format='jd')%0Aprint_time('B1950.0', format='byear_str')%0Aprint_time('2001-03-22 00:01:44.732327132980', scale='utc',%0A location=('120d', '40d')) # pass location%0A%0A# pandas%0Aprint_time(pandas.Timestamp(datetime(1966, 2, 3)))%0Aprint_time(%0A pandas.Series(%5B%5Bdatetime(2012, 1, 1, 0, 0),%0A datetime(2012, 1, 2, 0, 0)%5D,%0A %5Bdatetime(2012, 1, 3, 0, 0),%0A datetime(2012, 1, 4, 0, 0)%5D%5D))%0A
|
|
71e9a3a7e867b0670871ac46834988b87787c272
|
Add import-hacks.py snippet file
|
import-hacks.py
|
import-hacks.py
|
Python
| 0.000001
|
@@ -0,0 +1,1299 @@
+# Can control the import mechanism in Python.%0Aimport importlib.abc%0Aimport logging%0Aimport sys%0A%0Alogging.getLogger().setLevel(logging.INFO)%0A%0Aclass LoggingImporter(importlib.abc.Finder):%0A def find_module(self, name, path=None):%0A msg = %22importing %7B%7D on %7B%7D%22.format(name, path)%0A logging.info(msg)%0A return None # None means %22didn't match%22, move on to the next path Finder%0Asys.meta_path.append(LoggingImporter())%0A%0Aclass BlockingFinder(importlib.abc.Finder):%0A def find_module(self, name, path=None):%0A if name in %5B'numpy'%5D:%0A return BlockingLoader()%0Aclass BlockingLoader(importlib.abc.Loader):%0A def load_module(self, fullname):%0A if fullname not in sys.modules:%0A raise ImportError(%22Can't import excluded module %7B%7D%22.format(fullname))%0A return sys.modules%5Bfullname%5D%0Asys.meta_path.append(BlockingFinder())%0A%0A# combined in one single class:%0Aclass BlockingImporter(importlib.abc.Finder, importlib.abc.Loader):%0A def find_module(self, name, path=None):%0A if name in %5B'numpy'%5D:%0A return self%0A def load_module(self, fullname):%0A if fullname not in sys.modules:%0A raise ImportError(%22Can't import excluded module %7B%7D%22.format(fullname))%0A return sys.modules%5Bfullname%5D%0Asys.meta_path.append(BlockingImporter())%0A
|
|
9d9e82487a6ad9494f65e484392334e89baf7b83
|
Add AnalyzerResult class
|
coala/coalib/results/AnalyzerResult.py
|
coala/coalib/results/AnalyzerResult.py
|
Python
| 0
|
@@ -0,0 +1,1273 @@
+import uuid%0Afrom coalib.results.RESULT_SEVERITY import RESULT_SEVERITY%0A%0Aclass AnalyzerResult:%0A def __init__(self,%0A origin,%0A language: str,%0A language_ver: str,%0A project_name: str,%0A project_version: str,%0A source_file_path: str,%0A message: str,%0A severity: int=RESULT_SEVERITY.NORMAL,%0A diffs: (dict, None)=None,%0A confidence: int=100,%0A ):%0A origin = origin or ''%0A if not isinstance(origin, str):%0A origin = origin.__class__.__name__%0A if severity not in RESULT_SEVERITY.reverse:%0A raise ValueError('severity is not a valid RESULT_SEVERITY')%0A%0A self.origin = origin%0A self.language = language%0A self.language_ver = language_ver%0A self.project_name = project_name%0A self.project_version = project_version%0A self.source_file_path = source_file_path%0A self.message = message%0A self.severity = severity%0A if confidence %3C 0 or confidence %3E 100:%0A raise ValueError('Value of confidence should be between 0 and 100.')%0A self.confidence = confidence%0A self.diffs = diffs%0A self.id = uuid.uuid4().int
|
|
60e4269027adc05db7b585ab51334c8d28cd7a1c
|
Add assignment writer
|
nbgrader/assignment_writer.py
|
nbgrader/assignment_writer.py
|
Python
| 0.000001
|
@@ -0,0 +1,3002 @@
+%22%22%22Based on the FilesWriter class included with IPython.%22%22%22%0A%0Aimport io%0Aimport os%0Aimport glob%0Aimport shutil%0A%0Afrom IPython.utils.traitlets import Unicode%0Afrom IPython.utils.path import ensure_dir_exists%0Afrom IPython.utils.py3compat import unicode_type%0A%0Afrom IPython.nbconvert.writers.base import WriterBase%0A%0A%0Aclass AssignmentWriter(WriterBase):%0A%0A build_directory = Unicode(%0A %22.%22, config=True, help=%22Directory to write output to.%22)%0A%0A # Make sure that the output directory exists.%0A def _build_directory_changed(self, name, old, new):%0A if new:%0A ensure_dir_exists(new)%0A%0A def __init__(self, **kw):%0A super(AssignmentWriter, self).__init__(**kw)%0A self._build_directory_changed(%0A 'build_directory', self.build_directory, self.build_directory)%0A%0A def _makedir(self, path):%0A %22%22%22Make a directory if it doesn't already exist%22%22%22%0A if path:%0A self.log.info(%22Making directory %25s%22, path)%0A ensure_dir_exists(path)%0A%0A def write(self, output, resources, notebook_name=None, **kw):%0A %22%22%22Consume and write Jinja output to the file system. Output%0A directory is set via the 'build_directory' variable of this%0A instance (a configurable).%0A%0A See base for more...%0A%0A %22%22%22%0A%0A # Verify that a notebook name is provided.%0A if notebook_name is None:%0A raise TypeError('notebook_name')%0A%0A # Pull the extension and subdir from the resources dict.%0A output_extension = resources.get('output_extension', None)%0A%0A # Copy referenced files to output directory%0A if self.build_directory:%0A for filename in self.files:%0A%0A # Copy files that match search pattern%0A for matching_filename in glob.glob(filename):%0A%0A # Make sure folder exists.%0A dest = os.path.join(%0A self.build_directory, matching_filename)%0A path = os.path.dirname(dest)%0A self._makedir(path)%0A%0A # Copy if destination is different.%0A if not os.path.normpath(dest) == os.path.normpath(matching_filename):%0A self.log.info(%22Copying %25s -%3E %25s%22,%0A matching_filename, dest)%0A shutil.copy(matching_filename, dest)%0A%0A # Determine where to write conversion results.%0A if output_extension is not None:%0A dest = notebook_name + '.' + output_extension%0A else:%0A dest = notebook_name%0A if self.build_directory:%0A dest = os.path.join(self.build_directory, dest)%0A%0A # Write conversion results.%0A self.log.info(%22Writing %25i bytes to %25s%22, len(output), dest)%0A if isinstance(output, unicode_type):%0A with io.open(dest, 'w', encoding='utf-8') as f:%0A f.write(output)%0A else:%0A with io.open(dest, 'wb') as f:%0A f.write(output)%0A%0A return dest%0A
|
|
a998eaec11c9ec53e593e2a25542eabab0f75890
|
Create __init__.py
|
app/__init__.py
|
app/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,465 @@
+# app/__init__.py%0A%0Afrom flask_api import FlaskAPI%0Afrom flask_sqlalchemy import SQLAlchemy%0A%0A# local import%0Afrom instance.config import app_config%0A%0A# initialize sql-alchemy%0Adb = SQLAlchemy()%0A%0Adef create_app(config_name):%0A app = FlaskAPI(__name__, instance_relative_config=True)%0A app.config.from_object(app_config%5Bconfig_name%5D)%0A app.config.from_pyfile('config.py')%0A app.config%5B'SQLALCHEMY_TRACK_MODIFICATIONS'%5D = False%0A db.init_app(app)%0A%0A return app%0A
|
|
633b6f4c5cecda938f02ff6ccaa529de7b47ce67
|
Initialize excelToCSV
|
books/AutomateTheBoringStuffWithPython/Chapter14/PracticeProject/excelToCSV.py
|
books/AutomateTheBoringStuffWithPython/Chapter14/PracticeProject/excelToCSV.py
|
Python
| 0.000001
|
@@ -0,0 +1,1413 @@
+# Using the openpyxl module, write a program that reads all the Excel files in the%0A# current working directory and outputs them as CSV files.%0A# A single Excel file might contain multiple sheets; you%E2%80%99ll have to create one CSV%0A# file per sheet. The filenames of the CSV files should be%0A# %3Cexcel filename%3E_%3Csheet title%3E.csv, where %3Cexcel filename%3E is the filename of the%0A# Excel file without the file extension (for example, 'spam_data', not%0A# 'spam_data.xlsx') and %3Csheet title%3E is the string from the Worksheet object%E2%80%99s title%0A# variable.%0A#%0A# Note:%0A# - Example Excel files can be downloaded from http://nostarch.com/automatestuff/%0A%0Afor excelFile in os.listdir('.'):%0A # Skip non-xlsx files, load the workbook object.%0A for sheetName in wb.get_sheet_names():%0A # Loop through every sheet in the workbook.%0A sheet = wb.get_sheet_by_name(sheetName)%0A%0A # Create the CSV filename from the Excel filename and sheet title.%0A # Create the csv.writer object for this CSV file.%0A%0A # Loop through every row in the sheet.%0A for rowNum in range(1, sheet.get_highest_row() + 1):%0A rowData = %5B%5D # append each cell to this list%0A # Loop through each cell in the row.%0A for colNum in range(1, sheet.get_highest_column() + 1):%0A # Append each cell's data to rowData.%0A%0A # Write the rowData list to the CSV file.%0A%0A csvFile.close()
|
|
09181bd4c11501fa9e8274651370e45ac8d83316
|
add vars01.py
|
trypython/basic/vars01.py
|
trypython/basic/vars01.py
|
Python
| 0.000001
|
@@ -0,0 +1,902 @@
+# coding: utf-8%0A%0A%22%22%22%0Avars()%E3%81%AB%E3%81%A4%E3%81%84%E3%81%A6%E3%81%AE%E3%82%B5%E3%83%B3%E3%83%97%E3%83%AB%E3%81%A7%E3%81%99%E3%80%82%0A%22%22%22%0Afrom trypython.common.commoncls import SampleBase%0Afrom trypython.common.commonfunc import pr%0A%0A%0Aclass Sample(SampleBase):%0A def exec(self):%0A #%0A # vars() %E3%81%AF%E3%80%81%E5%BC%95%E6%95%B0%E7%84%A1%E3%81%97%E3%81%A7%E5%91%BC%E3%81%B6%E3%81%A8 locals() %E3%81%A8%E5%90%8C%E3%81%98%0A # %E5%BC%95%E6%95%B0%E3%82%92%E4%BB%98%E4%B8%8E%E3%81%97%E3%81%A6%E5%91%BC%E3%81%B6%E3%81%A8%E3%80%81%E3%81%9D%E3%81%AE%E3%82%AA%E3%83%96%E3%82%B8%E3%82%A7%E3%82%AF%E3%83%88%E3%81%AE __dict__ %E3%82%92%E8%BF%94%E3%81%99%0A #%0A x = 10%0A y = 20%0A pr('vars()', vars())%0A%0A self.fn01(10, 20, 30, **dict(apple=100, pineapple=200))%0A Sample.fn02(10, 20, 30, **dict(apple=100, pineapple=200))%0A Sample.fn03(10, 20, 30, **dict(apple=100, pineapple=200))%0A%0A%0A def fn01(self, *args, **kwargs):%0A pr('vars() in method', vars())%0A%0A @classmethod%0A def fn02(cls, *args, **kwargs):%0A pr('vars() in class method', vars())%0A%0A @staticmethod%0A def fn03(*args, **kwargs):%0A pr('vars() in static method', vars())%0A%0Adef go():%0A obj = Sample()%0A obj.exec()%0A%0A%0Aif __name__ == '__main__':%0A go()%0A
|
|
7577933c1e016ee6d4bca73bfa3434dc19c55315
|
add basic initialization
|
circa.py
|
circa.py
|
Python
| 0.00001
|
@@ -0,0 +1,359 @@
+import logging%0Aimport client%0A%0Aclass Circa(client.Client):%0A%09def __init__(self, conf):%0A%09%09conf%5B%22autoconn%22%5D = False%0A%09%09logging.basicConfig(filename=conf.get(%22log%22, %22circa.log%22), level=logging.INFO,%0A%09%09%09style=%22%25%22, format=%22%25(asctime)s %25(levelname)s %25(message)s%22)%0A%09%09if %22log%22 in conf:%0A%09%09%09logging.basicConfig(filename=conf%5B%22log%22%5D)%0A%09%09client.Client.__init__(self, **conf)%0A
|
|
ed479cf11540c6a67bb4b51eed42b91abd869090
|
Add html_doc.py
|
tests/test_files/html_doc.py
|
tests/test_files/html_doc.py
|
Python
| 0.000105
|
@@ -0,0 +1,302 @@
+doc = %22%22%22%3C!DOCTYPE html%3E%0A%0A%3Chtml%3E%0A %3Chead%3E%0A %3Ctitle%3EHtml document%3C/title%3E%0A %3C/head%3E%0A%0A %3Cbody%3E%0A %3Cdiv%3E%0A %3Ch1%3EH1 Tag%3C/h1%3E%0A %3Ch1 class='class-name'%3EH1 Tag with class%3C/h1%3E%0A %3Ch1 id='id-name'%3EH1 Tag with id%3C/h1%3E%0A %3Cp class='class-name'%3EP tag with class%3C/p%3E%0A %3C/div%3E%0A %3C/body%3E%0A%3C/html%3E%22%22%22
|
|
308744d8a022c7fc25af4f2ef8a6214cdcf014f9
|
Add additional attributes to GPSLogger (#4755)
|
homeassistant/components/device_tracker/gpslogger.py
|
homeassistant/components/device_tracker/gpslogger.py
|
"""
Support for the GPSLogger platform.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.gpslogger/
"""
import asyncio
from functools import partial
import logging
from homeassistant.const import HTTP_UNPROCESSABLE_ENTITY
from homeassistant.components.http import HomeAssistantView
# pylint: disable=unused-import
from homeassistant.components.device_tracker import ( # NOQA
DOMAIN, PLATFORM_SCHEMA)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['http']
def setup_scanner(hass, config, see):
"""Setup an endpoint for the GPSLogger application."""
hass.http.register_view(GPSLoggerView(see))
return True
class GPSLoggerView(HomeAssistantView):
"""View to handle gpslogger requests."""
url = '/api/gpslogger'
name = 'api:gpslogger'
def __init__(self, see):
"""Initialize GPSLogger url endpoints."""
self.see = see
@asyncio.coroutine
def get(self, request):
"""A GPSLogger message received as GET."""
res = yield from self._handle(request.app['hass'], request.GET)
return res
@asyncio.coroutine
def _handle(self, hass, data):
"""Handle gpslogger request."""
if 'latitude' not in data or 'longitude' not in data:
return ('Latitude and longitude not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'device' not in data:
_LOGGER.error('Device id not specified.')
return ('Device id not specified.',
HTTP_UNPROCESSABLE_ENTITY)
device = data['device'].replace('-', '')
gps_location = (data['latitude'], data['longitude'])
accuracy = 200
battery = -1
if 'accuracy' in data:
accuracy = int(float(data['accuracy']))
if 'battery' in data:
battery = float(data['battery'])
yield from hass.loop.run_in_executor(
None, partial(self.see, dev_id=device,
gps=gps_location, battery=battery,
gps_accuracy=accuracy))
return 'Setting location for {}'.format(device)
|
Python
| 0
|
@@ -1910,16 +1910,370 @@
ttery'%5D)
+%0A attrs = %7B%7D%0A if 'speed' in data:%0A attrs%5B'speed'%5D = float(data%5B'speed'%5D)%0A if 'direction' in data:%0A attrs%5B'direction'%5D = float(data%5B'direction'%5D)%0A if 'altitude' in data:%0A attrs%5B'altitude'%5D = float(data%5B'altitude'%5D)%0A if 'provider' in data:%0A attrs%5B'provider'%5D = data%5B'provider'%5D
%0A%0A
@@ -2471,16 +2471,60 @@
accuracy
+,%0A attributes=attrs
))%0A%0A
|
8f32a5be4ffc427a1fbc6bf700edc81b191e876f
|
add tests for basic funcionality
|
test/api/test_rest.py
|
test/api/test_rest.py
|
Python
| 0
|
@@ -0,0 +1,2563 @@
+import json%0Afrom test.api import APIClient%0Afrom django.test import TestCase%0Afrom squad.core import models%0Afrom squad.ci import models as ci_models%0A%0A%0Aclass RestApiTest(TestCase):%0A%0A def setUp(self):%0A self.group = models.Group.objects.create(slug='mygroup')%0A self.project = self.group.projects.create(slug='myproject')%0A self.build = self.project.builds.create(version='1')%0A self.environment = self.project.environments.create(slug='myenv')%0A self.testrun = self.build.test_runs.create(environment=self.environment, build=self.build)%0A self.backend = ci_models.Backend.objects.create(name='foobar')%0A self.testjob = self.build.test_jobs.create(%0A definition=%22foo: bar%22,%0A backend=self.backend,%0A target=self.project,%0A target_build=self.build,%0A build='1',%0A environment='myenv',%0A testrun=self.testrun%0A )%0A%0A def hit(self, url):%0A response = self.client.get(url)%0A self.assertEqual(200, response.status_code)%0A text = response.content.decode('utf-8')%0A if response%5B'Content-Type'%5D == 'application/json':%0A return json.loads(text)%0A else:%0A return text%0A%0A def test_root(self):%0A self.hit('/api/')%0A%0A def test_projects(self):%0A data = self.hit('/api/projects/')%0A self.assertEqual(1, len(data%5B'results'%5D))%0A%0A def test_project_builds(self):%0A data = self.hit('/api/projects/%25d/builds/' %25 self.project.id)%0A self.assertEqual(1, len(data%5B'results'%5D))%0A%0A def test_builds(self):%0A data = self.hit('/api/builds/')%0A self.assertEqual(1, len(data%5B'results'%5D))%0A%0A def test_build_testruns(self):%0A data = self.hit('/api/builds/%25d/testruns/' %25 self.build.id)%0A self.assertEqual(1, len(data%5B'results'%5D))%0A%0A def test_build_testjobs(self):%0A data = self.hit('/api/builds/%25d/testjobs/' %25 self.build.id)%0A self.assertEqual(1, len(data%5B'results'%5D))%0A%0A def test_testjob(self):%0A data = self.hit('/api/testjobs/%25d/' %25 self.testjob.id)%0A self.assertEqual('myenv', data%5B'environment'%5D)%0A%0A def test_testjob_definition(self):%0A data = self.hit('/api/testjobs/%25d/definition/' %25 self.testjob.id)%0A self.assertEqual('foo: bar', data)%0A%0A def test_backends(self):%0A data = self.hit('/api/backends/')%0A self.assertEqual('foobar', data%5B'results'%5D%5B0%5D%5B'name'%5D)%0A%0A def test_environments(self):%0A data = self.hit('/api/environments/')%0A self.assertEqual('myenv', data%5B'results'%5D%5B0%5D%5B'slug'%5D)%0A
|
|
86c6dcc8fe0ac739ed1ae1a7898ea609fe959c61
|
add spectra extraction script
|
spectra.py
|
spectra.py
|
Python
| 0
|
@@ -0,0 +1,2437 @@
+import sqlite3%0Aimport igraph%0Aimport os%0Afrom matplotlib import pyplot as plt%0A%0Adb = 'genome.leishmania.hp_assembly.db'%0Awordlen = 10%0Abase_dir = 'spectra-%25d' %25 wordlen%0Apos_dir = os.path.join(base_dir, 'positive')%0Aneg_dir = os.path.join(base_dir, 'negative')%0Anum_bins = 1000%0Aylim = (0,100)%0Axlim = (-15000,15000)%0AG = igraph.read('leishmania_true.gml')%0Adb_id_from_graph_id = lambda vid: int(G.vs%5Bvid%5D%5B'name'%5D.split('#')%5B1%5D)%0Atrue_overlaps = %5Bset(%5Bdb_id_from_graph_id(u), db_id_from_graph_id(v)%5D) for u,v in G.get_edgelist()%5D%0Avertices = %7Bdb_id_from_graph_id(int(v%5B'id'%5D)):v%5B'name'%5D for v in G.vs%7D%0Astart_pos_from_db_id = lambda dbid: int(vertices%5Bdbid%5D.split('#')%5B0%5D.split()%5B1%5D.split('-')%5B0%5D)%0A%0Ared = '#ffe7e7'%0Agreen = '#6bdb6b'%0A%0Afrom align import ProgressIndicator%0A%0Awith sqlite3.connect(db) as conn:%0A c = conn.cursor()%0A c.execute('SELECT id FROM seq ORDER BY id ASC')%0A ids = %5Brow%5B0%5D for row in c%5D%0A N = len(ids)*(len(ids)-1) / 2%0A os.mkdir(base_dir)%0A os.mkdir(pos_dir)%0A os.mkdir(neg_dir)%0A indic = ProgressIndicator('building spectra', N, percentage=False)%0A indic.start()%0A for S_id_idx in range(len(ids)):%0A for T_id_idx in range(S_id_idx, len(ids)):%0A S_id = ids%5BS_id_idx%5D%0A T_id = ids%5BT_id_idx%5D%0A q = %22%22%22%0A SELECT S_idx - T_idx FROM seeds_%25d%0A WHERE S_id = %25d AND T_id = %25d%0A %22%22%22 %25 (wordlen, S_id, T_id)%0A c.execute(q)%0A shifts = %5Brow%5B0%5D for row in c%5D%0A if len(shifts) %3C 5:%0A continue%0A indic.progress()%0A%0A plt.clf()%0A color = 'red'%0A if set(%5BS_id, T_id%5D) in true_overlaps:%0A color = 'green'%0A true_shift = start_pos_from_db_id(T_id) - start_pos_from_db_id(S_id)%0A plt.axvline(x=true_shift, ymin=ylim%5B0%5D, ymax=ylim%5B1%5D, color='#333333', linewidth=20, alpha=0.4)%0A plt.hist(shifts, num_bins, histtype='stepfilled', color=color, edgecolor=color)%0A plt.tick_params(axis='both', which='major', labelsize=8)%0A%0A plt.xlim(*xlim)%0A plt.ylim(*ylim)%0A plt.grid(True)%0A plt.title('%25s ----%3E %25s (%25d total seeds)' %25 (vertices%5BS_id%5D, vertices%5BT_id%5D, len(shifts)), fontsize=8, fontname='Inconsolata')%0A plt.savefig(os.path.join(%0A pos_dir if color == 'green' else neg_dir,%0A '%25d_%25d.png' %25 (S_id, T_id)%0A ))%0A%0A indic.finish()%0A
|
|
d42b2ee3eb60a0c11c1a973e030de909519ba662
|
Add garage.argparse.add_bool_argument
|
garage/argparse.py
|
garage/argparse.py
|
Python
| 0.000024
|
@@ -0,0 +1,515 @@
+__all__ = %5B%0A 'add_bool_argument',%0A%5D%0A%0Aimport argparse%0A%0A%0Adef add_bool_argument(parser, *args, **kwargs):%0A kwargs = dict(kwargs) # Make a copy before modifying it...%0A kwargs%5B'choices'%5D = (True, False)%0A kwargs%5B'type'%5D = parse_bool%0A parser.add_argument(*args, **kwargs)%0A%0A%0Adef parse_bool(string):%0A try:%0A return %7B'true': True, 'false': False%7D%5Bstring.lower()%5D%0A except KeyError:%0A raise argparse.ArgumentTypeError(%0A 'expect either %5C'true%5C' or %5C'false%5C' instead of %25r' %25 string)%0A
|
|
10f9b0d1b02c2b5f4c4eeac0c1f803657c89764b
|
add example file for smooth OT
|
examples/plot_OT_1D_smooth.py
|
examples/plot_OT_1D_smooth.py
|
Python
| 0
|
@@ -0,0 +1,2247 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A====================%0A1D optimal transport%0A====================%0A%0AThis example illustrates the computation of EMD, Sinkhorn and smooth OT plans%0Aand their visualization.%0A%0A%22%22%22%0A%0A# Author: Remi Flamary %3Cremi.flamary@unice.fr%3E%0A#%0A# License: MIT License%0A%0Aimport numpy as np%0Aimport matplotlib.pylab as pl%0Aimport ot%0Aimport ot.plot%0Afrom ot.datasets import get_1D_gauss as gauss%0A%0A##############################################################################%0A# Generate data%0A# -------------%0A%0A%0A#%25%25 parameters%0A%0An = 100 # nb bins%0A%0A# bin positions%0Ax = np.arange(n, dtype=np.float64)%0A%0A# Gaussian distributions%0Aa = gauss(n, m=20, s=5) # m= mean, s= std%0Ab = gauss(n, m=60, s=10)%0A%0A# loss matrix%0AM = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)))%0AM /= M.max()%0A%0A%0A##############################################################################%0A# Plot distributions and loss matrix%0A# ----------------------------------%0A%0A#%25%25 plot the distributions%0A%0Apl.figure(1, figsize=(6.4, 3))%0Apl.plot(x, a, 'b', label='Source distribution')%0Apl.plot(x, b, 'r', label='Target distribution')%0Apl.legend()%0A%0A#%25%25 plot distributions and loss matrix%0A%0Apl.figure(2, figsize=(5, 5))%0Aot.plot.plot1D_mat(a, b, M, 'Cost matrix M')%0A%0A##############################################################################%0A# Solve EMD%0A# ---------%0A%0A%0A#%25%25 EMD%0A%0AG0 = ot.emd(a, b, M)%0A%0Apl.figure(3, figsize=(5, 5))%0Aot.plot.plot1D_mat(a, b, G0, 'OT matrix G0')%0A%0A##############################################################################%0A# Solve Sinkhorn%0A# --------------%0A%0A%0A#%25%25 Sinkhorn%0A%0Alambd = 2e-3%0AGs = ot.sinkhorn(a, b, M, lambd, verbose=True)%0A%0Apl.figure(4, figsize=(5, 5))%0Aot.plot.plot1D_mat(a, b, Gs, 'OT matrix Sinkhorn')%0A%0Apl.show()%0A%0A##############################################################################%0A# Solve Smooth OT%0A# --------------%0A%0A%0A#%25%25 Smooth OT with KL regularization%0A%0Alambd = 2e-3%0AGsm = ot.smooth.smooth_ot_dual(a, b, M, lambd, reg_type='kl')%0A%0Apl.figure(5, figsize=(5, 5))%0Aot.plot.plot1D_mat(a, b, Gsm, 'OT matrix Smooth OT KL reg.')%0A%0Apl.show()%0A%0A%0A#%25%25 Smooth OT with KL regularization%0A%0Alambd = 1e-1%0AGsm = ot.smooth.smooth_ot_dual(a, b, M, lambd, reg_type='l2')%0A%0Apl.figure(6, figsize=(5, 5))%0Aot.plot.plot1D_mat(a, b, Gsm, 'OT matrix Smooth OT l2 reg.')%0A%0Apl.show()%0A
|
|
82dda8f12060ba0d3f83b6a9ff92bfcfbb212e25
|
Add utils
|
utils/extract_zipcodes.py
|
utils/extract_zipcodes.py
|
Python
| 0.000016
|
@@ -0,0 +1,433 @@
+import json%0Afrom bbvalib import create_mongoclient%0A%0A%0Adb = create_mongoclient()%0Azipcodes = set()%0Azipcodes.update(db.top_clients_week.find().distinct(%22shop_zipcode%22))%0Azipcodes.update(db.top_clients_month.find().distinct(%22shop_zipcode%22))%0Azipcodes.update(db.top_clients_week.find().distinct(%22home_zipcode%22))%0Azipcodes.update(db.top_clients_month.find().distinct(%22home_zipcode%22))%0Ajson.dump(list(zipcodes), open('home_zipcodes.json', 'w'))%0A
|
|
31110631d7fa43c695bc1f5504cd02c3d0cab745
|
Add some tests
|
tests/test_error_handlers.py
|
tests/test_error_handlers.py
|
Python
| 0
|
@@ -0,0 +1,669 @@
+from testutils import app # noqa: F401%0A%0A%0Adef test_not_found(client):%0A '''Test 404 error handler'''%0A results = client.get('/totally/made/up')%0A assert results.status_code == 404%0A assert results.json == %7B'error': 'Not found'%7D%0A%0A%0Adef test_method_not_allowed(client):%0A '''Test 405 error handler'''%0A results = client.get('/api/v1.0/search')%0A assert results.status_code == 405%0A assert results.json == %7B'error': 'Method not allowed'%7D%0A%0A%0Adef test_internal_server_error(client):%0A '''Test 500 error handler'''%0A results = client.get('/api/v1.0/stats')%0A assert results.status_code == 500%0A assert results.json == %7B'error': 'Internal server error'%7D%0A
|
|
9d5be9d464168a3d9e9b3265a1581e0359f69f2a
|
test for #579
|
test/test_issue579.py
|
test/test_issue579.py
|
Python
| 0.000001
|
@@ -0,0 +1,497 @@
+# test for https://github.com/RDFLib/rdflib/issues/579%0A%0Afrom rdflib import Graph, URIRef, Literal, Namespace%0Afrom rdflib.namespace import FOAF, RDF%0A%0Ag = Graph()%0Ag.bind('foaf', FOAF)%0An = Namespace(%22http://myname/%22)%0Ag.add((n.bob, FOAF.name, Literal('bb')))%0A# query is successful.%0Aassert len(g.query(%22select ?n where %7B ?n foaf:name 'bb' . %7D%22)) == 1%0A# update is not.%0Ag.update(%22delete where %7B ?e foaf:name 'ss' .%7D%22)%0Aassert len(g) == 1%0Ag.update(%22delete where %7B ?e foaf:name 'bb' .%7D%22)%0Aassert len(g) == 0%0A
|
|
d9b4b0d913304b19365854b0ffceab179237d8f8
|
Add tests for float->int symbols (esp for 32-bit windows and linux)
|
numba/tests/test_floatsyms.py
|
numba/tests/test_floatsyms.py
|
Python
| 0
|
@@ -0,0 +1,1286 @@
+from __future__ import print_function%0Aimport numba.unittest_support as unittest%0Afrom numba.compiler import compile_isolated%0Afrom numba import types%0A%0A%0Aclass TestFloatSymbols(unittest.TestCase):%0A %22%22%22%0A Test ftol symbols on windows%0A %22%22%22%0A%0A def _test_template(self, realty, intty):%0A def cast(x):%0A y = x%0A return y%0A%0A cres = compile_isolated(cast, args=%5Brealty%5D, return_type=intty)%0A self.assertAlmostEqual(cres.entry_point(1.), 1)%0A%0A def test_float64_to_int64(self):%0A self._test_template(types.float64, types.int64)%0A%0A def test_float64_to_uint64(self):%0A self._test_template(types.float64, types.uint64)%0A%0A def test_float64_to_int32(self):%0A self._test_template(types.float64, types.int32)%0A%0A def test_float64_to_uint32(self):%0A self._test_template(types.float64, types.uint32)%0A%0A def test_float32_to_int64(self):%0A self._test_template(types.float32, types.int64)%0A%0A def test_float32_to_uint64(self):%0A self._test_template(types.float32, types.uint64)%0A%0A def test_float32_to_int32(self):%0A self._test_template(types.float32, types.int32)%0A%0A def test_float32_to_uint32(self):%0A self._test_template(types.float32, types.uint32)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
45a6e108418944026f6d67fa018573a831dc5107
|
add mul_recds.py
|
test/sql/mul_recds.py
|
test/sql/mul_recds.py
|
Python
| 0.00028
|
@@ -0,0 +1,160 @@
+import sqlite3%0A%0Awith sqlite3.connect(%22new.db%22) as connection:%0A c = connection.cursor()%0A%0A cities = %5B%0A ('Boston','MA',600000)%0A %5D%0A%0A
|
|
aa320afb447ab8486720a05f0613cad446c9bea9
|
Add graph module.
|
plumeria/plugins/graph.py
|
plumeria/plugins/graph.py
|
Python
| 0
|
@@ -0,0 +1,1912 @@
+import asyncio%0Aimport io%0Aimport re%0A%0Aimport matplotlib%0Aimport pkg_resources%0A%0Afrom plumeria.command import commands, CommandError%0Afrom plumeria.message import Response, MemoryAttachment%0Afrom plumeria.util.ratelimit import rate_limit%0A%0Amatplotlib.use('Agg')%0A%0Aimport matplotlib.pyplot as plt%0Aimport matplotlib.font_manager as fm%0A%0APERCENTAGE_PATTERN = re.compile(%22(%5B0-9%5D+%5C%5C.?%5B0-9%5D*)%25%22)%0A%0Afont_path = pkg_resources.resource_filename(%22plumeria%22, 'fonts/FiraSans-Regular.ttf')%0A%0A%0Adef generate_pie(labels, data):%0A plt.figure(1, figsize=(5, 5))%0A ax = plt.axes(%5B0.1, 0.1, 0.4, 0.4%5D)%0A%0A plt.pie(data, labels=labels, autopct='%251.0f%25%25', startangle=90)%0A%0A prop = fm.FontProperties(fname=font_path, size=11)%0A for text in ax.texts:%0A text.set_fontproperties(prop)%0A%0A buf = io.BytesIO()%0A plt.savefig(buf, bbox_inches='tight', transparent=%22True%22, pad_inches=0.1)%0A%0A plt.clf()%0A%0A return buf%0A%0A%0A@commands.register(%22pie%22, category=%22Graphing%22)%0A@rate_limit()%0Aasync def image(message):%0A %22%22%22%0A Generate a pie graph.%0A%0A %22%22%22%0A labels = %5B%5D%0A data = %5B%5D%0A total_pct = 0%0A%0A if ';' in message.content:%0A delimeter = ';'%0A elif ',' in message.content:%0A delimeter = ','%0A else:%0A raise CommandError(%22Split pie sections with ; or ,%22)%0A%0A for part in message.content.strip().split(delimeter):%0A m = PERCENTAGE_PATTERN.search(part)%0A if m:%0A labels.append(PERCENTAGE_PATTERN.sub(%22%22, part, 1).strip())%0A pct = float(m.group(1)) / 100%0A data.append(pct)%0A total_pct += pct%0A else:%0A raise CommandError(%22Could not find a %25 in '%7B%7D'%22.format(part))%0A%0A data = list(map(lambda x: x / total_pct, data))%0A%0A def execute():%0A return generate_pie(labels, data)%0A%0A buf = await asyncio.get_event_loop().run_in_executor(None, execute)%0A%0A return Response(%22%22, attachments=%5BMemoryAttachment(buf, %22graph.png%22, %22image/png%22)%5D)%0A
|
|
8ff6f08a497adba17bd02eae9ec6425a71927e08
|
Update admin.py
|
avatar/admin.py
|
avatar/admin.py
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from avatar.models import Avatar
from avatar.signals import avatar_updated
from avatar.templatetags.avatar_tags import avatar
from avatar.util import get_user_model
class AvatarAdmin(admin.ModelAdmin):
list_display = ('get_avatar', 'user', 'primary', "date_uploaded")
list_filter = ('primary',)
search_fields = ('user__%s' % getattr(get_user_model(), 'USERNAME_FIELD', 'username'),)
list_per_page = 50
def get_avatar(self, avatar_in):
return avatar(avatar_in.user, 80)
get_avatar.short_description = _('Avatar')
get_avatar.allow_tags = True
def save_model(self, request, obj, form, change):
super(AvatarAdmin, self).save_model(request, obj, form, change)
avatar_updated.send(sender=Avatar, user=request.user, avatar=obj)
admin.site.register(Avatar, AvatarAdmin)
|
Python
| 0.000015
|
@@ -81,16 +81,97 @@
azy as _
+%0Afrom django.utils import six%0Afrom django.template.loader import render_to_string
%0A%0Afrom a
@@ -243,59 +243,8 @@
ted%0A
-from avatar.templatetags.avatar_tags import avatar%0A
from
@@ -583,40 +583,251 @@
-return avatar(avatar_in.user, 80
+context = dict(%7B%0A 'user': avatar_in.user,%0A 'url': avatar_in.avatar.url,%0A 'alt': six.text_type(avatar_in.user),%0A 'size': 80,%0A %7D)%0A return render_to_string('avatar/avatar_tag.html',context
)%0A%0A
|
3453d31f010c12757f72fe690b6bc6c5368a07ba
|
Use manufacturer id only for configure_reporting only when specified. (#19729)
|
homeassistant/components/zha/helpers.py
|
homeassistant/components/zha/helpers.py
|
"""
Helpers for Zigbee Home Automation.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/zha/
"""
import asyncio
import logging
from .const import (
DEFAULT_BAUDRATE, REPORT_CONFIG_MAX_INT, REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_RPT_CHANGE, RadioType)
_LOGGER = logging.getLogger(__name__)
async def safe_read(cluster, attributes, allow_cache=True, only_cache=False):
"""Swallow all exceptions from network read.
If we throw during initialization, setup fails. Rather have an entity that
exists, but is in a maybe wrong state, than no entity. This method should
probably only be used during initialization.
"""
try:
result, _ = await cluster.read_attributes(
attributes,
allow_cache=allow_cache,
only_cache=only_cache
)
return result
except Exception: # pylint: disable=broad-except
return {}
async def bind_cluster(entity_id, cluster):
"""Bind a zigbee cluster.
This also swallows DeliveryError exceptions that are thrown when devices
are unreachable.
"""
from zigpy.exceptions import DeliveryError
cluster_name = cluster.ep_attribute
try:
res = await cluster.bind()
_LOGGER.debug(
"%s: bound '%s' cluster: %s", entity_id, cluster_name, res[0]
)
except DeliveryError as ex:
_LOGGER.debug(
"%s: Failed to bind '%s' cluster: %s",
entity_id, cluster_name, str(ex)
)
async def configure_reporting(entity_id, cluster, attr, skip_bind=False,
min_report=REPORT_CONFIG_MIN_INT,
max_report=REPORT_CONFIG_MAX_INT,
reportable_change=REPORT_CONFIG_RPT_CHANGE,
manufacturer=None):
"""Configure attribute reporting for a cluster.
This also swallows DeliveryError exceptions that are thrown when devices
are unreachable.
"""
from zigpy.exceptions import DeliveryError
attr_name = cluster.attributes.get(attr, [attr])[0]
cluster_name = cluster.ep_attribute
try:
res = await cluster.configure_reporting(attr, min_report,
max_report, reportable_change,
manufacturer=manufacturer)
_LOGGER.debug(
"%s: reporting '%s' attr on '%s' cluster: %d/%d/%d: Result: '%s'",
entity_id, attr_name, cluster_name, min_report, max_report,
reportable_change, res
)
except DeliveryError as ex:
_LOGGER.debug(
"%s: failed to set reporting for '%s' attr on '%s' cluster: %s",
entity_id, attr_name, cluster_name, str(ex)
)
async def bind_configure_reporting(entity_id, cluster, attr, skip_bind=False,
min_report=REPORT_CONFIG_MIN_INT,
max_report=REPORT_CONFIG_MAX_INT,
reportable_change=REPORT_CONFIG_RPT_CHANGE,
manufacturer=None):
"""Bind and configure zigbee attribute reporting for a cluster.
This also swallows DeliveryError exceptions that are thrown when devices
are unreachable.
"""
if not skip_bind:
await bind_cluster(entity_id, cluster)
await configure_reporting(entity_id, cluster, attr, skip_bind=False,
min_report=REPORT_CONFIG_MIN_INT,
max_report=REPORT_CONFIG_MAX_INT,
reportable_change=REPORT_CONFIG_RPT_CHANGE,
manufacturer=None)
async def check_zigpy_connection(usb_path, radio_type, database_path):
"""Test zigpy radio connection."""
if radio_type == RadioType.ezsp.name:
import bellows.ezsp
from bellows.zigbee.application import ControllerApplication
radio = bellows.ezsp.EZSP()
elif radio_type == RadioType.xbee.name:
import zigpy_xbee.api
from zigpy_xbee.zigbee.application import ControllerApplication
radio = zigpy_xbee.api.XBee()
try:
await radio.connect(usb_path, DEFAULT_BAUDRATE)
controller = ControllerApplication(radio, database_path)
await asyncio.wait_for(controller.startup(auto_form=True), timeout=30)
radio.close()
except Exception: # pylint: disable=broad-except
return False
return True
|
Python
| 0
|
@@ -2165,32 +2165,115 @@
er.ep_attribute%0A
+ kwargs = %7B%7D%0A if manufacturer:%0A kwargs%5B'manufacturer'%5D = manufacturer%0A
try:%0A
@@ -2462,33 +2462,16 @@
-manufacturer=manufacturer
+**kwargs
)%0A
|
8420ec302cc35c1e65544caecc177849f92b7cbd
|
intent_service is OBVIOUSLY not a package
|
skills-sdk-setup.py
|
skills-sdk-setup.py
|
from setuptools import setup
from mycroft.util.setup_base import get_version, place_manifest
__author__ = 'seanfitz'
place_manifest("skills-sdk-MANIFEST.in")
setup(
name="mycroft-skills-sdk",
version=get_version(),
install_requires=[
"mustache==0.1.4",
"configobj==5.0.6",
"pyee==1.0.1",
"adapt-parser==0.2.1",
"padatious==0.1.4"
"websocket-client==0.32.0"
],
packages=[
"mycroft.configuration",
"mycroft.dialog",
"mycroft.filesystem",
"mycroft.messagebus",
"mycroft.messagebus.client",
"mycroft.session",
"mycroft.skills.intent_service",
"mycroft.skills",
"mycroft.util",
"mycroft"
],
include_package_data=True,
entry_points={
'console_scripts': [
'mycroft-skill-container=mycroft.skills.container:main'
]
}
)
|
Python
| 0.999999
|
@@ -623,49 +623,8 @@
n%22,%0A
- %22mycroft.skills.intent_service%22,%0A
|
4f3854eaf8d6e4b0ad9a77e871a946916ab3fec6
|
Migrate listings.syndication, FeedType.content_type should not be unique.
|
listings/syndication/migrations/0002_auto__del_unique_feedtype_content_type.py
|
listings/syndication/migrations/0002_auto__del_unique_feedtype_content_type.py
|
Python
| 0
|
@@ -0,0 +1,1886 @@
+# -*- coding: utf-8 -*-%0Aimport datetime%0Afrom south.db import db%0Afrom south.v2 import SchemaMigration%0Afrom django.db import models%0A%0A%0Aclass Migration(SchemaMigration):%0A%0A def forwards(self, orm):%0A # Removing unique constraint on 'FeedType', fields %5B'content_type'%5D%0A db.delete_unique('syndication_feedtype', %5B'content_type'%5D)%0A%0A%0A def backwards(self, orm):%0A # Adding unique constraint on 'FeedType', fields %5B'content_type'%5D%0A db.create_unique('syndication_feedtype', %5B'content_type'%5D)%0A%0A%0A models = %7B%0A 'sites.site': %7B%0A 'Meta': %7B'ordering': %22('domain',)%22, 'object_name': 'Site', 'db_table': %22'django_site'%22%7D,%0A 'domain': ('django.db.models.fields.CharField', %5B%5D, %7B'max_length': '100'%7D),%0A 'id': ('django.db.models.fields.AutoField', %5B%5D, %7B'primary_key': 'True'%7D),%0A 'name': ('django.db.models.fields.CharField', %5B%5D, %7B'max_length': '50'%7D)%0A %7D,%0A 'syndication.feed': %7B%0A 'Meta': %7B'object_name': 'Feed'%7D,%0A 'feed_type': ('django.db.models.fields.related.ForeignKey', %5B%5D, %7B'to': %22orm%5B'syndication.FeedType'%5D%22%7D),%0A 'id': ('django.db.models.fields.AutoField', %5B%5D, %7B'primary_key': 'True'%7D),%0A 'site': ('django.db.models.fields.related.ManyToManyField', %5B%5D, %7B'to': %22orm%5B'sites.Site'%5D%22, 'symmetrical': 'False'%7D)%0A %7D,%0A 'syndication.feedtype': %7B%0A 'Meta': %7B'object_name': 'FeedType'%7D,%0A 'content_type': ('django.db.models.fields.CharField', %5B%5D, %7B'default': %22'application/xml'%22, 'max_length': '100'%7D),%0A 'id': ('django.db.models.fields.AutoField', %5B%5D, %7B'primary_key': 'True'%7D),%0A 'name': ('django.db.models.fields.CharField', %5B%5D, %7B'unique': 'True', 'max_length': '100'%7D),%0A 'template': ('django.db.models.fields.files.FileField', %5B%5D, %7B'max_length': '100'%7D)%0A %7D%0A %7D%0A%0A complete_apps = %5B'syndication'%5D
|
|
1c6c32d453a1b2dd7011f05787e3a10a2a18bf62
|
Version bump.
|
src/xworkflows/__init__.py
|
src/xworkflows/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2012 Raphaël Barrois
__version__ = '0.4.0'
__author__ = 'Raphaël Barrois <raphael.barrois@polytechnique.org>'
from . import base
# Errors
AbortTransition = base.AbortTransition
ForbiddenTransition = base.ForbiddenTransition
InvalidTransitionError = base.InvalidTransitionError
WorkflowError = base.WorkflowError
# Defining and applying workflows
Workflow = base.Workflow
WorkflowEnabled = base.WorkflowEnabled
# Decorators
transition = base.transition
# Hooks
before_transition = base.before_transition
after_transition = base.after_transition
transition_check = base.transition_check
on_enter_state = base.on_enter_state
on_leave_state = base.on_leave_state
|
Python
| 0
|
@@ -79,17 +79,17 @@
= '0.4.
-0
+1
'%0A__auth
|
7b1fb0eb7f063c00c89b57ceca64a01881a7d4d9
|
add const_thrust helper
|
crazyflie_demo/scripts/const_thrust.py
|
crazyflie_demo/scripts/const_thrust.py
|
Python
| 0.000001
|
@@ -0,0 +1,432 @@
+#!/usr/bin/env python%0A%0Aimport rospy%0Afrom geometry_msgs.msg import Twist%0A%0Aif __name__ == '__main__':%0A rospy.init_node('crazyflie_demo_const_thrust', anonymous=True)%0A p = rospy.Publisher('cmd_vel', Twist)%0A twist = Twist()%0A r = rospy.Rate(50)%0A #for i in range(0, 100):%0A # p.publish(twist)%0A # r.sleep()%0A%0A twist.linear.z = 12000%0A while not rospy.is_shutdown():%0A p.publish(twist)%0A r.sleep()%0A
|
|
50331d662d67d9f625f9f9198988522b38b2d1f0
|
add task for midnight search index update, https://www.pivotaltracker.com/story/show/13730025
|
apps/search/tasks.py
|
apps/search/tasks.py
|
Python
| 0.000011
|
@@ -0,0 +1,337 @@
+from utils.celery_search_index import LogEntry%0Afrom utils.celery_utils import task%0Afrom celery.schedules import crontab%0Afrom celery.decorators import periodic_task%0Afrom django.core.management import call_command%0A%0A@periodic_task(run_every=crontab(minute=0, hour=0))%0Adef update_search_index():%0A call_command('update_index', verbosity=2)
|
|
3c2438049da743b53cb7a536ddc2db1a05302a33
|
Add grab.tools.logs to configure logging module
|
grab/tools/logs.py
|
grab/tools/logs.py
|
Python
| 0.000001
|
@@ -0,0 +1,389 @@
+import logging%0A%0Adef default_logging(grab_log='/tmp/grab.log'):%0A %22%22%22%0A Customize logging output to display all log messages%0A except grab network logs.%0A%0A Redirect grab network logs into file.%0A %22%22%22%0A%0A logging.basicConfig(level=logging.DEBUG)%0A glog = logging.getLogger('grab')%0A glog.propagate = False%0A hdl = logging.FileHandler(grab_log, 'w')%0A glog.addHandler(hdl)%0A
|
|
4662b430087404dbf011cf9ad97ee1e3188bfb9d
|
create wrapper for c_curve
|
wrappers/preseq/observed_complexity/wrapper.py
|
wrappers/preseq/observed_complexity/wrapper.py
|
Python
| 0.000001
|
@@ -0,0 +1,344 @@
+__author__ = %22Behram Radmanesh%22%0A__copyright__ = %22Copyright 2016, Behram Radmanesh%22%0A__email__ = %22behram.radmanesh@nih.gov%22%0A__license__ = %22MIT%22%0A%0A# import snakemake's ability to execute shell commands%0Afrom snakemake.shell import shell%0A%0A# execute preseq c_curve%0Ashell(%22preseq c_curve %7Bsnakemake.params%7D %7Bsnakemake.input%5B0%5D%7D %7Bsnakemake.output%5B0%5D%7D%22)%0A
|
|
ec1b3be5545d5ae530d3dc7dd8d90e6fe4730926
|
add unittest for heap
|
tests/testIndexedHeap.py
|
tests/testIndexedHeap.py
|
Python
| 0
|
@@ -0,0 +1,1225 @@
+'''%0ACreated on 2017/9/29%0A%0A:author: hubo%0A'''%0Aimport unittest%0Afrom random import randrange, sample%0Afrom vlcp.utils.indexedheap import IndexedHeap%0A%0Aclass Test(unittest.TestCase):%0A%0A%0A def testRandomSort(self):%0A data = %5B(randrange(0,10000), randrange(0,10000)) for _ in range(0,1000)%5D%0A data = list((v,k)%0A for k, v in dict((d%5B1%5D, d%5B0%5D) for d in data).items())%0A heap = IndexedHeap()%0A for d in data:%0A heap.push(d%5B1%5D, d)%0A # Remove min item%0A minv = heap.top()%0A self.assertEqual(minv, min(data)%5B1%5D)%0A heap.remove(minv)%0A data.remove(min(data))%0A # Remove last%0A last = heap.heap%5B-1%5D%5B0%5D%0A heap.remove(last%5B1%5D)%0A data.remove(last)%0A self.assertEqual(len(heap), len(data))%0A # Random remove%0A remove_sample = sample(data, 100)%0A data = %5Bd for d in data if d not in remove_sample%5D%0A for d in remove_sample:%0A heap.remove(d%5B1%5D)%0A result = %5B%5D%0A while heap:%0A result.append(heap.pop())%0A self.assertListEqual(result, %5Bd%5B1%5D for d in sorted(data)%5D)%0A %0A%0Aif __name__ == %22__main__%22:%0A #import sys;sys.argv = %5B'', 'Test.testRandomSort'%5D%0A unittest.main()
|
|
fb2292b58267d7d4d3c463879832d6327246155a
|
Add reflected magic methods
|
cupyx/fallback_mode/ndarray.py
|
cupyx/fallback_mode/ndarray.py
|
"""
class ndarray is wrapper around cupy.ndarray
to support fallback of methods of type `ndarray.func()`
"""
import sys
import numpy as np
import cupy as cp
from cupyx.fallback_mode import data_transfer
try:
from cupyx.fallback_mode import fallback
except ImportError:
import sys
fallback = sys.modules[__package__ + '.fallback']
class ndarray:
"""
Wrapper around cupy.ndarray
Gets initialized with a cupy ndarray.
"""
def __init__(self, array):
self._array = array
def __getattr__(self, attr):
"""
Catches attributes corresponding to ndarray.
Args:
attr (str): Attribute of ndarray class.
Returns:
(_RecursiveAttr object, self._array.attr):
Returns_RecursiveAttr object with numpy_object, cupy_object.
Returns self._array.attr if attr is not callable.
"""
cupy_object = getattr(cp.ndarray, attr, None)
numpy_object = getattr(np.ndarray, attr)
if not callable(numpy_object):
return getattr(self._array, attr)
return fallback._RecursiveAttr(numpy_object, cupy_object, self)
def _get_array(self):
"""
Returns _array (cupy.ndarray) of ndarray object.
"""
return self._array
def _get_cupy_ndarray(args, kwargs):
return data_transfer._get_xp_args(
ndarray, ndarray._get_array, (args, kwargs))
def _get_fallback_ndarray(cupy_res):
return data_transfer._get_xp_args(cp.ndarray, ndarray, cupy_res)
# Decorator for ndarray magic methods
def make_method(name):
def method(self, *args, **kwargs):
args, kwargs = _get_cupy_ndarray(args, kwargs)
cupy_method = getattr(cp.ndarray, name)
res = cupy_method(self._array, *args, **kwargs)
return _get_fallback_ndarray(res)
return method
def _create_magic_methods():
"""
Set magic methods of cupy.ndarray as methods of utils.ndarray.
"""
_common = [
# Comparison operators:
'__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__',
# Unary operations:
'__neg__', '__pos__', '__abs__', '__invert__',
# Arithmetic:
'__add__', '__sub__', '__mul__',
'__truediv__', '__floordiv__', '__mod__', '__divmod__', '__pow__',
'__lshift__', '__rshift__', '__and__', '__or__', '__xor__',
# Arithmetic, in-place:
'__iadd__', '__isub__', '__imul__', '__itruediv__',
'__ifloordiv__', '__imod__', '__ipow__', '__ilshift__', '__irshift__',
'__iand__', '__ior__', '__ixor__',
# For standard library functions:
'__copy__', '__deepcopy__', '__reduce__',
# Container customization:
'__iter__', '__len__', '__getitem__', '__setitem__',
# Conversion:
'__int__', '__float__', '__complex__',
# String representations:
'__repr__', '__str__'
]
_py3 = ['__matmul__', '__bool__']
_py2 = [
'__div__', '__idiv__', '__nonzero__', '__long__', '__hex__', '__oct__']
_specific = _py3
if sys.version_info[0] == 2:
_specific = _py2
for method in _common + _specific:
setattr(ndarray, method, make_method(method))
_create_magic_methods()
|
Python
| 0.000001
|
@@ -2230,32 +2230,24 @@
, '__mul__',
-%0A
'__truediv_
@@ -2261,24 +2261,32 @@
floordiv__',
+%0A
'__mod__',
@@ -2305,32 +2305,24 @@
, '__pow__',
-%0A
'__lshift__
@@ -2333,24 +2333,32 @@
__rshift__',
+%0A
'__and__',
@@ -2474,38 +2474,265 @@
__',
-%0A '__ifloordiv__', '__i
+ '__ifloordiv__',%0A '__imod__', '__ipow__', '__ilshift__', '__irshift__',%0A '__iand__', '__ior__', '__ixor__',%0A%0A # reflected-methods:%0A '__radd__', '__rsub__', '__rmul__', '__rtruediv__', '__rfloordiv__',%0A '__rmod__', '__rdiv
mod_
@@ -2730,33 +2730,33 @@
_rdivmod__', '__
-i
+r
pow__', '__ilshi
@@ -2742,33 +2742,33 @@
'__rpow__', '__
-i
+r
lshift__', '__ir
@@ -2757,33 +2757,33 @@
_rlshift__', '__
-i
+r
rshift__',%0A
@@ -2780,33 +2780,33 @@
__',%0A '__
-i
+r
and__', '__ior__
@@ -2792,33 +2792,33 @@
'__rand__', '__
-i
+r
or__', '__ixor__
@@ -2803,33 +2803,33 @@
, '__ror__', '__
-i
+r
xor__',%0A%0A
@@ -3164,16 +3164,25 @@
_py3 = %5B
+%0A
'__matmu
@@ -3182,24 +3182,39 @@
__matmul__',
+ '__rmatmul__',
'__bool__'%5D
@@ -3212,16 +3212,21 @@
_bool__'
+%0A
%5D%0A%0A _
@@ -3247,24 +3247,36 @@
'__div__',
+ '__rdiv__',
'__idiv__',
@@ -3286,24 +3286,32 @@
_nonzero__',
+%0A
'__long__',
@@ -3331,16 +3331,21 @@
__oct__'
+%0A
%5D%0A%0A _
|
5db58544133c66c5cbb4122c99a95a0ca6ddfa26
|
Create RomeOculus.py
|
home/Alessandruino/RomeOculus.py
|
home/Alessandruino/RomeOculus.py
|
Python
| 0.000007
|
@@ -0,0 +1,491 @@
+i01 = Runtime.createAndStart(%22i01%22,%22InMoov%22)%0Ai01.startHead(%22/dev/tty.usbmodem1411%22)%0A#i01.startLeftArm(%22COM5%22)%0A#leftHand = i01.startLeftHand(%22COM5%22)%0A#i01.leftArm.bicep.setMinMax(5,80)%0A%0A#i01.leftArm.bicep.moveTo(30)%0A%0Aoculus = Runtime.start(%22oculus%22,%22OculusDIY%22)%0Aoculus.arduino.connect(%22/dev/tty.usbmodem14541%22)%0A%0Adef onOculusData(data):%0A%0A print data.yaw%0A print data.pitch%0A%0A i01.head.neck.moveTo(int(data.pitch))%0A i01.head.rothead.moveTo(int(data.yaw))%0A%0Aoculus.addOculusDataListener(python)%0A
|
|
6b8e24485f28e1c0408cefdb989e078a332211d6
|
Fix migration chain
|
src/ggrc/migrations/versions/20160202161748_37b2a060bdd6_remove_object_type_table.py
|
src/ggrc/migrations/versions/20160202161748_37b2a060bdd6_remove_object_type_table.py
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""remove object type table
Revision ID: 37b2a060bdd6
Revises: 262bbe790f4c
Create Date: 2016-02-02 16:17:48.928846
"""
# Disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=C0103
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
# revision identifiers, used by Alembic.
revision = '37b2a060bdd6'
down_revision = '262bbe790f4c'
def upgrade():
"""Remove object_type foreign key to notifications.
Make sure the object_type in notifications is the same string field as in all
other polymorphic tables and remove the now obsolete object_types tabele.
"""
op.execute(
"ALTER TABLE notifications "
"ADD COLUMN object_type VARCHAR(250) NOT NULL "
"AFTER object_id;"
)
op.execute(
"UPDATE notifications AS n "
"LEFT JOIN object_types AS o "
"ON n.object_type_id = o.id "
"SET n.object_type = o.name"
)
op.drop_constraint("notifications_ibfk_1", "notifications", "foreignkey")
op.drop_column("notifications", "object_type_id")
op.drop_table('object_types')
def downgrade():
"""Add object_type foreign key to notifications.
Add the old object_types table and fill it with previous data, then replace
object_type string field in notifications with object_type_id foreign key.
"""
object_types_table = op.create_table(
'object_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=250), nullable=False),
sa.Column('description', sa.String(length=250), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('object_types_name', 'object_types', ['name'], unique=True)
object_types_table = table(
'object_types',
column('id', sa.Integer),
column('name', sa.String),
column('description', sa.Text),
column('created_at', sa.DateTime),
column('modified_by_id', sa.Integer),
column('updated_at', sa.DateTime),
column('context_id', sa.Integer),
)
op.bulk_insert(
object_types_table,
[
{"name": "Workflow", "description": ""},
{"name": "TaskGroup", "description": ""},
{"name": "TaskGroupTask", "description": ""},
{"name": "TaskGroupObject", "description": ""},
{"name": "Cycle", "description": ""},
{"name": "CycleTaskGroup", "description": ""},
{"name": "CycleTaskGroupObject", "description": ""},
{"name": "CycleTaskGroupObjectTask", "description": ""},
]
)
op.add_column(
"notifications",
sa.Column("object_type_id", sa.Integer(), nullable=True)
)
op.execute(
"UPDATE notifications AS n "
"LEFT JOIN object_types AS o "
"ON n.object_type = o.name "
"SET n.object_type_id = o.id"
)
op.create_foreign_key("notifications_ibfk_1", "notifications",
"object_types", ["object_type_id"], ["id"])
op.drop_column("notifications", "object_type")
|
Python
| 0.000025
|
@@ -237,17 +237,17 @@
om%0A%0A%0A%22%22%22
-r
+R
emove ob
@@ -635,28 +635,27 @@
sion = '
-262bbe790f4c
+6bed0575a0b
'%0A%0A%0Adef
|
c5fba0cc8acb482a0bc1c49ae5187ebc1232dba3
|
Add tests for the different input variations.
|
tests/test_directions.py
|
tests/test_directions.py
|
Python
| 0
|
@@ -0,0 +1,1718 @@
+import unittest%0A%0Afrom shapely.geometry import LineString, Point%0A%0Afrom directions.base import _parse_points%0A%0Aclass DirectionsTest(unittest.TestCase):%0A def setUp(self):%0A self.p = %5B(1,2), (3,4), (5,6), (7,8)%5D%0A self.line = LineString(self.p)%0A%0A def test_origin_dest(self):%0A result = _parse_points(self.p%5B0%5D, self.p%5B-1%5D)%0A self.assertEqual(%5Bself.p%5B0%5D, self.p%5B-1%5D%5D, result)%0A%0A def test_origin_dest_waypoints(self):%0A result = _parse_points(self.p%5B0%5D, self.p%5B-1%5D, self.p%5B1:-1%5D)%0A self.assertEqual(self.p, result)%0A%0A def test_line(self):%0A result = _parse_points(self.line)%0A self.assertEqual(self.p, result)%0A%0A def test_points(self):%0A p0 = Point(self.line.coords%5B0%5D)%0A p1 = Point(self.line.coords%5B-1%5D)%0A result = _parse_points(p0, p1)%0A self.assertEqual(%5Bself.p%5B0%5D, self.p%5B-1%5D%5D, result)%0A%0A def test_points_array(self):%0A p0 = Point(self.p%5B0%5D)%0A p1 = Point(self.p%5B-1%5D)%0A result = _parse_points(%5Bp0, p1%5D)%0A self.assertEqual(%5Bself.p%5B0%5D, self.p%5B-1%5D%5D, result)%0A%0A def test_mixed_types(self):%0A origin = 'blah'%0A destination = Point(self.p%5B-1%5D)%0A points = self.p%5B1:-1%5D%0A expected = list(self.p) # Copy it%0A expected%5B0%5D = 'blah'%0A result = _parse_points(origin, destination, points)%0A self.assertEqual(expected, result)%0A%0A def test_no_dest_waypoints(self):%0A # Can't specify waypoints without destination%0A with self.assertRaises(ValueError):%0A _parse_points('origin', waypoints=%5B'p1'%5D)%0A%0A def test_bad_input(self):%0A # Test points not length 2%0A with self.assertRaises(ValueError):%0A _parse_points(self.p%5B0%5D, (1.0, 2.0, 3.0))%0A
|
|
635682c9d206cd9ae6ea184f9361937b0a272b90
|
Add monadic utilities MonadicDict and MonadicDictCursor.
|
wqflask/utility/monads.py
|
wqflask/utility/monads.py
|
Python
| 0
|
@@ -0,0 +1,2462 @@
+%22%22%22Monadic utilities%0A%0AThis module is a collection of monadic utilities for use in%0AGeneNetwork. It includes:%0A%0A* MonadicDict - monadic version of the built-in dictionary%0A* MonadicDictCursor - monadic version of MySQLdb.cursors.DictCursor%0A that returns a MonadicDict instead of the built-in dictionary%0A%22%22%22%0A%0Afrom collections import UserDict%0Afrom functools import partial%0A%0Afrom MySQLdb.cursors import DictCursor%0Afrom pymonad.maybe import Just, Nothing%0A%0Aclass MonadicDict(UserDict):%0A %22%22%22%0A Monadic version of the built-in dictionary.%0A%0A Keys in this dictionary can be any python object, but values must%0A be monadic values.%0A %22%22%22%0A def __init__(self, d, convert=True):%0A %22%22%22%0A Initialize monadic dictionary.%0A%0A If convert is False, values in dictionary d must be%0A monadic. If convert is True, values in dictionary d are%0A converted to monadic values.%0A %22%22%22%0A if convert:%0A super().__init__(%7Bkey:(Nothing if value is None else Just(value))%0A for key, value in d.items()%7D)%0A else:%0A super().__init__(d)%0A def __getitem__(self, key):%0A %22%22%22%0A Get key from dictionary.%0A%0A If key exists in the dictionary, return a Just value. Else,%0A return Nothing.%0A %22%22%22%0A try:%0A return Just(self.data%5Bkey%5D)%0A except KeyError:%0A return Nothing%0A def __setitem__(self, key, value):%0A %22%22%22%0A Set key in dictionary.%0A%0A value must be a monadic value---either Nothing or a Just%0A value. If value is a Just value, set it in the dictionary. If%0A value is Nothing, do nothing.%0A %22%22%22%0A value.bind(partial(super().__setitem__, key))%0A def __delitem__(self, key):%0A %22%22%22%0A Delete key from dictionary.%0A%0A If key exists in the dictionary, delete it. Else, do nothing.%0A %22%22%22%0A try:%0A super().__delitem__(key)%0A except KeyError:%0A pass%0A%0Aclass MonadicDictCursor(DictCursor):%0A %22%22%22%0A Monadic version of MySQLdb.cursors.DictCursor.%0A%0A Monadic version of MySQLdb.cursors.DictCursor that returns a%0A MonadicDict instead of the built-in dictionary.%0A %22%22%22%0A def fetchone(self):%0A return MonadicDict(super().fetchone())%0A def fetchmany(self, size=None):%0A return %5BMonadicDict(row) for row in super().fetchmany(size=size)%5D%0A def fetchall(self):%0A return %5BMonadicDict(row) for row in super().fetchall()%5D%0A
|
|
d21e0721b614423e07e81809fb60dd936494bfff
|
Add validators test
|
tests/test_validators.py
|
tests/test_validators.py
|
Python
| 0.000001
|
@@ -0,0 +1,810 @@
+import attr%0Afrom twisted.trial import unittest%0A%0Aimport txnats%0Afrom txnats.validators import is_instance_of_nats_protocol%0A%0A%0A@attr.s%0Aclass Foo(object):%0A protocol = attr.ib(default=None, %0A validator=attr.validators.optional(%0A is_instance_of_nats_protocol%0A )%0A )%0A%0A%0Aclass IsNatsProtocolTest(unittest.TestCase):%0A maxDiff = None%0A def test_foo_valid(self):%0A %22%22%22%0A Ensure Foo.protocol may be a NatsProtocol.%0A %22%22%22%0A Foo(protocol=txnats.io.NatsProtocol())%0A %0A def test_foo_none(self):%0A %22%22%22%0A Ensure Foo.protocol may be None.%0A %22%22%22%0A Foo()%0A%0A def test_foo_invalid(self):%0A %22%22%22%0A Ensure any other value is invalid for Foo.protocol.%0A %22%22%22%0A with self.assertRaises(ValueError):%0A Foo(protocol=4)%0A%0A
|
|
65e08089d6d89b25ca89a7f194ce1e4e72a9640b
|
Update cut-off-trees-for-golf-event.py
|
Python/cut-off-trees-for-golf-event.py
|
Python/cut-off-trees-for-golf-event.py
|
# Time: O(t * (logt + m * n)), t is the number of trees
# Space: O(t + m * n)
# You are asked to cut off trees in a forest for a golf event.
# The forest is represented as a non-negative 2D map, in this map:
#
# 0 represents the obstacle can't be reached.
# 1 represents the ground can be walked through.
# The place with number bigger than 1 represents a tree can be walked through,
# and this positive number represents the tree's height.
#
# You are asked to cut off all the trees in this forest in the order of tree's height -
# always cut off the tree with lowest height first. And after cutting, the original place
# has the tree will become a grass (value 1).
#
# You will start from the point (0, 0)
# and you should output the minimum steps you need to walk to cut off all the trees.
# If you can't cut off all the trees, output -1 in that situation.
#
# You are guaranteed that no two trees have the same height and there is at least one tree needs to be cut off.
#
# Example 1:
# Input:
# [
# [1,2,3],
# [0,0,4],
# [7,6,5]
# ]
# Output: 6
# Example 2:
# Input:
# [
# [1,2,3],
# [0,0,0],
# [7,6,5]
# ]
# Output: -1
# Example 3:
# Input:
# [
# [2,3,4],
# [0,0,5],
# [8,7,6]
# ]
# Output: 6
# Explanation: You started from the point (0,0) and you can cut off the tree
# in (0,0) directly without walking.
# Hint: size of the given matrix will not exceed 50x50.
# Solution Reference:
# 1. https://discuss.leetcode.com/topic/103532/my-python-solution-inspired-by-a-algorithm/2
# 2. https://discuss.leetcode.com/topic/103562/python-solution-based-on-wufangjie-s-hadlock-s-algorithm
# 3. https://en.wikipedia.org/wiki/A*_search_algorithm
# 4. https://cg2010studio.files.wordpress.com/2011/12/dijkstra-vs-a-star.png
class Solution(object):
def cutOffTree(self, forest):
"""
:type forest: List[List[int]]
:rtype: int
"""
def dot(p1, p2):
return p1[0]*p2[0]+p1[1]*p2[1]
def minStep(p1, p2):
min_steps = abs(p1[0]-p2[0])+abs(p1[1]-p2[1])
closer, detour = [p1], []
lookup = set()
while True:
if not closer: # cannot find a path in the closer expansion
if not detour: # no other possible path
return -1
# try other possible paths in another detour expansion with extra 2-step cost
min_steps += 2
closer, detour = detour, closer
i, j = closer.pop()
if (i, j) == p2:
return min_steps
if (i, j) not in lookup:
lookup.add((i, j))
for I, J in (i+1, j), (i-1, j), (i, j+1), (i, j-1):
if 0 <= I < m and 0 <= J < n and forest[I][J] and (I, J) not in lookup:
is_closer = dot((I-i, J-j), (p2[0]-i, p2[1]-j)) > 0
(closer if is_closer else detour).append((I, J))
return min_steps
m, n = len(forest), len(forest[0])
min_heap = []
for i in xrange(m):
for j in xrange(n):
if forest[i][j] > 1:
heapq.heappush(min_heap, (forest[i][j], (i, j)))
start = (0, 0)
result = 0
while min_heap:
tree = heapq.heappop(min_heap)
step = minStep(start, tree[1])
if step < 0:
return -1
result += step
start = tree[1]
return result
# Time: O(t * (logt + m * n)), t is the number of trees
# Space: O(t + m * n)
class Solution_TLE(object):
def cutOffTree(self, forest):
"""
:type forest: List[List[int]]
:rtype: int
"""
def minStep(p1, p2):
min_steps = 0
lookup = {p1}
q = collections.deque([p1])
while q:
size = len(q)
for _ in xrange(size):
(i, j) = q.popleft()
if (i, j) == p2:
return min_steps
for i, j in (i+1, j), (i-1, j), (i, j+1), (i, j-1):
if not (0 <= i < m and 0 <= j < n and forest[i][j] and (i, j) not in lookup):
continue
q.append((i, j))
lookup.add((i, j))
min_steps += 1
return -1
m, n = len(forest), len(forest[0])
min_heap = []
for i in xrange(m):
for j in xrange(n):
if forest[i][j] > 1:
heapq.heappush(min_heap, (forest[i][j], (i, j)))
start = (0, 0)
result = 0
while min_heap:
tree = heapq.heappop(min_heap)
step = minStep(start, tree[1])
if step < 0:
return -1
result += step
start = tree[1]
return result
|
Python
| 0
|
@@ -2189,16 +2189,17 @@
xpansion
+s
%0A
@@ -2340,16 +2340,8 @@
in
-another
deto
@@ -2352,16 +2352,17 @@
xpansion
+s
with ex
|
45f26b56d177798efca4825f063372b505df6a76
|
Add gameman test
|
tests/gameman_test.py
|
tests/gameman_test.py
|
Python
| 0
|
@@ -0,0 +1,1024 @@
+import pytest%0Afrom savman import gameman%0A%0A@pytest.fixture%0Adef dir1(tmpdir):%0A return tmpdir.mkdir('dir1')%0A%0A@pytest.fixture%0Adef dir2(tmpdir):%0A return tmpdir.mkdir('dir2')%0A%0A@pytest.fixture%0Adef customfile(tmpdir, dir1, dir2):%0A file = tmpdir.join('custom.txt')%0A custom = '''%0A---%0Aname: My Game%0Adirectory: %7B%7D%0Ainclude:%0A- folder1/* # Include all files from folder%0Aexclude:%0A- '*.png'%0A---%0Aname: My Game 2%0Adirectory: %7B%7D%0A'''.format(str(dir1), str(dir2))%0A file.write(custom)%0A return file%0A%0A%0A%0Adef test_load_custom(customfile, dir1, dir2):%0A gman = gameman.GameMan('DUMMY')%0A gman.load_custom(str(customfile))%0A assert 'MyGame' in gman.games%0A assert 'MyGame2' in gman.games%0A game1 = gman.games%5B'MyGame'%5D%0A game2 = gman.games%5B'MyGame2'%5D%0A assert game1.name == 'My Game'%0A assert game2.name == 'My Game 2'%0A assert game1.locations%5B0%5D.path == str(dir1)%0A assert game2.locations%5B0%5D.path == str(dir2)%0A assert 'folder1/*' in game1.locations%5B0%5D.include%0A assert '*.png' in game1.locations%5B0%5D.exclude
|
|
1e3781bc3527f72053fdc4aad4f4887c567c457c
|
Add unicode test.
|
tests/unicode/unicode.py
|
tests/unicode/unicode.py
|
Python
| 0.000002
|
@@ -0,0 +1,627 @@
+# Test a UTF-8 encoded literal%0As = %22asdf%C2%A9qwer%22%0Afor i in range(len(s)):%0A print(%22s%5B%25d%5D: %25s %25X%22%25(i, s%5Bi%5D, ord(s%5Bi%5D)))%0A%0A# Test all three forms of Unicode escape, and%0A# all blocks of UTF-8 byte patterns%0As = %22a%5CxA9%5CxFF%5Cu0123%5Cu0800%5CuFFEE%5CU0001F44C%22%0Afor i in range(-len(s), len(s)):%0A print(%22s%5B%25d%5D: %25s %25X%22%25(i, s%5Bi%5D, ord(s%5Bi%5D)))%0A print(%22s%5B:%25d%5D: %25d chars, '%25s'%22%25(i, len(s%5B:i%5D), s%5B:i%5D))%0A for j in range(i, len(s)):%0A print(%22s%5B%25d:%25d%5D: %25d chars, '%25s'%22%25(i, j, len(s%5Bi:j%5D), s%5Bi:j%5D))%0A print(%22s%5B%25d:%5D: %25d chars, '%25s'%22%25(i, len(s%5Bi:%5D), s%5Bi:%5D))%0A%0A# Test UTF-8 encode and decode%0Aenc = s.encode()%0Aprint(enc, enc.decode() == s)%0A
|
|
40154d7de207df9689ac220cc8966735cb3ed5af
|
Test asyncio in python 3.6
|
tests/test_asyncio.py
|
tests/test_asyncio.py
|
Python
| 0.000003
|
@@ -0,0 +1,411 @@
+import asyncio%0A%0Aasync def routine0(s,n):%0A print('CRT:',s,':',n)%0A%0Aasync def routine(id, n):%0A print('TEST%5B%25s%5D %25d'%25(id,n))%0A if not n:%0A return%0A n -= 1%0A await routine(id, n)%0A await routine0(id, n)%0A%0Aloop = asyncio.get_event_loop()%0Atasks = %5B%0A asyncio.ensure_future(routine('a',5)),%0A asyncio.ensure_future(routine('b',8))%5D%0Aprint('muf')%0Aloop.run_until_complete(asyncio.wait(tasks))%0Aprint('puf')%0Aloop.close()%0A%0A
|
|
c63651a5fba9dd67b345bfb95adef5d6206f5da3
|
Add file lock
|
tagcache/lock.py
|
tagcache/lock.py
|
Python
| 0.000001
|
@@ -0,0 +1,1072 @@
+# -*- encoding: utf-8 -*-%0A%0Aimport os%0Aimport fcntl%0A%0A%0Aclass FileLock(object):%0A%0A def __init__(self, path):%0A%0A self.path = path%0A%0A self.fd = None%0A%0A def acquire(self, write=False, block=True):%0A%0A if self.fd is not None:%0A%0A self.release()%0A%0A try:%0A # open or create the file%0A open_flags = os.O_RDWR if write else os.O_RDONLY%0A%0A open_flags %7C= os.O_CREAT%0A%0A self.fd = os.open(self.path, open_flags)%0A%0A # try to lock the file%0A lock_flags = fcntl.LOCK_EX if write else fcntl.LOCK_SH%0A%0A if not block:%0A%0A lock_flags %7C= fcntl.LOCK_NB%0A%0A fcntl.flock(self.fd, lock_flags)%0A%0A return self.fd%0A%0A #except (OSError, IOError):%0A except:%0A%0A # open file failed or lock failed%0A if self.fd is not None:%0A%0A os.close(self.fd)%0A%0A self.fd = None%0A%0A return None%0A%0A def release(self):%0A%0A if self.fd is None:%0A%0A return%0A%0A os.close(self.fd)%0A%0A self.fd = None%0A%0A%0A%0A
|
|
3625646c34fed4c5081e73c175e257ee426a4c37
|
Fix reproduce_state
|
homeassistant/helpers/state.py
|
homeassistant/helpers/state.py
|
"""
homeassistant.helpers.state
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Helpers that help with state related things.
"""
import logging
from homeassistant.core import State
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_ON, STATE_OFF, SERVICE_TURN_ON, SERVICE_TURN_OFF,
SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PAUSE,
STATE_PLAYING, STATE_PAUSED, ATTR_ENTITY_ID)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods, attribute-defined-outside-init
class TrackStates(object):
"""
Records the time when the with-block is entered. Will add all states
that have changed since the start time to the return list when with-block
is exited.
"""
def __init__(self, hass):
self.hass = hass
self.states = []
def __enter__(self):
self.now = dt_util.utcnow()
return self.states
def __exit__(self, exc_type, exc_value, traceback):
self.states.extend(get_changed_since(self.hass.states.all(), self.now))
def get_changed_since(states, utc_point_in_time):
"""
Returns all states that have been changed since utc_point_in_time.
"""
point_in_time = dt_util.strip_microseconds(utc_point_in_time)
return [state for state in states if state.last_updated >= point_in_time]
def reproduce_state(hass, states, blocking=False):
""" Takes in a state and will try to have the entity reproduce it. """
if isinstance(states, State):
states = [states]
for state in states:
current_state = hass.states.get(state.entity_id)
if current_state is None:
_LOGGER.warning('reproduce_state: Unable to find entity %s',
state.entity_id)
continue
if state.domain == 'media_player' and state == STATE_PAUSED:
service = SERVICE_MEDIA_PAUSE
elif state.domain == 'media_player' and state == STATE_PLAYING:
service = SERVICE_MEDIA_PLAY
elif state.state == STATE_ON:
service = SERVICE_TURN_ON
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
else:
_LOGGER.warning("reproduce_state: Unable to reproduce state %s",
state)
continue
service_data = dict(state.attributes)
service_data[ATTR_ENTITY_ID] = state.entity_id
hass.services.call(state.domain, service, service_data, blocking)
|
Python
| 0.000009
|
@@ -1786,32 +1786,38 @@
layer' and state
+.state
== STATE_PAUSED
@@ -1909,24 +1909,30 @@
r' and state
+.state
== STATE_PL
|
58d5d6b078abe524c51e6db7963cc5091b0e0835
|
Remove failing test
|
tests/test_cmdline.py
|
tests/test_cmdline.py
|
# vim:fileencoding=utf-8:noet
'''Tests for shell.py parser'''
from powerline.shell import get_argparser, finish_args
from tests import TestCase
from tests.lib import replace_attr
import sys
if sys.version_info < (3,):
from io import BytesIO as StrIO
else:
from io import StringIO as StrIO # NOQA
class TestParser(TestCase):
def test_main_err(self):
parser = get_argparser()
out = StrIO()
err = StrIO()
def flush():
out.truncate(0)
err.truncate(0)
with replace_attr(sys, 'stdout', out, 'stderr', err):
for raising_args, raising_reg in [
([], 'too few arguments|the following arguments are required: ext'),
(['-r'], 'expected one argument'),
(['shell', '-r'], 'expected one argument'),
(['shell', '-w'], 'expected one argument'),
(['shell', '-c'], 'expected one argument'),
(['shell', '-t'], 'expected one argument'),
(['shell', '-p'], 'expected one argument'),
(['shell', '-R'], 'expected one argument'),
(['shell', '--renderer_module'], 'expected one argument'),
(['shell', '--width'], 'expected one argument'),
(['shell', '--last_exit_code'], 'expected one argument'),
(['shell', '--last_pipe_status'], 'expected one argument'),
(['shell', '--config'], 'expected one argument'),
(['shell', '--theme_option'], 'expected one argument'),
(['shell', '--config_path'], 'expected one argument'),
(['shell', '--renderer_arg'], 'expected one argument'),
(['shell', '--jobnum'], 'expected one argument'),
(['-r', 'zsh_prompt'], 'too few arguments|the following arguments are required: ext'),
(['shell', '--last_exit_code', 'i'], 'invalid int value'),
(['shell', '--last_pipe_status', '1 i'], 'invalid <lambda> value'),
(['shell', '-R', 'abc'], 'invalid <lambda> value'),
]:
self.assertRaises(SystemExit, parser.parse_args, raising_args)
self.assertFalse(out.getvalue())
self.assertRegexpMatches(err.getvalue(), raising_reg)
flush()
def test_main_normal(self):
parser = get_argparser()
out = StrIO()
err = StrIO()
with replace_attr(sys, 'stdout', out, 'stderr', err):
for argv, expargs in [
(['shell'], {'ext': ['shell']}),
(['shell', '-r', 'zsh_prompt'], {'ext': ['shell'], 'renderer_module': 'zsh_prompt'}),
([
'shell',
'left',
'-r', 'zsh_prompt',
'--last_exit_code', '10',
'--last_pipe_status', '10 20 30',
'--jobnum=10',
'-w', '100',
'-c', 'common.term_truecolor=true',
'-c', 'common.spaces=4',
'-t', 'default.segment_data.hostname.before=H:',
'-p', '.',
'-R', 'smth={"abc":"def"}'
], {
'ext': ['shell'],
'side': 'left',
'renderer_module': 'zsh_prompt',
'last_exit_code': 10,
'last_pipe_status': [10, 20, 30],
'jobnum': 10,
'width': 100,
'config': {'common': {'term_truecolor': True, 'spaces': 4}},
'theme_option': {
'default': {
'segment_data': {
'hostname': {
'before': 'H:'
}
}
}
},
'config_path': '.',
'renderer_arg': {'smth': {'abc': 'def'}},
}),
(['shell', '-R', 'arg=true'], {'ext': ['shell'], 'renderer_arg': {'arg': True}}),
(['shell', '-t', 'default.segment_info={"hostname": {}}'], {
'ext': ['shell'],
'theme_option': {
'default': {
'segment_info': {
'hostname': {}
}
}
},
}),
(['shell', '-c', 'common={ }'], {'ext': ['shell'], 'config': {'common': {}}}),
]:
args = parser.parse_args(argv)
finish_args(args)
for key, val in expargs.items():
self.assertEqual(getattr(args, key), val)
for key, val in args.__dict__.items():
if key not in expargs:
self.assertFalse(val, msg='key {0} is {1} while it should be something false'.format(key, val))
self.assertFalse(err.getvalue() + out.getvalue(), msg='unexpected output: {0!r} {1!r}'.format(
err.getvalue(),
out.getvalue(),
))
if __name__ == '__main__':
from tests import main
main()
|
Python
| 0.000005
|
@@ -2054,80 +2054,8 @@
'),%0A
-%09%09%09%09(%5B'shell', '-R', 'abc'%5D, 'invalid %3Clambda%3E value'),%0A
%09%09%09%5D
|
1fe84191c0f67af445e0b140efe67e90ae1e4c6f
|
Use set instead of ordered dict.
|
blues/slack.py
|
blues/slack.py
|
"""
Slack Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
settings:
slack:
# Single config:
endpoint: https://hooks.slack.com/... # (Required)
channels: # (Required)
- "#deploy"
username: deploybot
icon_emoji: ":rocket"
# Multiple configs:
# - endpoint: https://hooks.slack.com/... # (Required)
# channels: # (Required)
# - "#deploy"
# username: deploybot
# icon_emoji: ":rocket"
# - ...
"""
from fabric.utils import warn
from refabric.contrib import blueprints
from collections import OrderedDict
import urllib2
import json
blueprint = blueprints.get(__name__)
def notify(msg, quiet=False):
slack_config = blueprint.get('')
if isinstance(slack_config, dict):
slack_config = [slack_config]
for config in slack_config:
notify_with_config(msg, config, quiet)
def notify_with_config(msg, config, quiet):
channels = config.get('channels', [])
channel = config.get('channel', None)
# If channel is specified, add it to channels, and then run it through an
# OrderedDict, removing any duplicates.
if channel:
channels.append(channel)
channels = list(OrderedDict.fromkeys(channels))
if not channels:
warn('Empty slack channel list, skipping notification')
return False
username = config.get('username', 'deploybot')
icon_emoji = config.get('icon_emoji', ':rocket:')
endpoint = config.get('endpoint')
if not endpoint:
warn('No slack API endpoint found, skipping notification')
return False
for channel in set(channels):
send_request(endpoint=endpoint, channel=channel, username=username,
msg=msg, icon_emoji=icon_emoji, quiet=quiet)
def send_request(endpoint, channel, username, msg, icon_emoji, quiet=False):
data = json.dumps({
"channel": channel,
"username": username,
"text": msg,
"icon_emoji": icon_emoji,
})
req = urllib2.Request(endpoint, data, {'Content-Type': 'application/json'})
try:
urllib2.urlopen(req).close()
except urllib2.HTTPError as e:
if quiet:
warn(e)
else:
raise
|
Python
| 0
|
@@ -671,44 +671,8 @@
nts%0A
-from collections import OrderedDict%0A
impo
@@ -1264,64 +1264,8 @@
nel)
-%0A channels = list(OrderedDict.fromkeys(channels))
%0A%0A
|
7963e426e2d1f58105d8712c0379114d93d32b07
|
Add example with sklearn pipeline
|
examples/plot_feature_extraction_classification.py
|
examples/plot_feature_extraction_classification.py
|
Python
| 0
|
@@ -0,0 +1,2496 @@
+%22%22%22%0AUMAP as a Feature Extraction Technique for Classification%0A---------------------------------------------------------%0A%0AThe following script shows how UMAP can be used as a feature extraction%0Atechnique to improve the accuracy on a classification task. It also shows%0Ahow UMAP can be integrated in standard scikit-learn pipelines.%0A%0AThe first step is to create a dataset for a classification task, which is%0Aperformed with the function %60%60sklearn.datasets.make_classification%60%60. The%0Adataset is then split into a training set and a test set using the%0A%60%60sklearn.model_selection.train_test_split%60%60 function.%0A%0ASecond, a linear SVM is fitted on the training set. To choose the best%0Ahyperparameters automatically, a gridsearch is performed on the training set.%0AThe performance of the model is then evaluated on the test set with the%0Aaccuracy metric.%0A%0A Third, the previous step is repeated with a slight modification: UMAP is%0A used as a feature extraction technique. This small change results in a%0A substantial improvement compared to the model where raw data is used.%0A%22%22%22%0Afrom sklearn.datasets import make_classification%0Afrom sklearn.model_selection import train_test_split, GridSearchCV%0Afrom sklearn.pipeline import Pipeline%0Afrom sklearn.svm import LinearSVC%0Afrom umap import UMAP%0A%0A%0A# Make a toy dataset%0AX, y = make_classification(n_samples=1000, n_features=300, n_informative=250,%0A n_redundant=0, n_repeated=0, n_classes=2,%0A random_state=1212)%0A%0A# Split the dataset into a training set and a test set%0AX_train, X_test, y_train, y_test = train_test_split(%0A X, y, test_size=0.2, random_state=42)%0A%0A# Classification with a linear SVM%0Asvc = LinearSVC(dual=False, random_state=123)%0Aparams_grid = %7B%22C%22: %5B10**k for k in range(-3, 4)%5D%7D%0Aclf = GridSearchCV(svc, params_grid)%0Aclf.fit(X_train, y_train)%0Aprint(%22Accuracy on the test set with raw data: %7B:.3f%7D%22.format(%0A clf.score(X_test, y_test)))%0A%0A# Transformation with UMAP followed by classification with a linear SVM%0Aumap = UMAP(random_state=456)%0Apipeline = Pipeline(%5B(%22umap%22, umap),%0A (%22svc%22, svc)%5D)%0Aparams_grid_pipeline = %7B%22umap__n_neighbors%22: %5B5, 20%5D,%0A %22umap__n_components%22: %5B15, 25, 50%5D,%0A %22svc__C%22: %5B10**k for k in range(-3, 4)%5D%7D%0A%0Aclf_pipeline = GridSearchCV(pipeline, params_grid_pipeline)%0Aclf_pipeline.fit(X_train, y_train)%0Aprint(%22Accuracy on the test set with UMAP transformation: %7B:.3f%7D%22.format(%0A clf_pipeline.score(X_test, y_test)))%0A
|
|
a6bbcdd9a28b4ad3ebc5319ab849bd9116b2f0c6
|
Create 7kyu_how_many_points.py
|
Solutions/7kyu/7kyu_how_many_points.py
|
Solutions/7kyu/7kyu_how_many_points.py
|
Python
| 0.012299
|
@@ -0,0 +1,154 @@
+def get_los_angeles_points(results):%0A return sum(int(j.split(':')%5B0%5D) for i,j in results if __import__('re').fullmatch('Los%5CsAngeles%5Cs%5Ba-zA-Z%5D+$', i))%0A
|
|
824623c9f836c1591d89f7292fc1f406a1af189a
|
add a stub test for job manipulation
|
test/jobstest.py
|
test/jobstest.py
|
Python
| 0.000001
|
@@ -0,0 +1,611 @@
+#!/usr/bin/python2.4%0A#%0A# Copyright (c) 2004-2005 rpath, Inc.%0A#%0A%0Aimport testsuite%0Atestsuite.setup()%0A%0Aimport rephelp%0A%0Aclass ReleaseTest(rephelp.RepositoryHelper):%0A def testBasicAttributes(self):%0A client = self.getMintClient(%22testuser%22, %22testpass%22)%0A projectId = client.newProject(%22Foo%22, %22foo%22, %22rpath.org%22)%0A%0A release = client.newRelease(projectId, %22Test Release%22)%0A%0A job = client.startImageJob(release.getId())%0A jobs = list(client.iterJobs(releaseId = release.getId()))%0A assert(jobs%5B0%5D.getReleaseId() == release.getId())%0A%0Aif __name__ == %22__main__%22:%0A testsuite.main()%0A
|
|
af205246543fbb874ebf20b530fac04a3ba9808c
|
Add some notes to graph script
|
graph.py
|
graph.py
|
Python
| 0
|
@@ -0,0 +1,2304 @@
+from __future__ import unicode_literals%0A%0A%0Aclass Graph(object):%0A %22%22%22A class for a simple graph data structure.%22%22%22%0A def __init__(self):%0A self.graph = %7B%7D%0A%0A def __repr__(self): # Consider how we want to repr this.%0A return repr(self.graph)%0A%0A def __len__(self):%0A return len(self.graph)%0A%0A def __iter__(self):%0A return iter(self.graph)%0A%0A def __getitem__(self, index):%0A return self.graph%5Bindex%5D%0A%0A def __setitem__(self, index, value):%0A self.graph%5Bindex%5D = value%0A%0A def __delitem__(self, index): # Add cleanup%0A del self.graph%5Bindex%5D%0A%0A def add_node(self, n):%0A %22%22%22Add a new node to the graph.%22%22%22%0A if not self.has_node(n):%0A raise KeyError('Node already in graph.')%0A self%5Bn%5D = set()%0A%0A def add_edge(self, n1, n2):%0A %22%22%22Add a new edge connecting n1 to n2.%22%22%22%0A if not self.has_node(n2):%0A self.add_node(n2)%0A try:%0A self%5Bn1%5D.add(n2)%0A except KeyError:%0A self.add_node(n1)%0A self%5Bn1%5D.add(n2)%0A%0A def del_node(self, n):%0A %22%22%22Delete a node from the graph.%22%22%22%0A del self%5Bn%5D%0A for edgeset in self.graph.values(): # Move cleanup to __delitem__%0A edgeset.discard(n)%0A%0A def del_edge(self, n1, n2):%0A %22%22%22Delete the edge connecting two nodes from graph.%22%22%22%0A self%5Bn1%5D.remove(n2)%0A%0A def has_node(self, n):%0A %22%22%22Check if a given node is in the graph.%22%22%22%0A return n in self%0A%0A def nodes(self):%0A %22%22%22Return a list of all nodes in the graph.%22%22%22%0A return %5Bnode for node in self%5D%0A%0A def iter_edges(self):%0A for node in self:%0A for edge in self%5Bnode%5D:%0A yield (node, edge)%0A%0A def edges(self):%0A return list(self.iter_edges())%0A%0A def iter_neighbors(self, n):%0A for node in self:%0A if n in self%5Bnode%5D:%0A yield node%0A%0A def neighbors(self, n):%0A return self%5Bn%5D%0A%0A def adjacent(self, n1, n2):%0A %22%22%22Check if there is an edge connecting 'n1' and 'n2'.%22%22%22%0A return n2 in self%5Bn1%5D or n1 in self%5Bn2%5D%0A%0A%0A# helper start conditions for testing%0Adef helper():%0A g = Graph()%0A g.add_node(5)%0A g.add_node(10)%0A g.add_node(20)%0A g.add_edge(10, 5)%0A g.add_edge(10, 20)%0A g.add_edge(5, 10)%0A return g%0A%0A
|
|
5d554573031f2f7b60d963c587aa650a025f6c45
|
Create tutorial3.py
|
tutorial3.py
|
tutorial3.py
|
Python
| 0
|
@@ -0,0 +1,1701 @@
+%22%22%22%0Atutorial3.py%0Aby E. Dennison%0A%22%22%22%0Afrom ggame import App, RectangleAsset, ImageAsset, SoundAsset, Sprite, Sound%0Afrom ggame import LineStyle, Color%0A%0ASCREEN_WIDTH = 640%0ASCREEN_HEIGHT = 480%0A%0Agreen = Color(0x00ff00, 1)%0Ablack = Color(0, 1)%0Anoline = LineStyle(0, black)%0Abg_asset = RectangleAsset(SCREEN_WIDTH, SCREEN_HEIGHT, noline, green)%0Abg = Sprite(bg_asset, (0,0))%0A%0A# Sounds%0Apew1_asset = SoundAsset(%22sounds/pew1.mp3%22)%0Apew1 = Sound(pew1_asset)%0Apop_asset = SoundAsset(%22sounds/reappear.mp3%22)%0Apop = Sound(pop_asset)%0A# A ball! This is already in the ggame-tutorials repository%0Aball_asset = ImageAsset(%22images/orb-150545_640.png%22)%0Aball = Sprite(ball_asset, (0, 0))%0A# Original image is too big. Scale it to 1/10 its original size%0Aball.scale = 0.1%0Aball.y = 200%0A# custom attributes%0Aball.dir = 1%0Aball.go = True%0A# Sounds%0Apew1_asset = SoundAsset(%22sounds/pew1.mp3%22)%0Apew1 = Sound(pew1_asset)%0Apop_asset = SoundAsset(%22sounds/reappear.mp3%22)%0Apop = Sound(pop_asset)%0A%0A%0Adef reverse(b):%0A b.dir *= -1%0A pop.play()%0A%0A# Set up function for handling screen refresh%0Adef step():%0A if ball.go:%0A ball.x += ball.dir%0A if ball.x + ball.width %3E SCREEN_WIDTH or ball.x %3C 0:%0A ball.x -= ball.dir%0A reverse(ball)%0A%0A# Handle the space key%0Adef spaceKey(event):%0A ball.go = not ball.go%0A%0A# Handle the %22reverse%22 key%0Adef reverseKey(event):%0A reverse(ball)%0A%0A# Handle the mouse click%0Adef mouseClick(event):%0A ball.x = event.x%0A ball.y = event.y%0A pew1.play()%0A%0Amyapp = App(SCREEN_WIDTH, SCREEN_HEIGHT)%0A# Set up event handlers for the app%0Amyapp.listenKeyEvent('keydown', 'space', spaceKey)%0Amyapp.listenKeyEvent('keydown', 'r', reverseKey)%0Amyapp.listenMouseEvent('click', mouseClick)%0A%0Amyapp.run(step)%0A
|
|
bcaa60ce73134e80e11e7df709e7ba7dbc07d349
|
Add tests for systemd module
|
tests/test_systemd.py
|
tests/test_systemd.py
|
Python
| 0
|
@@ -0,0 +1,2581 @@
+#!/usr/bin/env python3%0Aimport unittest%0Afrom unittest import mock%0Afrom unittest.mock import MagicMock, patch%0A%0Afrom portinus import systemd%0A%0Aclass testSystemd(unittest.TestCase):%0A%0A def setUp(self):%0A systemd.subprocess.check_output = MagicMock(return_value=True)%0A self.unit = systemd.Unit('foo')%0A%0A%0A def testBasicCalls(self):%0A self.unit.reload()%0A self.unit.restart()%0A self.unit.stop()%0A self.unit.enable()%0A self.unit.disable()%0A%0A%0A def testRemove(self):%0A with patch('os.path.exists', MagicMock(return_value=True)) as fake_path_exists, %5C%0A patch.object(systemd.Unit, 'stop') as fake_stop, %5C%0A patch.object(systemd.Unit, 'disable') as fake_disable, %5C%0A patch('os.remove') as fake_os_remove, %5C%0A patch.object(systemd.Unit, 'reload') as fake_reload:%0A self.unit.remove()%0A fake_path_exists.assert_called_with(self.unit.service_file_path)%0A self.assertTrue(fake_stop.called)%0A self.assertTrue(fake_disable.called)%0A fake_os_remove.assert_called_with(self.unit.service_file_path)%0A self.assertTrue(fake_reload.called)%0A%0A with patch('os.path.exists', MagicMock(return_value=False)) as fake_path_exists, %5C%0A patch.object(systemd.Unit, 'stop') as fake_stop, %5C%0A patch.object(systemd.Unit, 'disable') as fake_disable, %5C%0A patch('os.remove') as fake_os_remove, %5C%0A patch.object(systemd.Unit, 'reload') as fake_reload:%0A self.unit.remove()%0A fake_path_exists.assert_called_with(self.unit.service_file_path)%0A self.assertFalse(fake_stop.called)%0A self.assertFalse(fake_disable.called)%0A fake_os_remove.assert_called_with(self.unit.service_file_path)%0A self.assertTrue(fake_reload.called)%0A%0A%0A def testCreateServiceFile(self):%0A with patch('builtins.open', mock.mock_open(), create=True) as fake_open:%0A self.unit.create_service_file()%0A fake_open.assert_called_once_with(self.unit.service_file_path, 'w')%0A%0A%0A @patch.object(systemd.Unit, 'set_content')%0A @patch.object(systemd.Unit, 'create_service_file')%0A def testEnsure(self, fake_create_service_file, fake_set_content):%0A test_content = 'qweasdzxc'%0A self.unit.ensure()%0A self.assertFalse(fake_set_content.called)%0A self.assertTrue(fake_create_service_file.called)%0A%0A self.unit.ensure(content='qwe')%0A self.assertTrue(fake_set_content.called)%0A self.assertTrue(fake_create_service_file.called)%0A
|
|
0c3107739671398de1a206cfbb7673c25c543e60
|
Update driver value in Seat model.
|
driver27/migrations/0009_populate_driver_in_seats.py
|
driver27/migrations/0009_populate_driver_in_seats.py
|
Python
| 0
|
@@ -0,0 +1,555 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Adef populate_driver_in_seats(apps, schema_editor):%0A Seat = apps.get_model(%22driver27%22, %22Seat%22)%0A for seat in Seat.objects.all():%0A driver = seat.contender.driver%0A seat.driver = driver%0A seat.save()%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('driver27', '0008_auto_20170529_2220'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(%0A populate_driver_in_seats,%0A ),%0A %5D
|
|
3b4c811f0b45f5739ce7c0d64f31eb2c2c9a7f4b
|
add battery
|
ai/machine-learning/battery/battery.py
|
ai/machine-learning/battery/battery.py
|
Python
| 0.000003
|
@@ -0,0 +1,784 @@
+from numpy import *%0A%0Adef loadData():%0A xArr, yArr = %5B%5D, %5B%5D%0A for i in open('trainingdata.txt'):%0A line = map(float, i.split(','))%0A if line%5B0%5D %3C 4:%0A xArr.append(line%5B:-1%5D)%0A yArr.append(line%5B-1%5D)%0A return xArr, yArr%0A%0Adef lineReg(xArr, yArr):%0A xMat = mat(xArr); yMat = mat(yArr).T%0A xTx = xMat.T * xMat%0A if linalg.det(xTx) == 0.0:%0A print %22this matrix is singular, cannot do inverse%22%0A return%0A w = xTx.I * (xMat.T * yMat)%0A return w%0A%0Adef main():%0A xArr, yArr = loadData()%0A w = lineReg(xArr, yArr)%0A x = float(raw_input())%0A if x %3C 4:%0A xxArr = %5Bx%5D%0A yHat = (mat(xxArr) * w)%5B0%5D%5B0%5D%0A print '%25.2f' %25 yHat%0A else:%0A print 8.00%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
023e587e28e148be3a21d4cb34a702a68ef02a0b
|
test script to list root dirs of image files
|
testdir.py
|
testdir.py
|
Python
| 0
|
@@ -0,0 +1,956 @@
+#!/usr/local/bin/python%0A%0Aimport sys%0A%0Aimport disklib.mediageom%0Aimport disklib.validity%0Aimport msfat.dir%0Aimport msfat.volume%0A%0A%0Adef main():%0A%09prog_errs = %5B %5D%0A%0A%09for path in sys.argv%5B1:%5D:%0A%09%09print path%0A%0A%09%09try:%0A%09%09%09validity = disklib.validity.read_validity_for_file(path)%0A%09%09%09with open(path, %22rb%22) as stream:%0A%09%09%09%09geometry = disklib.mediageom.DiskGeometry.from_image_size(validity.domain)%0A%09%09%09%09volume = msfat.volume.FATVolume(stream, geometry)%0A%0A%09%09%09%09for k, v in volume.get_info()._asdict().iteritems():%0A%09%09%09%09%09if isinstance(v, (int, long)):%0A%09%09%09%09%09%09sv = %220x%7B0:08X%7D%22.format(v)%0A%09%09%09%09%09else:%0A%09%09%09%09%09%09sv = repr(v)%0A%09%09%09%09%09print %22%7B0:24%7D %7B1%7D%22.format(k, sv)%0A%0A%09%09%09%09for entry in msfat.dir.read_dir(volume._open_root_dir()):%0A%09%09%09%09%09print str(entry)%0A%0A%09%09except Exception as e:%0A%09%09%09prog_errs.append((path, e))%0A%0A%09%09print %22%22%0A%0A%09if prog_errs:%0A%09%09print %22Program errors (%7B0%7D):%22.format(len(prog_errs))%0A%09%09for path, e in prog_errs:%0A%09%09%09print u%22%7B0%7D: %7B1!s%7D%22.format(path, e)%0A%0A%0Aif __name__ == '__main__':%0A%09main()%0A
|
|
338f8d95df785b49eb0c00209535bfde675b6ce9
|
Create release_jobs_from_hold.py
|
release_jobs_from_hold.py
|
release_jobs_from_hold.py
|
Python
| 0
|
@@ -0,0 +1,642 @@
+#!/usr/bin/env perl%0Ause strict;%0Ause Getopt::Long;%0A%0A%0Amy $usage = %22%5Ct%5Ct --usage $0 job_id=%3CPBS JOB ID%3E min=%3CStart of array%3E max=%3CEnd of array%3E%5Cn%22;%0Amy $command = %22releasehold -a%22;%0Amy $job_id;%0Amy $min;%0Amy $max;%0A$%5C= %22%5Cn%22;%0A%0A%0A%0Aif (! scalar(@ARGV) ) %7B%0A die $usage . scalar @ARGV;%0A%7D%0A%0AGetOptions (%22job_id=i%22 =%3E %5C$job_id,%0A %22min=i%22 =%3E %5C$min,%0A %22max=i%22 =%3E %5C$max) ;%0A%0Aif(! $job_id %7C%7C ! $min %7C%7C ! $max)%7B%0A die $usage;%0A%7D%0A%0A%0Aprint %22job=$job_id min=$min max=$max%5Cn%22;%0A%0A%0Afor(my $i = $min; $i %3C $max; $i++)%7B%0A my $call = %22$command %5C%22$job_id%22 . '%5C%5B' . $i . '%5C%5D%22';%0A print $call;%0A system($call);%0A%7D%0A
|
|
01b105376dc3ddcfe528f3bf22d89a3e82d5f4c1
|
add some negative tests for security group:
|
tempest/api/network/test_security_groups_negative.py
|
tempest/api/network/test_security_groups_negative.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base_security_groups as base
from tempest import exceptions
from tempest.test import attr
import uuid
class NegativeSecGroupTest(base.BaseSecGroupTest):
_interface = 'json'
@attr(type=['negative', 'gate'])
def test_show_non_existent_security_group(self):
non_exist_id = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound, self.client.show_security_group,
non_exist_id)
@attr(type=['negative', 'gate'])
def test_show_non_existent_security_group_rule(self):
non_exist_id = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound,
self.client.show_security_group_rule,
non_exist_id)
@attr(type=['negative', 'gate'])
def test_delete_non_existent_security_group(self):
non_exist_id = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound,
self.client.delete_security_group,
non_exist_id
)
@attr(type=['negative', 'gate'])
def test_create_security_group_rule_with_bad_protocol(self):
group_create_body, _ = self._create_security_group()
#Create rule with bad protocol name
pname = 'bad_protocol_name'
self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
group_create_body['security_group']['id'],
protocol=pname)
@attr(type=['negative', 'gate'])
def test_create_security_group_rule_with_invalid_ports(self):
group_create_body, _ = self._create_security_group()
#Create rule with invalid ports
states = [(-16, 80, 'Invalid value for port -16'),
(80, 79, 'port_range_min must be <= port_range_max'),
(80, 65536, 'Invalid value for port 65536'),
(-16, 65536, 'Invalid value for port')]
for pmin, pmax, msg in states:
ex = self.assertRaises(exceptions.BadRequest,
self.client.create_security_group_rule,
group_create_body['security_group']['id'],
protocol='tcp',
port_range_min=pmin,
port_range_max=pmax)
self.assertIn(msg, str(ex))
class NegativeSecGroupTestXML(NegativeSecGroupTest):
_interface = 'xml'
|
Python
| 0.9995
|
@@ -674,16 +674,29 @@
cense.%0A%0A
+import uuid%0A%0A
from tem
@@ -809,28 +809,16 @@
rt attr%0A
-import uuid%0A
%0A%0Aclass
@@ -3118,16 +3118,748 @@
r(ex))%0A%0A
+ @attr(type=%5B'negative', 'smoke'%5D)%0A def test_create_additional_default_security_group_fails(self):%0A # Create security group named 'default', it should be failed.%0A name = 'default'%0A self.assertRaises(exceptions.Conflict,%0A self.client.create_security_group,%0A name)%0A%0A @attr(type=%5B'negative', 'smoke'%5D)%0A def test_create_security_group_rule_with_non_existent_security_group(self):%0A # Create security group rules with not existing security group.%0A non_existent_sg = str(uuid.uuid4())%0A self.assertRaises(exceptions.NotFound,%0A self.client.create_security_group_rule,%0A non_existent_sg)%0A%0A
%0Aclass N
|
fb03cd60646e56a789e61471f5bb6772f7035d6e
|
add test for io
|
tests/test_io.py
|
tests/test_io.py
|
Python
| 0.000001
|
@@ -0,0 +1,2200 @@
+# -*- coding: utf-8 -*-%0D%0A%0D%0Aimport os%0Aimport re%0Aimport unittest%0D%0A%0Afrom kisell.core import Origin, Pipe%0Afrom kisell import io%0D%0A%0D%0A%0D%0A_license_file_path = os.path.join(%0D%0A os.path.dirname(os.path.dirname(__file__)), 'LICENSE'%0D%0A)%0D%0A_license_file_content = None%0D%0Awith open(_license_file_path, 'r') as f:%0D%0A _license_file_content = f.read()%0D%0A%0A%0Aclass _AddLineNumber(Pipe):%0A def __init__(self):%0A super(_AddLineNumber, self).__init__()%0A%0A def _initialize(self):%0A cnt = 0%0A for x in self.upstream:%0A yield str(cnt) + x%0A cnt += 1%0A%0A%0Aclass ReadStreamTester(unittest.TestCase):%0A def setUp(self):%0A pass%0A%0A def tearDown(self):%0A pass%0A%0A def test__init__(self):%0A fin = open(_license_file_path, 'r')%0A test = io.ReadStream(fin, 100)%0A l = list(test)%0A self.assertEqual(len(l%5B0%5D), 100)%0A%0A%0Aclass FileReadStreamTester(unittest.TestCase):%0D%0A def setUp(self):%0D%0A pass%0D%0A%0D%0A def tearDown(self):%0D%0A pass%0D%0A%0A def test__init__(self):%0A test = io.FileReadStream(_license_file_path)%0A self.assertEqual(test.name, _license_file_path)%0A self.assertEqual(test.encoding, 'utf-8')%0A self.assertEqual(test.mode, 'r')%0A content = test.read()%0A self.assertEqual(content, _license_file_content)%0A%0A%0Aclass WriteStreamTester(unittest.TestCase):%0A%0A tmp_dir_path = os.path.join(os.path.dirname(__file__), 'tmp')%0A%0A def setUp(self):%0A if not os.path.exists(WriteStreamTester.tmp_dir_path):%0A os.makedirs(WriteStreamTester.tmp_dir_path)%0A%0A def tearDown(self):%0A os.removedirs(WriteStreamTester.tmp_dir_path)%0A%0A def test__init__(self):%0A tmp_name = os.path.join(WriteStreamTester.tmp_dir_path, 'init')%0A tmp_file = open(tmp_name, 'w')%0A test = io.WriteStream(tmp_file)%0A orig = Origin(re.split('%5Cs', _license_file_content))%0A (orig + test).run()%0A tmp_file.close()%0A with open(tmp_name, 'r') as tmp_fin:%0A self.assertListEqual(%5Bx.rstrip('%5Cn') for x in tmp_fin%5D,%0A re.split('%5Cs', _license_file_content))%0A os.remove(tmp_name)%0A%0A%0Aclass FileWriteStreamTester(unittest.TestCase):%0A pass%0A
|
|
64fca89a9bb3bc0cd7725f4ad2ef0924c5c97859
|
remove very large target coverage
|
lib/GATK4/fixCollectHsMetrics.py
|
lib/GATK4/fixCollectHsMetrics.py
|
Python
| 0
|
@@ -0,0 +1,1665 @@
+import argparse%0D%0Aimport sys%0D%0Aimport logging%0D%0Aimport os%0D%0Aimport random%0D%0A%0D%0ADEBUG=False%0D%0ANotDEBUG=not DEBUG%0D%0A%0D%0Aparser = argparse.ArgumentParser(description=%22fixCollectHsMetrics%22,%0D%0A formatter_class=argparse.ArgumentDefaultsHelpFormatter)%0D%0A%0D%0Aparser.add_argument('-i', '--input', action='store', nargs='?', help='Input CollectHsMetrics file', required=NotDEBUG)%0D%0Aparser.add_argument('-o', '--output', action='store', nargs='?', help=%22Output file%22, required=NotDEBUG)%0D%0A%0D%0Aargs = parser.parse_args()%0D%0A%0D%0Aif DEBUG:%0D%0A args.input = %22/scratch/cqs/PCA_scRNAseq/Exoseq/20220214_7538_CH/bwa_g4_refine_target_coverage/result/WD82458_NL_hs_metrics.txt%22%0D%0A args.output = %22/scratch/cqs/PCA_scRNAseq/Exoseq/20220214_7538_CH/bwa_g4_refine_target_coverage/result/WD82458_NL_hs_metrics.txt.fixed%22%0D%0A%0D%0Alogger = logging.getLogger('fixCollectHsMetrics')%0D%0Alogging.basicConfig(level=logging.INFO, format='%25(asctime)s - %25(name)s - %25(levelname)-8s - %25(message)s')%0D%0A%0D%0Alogger.info(%22reading %22 + args.input + %22 ...%22)%0D%0A%0D%0Awith open(args.output, %22wt%22) as fout:%0D%0A with open(args.input, %22rt%22) as fin:%0D%0A for line in fin:%0D%0A if %22PCT_TARGET_BASES_100000X%22 not in line:%0D%0A fout.write(line)%0D%0A else:%0D%0A break%0D%0A %0D%0A parts = line.split('%5Ct')%0D%0A PCT_TARGET_BASES_500X = parts.index('PCT_TARGET_BASES_500X')%0D%0A AT_DROPOUT = parts.index('AT_DROPOUT')%0D%0A del parts%5BPCT_TARGET_BASES_500X:AT_DROPOUT%5D%0D%0A fout.write(%22%5Ct%22.join(parts))%0D%0A%0D%0A line = fin.readline()%0D%0A parts = line.split('%5Ct')%0D%0A del parts%5BPCT_TARGET_BASES_500X:AT_DROPOUT%5D%0D%0A fout.write(%22%5Ct%22.join(parts))%0D%0A%0D%0A for line in fin:%0D%0A fout.write(line)%0D%0A %0D%0Alogger.info(%22done.%22)%0D%0A
|
|
ee928a52805ea8179277487e667947746985a2db
|
Create __init__.py
|
tools/dwx_zeromq_connector/v2.0.1/EXAMPLES/TEMPLATE/STRATEGIES/__init__.py
|
tools/dwx_zeromq_connector/v2.0.1/EXAMPLES/TEMPLATE/STRATEGIES/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
8fe57fbbc5764d3e13c3513afcdb2c49d04b117e
|
Add a migration for php5-fpm pools to php7
|
src/yunohost/data_migrations/0003_php5_to_php7_pools.py
|
src/yunohost/data_migrations/0003_php5_to_php7_pools.py
|
Python
| 0
|
@@ -0,0 +1,2164 @@
+import os%0Aimport glob%0Afrom shutil import copy2%0A%0Afrom moulinette.utils.log import getActionLogger%0A%0Afrom yunohost.tools import Migration%0Afrom yunohost.service import _run_service_command%0A%0Alogger = getActionLogger('yunohost.migration')%0A%0APHP5_POOLS = %22/etc/php5/fpm/pool.d%22%0APHP7_POOLS = %22/etc/php/7.0/fpm/pool.d%22%0A%0APHP5_SOCKETS_PREFIX = %22/var/run/php5-fpm%22%0APHP7_SOCKETS_PREFIX = %22/run/php/php7.0-fpm%22%0A%0AMIGRATION_COMMENT = %22; YunoHost note : this file was automatically moved from %7B%7D%22.format(PHP5_POOLS)%0A%0A%0Aclass MyMigration(Migration):%0A %22Migrate php5-fpm 'pool' conf files to php7 stuff%22%0A%0A def migrate(self):%0A%0A # Get list of php5 pool files%0A php5_pool_files = glob.glob(%22%7B%7D/*.conf%22.format(PHP5_POOLS))%0A%0A # Keep only basenames%0A php5_pool_files = %5Bos.path.basename(f) for f in php5_pool_files%5D%0A%0A # Ignore the %22www.conf%22 (default stuff, probably don't want to touch it ?)%0A php5_pool_files = %5Bf for f in php5_pool_files if f != %22www.conf%22%5D%0A%0A for f in php5_pool_files:%0A%0A # Copy the files to the php7 pool%0A src = %22%7B%7D/%7B%7D%22.format(PHP5_POOLS, f)%0A dest = %22%7B%7D/%7B%7D%22.format(PHP7_POOLS, f)%0A copy2(src, dest)%0A%0A # Replace the socket prefix if it's found%0A c = %22sed -i -e 's@%7B%7D@%7B%7D@g' %7B%7D%22.format(PHP5_SOCKETS_PREFIX, PHP7_SOCKETS_PREFIX, dest)%0A os.system(c)%0A%0A # Also add a comment that it was automatically moved from php5%0A # (for human traceability and backward migration)%0A c = %22sed -i '1i %7B%7D' %7B%7D%22.format(MIGRATION_COMMENT, dest)%0A os.system(c)%0A%0A # Reload/restart the php pools%0A _run_service_command(%22restart%22, %22php-fpm%22)%0A%0A def backward(self):%0A%0A # Get list of php7 pool files%0A php7_pool_files = glob.glob(%22%7B%7D/*.conf%22.format(PHP7_POOLS))%0A%0A # Keep only files which have the migration comment%0A php7_pool_files = %5Bf for f in php7_pool_files if open(f).readline().strip() == MIGRATION_COMMENT%5D%0A%0A # Delete those files%0A for f in php7_pool_files:%0A os.remove(f)%0A%0A # Reload/restart the php pools%0A _run_service_command(%22restart%22, %22php-fpm%22)%0A
|
|
935c77777d9d15269d2579f001c3abd97f8635e7
|
add - module for communicating with redis.
|
flickipedia/redisio.py
|
flickipedia/redisio.py
|
Python
| 0
|
@@ -0,0 +1,1825 @@
+%22%22%22%0AModule for handling redis IO%0A%22%22%22%0A%0Aimport redis%0Afrom flickipedia.config import log%0A%0A__author__ = 'Ryan Faulkner'%0A__date__ = %222014-04-01%22%0A%0A%0Aclass DataIORedis(object):%0A %22%22%22 Class implementing data IO for Redis. %22%22%22%0A%0A DEFAULT_HOST = 'localhost'%0A DEFAULT_PORT = 6379%0A DEFAULT_DB = 0%0A%0A def __init__(self, **kwargs):%0A super(DataIORedis, self).__init__(**kwargs)%0A%0A self.conn = None%0A%0A self.host = kwargs%5B'host'%5D if kwargs.has_key('host') else %5C%0A self.DEFAULT_HOST%0A self.port = kwargs%5B'port'%5D if kwargs.has_key('port') else %5C%0A self.DEFAULT_PORT%0A self.db = kwargs%5B'db'%5D if kwargs.has_key('db') else self.DEFAULT_DB%0A%0A def connect(self, **kwargs):%0A self.conn = redis.Redis(host=self.host, port=self.port, db=self.db)%0A%0A def write(self, **kwargs):%0A if self.conn:%0A try:%0A return self.conn.set(kwargs%5B'key'%5D, kwargs%5B'value'%5D)%0A except KeyError as e:%0A log.error('Missing param -%3E %7B0%7D'.format(e.message))%0A return False%0A else:%0A log.error('No redis connection.')%0A return False%0A%0A def read(self, **kwargs):%0A if self.conn:%0A try:%0A return self.conn.get(kwargs%5B'key'%5D)%0A except KeyError as e:%0A log.error('Missing param -%3E %7B0%7D'.format(e.message))%0A return False%0A else:%0A log.error('No redis connection.')%0A return False%0A%0A def _del(self, **kwargs):%0A if self.conn:%0A try:%0A return self.conn.delete(kwargs%5B'key'%5D)%0A except KeyError as e:%0A log.error('Missing param -%3E %7B0%7D'.format(e.message))%0A return False%0A else:%0A log.error('No redis connection.')%0A return False%0A
|
|
637165eef82d40abc240b1dc40edddabecbb6af3
|
Create new package. (#6503)
|
var/spack/repos/builtin/packages/r-biocstyle/package.py
|
var/spack/repos/builtin/packages/r-biocstyle/package.py
|
Python
| 0
|
@@ -0,0 +1,1949 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass RBiocstyle(RPackage):%0A %22%22%22Provides standard formatting styles for Bioconductor PDF and HTML%0A documents. Package vignettes illustrate use and functionality.%22%22%22%0A%0A homepage = %22https://www.bioconductor.org/packages/BiocStyle/%22%0A url = %22https://git.bioconductor.org/packages/BiocStyle%22%0A%0A version('2.4.1', git='https://git.bioconductor.org/packages/BiocStyle', commit='ef10764b68ac23a3a7a8ec3b6a6436187309c138')%0A%0A depends_on('r-bookdown', type=('build', 'run'))%0A depends_on('r-knitr@1.12:', type=('build', 'run'))%0A depends_on('r-rmarkdown@1.2:', type=('build', 'run'))%0A depends_on('r-yaml', type=('build', 'run'))%0A depends_on('r@3.4.0:3.4.9', when='@2.4.1')%0A
|
|
b577b1b54cf8ba2f1b9184bda270e0bcd9613ef5
|
Create wordcount-mapper.py
|
wordcount-mapper.py
|
wordcount-mapper.py
|
Python
| 0.000004
|
@@ -0,0 +1,383 @@
+#!/usr/bin/env python%0A%0Aimport sys%0A%0Afor line in sys.stdin: # Input is read from STDIN and the output of this file is written into STDOUT%0A line = line.strip() # remove leading and trailing whitespace%0A words = line.split() # split the line into words%0A %0A for word in words: %0A print '%25s%5Ct%25s' %25 (word, 1) #Print all words (key) individually with the value 1%0A
|
|
86304649c06c15003df956c894560c8cc3b8e542
|
fix stream name
|
src/streamlink/plugins/douyutv.py
|
src/streamlink/plugins/douyutv.py
|
import re
import time
import hashlib
from requests.adapters import HTTPAdapter
from streamlink.plugin import Plugin
from streamlink.plugin.api import http, validate, useragents
from streamlink.stream import HTTPStream, HLSStream, RTMPStream
API_URL = "https://capi.douyucdn.cn/api/v1/{0}&auth={1}"
VAPI_URL = "https://vmobile.douyu.com/video/getInfo?vid={0}"
API_SECRET = "Y237pxTx2In5ayGz"
SHOW_STATUS_ONLINE = 1
SHOW_STATUS_OFFLINE = 2
STREAM_WEIGHTS = {
"middle": 720,
"middle2": 720,
"source": 1080
}
_url_re = re.compile(r"""
http(s)?://
(?:
(?P<subdomain>.+)
\.
)?
douyu.com/
(?:
show/(?P<vid>[^/&?]+)|
(?P<channel>[^/&?]+)
)
""", re.VERBOSE)
_room_id_re = re.compile(r'"room_id\\*"\s*:\s*(\d+),')
_room_id_alt_re = re.compile(r'data-onlineid=(\d+)')
_room_id_schema = validate.Schema(
validate.all(
validate.transform(_room_id_re.search),
validate.any(
None,
validate.all(
validate.get(1),
validate.transform(int)
)
)
)
)
_room_id_alt_schema = validate.Schema(
validate.all(
validate.transform(_room_id_alt_re.search),
validate.any(
None,
validate.all(
validate.get(1),
validate.transform(int)
)
)
)
)
_room_schema = validate.Schema(
{
"data": validate.any(None, {
"show_status": validate.all(
validate.text,
validate.transform(int)
),
"rtmp_url": validate.text,
"rtmp_live": validate.text,
"hls_url": validate.text,
"rtmp_multi_bitrate": validate.all(
validate.any([], {
validate.text: validate.text
}),
validate.transform(dict)
)
})
},
validate.get("data")
)
_vapi_schema = validate.Schema(
{
"data": validate.any(None, {
"video_url": validate.text
})
},
validate.get("data")
)
class Douyutv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
@classmethod
def stream_weight(cls, stream):
if stream in STREAM_WEIGHTS:
return STREAM_WEIGHTS[stream], "douyutv"
return Plugin.stream_weight(stream)
def _get_streams(self):
match = _url_re.match(self.url)
subdomain = match.group("subdomain")
http.verify = False
http.mount('https://', HTTPAdapter(max_retries=99))
if subdomain == 'v':
vid = match.group("vid")
headers = {
"User-Agent": useragents.ANDROID,
"X-Requested-With": "XMLHttpRequest"
}
res = http.get(VAPI_URL.format(vid), headers=headers)
room = http.json(res, schema=_vapi_schema)
yield "source", HLSStream(self.session, room["video_url"])
return
channel = match.group("channel")
try:
channel = int(channel)
except ValueError:
channel = http.get(self.url, schema=_room_id_schema)
if channel is None:
channel = http.get(self.url, schema=_room_id_alt_schema)
http.headers.update({'User-Agent': useragents.ANDROID})
cdns = ["ws", "tct", "ws2", "dl"]
ts = int(time.time())
suffix = "room/{0}?aid=androidhd1&cdn={1}&client_sys=android&time={2}".format(channel, cdns[0], ts)
sign = hashlib.md5((suffix + API_SECRET).encode()).hexdigest()
res = http.get(API_URL.format(suffix, sign))
room = http.json(res, schema=_room_schema)
if not room:
self.logger.info("Not a valid room url.")
return
if room["show_status"] != SHOW_STATUS_ONLINE:
self.logger.info("Stream currently unavailable.")
return
url = room["hls_url"]
yield "source", HLSStream(self.session, url)
url = "{room[rtmp_url]}/{room[rtmp_live]}".format(room=room)
if 'rtmp:' in url:
stream = RTMPStream(self.session, {
"rtmp": url,
"live": True
})
yield "source", stream
else:
yield "source", HTTPStream(self.session, url)
for name, url in room["rtmp_multi_bitrate"].items():
url = "{room[rtmp_url]}/{url}".format(room=room, url=url)
if 'rtmp:' in url:
stream = RTMPStream(self.session, {
"rtmp": url,
"live": True
})
yield name, stream
else:
yield name, HTTPStream(self.session, url)
__plugin__ = Douyutv
|
Python
| 0.000056
|
@@ -463,33 +463,13 @@
%22m
-iddle%22: 720,%0A %22middle2
+edium
%22: 7
@@ -4500,24 +4500,89 @@
m, url=url)%0A
+ if 'middle' in name:%0A name = %22medium%22%0A
|
79a0302ed3cebc0b93775824f6bfa5ce17bdb371
|
Create j34all_possible.py
|
j34all_possible.py
|
j34all_possible.py
|
Python
| 0.00001
|
@@ -0,0 +1,2574 @@
+import itertools%0A%0Aclass ListOps():%0A %0A def __init__(self, length=9, total=100):%0A self.length = length%0A self.total = total%0A temp_list = %5B%5D%0A temp_value = %5B0, 1, -1%5D%0A temp_value = tuple(temp_value)%0A temp_list.append(temp_value)%0A for i in range(2, self.length):%0A temp_value = %5B0, i, -i, (10 * (i - 1)) + i, - ((10 * (i - 1)) + i)%5D%0A temp_value = tuple(temp_value)%0A temp_list.append(temp_value)%0A temp_value = %5Bself.length, - (self.length),%0A (10 * (self.length - 1)) + self.length, -%0A ((10 * (self.length - 1)) + self.length)%5D%0A temp_list.append(temp_value)%0A self.poss_values = tuple(temp_list)%0A self.valid_list = %5B%5D%0A self.final_list = %5B%5D%0A %0A def _make_full_list(self):%0A self.full_list = list(itertools.product(*self.poss_values))%0A%0A def _is_valid(self, num_list):%0A for i in range(1, len(num_list)):%0A if abs(num_list%5B-(i)%5D) %3E 10 and abs(num_list%5B-(i) -1%5D) %3E 0:%0A return False%0A elif abs(num_list%5B-(i)%5D) %3C 10 and num_list%5B-(i) -1%5D == 0:%0A return False%0A return True%0A %0A def _make_valid_list(self):%0A for i in range(0, len(self.full_list)):%0A if self._is_valid(self.full_list%5Bi%5D):%0A self.valid_list.append(self.full_list%5Bi%5D)%0A%0A def _equals_total(self, num_list):%0A total = 0%0A for i in range(0, len(num_list)):%0A total += num_list%5Bi%5D%0A if total == self.total:%0A return True%0A else:%0A return False %0A %0A def _make_final_list(self):%0A for i in range(0, len(self.valid_list)):%0A if self._equals_total(self.valid_list%5Bi%5D):%0A self.final_list.append(self.valid_list%5Bi%5D)%0A%0A def _print_final_list(self):%0A for i in range(len(self.final_list)):%0A for j in range(len(self.final_list%5Bi%5D)):%0A if self.final_list%5Bi%5D%5Bj%5D == 0:%0A print('', end = '')%0A elif self.final_list%5Bi%5D%5Bj%5D %3E 0:%0A print('+', self.final_list%5Bi%5D%5Bj%5D, ' ', end = '')%0A elif self.final_list%5Bi%5D%5Bj%5D %3C 0:%0A print('-', abs(self.final_list%5Bi%5D%5Bj%5D), ' ', end = '')%0A print('')%0A%0A def run(self):%0A self._make_full_list()%0A self._make_valid_list()%0A self._make_final_list()%0A self._print_final_list()%0A%0Adef main():%0A list_obj = ListOps()%0A list_obj.run() %0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
be1b50a9780bc8d2114b8660687fd72bb9472949
|
Implement new Lin similarity based query expansion in query_processing/wordnet_expansion.py
|
src/query_processing/wordnet_expansion.py
|
src/query_processing/wordnet_expansion.py
|
Python
| 0.000052
|
@@ -0,0 +1,1684 @@
+from nltk.corpus import wordnet as wn%0Afrom nltk.corpus import wordnet_ic%0Asemcor_ic = wordnet_ic.ic('ic-semcor.dat')%0Afrom nltk.corpus import lin_thesaurus as thes%0Aimport heapq%0A%0A# specifies number of top-scoring synonyms to use%0ANUMBER_SYNONYMS = 3%0A%0A%0Adef expand_query(query):%0A%09%0A%09# add weight of 1 for each original term%0A%09weighted_terms = %7B%7D%0A%09for term in query:%0A%09%09weighted_terms%5Bterm%5D = 1%0A%0A%09# should do some stopword removal here%0A%0A%09for word in query:%0A%09%09syns = thes.scored_synonyms(word)%0A%09%09if (thes) %3E NUMBER_SYNONYMS:%0A%09%09%09all_syns = %5B%5D%0A%09%09%09# syn list is in the form ((POS, %5Bsyn, syn, syn%5D), (POS, %5Bsyn, syn, syn%5D) ...)%0A%09%09%09# concatenate all synonyms from the various lists %0A%09%09%09for element in syns:%0A%09%09%09%09all_syns.extend(element%5B1%5D)%0A%09%09%09# get n-best synonyms according to Lin similarity%0A%09%09%09top = heapq.nlargest(NUMBER_SYNONYMS, all_syns, key = lambda k: k%5B1%5D)%0A%09%09else:%0A%09%09%09top = syns%0A%09%09# add top synonyms to weighted term dict%0A%09%09for element in top:%0A%09%09%09weighted_terms%5Belement%5B0%5D%5D = element%5B1%5D%0A%0A%09return weighted_terms%0A%0A%0A# functions below are old - probably won't want to use them%0A%0Adef get_synset_similarity(t1, t2):%0A%09%22%22%22%0A%09takes a pair of query terms and returns their synset similarity: the %0A%09overlap between the glosses of all the synsets of both terms%0A%09%22%22%22%0A%0A%09overlap = (get_synset_glosses(t1).intersection(get_synset_glosses(t2)))%0A%09print overlap%0A%09return len(overlap)%0A%0Adef get_synset_glosses(t):%0A%09%22%22%22%0A%09returns a set of the glosses for each of a term's synsets%0A%09%22%22%22%0A%0A%09glosses = set()%0A%09for synset in wn.synsets(t):%0A%09%09# not sure how to get glosses.. seems like a good approximation%0A%09%09glosses.add(synset.lemma_names%5B0%5D)%0A%0A%09return glosses%0A%09%0A%0A## debug code%0A%0Aget_synset_similarity(%22dog%22, %22domestic_dog%22)%0A%0A%0A
|
|
1b810ec3fb2bdd241d831a3167d9ed8051fa29ca
|
Add to repo.
|
mapIt.py
|
mapIt.py
|
Python
| 0
|
@@ -0,0 +1,382 @@
+#!/usr/bin/python3%0A# mapIt.py - Launches a map in the browser using an address from the%0A# command line or clipboard.%0A%0Aimport webbrowser%0Aimport sys%0A%0A%0Aif len(sys.argv) %3E 1:%0A # Get address from command line.%0A address = ' '.join(sys.argv%5B1:%5D)%0Aelse:%0A # Get address from clipboard.%0A address = pyperclip.paste()%0A%0Awebbrowser.open('https://www.google.com/maps/place/' + address)%0A
|
|
3559faeceff06aee82409ca22158223aff696b07
|
Create MajorityElement_004.py
|
leetcode/169-Majority-Element/MajorityElement_004.py
|
leetcode/169-Majority-Element/MajorityElement_004.py
|
Python
| 0.000001
|
@@ -0,0 +1,403 @@
+class Solution(object):%0A def majorityElement(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A num, cnt = nums%5B0%5D, 1%0A for i in xrange(1, len(nums)):%0A if nums%5Bi%5D == num:%0A cnt += 1%0A elif cnt == 0:%0A num = nums%5Bi%5D%0A cnt = 1%0A else:%0A cnt -= 1%0A return num%0A
|
|
ba0d2ed9373df05eae280f8664214decddbd755c
|
add basic drawing tests for svg
|
kiva/tests/test_svg_drawing.py
|
kiva/tests/test_svg_drawing.py
|
Python
| 0
|
@@ -0,0 +1,752 @@
+import contextlib%0Aimport StringIO%0Aimport unittest%0Afrom xml.etree import ElementTree%0A%0Afrom kiva.tests.drawing_tester import DrawingTester%0Afrom kiva.svg import GraphicsContext%0A%0A%0Aclass TestSVGDrawing(DrawingTester, unittest.TestCase):%0A%0A def create_graphics_context(self, width, height):%0A return GraphicsContext((width, height))%0A%0A @contextlib.contextmanager%0A def draw_and_check(self):%0A yield%0A filename = %22%7B0%7D.svg%22.format(self.filename)%0A self.gc.save(filename)%0A tree = ElementTree.parse(filename)%0A elements = %5Belement for element in tree.iter()%5D%0A if not len(elements) in %5B4, 7%5D:%0A self.fail('The expected number of elements was not found')%0A%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
993d606bc69eef1ef821b121b96cf4076d73b4f4
|
add logging on downloader start
|
lbrynet/lbryfilemanager/EncryptedFileDownloader.py
|
lbrynet/lbryfilemanager/EncryptedFileDownloader.py
|
"""
Download LBRY Files from LBRYnet and save them to disk.
"""
import logging
from zope.interface import implements
from twisted.internet import defer
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.core.StreamDescriptor import StreamMetadata
from lbrynet.lbryfile.client.EncryptedFileDownloader import EncryptedFileSaver
from lbrynet.lbryfile.client.EncryptedFileDownloader import EncryptedFileDownloader
from lbrynet.lbryfilemanager.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.interfaces import IStreamDownloaderFactory
from lbrynet.lbryfile.StreamDescriptor import save_sd_info
log = logging.getLogger(__name__)
class ManagedEncryptedFileDownloader(EncryptedFileSaver):
STATUS_RUNNING = "running"
STATUS_STOPPED = "stopped"
STATUS_FINISHED = "finished"
def __init__(self, rowid, stream_hash, peer_finder, rate_limiter,
blob_manager, stream_info_manager, lbry_file_manager,
payment_rate_manager, wallet, download_directory,
upload_allowed, file_name=None):
EncryptedFileSaver.__init__(self, stream_hash, peer_finder,
rate_limiter, blob_manager,
stream_info_manager,
payment_rate_manager, wallet,
download_directory,
upload_allowed, file_name)
self.sd_hash = None
self.txid = None
self.nout = None
self.uri = None
self.claim_id = None
self.rowid = rowid
self.lbry_file_manager = lbry_file_manager
self._saving_status = False
@property
def saving_status(self):
return self._saving_status
@defer.inlineCallbacks
def restore(self):
sd_hash = yield self.stream_info_manager.get_sd_blob_hashes_for_stream(self.stream_hash)
if sd_hash:
self.sd_hash = sd_hash[0]
else:
raise Exception("No sd hash for stream hash %s" % self.stream_hash)
claim_metadata = yield self.wallet.get_claim_metadata_for_sd_hash(self.sd_hash)
if claim_metadata is None:
raise Exception("A claim doesn't exist for sd %s" % self.sd_hash)
self.uri, self.txid, self.nout = claim_metadata
self.claim_id = yield self.wallet.get_claimid(self.uri, self.txid, self.nout)
status = yield self.lbry_file_manager.get_lbry_file_status(self)
if status == ManagedEncryptedFileDownloader.STATUS_RUNNING:
# start returns self.finished_deferred
# which fires when we've finished downloading the file
# and we don't want to wait for the entire download
self.start()
elif status == ManagedEncryptedFileDownloader.STATUS_STOPPED:
defer.returnValue(False)
elif status == ManagedEncryptedFileDownloader.STATUS_FINISHED:
self.completed = True
defer.returnValue(True)
else:
raise Exception("Unknown status for stream %s: %s", self.stream_hash, status)
@defer.inlineCallbacks
def stop(self, err=None, change_status=True):
log.debug('Stopping download for %s', self.sd_hash)
# EncryptedFileSaver deletes metadata when it's stopped. We don't want that here.
yield EncryptedFileDownloader.stop(self, err=err)
if change_status is True:
status = yield self._save_status()
defer.returnValue(status)
@defer.inlineCallbacks
def status(self):
blobs = yield self.stream_info_manager.get_blobs_for_stream(self.stream_hash)
blob_hashes = [b[0] for b in blobs if b[0] is not None]
completed_blobs = yield self.blob_manager.completed_blobs(blob_hashes)
num_blobs_completed = len(completed_blobs)
num_blobs_known = len(blob_hashes)
if self.completed:
status = "completed"
elif self.stopped:
status = "stopped"
else:
status = "running"
defer.returnValue(EncryptedFileStatusReport(self.file_name, num_blobs_completed,
num_blobs_known, status))
@defer.inlineCallbacks
def _start(self):
yield EncryptedFileSaver._start(self)
sd_hash = yield self.stream_info_manager.get_sd_blob_hashes_for_stream(self.stream_hash)
if len(sd_hash):
self.sd_hash = sd_hash[0]
maybe_metadata = yield self.wallet.get_claim_metadata_for_sd_hash(self.sd_hash)
if maybe_metadata:
name, txid, nout = maybe_metadata
self.uri = name
self.txid = txid
self.nout = nout
status = yield self._save_status()
defer.returnValue(status)
def _get_finished_deferred_callback_value(self):
if self.completed is True:
return "Download successful"
else:
return "Download stopped"
@defer.inlineCallbacks
def _save_status(self):
self._saving_status = True
if self.completed is True:
status = ManagedEncryptedFileDownloader.STATUS_FINISHED
elif self.stopped is True:
status = ManagedEncryptedFileDownloader.STATUS_STOPPED
else:
status = ManagedEncryptedFileDownloader.STATUS_RUNNING
status = yield self.lbry_file_manager.change_lbry_file_status(self, status)
self._saving_status = False
defer.returnValue(status)
def _get_progress_manager(self, download_manager):
return FullStreamProgressManager(self._finished_downloading,
self.blob_manager, download_manager)
class ManagedEncryptedFileDownloaderFactory(object):
implements(IStreamDownloaderFactory)
def __init__(self, lbry_file_manager):
self.lbry_file_manager = lbry_file_manager
def can_download(self, sd_validator):
# TODO: add a sd_validator for non live streams, use it
return True
@defer.inlineCallbacks
def make_downloader(self, metadata, options, payment_rate_manager, download_directory=None,
file_name=None):
data_rate = options[0]
upload_allowed = options[1]
stream_hash = yield save_sd_info(self.lbry_file_manager.stream_info_manager,
metadata.validator.raw_info)
if metadata.metadata_source == StreamMetadata.FROM_BLOB:
yield self.lbry_file_manager.save_sd_blob_hash_to_stream(stream_hash,
metadata.source_blob_hash)
lbry_file = yield self.lbry_file_manager.add_lbry_file(stream_hash, payment_rate_manager,
data_rate, upload_allowed,
download_directory, file_name)
defer.returnValue(lbry_file)
@staticmethod
def get_description():
return "Save the file to disk"
|
Python
| 0.000001
|
@@ -4287,32 +4287,97 @@
f _start(self):%0A
+ log.info('Starting Downloader for %25s', self.stream_hash)%0A
yield En
@@ -4872,32 +4872,113 @@
._save_status()%0A
+ log.info('Set Downloader status for %25s to %25s', self.stream_hash, status)%0A
defer.re
|
911baa4f700c34b2c2c3de8239a0fee60c12f1e9
|
Create db.py
|
www/transwarp/db.py
|
www/transwarp/db.py
|
Python
| 0.000002
|
@@ -0,0 +1 @@
+%0A
|
|
c51d6fd5d12fd22391e792f8bb792a48b5bcda04
|
Create yt-search-filter.py
|
yt-search-filter.py
|
yt-search-filter.py
|
Python
| 0.000002
|
@@ -0,0 +1,2058 @@
+%22%22%22This program is designed to facilitate rapidly%0Afinding a video and its link on YouTube. Instructions for use:%0AInstall elementtree and gdata 2.0 APIs.%0ARun program through command prompt of choice and enter a query%0Ato be searched on YouTube. Specific queries work best.%0AThen enter a second query to filter the results through.%0AThis is because YouTube delivers results with the query not%0Anecessarily in the title. One word queries on this part work best.%0AIf an output.txt file is in the same directory as the program,%0Athe results satisfying the paramaters will be added to the file.%22%22%22%0A%0Aimport elementtree, httplib, urllib, gdata.youtube, gdata.youtube.service%0A%0ASortedYTFeed = %7B%7D%0Aresult = %7B%7D%0A%0A# Functions take YouTube query and process them into a dictionary%0A# ----------------------------------------------------------------%0Adef UpdateSortedFeed(entry):%0A SortedYTFeed.update(%7Bentry.media.title.text: entry.GetSwfUrl()%7D)%0A%09%09%0Adef LoopThroughEntries(feed):%0A for entry in feed.entry:%0A UpdateSortedFeed(entry)%0A%0Adef YouTubeSearch(search_terms):%0A yt_service = gdata.youtube.service.YouTubeService()%0A query = gdata.youtube.service.YouTubeVideoQuery()%0A query.vq = search_terms%0A query.orderby = 'relevance'%0A query.max_results = 50%0A feed = yt_service.YouTubeQuery(query)%0A LoopThroughEntries(feed)%0A# ----------------------------------------------------------------%0A%0A# Basic search function accepting dictionaries%0Adef search(Websites, query):%0A for i in Websites:%0A for j in range(len(i)):%0A if i%5Bj:j + len(query)%5D.lower() == query.lower():%0A%09result.update(%7Bi: Websites%5Bi%5D%7D)%0A %0AYouTubeSearch(raw_input(%22Enter query for YouTube search:%5Cn%3E %22))%0A%0ASortedYTFeed = %7Bkey: value for key, value in SortedYTFeed.items() if key != %22https://youtube.com/devicesupport%22%7D%0A%0Asearch(SortedYTFeed, raw_input(%22Enter query for search refinement:%5Cn%3E %22))%0A%0A# Writes to file to make large results more managable%0Awith open(%22output.txt%22, %22r+%22) as output:%0A # OVERWRITES OUTPUT FILE ON EACH RUN%0A output.truncate()%0A for i in result:%0A output.write(i + %22 - %22 + result%5Bi%5D + %22%5Cn%5Cn%22)%0A
|
|
fd1759b05c35d45bb6bf289f5267415e8c2a447e
|
Add missing superclass
|
rbuild/internal/rbuilder/rbuildercommand.py
|
rbuild/internal/rbuilder/rbuildercommand.py
|
Python
| 0.999946
|
@@ -0,0 +1,1891 @@
+#%0A# Copyright (c) 2008 rPath, Inc.%0A#%0A# This program is distributed under the terms of the Common Public License,%0A# version 1.0. A copy of this license should have been distributed with this%0A# source file in a file called LICENSE. If it is not present, the license%0A# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# without any warranty; without even the implied warranty of merchantability%0A# or fitness for a particular purpose. See the Common Public License for%0A# full details.%0A#%0A%0Aimport sys%0Aimport urllib%0Aimport urlparse%0Afrom rbuild.facade import rbuilderfacade%0Afrom rbuild.pluginapi import command%0A%0A%0Aclass RbuilderCommand(command.BaseCommand):%0A %22%22%22%0A Base class for rBuilder commands.%0A%0A Handles backwards-compatibility for some command-line options.%0A %22%22%22%0A%0A requireConfig = False%0A%0A def processLocalConfigOptions(self, rbuildConfig, argSet):%0A %22%22%22%0A Tweak the serverUrl option so URLs that used to work with the%0A old client continue to work with rBuild.%0A %22%22%22%0A uri = rbuildConfig.serverUrl%0A if not uri:%0A return%0A scheme, netloc, path, query, fragment = urlparse.urlsplit(uri)%0A%0A pathParts = path.split('/')%0A if 'xmlrpc-private' in pathParts:%0A # Remove xmlrpc-private; rbuild expects it not to be there%0A # but the old client needed it.%0A del pathParts%5BpathParts.index('xmlrpc-private'):%5D%0A path = '/'.join(pathParts)%0A%0A userPart, hostPart = urllib.splituser(netloc)%0A if userPart is not None:%0A user, password = urllib.splitpasswd(userPart)%0A rbuildConfig%5B'user'%5D = (user, password)%0A%0A # Re-form URI sans user part%0A uri = urlparse.urlunsplit((scheme, hostPart, path, query, fragment))%0A rbuildConfig%5B'serverUrl'%5D = uri%0A
|
|
5e005fab08da740a027dcc23ba1b53abc3efaec4
|
add missing indices
|
ziggurat_foundations/migrations/versions/613e7c11dead_create_indices_on_resource_owners.py
|
ziggurat_foundations/migrations/versions/613e7c11dead_create_indices_on_resource_owners.py
|
Python
| 0.000032
|
@@ -0,0 +1,515 @@
+%22%22%22create indices on resource owners%0A%0ARevision ID: 613e7c11dead%0ARevises: b5e6dd3449dd%0ACreate Date: 2018-02-15 11:51:29.659352%0A%0A%22%22%22%0Afrom __future__ import unicode_literals%0A%0A# revision identifiers, used by Alembic.%0Arevision = '613e7c11dead'%0Adown_revision = 'b5e6dd3449dd'%0A%0Afrom alembic import op%0A%0Adef upgrade():%0A op.create_index(op.f('ix_resources_owner_group_id'), 'resources', %5B'owner_group_id'%5D)%0A op.create_index(op.f('ix_resources_owner_user_id'), 'resources', %5B'owner_user_id'%5D)%0A%0Adef downgrade():%0A pass%0A
|
|
1b5a8307fd816c935f197993d1ead07cffc01892
|
add simple consumer
|
samsa/rdsamsa/consumer.py
|
samsa/rdsamsa/consumer.py
|
Python
| 0
|
@@ -0,0 +1,2438 @@
+from collections import namedtuple%0Afrom copy import copy%0Aimport logging%0A%0Aimport rd_kafka%0Afrom samsa import abstract%0A%0A%0Alogger = logging.getLogger(__name__)%0A%0A%0AMessage = namedtuple(%22Message%22, %5B%22topic%22, %22payload%22, %22key%22, %22offset%22%5D)%0A# TODO %5E%5E namedtuple is just a placeholder thingy until we've fleshed out%0A# samsa.common.Message etc%0A%0A%0Aclass Consumer(abstract.Consumer):%0A%0A def __init__(self, client, topic, partitions=None):%0A if isinstance(topic, basestring):%0A topic = client%5Btopic%5D%0A self._topic = topic%0A self._partitions = partitions or self.topic.partitions # ie all%0A%0A config, topic_config = self._configure()%0A rdk_consumer = rd_kafka.Consumer(config)%0A self.rdk_topic = rdk_consumer.open_topic(self.topic.name, topic_config)%0A self.rdk_queue = rdk_consumer.new_queue()%0A for p in self.partitions:%0A if not isinstance(p, int):%0A p = p.id%0A self.rdk_queue.add_toppar(self.rdk_topic, p, start_offset=0)%0A # FIXME %5E%5E change python-librdkafka to provide default for offset%0A # (which should probably be OFFSET_STORED)%0A%0A # Note that this %5E%5E uses a new rdk_consumer handle for every instance;%0A # this avoids the confusion of not being allowed a second reader on%0A # the same toppar (a restriction python-librdkafka would impose if%0A # we'd use a common rdk_consumer). The extra overhead should be%0A # acceptable for most uses.%0A%0A def _configure(self):%0A config = copy(self.topic.cluster.config)%0A topic_config = %7B%7D # TODO where do we expose this?%0A # TODO config.update( ...stuff like group.id ...)%0A%0A return config, topic_config%0A%0A @property%0A def topic(self):%0A return self._topic%0A%0A @property%0A def partitions(self):%0A return self._partitions # TODO check if Partitions or ints are expected%0A%0A def __iter__(self):%0A raise NotImplementedError%0A # TODO implement StopIteration in python-librdkafka%0A%0A def consume(self, timeout=1):%0A msg = self.rdk_queue.consume(timeout_ms=1000 * timeout)%0A return None if msg is None else Message(self.topic.name,%0A msg.key%5B:%5D,%0A msg.payload%5B:%5D,%0A msg.offset)%0A # XXX copy key/payload to native str in python-librdkafka instead?%0A
|
|
8ae6a00e8cf851b46694cd4ab28a827d27fc8d65
|
add contextual help for new select field feature
|
ForgeTracker/forgetracker/widgets/admin_custom_fields.py
|
ForgeTracker/forgetracker/widgets/admin_custom_fields.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import ew as ew_core
import ew.jinja2_ew as ew
from allura.lib.widgets import form_fields as ffw
from allura.lib.widgets import forms as f
from pylons import tmpl_context as c
from forgetracker import model
from formencode import validators as fev
class MilestonesAdmin(ffw.SortableTable):
defaults=dict(
ffw.SortableTable.defaults,
button=ffw.AdminField(field=ew.InputField(
css_class='add', field_type='button',
value='New Milestone')),
empty_msg='No milestones have been created.',
nonempty_msg='Drag and drop the milestones to reorder.',
repetitions=0)
fields = [
ew.HiddenField(name='old_name'),
ew.Checkbox(name='complete', show_label=True, suppress_label=True),
ew.TextField(name='name',
attrs={'style':'width: 80px'}),
ffw.DateField(name='due_date',
attrs={'style':'width: 80px'}),
ffw.AutoResizeTextarea(
name='description',
attrs={'style':'height:1em; width: 150px'}),
ew.InputField(
label='Delete',
field_type='button',
attrs={'class':'delete', 'value':'Delete'}),
]
button = ew.InputField(
css_class='add', field_type='button', value='New Milestone')
def prepare_context(self, context):
response = super(MilestonesAdmin, self).prepare_context(context)
if 'value' in response:
for milestone_data in response['value']:
if 'name' in milestone_data:
milestone_data['old_name'] = milestone_data['name']
return response
def resources(self):
for r in super(MilestonesAdmin, self).resources(): yield r
yield ew.CSSScript('''div.state-field table{ width: 700px; }''')
class CustomFieldAdminDetail(ffw.StateField):
template='jinja:forgetracker:templates/tracker_widgets/custom_field_admin_detail.html'
defaults=dict(
ffw.StateField.defaults,
selector=ffw.AdminField(field=ew.SingleSelectField(
name='type',
options=[
ew.Option(py_value='string', label='Text'),
ew.Option(py_value='number', label='Number'),
ew.Option(py_value='boolean', label='Boolean'),
ew.Option(py_value='select', label='Select'),
ew.Option(py_value='milestone', label='Milestone'),
ew.Option(py_value='user', label='User'),
],
)),
states=dict(
select=ffw.FieldCluster(
fields=[
ffw.AdminField(field=ew.TextField(name='options',
label='Options (separate with spaces; prefix with * to set a default)',
)) ],
show_labels=False),
milestone=ffw.FieldCluster(
# name='milestones',
fields=[ MilestonesAdmin(name='milestones') ])
))
class CustomFieldAdmin(ew.CompoundField):
template='jinja:forgetracker:templates/tracker_widgets/custom_field_admin.html'
def resources(self):
for r in super(CustomFieldAdmin, self).resources():
yield r
yield ew.JSLink('tracker_js/custom-fields.js')
fields = [
ew.HiddenField(name='name'),
ew.TextField(name='label'),
ew.Checkbox(
name='show_in_search',
label='Show in list view',
show_label=True,
suppress_label=True),
CustomFieldAdminDetail() ]
class TrackerFieldAdmin(f.ForgeForm):
submit_text=None
class fields(ew_core.NameList):
open_status_names = ew.TextField(label='Open Statuses')
closed_status_names = ew.TextField(label='Closed Statuses')
custom_fields = ffw.SortableRepeatedField(field=CustomFieldAdmin())
class buttons(ew_core.NameList):
save = ew.SubmitButton(label='Save')
cancel = ew.SubmitButton(
label="Cancel",
css_class='cancel', attrs=dict(
onclick='window.location.reload(); return false;'))
def resources(self):
for rr in self.fields['custom_fields'].resources():
yield rr
class CustomFieldDisplay(ew.CompoundField):
template='jinja:forgetracker:templates/tracker_widgets/custom_field_display.html'
class CustomFieldsDisplay(ew.RepeatedField):
template='jinja:forgetracker:templates/tracker_widgets/custom_fields_display.html'
class TrackerFieldDisplay(f.ForgeForm):
class fields(ew_core.NameList):
milestone_names = ew.TextField()
open_status_names = ew.TextField(label='Open Statuses')
closed_status_names = ew.TextField(label='Open Statuses')
custom_fields = CustomFieldsDisplay()
def resources(self):
for rr in self.fields['custom_fields'].resources():
yield rr
|
Python
| 0
|
@@ -3620,16 +3620,44 @@
spaces;
+ quote if containing spaces;
prefix
|
46a652e13da604776d745d2f09e02e4f75dc3fd7
|
test commit 1
|
testfile.py
|
testfile.py
|
Python
| 0.000004
|
@@ -0,0 +1,15 @@
+print(%22Hello%22)%0A
|
|
afaed1b9c6889312cc3e6fa992a03c500470e967
|
add test for feature sequence
|
src/edge/tests/test_feature.py
|
src/edge/tests/test_feature.py
|
Python
| 0
|
@@ -0,0 +1,555 @@
+from django.test import TestCase%0A%0Afrom edge.models import Fragment%0A%0A%0Aclass FeatureTests(TestCase):%0A def setUp(self):%0A self.root_sequence = %22agttcgaggctga%22%0A self.root = Fragment.create_with_sequence(%22Foo%22, self.root_sequence)%0A%0A def test_sequence_positive_strand(self):%0A feature = self.root.annotate(3, 5, %22A1%22, %22gene%22, 1)%0A self.assertEqual(feature.sequence, %22ttc%22)%0A%0A def test_sequence_negative_strand(self):%0A feature = self.root.annotate(3, 5, %22A1%22, %22gene%22, -1)%0A self.assertEqual(feature.sequence, %22gaa%22)%0A
|
|
bf34c1dbb37865e62e97e3463645c7df16a4ca08
|
Add an interface for Markov Chains
|
markov_chain.py
|
markov_chain.py
|
Python
| 0.000001
|
@@ -0,0 +1,740 @@
+from random import choice%0A%0Aclass MarkovChain(object):%0A %22%22%22 An interface for signle-word states Markov Chains %22%22%22%0A%0A def __init__(self, text=None):%0A self._states_map = %7B%7D%0A %0A if text is not None:%0A self.add_text(text)%0A%0A def add_text(self, text, separator=%22 %22):%0A %22%22%22 Adds text to the markov chain %22%22%22%0A word_list = text.split(separator)%0A for i in range(0, len(word_list)-1):%0A self._states_map.setdefault(word_list%5Bi%5D, %5B%5D).append(word_list%5Bi+1%5D)%0A return self%0A%0A def get_word(self, key):%0A %22%22%22 Returns a word from Markov Chain associated with the key %22%22%22%0A values = self._states_map.get(key)%0A return choice(values) if values is not None else None%0A
|
|
055c1b0d140e2c5659c2767fd123fc69d0f83859
|
Create clean_up_d3s.py
|
clean_up_d3s.py
|
clean_up_d3s.py
|
Python
| 0.000002
|
@@ -0,0 +1,90 @@
+from globalvalues import RPI%0Aif RPI:%0A import RPi.GPIO as GPIO%0A %0A %0AGPIO.cleanup()%0A
|
|
fb10273ee2007846fa760d36ebb6806b35407fa3
|
add script/json/ts-urllib2.py
|
script/json/ts-urllib2.py
|
script/json/ts-urllib2.py
|
Python
| 0.000001
|
@@ -0,0 +1,1226 @@
+#!/usr/bin/env python%0A#%0A# ts-urllib2.py%0A#%0A# Author: Zex %3Ctop_zlynch@yahoo.com%3E%0A#%0Aimport urllib2%0Aimport json%0Afrom os import path, mkdir%0Afrom basic import *%0A%0Aif not path.isdir(RESPONSE_DIR):%0A mkdir(RESPONSE_DIR)%0A%0Adef case():%0A headers = %7B%0A #'Content-Type' : 'application/json'%0A #'Content-Type' : 'text/html',%0A %7D%0A%0A data = %7B%0A %7D%0A%0A# url = 'http://' + '127.0.0.1:5000'#/sos/login.html'%0A# auth = urllib2.HTTPPasswordMgrWithDefaultRealm()%0A# auth.add_password(None, url, USERNAME, PASSWORD)%0A# auth_handler = urllib2.HTTPBasicAuthHandler(auth)%0A#%0A# opener = urllib2.build_opener(auth_handler)%0A# urllib2.install_opener(opener)%0A# for k in headers.items():%0A# opener.addheaders.append(k)%0A# rsp = opener.open(url, json.dumps(data))%0A# with open(RESPONSE_DIR+'/rsp_'+url.replace('/','')+'.json', 'w') as fd:%0A# print rsp%0A#%0A# opener.close()%0A%0A url = URL# + '/accesspoint'%0A req = urllib2.Request(url, json.dumps(data), headers)%0A req.get_method = lambda:'GET'%0A rsp = urllib2.urlopen(req)%0A%0A with open(RESPONSE_DIR+'/rsp_'+url.replace('/','.')+'.json', 'w') as fd:%0A fd.write(rsp.read() + '%5Cn')%0A%0Atry:%0A case()%0Aexcept Exception as e:%0A print e%0A
|
|
65cc9fd3ada01790484469028875e580e8447c85
|
Update migrations to current state (#65)
|
aa_stripe/migrations/0021_auto_20190906_1623.py
|
aa_stripe/migrations/0021_auto_20190906_1623.py
|
Python
| 0
|
@@ -0,0 +1,3154 @@
+# Generated by Django 2.1.11 on 2019-09-06 20:23%0A%0Aimport django_extensions.db.fields.json%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('aa_stripe', '0020_stripecharge_statement_descriptor'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='stripecharge',%0A name='stripe_refund_id',%0A field=models.CharField(blank=True, db_index=True, max_length=255),%0A ),%0A migrations.AlterField(%0A model_name='stripecharge',%0A name='stripe_response',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A migrations.AlterField(%0A model_name='stripecoupon',%0A name='metadata',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict, help_text='Set of key/value pairs that you can attach to an object. It can be useful for storing additional information about the object in a structured format.'),%0A ),%0A migrations.AlterField(%0A model_name='stripecoupon',%0A name='stripe_response',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A migrations.AlterField(%0A model_name='stripecustomer',%0A name='sources',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=%5B%5D),%0A ),%0A migrations.AlterField(%0A model_name='stripecustomer',%0A name='stripe_js_response',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A migrations.AlterField(%0A model_name='stripecustomer',%0A name='stripe_response',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A migrations.AlterField(%0A model_name='stripesubscription',%0A name='metadata',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict, help_text='https://stripe.com/docs/api/python#create_subscription-metadata'),%0A ),%0A migrations.AlterField(%0A model_name='stripesubscription',%0A name='stripe_response',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A migrations.AlterField(%0A model_name='stripesubscriptionplan',%0A name='metadata',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict, help_text='A set of key/value pairs that you can attach to a plan object. It can be useful for storing additional information about the plan in a structured format.'),%0A ),%0A migrations.AlterField(%0A model_name='stripesubscriptionplan',%0A name='stripe_response',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A migrations.AlterField(%0A model_name='stripewebhook',%0A name='raw_data',%0A field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),%0A ),%0A %5D%0A
|
|
6f415dccb547cae95aeb9946e503cadd99f63bd6
|
add backtest_files.py
|
scripts/backtest_files.py
|
scripts/backtest_files.py
|
Python
| 0.000003
|
@@ -0,0 +1,944 @@
+#!/usr/bin/python3%0A%0Aimport json%0Aimport sys%0Afrom pprint import pprint%0Aimport requests%0Aimport re%0Aimport os%0Aimport shutil%0A%0Alocation = os.path.dirname(os.path.realpath(__file__))%0Adata_dir = os.path.join(location, %22..%22, %22data%22)%0A%0Aconfig_name = sys.argv%5B1%5D%0Aitems = sys.argv%5B2:%5D%0Aconfig_file = os.path.join(data_dir, config_name + %22.json%22)%0Aprint(%22looking for %22, config_file)%0A%0Awith open(config_file) as data_file:%0A%09data = json.load(data_file)%0A%0Afor i in items:%0A%09base_name = os.path.basename(i)%0A%09data_file = os.path.join(data_dir, base_name)%0A%09if os.path.exists(data_file):%0A%09%09print(i, %22 already exists in data dir%22, data_dir)%0A%09else:%0A%09%09print(%22copying %22, i, %22 to %22, data_dir)%09%09 %0A%09%09shutil.copy(i, data_file)%0A%09data%5B'tickersource'%5D = base_name%0A%09r = requests.post(%22http://localhost:5000/backtest%22,%0A%09%09%09 data)%0A%09k = base_name.rsplit(%22.%22, 1)%0A%09outfile = k%5B0%5D + %22.html%22%0A%09print(%22writing to %22, outfile)%0A%09with open(outfile, 'w') as outf:%0A%09%09print(r.text, file=outf)%0A
|
|
63ff0b3d0be5f34b8daea811b1eac33736a4c393
|
args list can be simpelr
|
hack/prow/run_tests.py
|
hack/prow/run_tests.py
|
#!/usr/bin/env python
# Copyright 2019 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script will execute a set of named minikube tests,
gather the results, logs, and artifacts into a named GCS
bucket for presentation in k8s testgrid:
https://k8s-testgrid.appspot.com
"""
import os, sys, json, re, argparse, calendar, time, subprocess, shlex
def write_results(outdir, started, finished, test_results):
""" write current results into artifacts/junit_runner.xml
format:
<testsuite failures="XXX" tests="YYY" time="ZZZ">
<testcase classname="SUITENAME" name="TESTNAME" time="ZZZ1" />
<testcase classname="SUITENAME" name="TESTNAME" time="ZZZ1" />
...
</testsuite>
write the started.json and finish.json files
format:
started.json: {"timestamp":STARTTIMEINSECONDSINCEEPOCH}
finished.json: {"timestamp":FINISHTIMEINSECONDSINCEEPOCH,
"passed":FINALRESULT,
"result":SUCCESS|FAIL,
"metadata":{}
}
Args:
outdir: a string containing the results storage directory
started: a dict containing the starting data
finished: a dict containing the finished data
tests_results: a list of dicts containing test results
"""
started_json = open(os.path.join(outdir, "started.json"), 'w')
finished_json = open(os.path.join(outdir, "finished.json"), 'w')
junit_xml = open(os.path.join(outdir, "artifacts", "junit_runner.xml"), 'w')
failures = 0
testxml = ""
for test in test_results:
testxml += '<testcase classname="%s" name="%s" time="%s">' % (test['classname'], test['name'], test['time'])
if test['status'] == 'FAIL':
failures += 1
testxml += '<failure message="Test Failed" />'
testxml += '</testcase>\n'
junit_xml.write('<testsuite failures="%s" tests="%s">\n' % (failures, len(test_results)))
junit_xml.write(testxml)
junit_xml.write('</testsuite>')
junit_xml.close()
started_json.write(json.dumps(started))
started_json.close()
finished_json.write(json.dumps(finished))
finished_json.close()
return
def upload_results(outdir, test, buildnum, bucket):
""" push the contents of gcs_out/* into bucket/test/logs/buildnum
Args:
outdir: a string containing the results storage directory
test: a string containing path to the test script
buildnum: a string containing the buildnum
bucket: a string containing the bucket to upload results to
"""
classname = os.path.basename(test).split('.')[0]
args = shlex.split("gsutil cp -R gcs_out/ gs://%s/logs/%s/%s" % (bucket, classname, buildnum))
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in p.stdout:
print line
def run_tests(test, build_log, exit_status, started, finished, test_results):
""" execute the test, grab the start time, finish time, build logs and exit status
Pull test results and important information out of the build log
test results format should be:
=== RUN TestFunctional/Mounting
--- PASS: TestFunctional (42.87s)
--- PASS: TestFunctional/Status (2.33s)
--- FAIL: SOMETESTSUITE/TESTNAME (seconds)
Args:
test: a string containing path to the test script
build_log: a string containing path to the build_log
exit_status: a string that will contain the test script's exit_status
started: a dict containing the starting data
finished: a dict containing the finished data
tests_results: a list of dicts containing test results
"""
classname = os.path.basename(test).split('.')[0]
build_log_file = open(build_log, 'w')
args = shlex.split('bash -x %s' % test)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in p.stdout:
build_log_file.write(line)
print line.rstrip()
if '--- PASS' in line:
match = re.match('.*--- PASS: ([^ ]+) \(([0-9.]+)s\)', line)
(name, seconds) = match.group(1, 2)
test_results.append({"name":name, "classname":classname, "time":seconds, "status":"PASS"})
if '--- FAIL' in line:
match = re.match('.*--- FAIL: ([^ ]+) \(([0-9.]+)s\)', line)
(name, seconds) = match.group(1, 2)
test_results.append({"name":name, "classname":classname, "time":seconds, "status":"FAIL"})
build_log_file.close()
return
def main(argv):
parser = argparse.ArgumentParser(description='Run tests and upload results to GCS bucket', usage='./run_tests.py --test path/to/test.sh')
parser.add_argument('--test', required=True, help='full path to test script you want to run')
parser.add_argument('--build-num', dest="buildnum", required=True, help='buildnumber for uploading to GCS')
parser.add_argument('--bucket', default="k8s-minikube-prow", help='Name of the GCS bucket to upload to. Default: k8s-minkube-prow')
parser.add_argument('--outdir', default="gcs_out", help='Path of the directory to store all results, artifacts, and logs')
args = parser.parse_args()
if not os.path.exists(args.outdir):
os.makedirs(os.path.join(args.outdir, "artifacts"))
build_log = os.path.join(args.outdir, "build_log.txt")
exit_status = ""
started = {"timestamp":calendar.timegm(time.gmtime())}
finished = {}
test_results = []
run_tests(args.test, build_log, exit_status, started, finished, test_results)
finished['timestamp'] = calendar.timegm(time.gmtime())
#if the test script in run_tests exits with a non-zero status then mark the test run as FAILED
if exit_status != "0":
finished['passed'] = "false"
finished['result'] = "FAIL"
else:
finished['passed'] = "true"
finished['result'] = "SUCCESS"
write_results(args.outdir, started, finished, test_results)
upload_results(args.outdir, args.test, args.buildnum, args.bucket)
if __name__ == '__main__':
main(sys.argv)
|
Python
| 0.999983
|
@@ -4276,50 +4276,8 @@
w')%0A
- args = shlex.split('bash -x %25s' %25 test)%0A
p
@@ -4287,36 +4287,50 @@
ubprocess.Popen(
-args
+%5B'bash','-x',test%5D
, stdout=subproc
|
7e1058821f165e60d76eee1b07a7b411f3439408
|
Create uber.py
|
modules/uber.py
|
modules/uber.py
|
Python
| 0.000036
|
@@ -0,0 +1,40 @@
+def uber(self):%0A self.send_chan(%22Moi%22)%0A
|
|
98591c4385ae3f8e8093f6f97b57ef2c9a7284b4
|
add label to expr_to_config sub-dicts
|
hyperopt/pyll_utils.py
|
hyperopt/pyll_utils.py
|
from functools import partial
from base import DuplicateLabel
from pyll.base import Apply
from pyll import scope
from pyll import as_apply
#
# Hyperparameter Types
#
@scope.define
def hyperopt_param(label, obj):
""" A graph node primarily for annotating - VectorizeHelper looks out
for these guys, and optimizes subgraphs of the form:
hyperopt_param(<stochastic_expression>(...))
"""
return obj
def hp_pchoice(label, p_options):
"""
label: string
p_options: list of (probability, option) pairs
"""
if not isinstance(label, basestring):
raise TypeError('require string label')
p, options = zip(*p_options)
n_options = len(options)
ch = scope.hyperopt_param(label,
scope.categorical(
p,
upper=n_options))
return scope.switch(ch, *options)
def hp_choice(label, options):
if not isinstance(label, basestring):
raise TypeError('require string label')
ch = scope.hyperopt_param(label,
scope.randint(len(options)))
return scope.switch(ch, *options)
def hp_randint(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.hyperopt_param(label,
scope.randint(*args, **kwargs))
def hp_uniform(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.uniform(*args, **kwargs)))
def hp_quniform(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.quniform(*args, **kwargs)))
def hp_loguniform(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.loguniform(*args, **kwargs)))
def hp_qloguniform(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.qloguniform(*args, **kwargs)))
def hp_normal(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.normal(*args, **kwargs)))
def hp_qnormal(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.qnormal(*args, **kwargs)))
def hp_lognormal(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.lognormal(*args, **kwargs)))
def hp_qlognormal(label, *args, **kwargs):
if not isinstance(label, basestring):
raise TypeError('require string label')
return scope.float(
scope.hyperopt_param(label,
scope.qlognormal(*args, **kwargs)))
#
# Tools for extracting a search space from a Pyll graph
#
class Cond(object):
def __init__(self, name, val, op):
self.op = op
self.name = name
self.val = val
def __str__(self):
return 'Cond{%s %s %s}' % (self.name, self.op, self.val)
def __eq__(self, other):
return self.op == other.op and self.name == other.name and self.val == other.val
def __hash__(self):
return hash((self.op, self.name, self.val))
def __repr__(self):
return str(self)
EQ = partial(Cond, op='=')
def expr_to_config(expr, conditions, hps):
"""
Populate dictionary `hps` with the hyperparameters in pyll graph `expr`
and conditions for participation in the evaluation of `expr`.
Arguments:
expr - a pyll expression root.
conditions - a tuple of conditions (`Cond`) that must be True for
`expr` to be evaluated.
hps - dictionary to populate
Creates `hps` dictionary:
label -> { 'node': apply node of hyperparameter distribution,
'conditions': `conditions` + tuple,
'label': label
}
"""
expr = as_apply(expr)
if conditions is None:
conditions = ()
assert isinstance(expr, Apply)
if expr.name == 'switch':
idx = expr.inputs()[0]
options = expr.inputs()[1:]
assert idx.name == 'hyperopt_param'
assert idx.arg['obj'].name in (
'randint', # -- in case of hp.choice
'categorical', # -- in case of hp.pchoice
)
expr_to_config(idx, conditions, hps)
for ii, opt in enumerate(options):
expr_to_config(opt,
conditions + (EQ(idx.arg['label'].obj, ii),),
hps)
elif expr.name == 'hyperopt_param':
label = expr.arg['label'].obj
if label in hps:
if hps[label]['node'] != expr.arg['obj']:
raise DuplicateLabel(label)
hps[label]['conditions'].add(conditions)
else:
hps[label] = {'node': expr.arg['obj'],
'conditions': set((conditions,))}
else:
for ii in expr.inputs():
expr_to_config(ii, conditions, hps)
|
Python
| 0.000001
|
@@ -5545,16 +5545,86 @@
tions,))
+,%0A 'label': label,%0A
%7D%0A el
|
b8795def87635aa8192f5f8cf64afe1a22ec30f1
|
Add findCsetsIntersection.py script to find intersecting changesets in our knownBrokenRanges.
|
autobisect-js/findCsetsIntersection.py
|
autobisect-js/findCsetsIntersection.py
|
Python
| 0
|
@@ -0,0 +1,1784 @@
+#!/usr/bin/env python%0A#%0A# This Source Code Form is subject to the terms of the Mozilla Public%0A# License, v. 2.0. If a copy of the MPL was not distributed with this file,%0A# You can obtain one at http://mozilla.org/MPL/2.0/.%0A%0Aimport os%0Aimport sys%0Afrom optparse import OptionParser%0A%0Afrom ignoreAndEarliestWorkingLists import knownBrokenRanges%0Apath0 = os.path.dirname(os.path.abspath(__file__))%0Apath1 = os.path.abspath(os.path.join(path0, os.pardir, 'util'))%0Asys.path.append(path1)%0Afrom subprocesses import captureStdout, normExpUserPath%0A%0Adef parseOptions():%0A parser = OptionParser()%0A parser.add_option('-R', '--repo', dest='rDir',%0A help='Sets the repository to analyze..')%0A options, args = parser.parse_args()%0A assert options.rDir is not None%0A assert os.path.isdir(normExpUserPath(options.rDir))%0A return options.rDir%0A%0Adef countCsets(revset, rdir):%0A listCmd = %5B'hg', 'log', '-r', revset, '--template=1'%5D%0A rangeIntersectionOnes = captureStdout(listCmd, currWorkingDir=rdir)%0A assert rangeIntersectionOnes%5B1%5D == 0%0A return len(rangeIntersectionOnes%5B0%5D)%0A%0Adef main():%0A repoDir = parseOptions()%0A brokenRanges = knownBrokenRanges()%0A%0A cnt = 0%0A for i in range(0, len(brokenRanges)):%0A print 'Analyzing revset: ' + brokenRanges%5Bi%5D + %5C%0A ' which matches ' + str(countCsets(brokenRanges%5Bi%5D, repoDir)) + ' changesets'%0A for j in range(i + 1, len(brokenRanges)):%0A cnt += 1%0A print 'Number ' + str(cnt) + ': Compared against revset: ' + brokenRanges%5Bj%5D%0A overlap = countCsets(brokenRanges%5Bi%5D + ' and ' + brokenRanges%5Bj%5D, repoDir)%0A if overlap %3E 0:%0A print('Number of overlapping changesets: ' + str(overlap))%0A cnt = 0%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
edd808ae5ef9b69dddaf522850ef1f55a295990e
|
Add tests for generating predictions
|
cf_predict/test/test_cf_predict.py
|
cf_predict/test/test_cf_predict.py
|
"""Unit test cf_predict"""
import pytest
from cf_predict import __version__
from cf_predict.resources import get_db
@pytest.mark.usefixtures("client_class")
class TestCf_predict:
def test_catalogue(self):
rv = self.client.get("/")
assert rv.status_code == 200
assert rv.json == {
"predict_url": "http://localhost/predict",
"model_version_url": "http://localhost/model",
"api_version": __version__
}
def test_get_db(self):
r = get_db()
r.set("test", 5)
assert int(r.get("test")) == 5
def test_no_model_in_db(self, monkeypatch, caplog):
monkeypatch.setattr("cf_predict.resources.get_db", lambda: {})
self.client.get("/model")
assert "No model" in caplog.text()
def test_get_version(self):
rv = self.client.get("/model")
assert rv.status_code == 200
assert rv.json == {
"model_version": "1.2.0"
}
def test_put_version_valid_latest(self):
rv = self.client.put("/model?version=latest")
assert rv.status_code == 200
assert rv.json == {
"model_version": "1.2.0"
}
def test_put_version_valid_specific(self):
rv = self.client.put("/model?version=1.1.0")
assert rv.status_code == 200
assert rv.json == {
"model_version": "1.1.0"
}
def test_put_version_invalid(self):
rv = self.client.put("/model?version=lol")
assert rv.status_code == 404
assert rv.json == {
"message": "Model version lol not found"
}
def test_get_prediction_valid_input(self):
pass
def test_get_prediction_invalid_id(self):
pass
def test_get_prediction_invalid_features(self):
pass
def test_get_prediction_valid_json_output(self):
pass
|
Python
| 0.000001
|
@@ -27,22 +27,130 @@
%0Aimport
-pytest
+json%0Aimport pickle%0A%0Aimport numpy as np%0Aimport pytest%0A%0Afrom mockredis import MockRedis%0Afrom .conftest import models
%0Afrom cf
@@ -428,76 +428,13 @@
%22
-predict_url%22: %22http://localhost/predict%22,%0A %22model_version
+model
_url
@@ -744,18 +744,17 @@
b%22,
-lambda: %7B%7D
+MockRedis
)%0A
@@ -1663,34 +1663,35 @@
%7D%0A%0A def test_
-ge
+pos
t_prediction_val
@@ -1718,192 +1718,831 @@
-pass%0A%0A def test_get_pred
+features = %7B%22features%22: %5B1, 2, 3, 4, 5%5D%7D%0A model = pickle.loads(models()%5B%221.2.0%22%5D)%0A rv = self.client.post(%22/model%22,%0A data=json.dumps(features),%0A content_type=%22appl
ic
+a
tion
-_invalid_id(self):%0A pass%0A%0A def test_get_prediction_invalid_features(self):%0A pass%0A%0A def test_get_pred
+/json%22)%0A assert rv.status_code == 200%0A assert rv.json == %7B%0A %22prediction%22: model.predict(np.array(features%5B%22features%22%5D.reshape(1, -1)))%0A %7D%0A%0A def test_get_prediction_invalid_features(self):%0A features = %7B%22features%22: %5B1, 2, %22lol%22, 4, 5%5D%7D%0A rv = self.client.post(%22/model%22,%0A data=json.dumps(features),%0A content_type=%22appl
ic
+a
tion
-_valid_json_output(self):
+/json%22)%0A assert rv.status_code == 400%0A assert rv.json == %7B%0A %22message%22: %22Features %5B1, 2, 'lol', 4, 5%5D do not match expected input%22
%0A
@@ -2550,9 +2550,6 @@
-pass
+%7D
%0A
|
e81f823a1542bf24caa081b299352e593e1a10c9
|
Add a utility function
|
cohydra/util.py
|
cohydra/util.py
|
Python
| 0.00008
|
@@ -0,0 +1,971 @@
+import os%0A%0A%0Adef recursive_scandir(top_dir, dir_first=True):%0A %22%22%22Recursively scan a path.%0A%0A Args:%0A top_dir: The path to scan.%0A dir_first: If true, yield a directory before its contents.%0A Otherwise, yield a directory's contents before the%0A directory itself.%0A%0A Returns:%0A A generator of tuples of the path of a directory relative to%0A the top path, and an os.DirEntry object of an entry in that%0A directory. The top_dir itself is not included.%0A %22%22%22%0A%0A def f(relpath, dir_entry):%0A if dir_first and dir_entry is not None:%0A yield relpath, dir_entry%0A%0A path = os.path.join(top_dir, relpath)%0A%0A for entry in os.scandir(path):%0A entry_relpath = os.path.join(relpath, entry.name)%0A%0A if entry.is_dir():%0A for item in f(entry_relpath, entry):%0A yield item%0A else:%0A yield entry_relpath, entry%0A%0A if not dir_first and dir_entry is not None:%0A yield relpath, dir_entry%0A%0A return f('', None)%0A
|
|
e29de047d770de70f3745ae410b62d0ddad4b0b4
|
Add one test case for IOTOS-358
|
lib/oeqa/runtime/misc/appFW.py
|
lib/oeqa/runtime/misc/appFW.py
|
Python
| 0
|
@@ -0,0 +1,310 @@
+from oeqa.oetest import oeRuntimeTest%0A%0Aclass AppFwTest(oeRuntimeTest):%0A%0A %22%22%22 App Framework testing %22%22%22%0A%0A def test_sqlite_integration(self):%0A%0A %22%22%22 test sqlite is integrated in image %22%22%22%0A%0A (status,output) = self.target.run(%22rpm -qa %7C grep sqlite%22)%0A self.assertEqual(status, 0, output)%0A
|
|
891a10c6dec653d36e7416698369e5e5566338d5
|
Remove unused imports
|
pip/commands/configuration.py
|
pip/commands/configuration.py
|
import logging
import subprocess
from pip import cmdoptions
from pip.basecommand import Command
from pip.configuration import Configuration
from pip.exceptions import ConfigurationError
from pip.status_codes import SUCCESS, ERROR
logger = logging.getLogger(__name__)
class ConfigurationCommand(Command):
"""Manage local and global configuration."""
name = 'config'
usage = """
%prog [<file-option>] --list
%prog [<file-option>] --edit --editor
%prog [<file-option>] --get name
%prog [<file-option>] --set name=value
%prog [<file-option>] --unset name"""
summary = 'Manage local and global configuration.'
def __init__(self, *args, **kwargs):
super(ConfigurationCommand, self).__init__(*args, **kwargs)
self.configuration = None
self.cmd_opts.add_option(
'-l', '--list',
dest='list',
action='store_true',
default=False,
help='List the active configuration (or from the file specified)'
)
self.cmd_opts.add_option(
'-e', '--edit',
dest='edit',
action='store_true',
default=False,
help='Edit the configuration file'
)
self.cmd_opts.add_option(
'--get',
dest='get_name',
action='store',
metavar='name',
default=None,
help='Get the value associated with name in the configuration file'
)
self.cmd_opts.add_option(
'--set',
dest='set_name_value',
action='store',
metavar='name=value',
type="string", # this is validated elsewhere
default=None,
help='Set name=value in the configuration file'
)
self.cmd_opts.add_option(
'--unset',
dest='unset_name',
action='store',
metavar='name',
default=None,
help=(
'Unset the value associated with name in the configuration '
'file'
)
)
self.cmd_opts.add_option(
'--global',
dest='global_file',
action='store_true',
default=False,
help='Use the system-wide configuration file only'
)
self.cmd_opts.add_option(
'--user',
dest='user_file',
action='store_true',
default=False,
help='Use the user configuration file only'
)
self.cmd_opts.add_option(
'--venv',
dest='venv_file',
action='store_true',
default=False,
help='Use the virtualenv configuration file only'
)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
# Determine what action is to be taken
action_options = {
"list": options.list,
"edit": options.edit,
"get": options.get_name,
"set": options.set_name_value,
"unset": options.unset_name,
}
action = None
for k, v in action_options.items():
if v:
if action is not None:
# this works because of the conditional immediately after
# the loop.
action = None
break
action = k
if action is None:
logger.error(
"Need exactly one action (--list, --edit, "
"--get, --set, --unset) to perform."
)
return ERROR
# Determine which configuration files are to be loaded
if sum([options.user_file, options.global_file, options.venv_file]):
logger.error(
"Need at-most one configuration file to use - pass "
"only one of --global, --user, --venv."
)
return ERROR
kwargs = {}
if options.user_file:
kwargs["load_only"] = "user"
elif options.global_file:
kwargs["load_only"] = "site-wide"
elif options.venv_file:
kwargs["load_only"] = "venv"
elif action in ["set", "unset", "edit"]:
logger.error(
"Need one configuration file to modify - pass one of "
"--global, --user, --venv."
)
return ERROR
# Load a new configuration
self.configuration = Configuration(
isolated=options.isolated_mode, **kwargs
)
self.configuration.load()
# Call the handler for the action with the options
handlers = {
"list": self.list_values,
"edit": self.open_in_editor,
"get": self.get_name,
"set": self.set_name_value,
"unset": self.unset_name
}
return handlers[action](options)
def list_values(self, options):
for key, value in sorted(self.configuration.items()):
logger.info("%s=%r", key, value)
return SUCCESS
def open_in_editor(self, options):
if options.editor is None:
logger.error(
"--edit requires an editor to be passed, either using "
"--editor or by setting it in a configuration file."
)
return ERROR
file = self.configuration.get_file()
try:
subprocess.check_call([options.editor, file])
except subprocess.CalledProcessError as e:
logger.error(
"Subprocess exited with exit code %d", e.returncode
)
return ERROR
else:
return SUCCESS
def get_name(self, options):
try:
value = self.configuration.get_value(options.get_name)
except KeyError:
logger.error("No key %r in configuration", options.get_name)
return ERROR
logger.info("%s", value)
return SUCCESS
def set_name_value(self, options):
key, value = options.set_name_value.split("=", 1)
try:
self.configuration.set_value(key, value)
except ConfigurationError:
logger.error("Could not set value in configuration")
else:
return self._save_configuration()
def unset_name(self, options):
key = options.unset_name
try:
self.configuration.unset_value(key)
except ConfigurationError:
logger.error("Could not unset value in configuration")
else:
return self._save_configuration()
def _save_configuration(self):
# We successfully ran a modifying command. Need to save the
# configuration.
try:
self.configuration.save()
except Exception:
logger.error(
"Unable to save configuration. Please report this as a bug.",
exc_info=1
)
return ERROR
else:
return SUCCESS
|
Python
| 0.000001
|
@@ -31,35 +31,8 @@
ss%0A%0A
-from pip import cmdoptions%0A
from
|
1051ec35f33c3e7a3946af3cf8a11a86dc9265a0
|
Create utility module
|
app_v2/utils.py
|
app_v2/utils.py
|
Python
| 0
|
@@ -0,0 +1,175 @@
+def map_range(x, in_min, in_max, out_min, out_max):%0A out_delta = out_max - out_min%0A in_delta = in_max - in_min%0A%0A return (x - in_min) * out_delta / in_delta + out_min%0A
|
|
351d243ccf33b0dd979d84a67e6288621dd84227
|
Fix --debug-shell
|
src/ostbuild/pyostbuild/vcs.py
|
src/ostbuild/pyostbuild/vcs.py
|
# Copyright (C) 2011 Colin Walters <walters@verbum.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import re
import urlparse
import shutil
from .subprocess_helpers import run_sync_get_output, run_sync
def get_mirrordir(mirrordir, keytype, uri, prefix=''):
assert keytype == 'git'
parsed = urlparse.urlsplit(uri)
return os.path.join(mirrordir, prefix, keytype, parsed.scheme, parsed.netloc, parsed.path[1:])
def _fixup_submodule_references(mirrordir, cwd):
submodules_status_text = run_sync_get_output(['git', 'submodule', 'status'], cwd=cwd)
submodule_status_lines = submodules_status_text.split('\n')
have_submodules = False
for line in submodule_status_lines:
if line == '': continue
have_submodules = True
line = line[1:]
(sub_checksum, sub_name) = line.split(' ', 1)
sub_url = run_sync_get_output(['git', 'config', '-f', '.gitmodules',
'submodule.%s.url' % (sub_name, )], cwd=cwd)
mirrordir = get_mirrordir(mirrordir, 'git', sub_url)
run_sync(['git', 'config', 'submodule.%s.url' % (sub_name, ), 'file://' + mirrordir], cwd=cwd)
return have_submodules
def get_vcs_checkout(mirrordir, keytype, uri, dest, branch, overwrite=True):
module_mirror = get_mirrordir(mirrordir, keytype, uri)
assert keytype == 'git'
checkoutdir_parent=os.path.dirname(dest)
if not os.path.isdir(checkoutdir_parent):
os.makedirs(checkoutdir_parent)
tmp_dest = dest + '.tmp'
if os.path.isdir(tmp_dest):
shutil.rmtree(tmp_dest)
if os.path.isdir(dest) and overwrite:
shutil.rmtree(dest)
if not os.path.isdir(tmp_dest):
run_sync(['git', 'clone', '-q',
'--no-checkout', module_mirror, tmp_dest])
run_sync(['git', 'checkout', '-q', branch], cwd=tmp_dest)
run_sync(['git', 'submodule', 'init'], cwd=tmp_dest)
have_submodules = _fixup_submodule_references(mirrordir, tmp_dest)
if have_submodules:
run_sync(['linux-user-chroot',
'--unshare-net', '--chdir', tmp_dest, '/',
'/usr/bin/git', 'submodule', 'update'])
os.rename(tmp_dest, dest)
return dest
|
Python
| 0
|
@@ -2288,20 +2288,28 @@
ir(dest)
- and
+:%0A if
overwri
@@ -2312,32 +2312,36 @@
rwrite:%0A
+
shutil.rmtree(de
@@ -2336,32 +2336,70 @@
il.rmtree(dest)%0A
+ else:%0A return dest%0A
if not os.pa
|
c6d505ef6610ebb383e4f0a7a3d1b746f7fd5f75
|
add group
|
conjur/group.py
|
conjur/group.py
|
Python
| 0.000001
|
@@ -0,0 +1,1429 @@
+#%0A# Copyright (C) 2014 Conjur Inc%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy of%0A# this software and associated documentation files (the %22Software%22), to deal in%0A# the Software without restriction, including without limitation the rights to%0A# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of%0A# the Software, and to permit persons to whom the Software is furnished to do so,%0A# subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in all%0A# copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS%0A# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR%0A# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER%0A# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN%0A# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.%0Afrom conjur.role import Role%0A%0Aclass Group(object):%0A def __init__(self, api, id):%0A self.api = api%0A self.id = id%0A self.role = Role(api, 'group', id)%0A%0A def add_member(self, member, admin=False):%0A self.role.grant_to(member, admin)%0A%0A def remove_member(self, member):%0A self.role.revoke_from(member)%0A%0A%0A%0A
|
|
0fd4a7e33f67a2405362c2a57ba72f5f6c034651
|
should be called daily
|
bin/generate_tsas.py
|
bin/generate_tsas.py
|
Python
| 0.998713
|
@@ -0,0 +1,2326 @@
+#!/usr/bin/python%0Afrom __future__ import print_function%0Aimport cProfile%0Aimport copy%0Aimport sys%0Aimport gc%0Aimport logging%0Alogging.basicConfig(level=logging.INFO)%0A# own modules%0Afrom datalogger import DataLoggerWeb as DataLoggerWeb%0Afrom commons import *%0A%0Adef main(project, tablename, datestring, datalogger):%0A #caches = datalogger.get_caches(datestring)%0A caches = datalogger.get_caches(project, tablename, datestring)%0A suffix = %22%25s/%25s/%25s%5Ct%22 %25 (datestring, project, tablename)%0A if caches%5B%22tsa%22%5D%5B%22raw%22%5D is None:%0A print(suffix, %22Nothing could be done without RAW data%22)%0A else:%0A #print(%22RAW filename : %25s%22 %25 caches%5B%22tsa%22%5D%5B%22raw%22%5D)%0A if len(caches%5B%22tsa%22%5D%5B%22keys%22%5D) == 0:%0A print(suffix, %22TSA Archive missing, calling get_tsa and get_tsastats%22)%0A #datalogger.get_tsa(project, tablename, datestring)%0A datalogger.get_tsastats(project, tablename, datestring)%0A else:%0A #print(%22TSA filename : %25s%22 %25 caches%5B%22tsa%22%5D%5B%22keys%22%5D)%0A if len(caches%5B%22tsastat%22%5D%5B%22keys%22%5D) == 0:%0A print(suffix, %22TSASTAT Archive missing, calling get_tsastats%22)%0A datalogger.get_tsastats(project, tablename, datestring)%0A else:%0A #print(%22TSASTAT filename : %25s%22 %25 caches%5B%22tsastat%22%5D%5B%22keys%22%5D)%0A if len(caches%5B%22ts%22%5D%5B%22keys%22%5D) == 0:%0A print(suffix, %22there are no ts archives, something went wrong, or tsa is completely empty, calling get_tsastats%22)%0A datalogger.get_tsastats(project, tablename, datestring)%0A else:%0A #print(%22TS filename : %25s%22 %25 len(caches%5B%22ts%22%5D%5B%22keys%22%5D))%0A #print(%22TSSTAT filename : %25s%22 %25 len(caches%5B%22tsstat%22%5D%5B%22keys%22%5D))%0A print(suffix, %22All fine%22)%0A%0Aif __name__ == %22__main__%22:%0A datalogger = DataLoggerWeb()%0A #for datestring in DataLogger.datewalker(%222015-09-01%22, datalogger.get_last_business_day_datestring()):%0A for datestring in datalogger.get_datewalk(%222015-11-01%22, datalogger.get_last_business_day_datestring()):%0A for project in datalogger.get_projects():%0A for tablename in datalogger.get_tablenames(project):%0A #datalogger = DataLogger(BASEDIR, project, tablename)%0A main(project, tablename, datestring, datalogger)%0A #cProfile.run(%22main()%22)%0A
|
|
d14c0127a2489c03a50f49751b8b735202873e29
|
Add RFID Reader example.
|
black_rfid_reader.py
|
black_rfid_reader.py
|
Python
| 0
|
@@ -0,0 +1,665 @@
+from keyboard_alike import reader%0A%0A%0Aclass RFIDReader(reader.Reader):%0A %22%22%22%0A This class supports common black RFID Readers for 125 kHz read only tokens%0A http://www.dx.com/p/intelligent-id-card-usb-reader-174455%0A %22%22%22%0A def extract_meaningful_data_from_chunk(self, raw_data):%0A # every good chunk is followed by blank chunk%0A chunks = super(RFIDReader, self).extract_meaningful_data_from_chunk(raw_data)%0A for index, chunk in enumerate(chunks):%0A if index %25 2 == 0:%0A yield chunk%0A%0A%0Aif __name__ == %22__main__%22:%0A reader = RFIDReader(0x08ff, 0x0009, 84, 8)%0A reader.initialize()%0A print(reader.read().strip())%0A
|
|
a14b5aad9f6f15cfd9bee32c689e023a5dc94f19
|
Add very basic visualization tool in python until I find a go lib which can do the same
|
visualize.py
|
visualize.py
|
Python
| 0
|
@@ -0,0 +1,1138 @@
+'''%0ACreate a visual representation of the various DAGs defined%0A'''%0A%0Aimport requests%0Aimport networkx as nx%0Aimport matplotlib.pyplot as plt%0A%0A%0Aif __name__ == '__main__':%0A g = nx.DiGraph()%0A labels = %7B%0A 'edges': %7B%7D,%0A 'nodes': %7B%7D,%0A %7D%0A%0A nodes = %7B%7D%0A%0A for routeKey, routeMap in requests.get('http://localhost:12345').json().iteritems():%0A for i, node in enumerate(routeMap%5B'Path'%5D):%0A g.add_node(node%5B'Name'%5D)%0A labels%5B'nodes'%5D%5Bnode%5B'Name'%5D%5D = node%5B'Name'%5D%0A if i - 1 %3E= 0:%0A g.add_edge(routeMap%5B'Path'%5D%5Bi-1%5D%5B'Name'%5D, routeMap%5B'Path'%5D%5Bi%5D%5B'Name'%5D)%0A labels%5B'edges'%5D%5B(routeMap%5B'Path'%5D%5Bi-1%5D%5B'Name'%5D, routeMap%5B'Path'%5D%5Bi%5D%5B'Name'%5D)%5D = (routeMap%5B'Path'%5D%5Bi-1%5D%5B'Name'%5D, routeMap%5B'Path'%5D%5Bi%5D%5B'Name'%5D)%0A%0A%0A #pos = nx.spring_layout(g)%0A #nx.draw(g, pos=pos)%0A nx.draw_networkx(g, with_labels=True)%0A%0A # add labels%0A #nx.draw_networkx_labels(g, pos, labels%5B'nodes'%5D)%0A #nx.draw_networkx_edge_labels(g, pos, labels%5B'edges'%5D)%0A%0A # write out the graph%0A plt.savefig('dag.png')%0A plt.show() # in case people have the required libraries to make it happen%0A
|
|
78c64d00df97edbec5f07213dc87ff30a7bb4ca9
|
Create moveToObject.py
|
af_scripts/misc/moveToObject.py
|
af_scripts/misc/moveToObject.py
|
Python
| 0.000001
|
@@ -0,0 +1,624 @@
+# move to object%0A%0Aimport pymel.core as pm%0A%0Adef move_to_object():%0A%09get_sel = pm.ls(sl=1,fl=1)%0A%09if len(get_sel) == 2:%0A%09%09src = get_sel%5B1%5D%0A%09%09target = get_sel%5B0%5D%0A%09%09src_oc_x = pm.objectCenter(src,x=1)%0A%09%09src_oc_y = pm.objectCenter(src,y=1)%0A%09%09src_oc_z = pm.objectCenter(src,z=1)%0A%09%09target_oc_x = pm.objectCenter(target,x=1)%0A%09%09target_oc_y = pm.objectCenter(target,y=1)%0A%09%09target_oc_z = pm.objectCenter(target,z=1)%0A%09%09src_oc = %5Bsrc_oc_x,src_oc_y,src_oc_z%5D%0A%09%09target_oc = %5Btarget_oc_x,target_oc_y,target_oc_z%5D%0A%09%09vector = (src_oc_x-target_oc_x),(src_oc_y-target_oc_y),(src_oc_z-target_oc_z)%0A%09%09pm.xform(target,t=vector,r=1)%0Amove_to_object()%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.