commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
bce910100fe0c3970b82d4f5544f11ce3392bc3c | Remove NoQueueMinCycleTime nonsense from sync worker | sync_worker.py | sync_worker.py | from datetime import datetime, timedelta
import os
print("Sync worker %s booting at %s" % (os.getpid(), datetime.now()))
from tapiriik.requests_lib import patch_requests_with_default_timeout, patch_requests_source_address
from tapiriik import settings
from tapiriik.database import db, close_connections
import time
import signal
import sys
import subprocess
import socket
Run = True
RecycleInterval = 2 # Time spent rebooting workers < time spent wrangling Python memory management.
oldCwd = os.getcwd()
WorkerVersion = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, cwd=os.path.dirname(__file__)).communicate()[0].strip()
os.chdir(oldCwd)
def sync_interrupt(signal, frame):
global Run
Run = False
signal.signal(signal.SIGINT, sync_interrupt)
signal.signal(signal.SIGUSR2, sync_interrupt)
def sync_heartbeat(state, user=None):
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"$set": {"Heartbeat": datetime.utcnow(), "State": state, "User": user}})
print("Sync worker " + str(os.getpid()) + " initialized at " + str(datetime.now()))
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"Process": os.getpid(), "Heartbeat": datetime.utcnow(), "Startup": datetime.utcnow(), "Version": WorkerVersion, "Host": socket.gethostname(), "Index": settings.WORKER_INDEX, "State": "startup"}, upsert=True)
sys.stdout.flush()
patch_requests_with_default_timeout(timeout=60)
if isinstance(settings.HTTP_SOURCE_ADDR, list):
settings.HTTP_SOURCE_ADDR = settings.HTTP_SOURCE_ADDR[settings.WORKER_INDEX % len(settings.HTTP_SOURCE_ADDR)]
patch_requests_source_address((settings.HTTP_SOURCE_ADDR, 0))
print(" -> Index %s\n -> Interface %s" % (settings.WORKER_INDEX, settings.HTTP_SOURCE_ADDR))
# We defer including the main body of the application till here so the settings aren't captured before we've set them up.
# The better way would be to defer initializing services until they're requested, but it's 10:30 and this will work just as well.
from tapiriik.sync import Sync
Sync.InitializeWorkerBindings()
sync_heartbeat("ready")
while Run:
cycleStart = datetime.utcnow() # Avoid having synchronization fall down during DST setback
processed_user_count = Sync.PerformGlobalSync(heartbeat_callback=sync_heartbeat, version=WorkerVersion)
RecycleInterval -= processed_user_count
if RecycleInterval <= 0:
break
sync_heartbeat("idle")
print("Sync worker shutting down cleanly")
db.sync_workers.remove({"Process": os.getpid(), "Host": socket.gethostname()})
print("Closing database connections")
close_connections()
sys.stdout.flush()
| from datetime import datetime, timedelta
import os
print("Sync worker %s booting at %s" % (os.getpid(), datetime.now()))
from tapiriik.requests_lib import patch_requests_with_default_timeout, patch_requests_source_address
from tapiriik import settings
from tapiriik.database import db, close_connections
import time
import signal
import sys
import subprocess
import socket
Run = True
RecycleInterval = 2 # Time spent rebooting workers < time spent wrangling Python memory management.
NoQueueMinCycleTime = timedelta(seconds=30) # No need to hammer the database given the number of sync workers I have
oldCwd = os.getcwd()
WorkerVersion = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, cwd=os.path.dirname(__file__)).communicate()[0].strip()
os.chdir(oldCwd)
def sync_interrupt(signal, frame):
global Run
Run = False
signal.signal(signal.SIGINT, sync_interrupt)
signal.signal(signal.SIGUSR2, sync_interrupt)
def sync_heartbeat(state, user=None):
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"$set": {"Heartbeat": datetime.utcnow(), "State": state, "User": user}})
print("Sync worker " + str(os.getpid()) + " initialized at " + str(datetime.now()))
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"Process": os.getpid(), "Heartbeat": datetime.utcnow(), "Startup": datetime.utcnow(), "Version": WorkerVersion, "Host": socket.gethostname(), "Index": settings.WORKER_INDEX, "State": "startup"}, upsert=True)
sys.stdout.flush()
patch_requests_with_default_timeout(timeout=60)
if isinstance(settings.HTTP_SOURCE_ADDR, list):
settings.HTTP_SOURCE_ADDR = settings.HTTP_SOURCE_ADDR[settings.WORKER_INDEX % len(settings.HTTP_SOURCE_ADDR)]
patch_requests_source_address((settings.HTTP_SOURCE_ADDR, 0))
print(" -> Index %s\n -> Interface %s" % (settings.WORKER_INDEX, settings.HTTP_SOURCE_ADDR))
# We defer including the main body of the application till here so the settings aren't captured before we've set them up.
# The better way would be to defer initializing services until they're requested, but it's 10:30 and this will work just as well.
from tapiriik.sync import Sync
Sync.InitializeWorkerBindings()
while Run:
cycleStart = datetime.utcnow() # Avoid having synchronization fall down during DST setback
processed_user_count = Sync.PerformGlobalSync(heartbeat_callback=sync_heartbeat, version=WorkerVersion)
RecycleInterval -= processed_user_count
# When there's no queue, all the workers sit sending 1000s of the queries to the database server
if processed_user_count == 0:
# Put this before the recycle shutdown, otherwise it'll quit and get rebooted ASAP
remaining_cycle_time = NoQueueMinCycleTime - (datetime.utcnow() - cycleStart)
if remaining_cycle_time > timedelta(0):
print("Pausing for %ss" % remaining_cycle_time.total_seconds())
sync_heartbeat("idle-spin")
time.sleep(remaining_cycle_time.total_seconds())
if RecycleInterval <= 0:
break
sync_heartbeat("idle")
print("Sync worker shutting down cleanly")
db.sync_workers.remove({"Process": os.getpid(), "Host": socket.gethostname()})
print("Closing database connections")
close_connections()
sys.stdout.flush()
| Python | 0.000001 |
6d134c2a870150477ecc41edbab272e75462bbcd | Add benchmark script | tests/bench.py | tests/bench.py |
import os
import re
import time
root = os.path.dirname(__file__)
known = []
def listdir(folder):
folder = os.path.join(root, folder)
files = os.listdir(folder)
files = filter(lambda o: o.endswith('.text'), files)
return files
def mistune_runner(content):
import mistune
return mistune.markdown(content)
def misaka_runner(content):
import misaka
extensions = (
misaka.EXT_NO_INTRA_EMPHASIS | misaka.EXT_TABLES |
misaka.EXT_FENCED_CODE | misaka.EXT_AUTOLINK |
misaka.EXT_STRIKETHROUGH
)
md = misaka.Markdown(misaka.HtmlRenderer(), extensions=extensions)
return md.render(content)
def bench(runner=None):
cases = []
for name in listdir('cases'):
with open(os.path.join(root, 'cases', name), 'r') as f:
cases.append(f.read())
for name in listdir('extra'):
with open(os.path.join(root, 'extra', name), 'r') as f:
cases.append(f.read())
if runner is None:
runner = mistune_runner
begin = time.time()
count = 100
while count:
count -= 1
for text in cases:
runner(text)
end = time.time()
return end - begin
print('misaka', bench(misaka_runner))
print('mistune', bench())
| Python | 0.000001 | |
f5970d1488d28f27c5f20dd11619187d0c13c960 | Add simple windows registry read/write functions | os/win_registry.py | os/win_registry.py | import _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
| Python | 0.000001 | |
a37007e03747395c12cc4bc34c761aa3253f7599 | Add tests folder | tests/__init__.py | tests/__init__.py | # -*- coding: utf-8 -*-
| Python | 0.000001 | |
d046bc3be27c39ca70a45d92939a2aa2444f3195 | test examples | test/examples/test_examples.py | test/examples/test_examples.py | """
Runs all example scripts. Only tests whether examples can be executed.
"""
import pytest
import os
import subprocess
import glob
import sys
# set environment flag
# can be used in examples to reduce cpu cost
os.environ['THETIS_REGRESSION_TEST'] = "1"
exclude_files = [
'baroclinic_eddies/diagnostics.py',
'baroclinic_eddies/submitRuns.py',
'bottomFriction/plot_results.py',
'columbia_plume/atm_forcing.py',
'columbia_plume/bathymetry.py',
'columbia_plume/cre-plume.py',
'columbia_plume/diagnostics.py',
'columbia_plume/plot_elevation_ts.py',
'columbia_plume/roms_forcing.py',
'columbia_plume/test_bathy_smoothing.py',
'columbia_plume/tidal_forcing.py',
'columbia_plume/timeseries_forcing.py',
'dome/diagnostics.py',
'dome/dome_setup.py',
'dome/plot_histogram.py',
'katophillips/plot_results.py',
'lockExchange/diagnostics.py',
'lockExchange/plotting.py',
'lockExchange/submitRuns.py',
'tidalfarm/tidalfarm.py',
]
cwd = os.path.abspath(os.path.dirname(__file__))
examples_dir = os.path.abspath(os.path.join(cwd, '..', '..', 'examples'))
exclude_files = [os.path.join(examples_dir, f) for f in exclude_files]
all_examples = glob.glob(os.path.join(examples_dir, '*/*.py'))
all_examples = [f for f in all_examples if f not in exclude_files]
@pytest.fixture(params=all_examples,
ids=lambda x: os.path.basename(x))
def example_file(request):
return os.path.abspath(request.param)
def test_examples(example_file, tmpdir, monkeypatch):
assert os.path.isfile(example_file), 'File not found {:}'.format(example_file)
# change workdir to temporary dir
monkeypatch.chdir(tmpdir)
subprocess.check_call([sys.executable, example_file])
| Python | 0 | |
b5f3a96c9da9cb957fab1849c658c6982bcf0678 | Create MockEnv helper for unit tests | tests/mock_env.py | tests/mock_env.py | """
Copyright (c) 2020 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals, absolute_import
from flexmock import flexmock
from atomic_reactor.constants import PLUGIN_BUILD_ORCHESTRATE_KEY
from atomic_reactor.plugin import (PreBuildPluginsRunner,
BuildStepPluginsRunner,
PostBuildPluginsRunner,
PrePublishPluginsRunner,
ExitPluginsRunner)
from atomic_reactor.inner import DockerBuildWorkflow
from atomic_reactor.plugins.pre_reactor_config import (ReactorConfig,
ReactorConfigPlugin,
WORKSPACE_CONF_KEY)
from tests.constants import TEST_IMAGE, MOCK_SOURCE
from tests.stubs import StubSource, StubInsideBuilder
class MockEnv(object):
"""
Mock environment for unit tests.
Provides methods for setting up workflow (DockerBuildWorkflow) and runner (PluginsRunner)
for a specific test scenario.
Example usage:
>>> runner = (MockEnv()
>>> .for_plugin('prebuild', 'my_plugin')
>>> .set_scratch(True)
>>> .make_orchestrator()
>>> .create_runner(docker_tasker)) # docker_tasker is a fixture
>>> runner.run()
"""
_plugin_phases = ('prebuild', 'buildstep', 'postbuild', 'prepublish', 'exit')
_runner_for_phase = {
'prebuild': PreBuildPluginsRunner,
'buildstep': BuildStepPluginsRunner,
'postbuild': PostBuildPluginsRunner,
'prepublish': PrePublishPluginsRunner,
'exit': ExitPluginsRunner,
}
_results_for_phase = {
'prebuild': 'prebuild_results',
'buildstep': 'buildstep_result',
'postbuild': 'postbuild_results',
'prepublish': 'prepub_results',
'exit': 'exit_results',
}
_plugins_for_phase = {phase: phase + '_plugins_conf' for phase in _plugin_phases}
def __init__(self):
self.workflow = DockerBuildWorkflow(TEST_IMAGE, source=MOCK_SOURCE)
self.workflow.source = StubSource()
self.workflow.builder = StubInsideBuilder().for_workflow(self.workflow)
self.workflow.builder.tasker = flexmock()
self._phase = None
self._plugin_key = None
def create_runner(self, docker_tasker):
"""
Create runner for current plugin (configured using for_plugin())
:param docker_tasker: docker_tasker fixture from conftest
:return: PluginsRunner instance (instance of appropriate subclass based on plugin phase)
"""
if self._phase is None:
raise ValueError('No plugin configured (use for_plugin() to configure one)')
runner_cls = self._runner_for_phase[self._phase]
plugins_conf = getattr(self.workflow, self._plugins_for_phase[self._phase])
return runner_cls(docker_tasker, self.workflow, plugins_conf)
def for_plugin(self, phase, plugin_key, args=None):
"""
Set up environment for the specified plugin
:param phase: str, plugin phase (prebuild, buildstep, postbuild, prepublish, exit)
:param plugin_key: str, plugin key
:param args: dict, optional plugin arguments
"""
if self._phase is not None:
msg = 'Plugin already configured: {} ({} phase)'.format(self._plugin_key, self._phase)
raise ValueError(msg)
self._validate_phase(phase)
self._phase = phase
self._plugin_key = plugin_key
plugins_conf = [self._make_plugin_conf(plugin_key, args)]
setattr(self.workflow, self._plugins_for_phase[phase], plugins_conf)
return self
def set_scratch(self, scratch):
"""
Set "scratch" user param to specified value
"""
self.workflow.user_params['scratch'] = scratch
return self
def set_isolated(self, isolated):
"""
Set "isolated" user param to specified value
"""
self.workflow.user_params['isolated'] = isolated
return self
def make_orchestrator(self, orchestrator_args=None):
"""
Make plugin think it is running in orchestrator
:param orchestrator_args: dict, optional orchestrate_build plugin arguments
"""
if self.workflow.buildstep_plugins_conf:
raise ValueError("Buildstep plugin already configured, cannot make orchestrator")
self.workflow.buildstep_plugins_conf = [
self._make_plugin_conf(PLUGIN_BUILD_ORCHESTRATE_KEY, orchestrator_args)
]
return self
def set_plugin_result(self, phase, plugin_key, result):
"""
Set result of the specified plugin (stored in workflow)
:param phase: str, plugin phase
:param plugin_key: str, plugin key
:param result: any, value to set as plugin result
"""
self._validate_phase(phase)
results = getattr(self.workflow, self._results_for_phase[phase])
results[plugin_key] = result
return self
def set_plugin_args(self, args, phase=None, plugin_key=None):
"""
Set plugin arguments (stored in plugins configuration in workflow).
By default, sets args for the current plugin (configured using for_plugin()).
Phase and plugin key can be specified to set args for a different plugin.
If overriding phase and plugin key, the specified plugin must already be present
in the plugins configuration. Typically, only the current plugin and the
orchestrate_build plugin (after make_orchestrator()) will be present.
:param args: dict, arguments for plugin
:param phase: str, optional plugin phase
:param plugin_key: str, optional plugin key
"""
phase = phase or self._phase
plugin_key = plugin_key or self._plugin_key
plugin = self._get_plugin(phase, plugin_key)
plugin['args'] = args
return self
def set_reactor_config(self, config):
"""
Set reactor config map (in the ReactorConfigPlugin's workspace).
:param config: dict or ReactorConfig, if dict, will be converted to ReactorConfig
"""
if not isinstance(config, ReactorConfig):
config = ReactorConfig(config)
workspace = self._get_reactor_config_workspace()
workspace[WORKSPACE_CONF_KEY] = config
return self
@property
def reactor_config(self):
"""
Get reactor config map (from the ReactorConfigPlugin's workspace)
If config does not exist, it will be created, i.e. you can do:
>>> env = MockEnv()
>>> env.reactor_config.conf['sources_command'] = 'fedpkg sources'
:return: ReactorConfig instance
"""
workspace = self._get_reactor_config_workspace()
return workspace.setdefault(WORKSPACE_CONF_KEY, ReactorConfig())
def _validate_phase(self, phase):
if phase not in self._plugin_phases:
phases = ', '.join(self._plugin_phases)
raise ValueError('Invalid plugin phase: {} (valid: {})'.format(phase, phases))
def _make_plugin_conf(self, name, args):
plugin = {'name': name}
if args:
plugin['args'] = args
return plugin
def _get_plugin(self, phase, plugin_key):
self._validate_phase(phase)
plugins = getattr(self.workflow, self._plugins_for_phase[phase]) or []
for plugin in plugins:
if plugin['name'] == plugin_key:
return plugin
raise ValueError('No such plugin: {} (for {} phase)'.format(plugin_key, phase))
def _get_reactor_config_workspace(self):
return self.workflow.plugin_workspace.setdefault(ReactorConfigPlugin.key, {})
| Python | 0 | |
b872aaa2837e7cd72c36f2b3fd7679106fda57b4 | Add test cli | tests/test_cli.py | tests/test_cli.py | import unittest
import sys, os
import cli
from io import StringIO
io = StringIO()
class TestBuildInCommands(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_normal(self):
sys.stdout = io
# $ iroha-ya-cli
cli.main.main(['iroha-ya-cli'])
sys.stdout = sys.__stdout__
self.assertTrue('Iroha-mizuki-cli' in io.getvalue())
def test_config(self):
sys.stdout = io
# $ iroha-ya-cli config
cli.main.main(['iroha-ya-cli','config'])
sys.stdout = sys.__stdout__
self.assertTrue('Iroha-mizuki-cli' in io.getvalue()) | Python | 0 | |
89cab892b1cb6b93521a9e4cb5321b90699c2943 | Create main.py | main.py | main.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import psycopg2
import sys
from xlrd import open_workbook
con = None
program_list =[]
measure_list = []
##This is a bit of code that will parse an excel file and read records to a postgreSQL db. For this example the data consists
##of clinical quality measures and the government programs asscociated with them.
##Here the function opens the excel workbook .xlsx and the specific spreadsheet then reads the list of program names at the
##top of the sheet. It iterates over the correct cells using a for loop over a range of the correct cells and returns a list
##of the desired cell values.
def readProgs():
book = open_workbook('aug_3_measures.xlsx','r')
sheet = book.sheet_by_index(1)
for col_index in range(5,sheet.ncols):
pName = sheet.cell(0,col_index).value
pDes = sheet.cell(1,col_index).value
pLink = sheet.cell(2,col_index).value
program = (pName,pDes,pLink)
program_list.append(program)
return program_list
##Here a connection with a db is opened using the psycopg2 module. Then a create query for a table called programs is written here.
##The readProgs() function is then called to return the correct cell values. Then the function disects the the list and puts
##the values into an insert statement using a for loop. Lastly the .commit() writes the statement.
def writeProgs():
con = psycopg2.connect(database='chriscalhoun', user='chriscalhoun')
cur = con.cursor()
cur.execute("CREATE TABLE programs(\
id serial PRIMARY KEY,\
program_name VARCHAR(200), \
program_description TEXT, \
program_link VARCHAR(200))")
program_list=readProgs()
for i in program_list:
program_name = i[0]
program_description = i[1]
program_link = i[2]
cur.execute("INSERT INTO programs(program_name, program_description, program_link)\
VALUES\
('" + program_name + "', '" + program_description + "', '" + program_link + "')")
con.commit()
##Here we are grabbing a different section of data from the excel spreadsheet using the same method as the readProgs() and returning
##a list with the desired values.
def readMeasures():
book = open_workbook('aug_3_measures.xlsx','r')
sheet = book.sheet_by_index(1)
for row_index in range(3,sheet.nrows):
u'\xae'.encode('utf-8')
measure_description = sheet.cell(row_index,3).value
care_setting = sheet.cell(row_index,4).value
cms_id = sheet.cell(row_index,0).value
nqf_id = sheet.cell(row_index,1).value
pqrs_id = sheet.cell(row_index,2).value
measure = (measure_description.encode('ascii', 'ignore'),
care_setting.encode('ascii', 'ignore'),
str(nqf_id),
str(pqrs_id),
str(cms_id))
measure_list.append(measure)
return measure_list
##Here we are opening a db and creating a table again called measures this time. Then calling the readMeasures() function
##and using a for loop to itereate over the returned list and writing multiple insert statements before commiting the records.
def writeMeasures():
con = psycopg2.connect(database='chriscalhoun', user='chriscalhoun')
cur = con.cursor()
cur.execute("CREATE TABLE measures(\
measure_id serial PRIMARY KEY NOT NULL,\
measure_description TEXT NOT NULL,\
care_setting TEXT,\
nqf_id varchar(20), \
pqrs_id varchar(30),\
cms_id varchar(60))")
measure_list = readMeasures()
for i in measure_list:
measure_description = i[0]
care_setting = i[1]
nqf_id = i[2]
pqrs_id = i[3]
cms_id = i[4]
cur.execute("INSERT INTO measures(measure_description,\
care_setting, nqf_id, pqrs_id, cms_id) \
VALUES\
('"+measure_description + "', '" + care_setting + "', '" + nqf_id + "', '" + pqrs_id + "', '" + cms_id + "')")
con.commit()
## Here we are grabbing the data from the excel sheet and checking the cell for certain values. In this example either a cell with
##data or a blank cell. Be aware that excel and python data types can vary so it is usually the case that data in your excel file
##will need to be converted to text format.
##The function goes through the correct cells in the specified range of rows and cols. Then checks if the cell has anything in it.
##If the cell has data in it the data is appended to a list along with the measure name and the program for that measure.
##Then the list is returned.
def measure_program_check():
checkProgram_list =[]
book = open_workbook('aug_3_measures.xlsx','r')
sheet = book.sheet_by_index(0)
for row_index in range(3,sheet.nrows):
for col_index in range(5, 26):
cell = sheet.cell(row_index,col_index)
check = cell.value
if check != '':
pName = sheet.cell(0,col_index).value
mDes = sheet.cell(row_index, 3).value
checkProgram = (True, pName.encode('ascii', 'ignore'), mDes.encode('ascii','ignore'))
checkProgram_list.append(checkProgram)
## elif check == '':
## pName = sheet.cell(0,col_index).value
## mDes = sheet.cell(row_index, 3).value
## checkProgram = (False, pName.encode('ascii', 'ignore'), mDes.encode('ascii','ignore'))
## checkProgram_list.append(checkProgram)
return checkProgram_list
##Here we create the table for measure_programs and also create the relationships between the measures and programs tables.
##This table uses to foreign keys as the primary key.
def measure_program_CreateInsert():
measure_program_list = []
con = psycopg2.connect(database='chriscalhoun', user='chriscalhoun')
cur = con.cursor()
cur.execute("CREATE TABLE measure_program(measure_id integer REFERENCES measures (measure_id) ON UPDATE RESTRICT NOT NULL,\
program_id integer REFERENCES programs (id) ON UPDATE RESTRICT,\
value BOOLEAN,\
PRIMARY KEY (measure_id, program_id))")
checkProgram_list = measure_program_check()
cur.execute("SELECT id, program_name FROM programs")
prog_idNameList = cur.fetchall()
cur.execute("SELECT measure_id, measure_description FROM measures")
measure_idNameList = cur.fetchall()
measure_program = []
for i in range(len(checkProgram_list)):
for m in measure_idNameList:
if checkProgram_list[i][2]==m[1]:
m_id = m[0]
for p in prog_idNameList:
if checkProgram_list[i][1] == p[1]:
p_id = p[0]
measure_info = (m_id, p_id)
measure_program.append(measure_info)
query = "INSERT INTO measure_program (measure_id, program_id) VALUES(%s, %s)"
cur.executemany(query, measure_program)
con.commit()
def main():
measure_program_check()
writeProgs()
writeMeasures()
measure_program_CreateInsert()
try:
main()
except psycopg2.DatabaseError, e:
print('Error %s' % e)
sys.exit(1)
finally:
if con:
con.close()
| Python | 0.000001 | |
3d7bb0dfcbfda9c99ee2372394959667c76bb83f | Add first .py file to project | main.py | main.py |
print("Hello!") | Python | 0 | |
59a57a25ff925bd1ce6d467d316ec478847b58ad | Create combinations.py | combinations.py | combinations.py | #!/usr/bin/env python
from string import uppercase, lowercase, maketrans
import math, sys
class combinations():
def combs(self, total, choice):
return (math.factorial(total)/(math.factorial(choice)*math.factorial(total-choice)))
if __name__ == '__main__':
try:
total = sys.argv[1]
choice = sys.argv[2]
total = int(total, 0)
choice = int(choice, 0)
ops = combinations()
result = ops.combs(total, choice)
print result
except IndexError:
print('Usage: combinations.py <int of total> <int to choice>')
| Python | 0 | |
2c63d77428b84c7d1be1c861079d39d641d51fcf | add script to scrap stock data and save them locally | stock_scraping/stock_price_scraping_to_local.py | stock_scraping/stock_price_scraping_to_local.py | '''
This script helps you scrap stock data avaliable on Bloomberg Finance
and store them locally.
Please obey applicable local and federal laws and applicable API term of use
when using this scripts. I, the creater of this script, will not be responsible
for any legal issues resulting from the use of this script.
@author Gan Tu
@version python 2 or python 3
[HOW TO CHANGE PYTHON VERSION]
This script by default should be run by Python 2.
To use this in Python 3, change the followings:
1) change ALL occurrences of "urllib" to "urllib.request".
'''
import urllib
import re
import json
import os
# Stock Symbols Initialization
# Feel free to modify the file source to contain stock symbols you plan to scrap fro
stocks = open("nasdaq_symbols.txt", "r").read().split("\n")
# URL Initialization
urlPrefix = "http://www.bloomberg.com/markets/api/bulk-time-series/price/"
urlAffix = "%3AUS?timeFrame="
# Only four of these are valid options for now
# 1_Day will scrap minute by minute data for one day, while others will be daily close price
# Feel free to modify them for your own need
options = ["1_DAY", "1_MONTH", "1_YEAR", "5_YEAR"]
def setup():
try:
os.mkdir("data")
except Exception as e:
pass
for option in options:
try:
os.mkdir("data/" + option + "/")
except Exception as e:
pass
def scrap():
i = 0
while i < len(stocks):
for option in options:
file = open("data/" + option + "/" + stocks[i] + ".txt", "w")
file.close()
htmltext = urllib.urlopen(urlPrefix + stocks[i] + urlAffix + option)
try:
data = json.load(htmltext)[0]["price"]
key = "date"
if option == "1_DAY":
key = "dateTime"
file = open("data/" + option + "/" + stocks[i] + ".txt", "a")
for price in data:
file.write(stocks[i] + "," + price[key] + "," + str(price["value"]) + "\n")
file.close()
except Exception as e:
pass
i += 1
if __name__ == "__main__":
setup()
scrap()
| Python | 0 | |
72fe45ca6e6cd13b0b5fbb250ce769f5ec883e90 | Add awful pixiv command. | joku/cogs/pixiv.py | joku/cogs/pixiv.py | """
Cog for interacting with the Pixiv API.
"""
import random
import shutil
import requests
from discord.ext import commands
from io import BytesIO
from pixivpy3 import AppPixivAPI
from asyncio_extras import threadpool
from pixivpy3 import PixivAPI
from pixivpy3 import PixivError
from joku.bot import Context
class EncodingAwarePixivAPI(PixivAPI):
"""
A custom encoding-aware Pixiv API.
"""
def requests_call(self, method, url, headers=None, params=None, data=None, stream=False):
""" requests http/https call for Pixiv API """
if headers is None:
headers = {}
try:
if method == 'GET':
r = requests.get(url, params=params, headers=headers, stream=stream, **self.requests_kwargs)
elif method == 'POST':
r = requests.post(url, params=params, data=data, headers=headers, stream=stream,
**self.requests_kwargs)
elif method == 'DELETE':
r = requests.delete(url, params=params, data=data, headers=headers, stream=stream,
**self.requests_kwargs)
else:
raise PixivError('Unknown method: %s' % method)
r.encoding = "utf-8"
return r
except Exception as e:
raise PixivError('requests %s %s error: %s' % (method, url, e)) from e
class Pixiv(object):
def __init__(self, bot):
self.bot = bot
# This is the authentciated API.
self._pixiv_api = EncodingAwarePixivAPI()
@commands.group(pass_context=True)
async def pixiv(self, ctx: Context):
"""
Commands for interacting with the Pixiv API.
"""
@pixiv.command(pass_context=True)
async def search(self, ctx: Context, *, tag: str):
"""
Searches Pixiv using the specified tag.
"""
await ctx.bot.type()
async with threadpool():
if not self._pixiv_api.access_token:
self._pixiv_api.auth(**ctx.bot.config.get("pixiv", {}))
data = self._pixiv_api.search_works(tag, per_page=100)
if data.get("status") == "failure":
await ctx.bot.say(":x: Failed to download from pixiv.")
return
# 'response' is the actual data key
illusts = data["response"]
if not illusts:
await ctx.bot.say(":x: No results found.")
return
# Sort the illusts by score.
illusts = sorted(illusts, key=lambda x: x["stats"]["score"], reverse=True)[:30]
item = random.SystemRandom().choice(illusts)
# Get some useful attributes out.
obb = {
"id": item["id"],
"title": item["title"],
"image": item["image_urls"]["large"],
"username": item["user"]["name"],
"url": "http://www.pixiv.net/member_illust.php?mode=medium&illust_id={}".format(item["id"]),
"total_bookmarks": item["stats"]["favorited_count"]["public"],
"views": item["stats"]["views_count"],
"score": item["stats"]["score"]
}
async with threadpool():
# Download the image.
r = self._pixiv_api.requests_call('GET', obb["image"],
headers={'Referer': "https://app-api.pixiv.net/"},
stream=True)
# Copy it into BytesIO, which wiull be uploaded to Discord.
fobj = BytesIO()
shutil.copyfileobj(r.raw, fobj)
# Seek back, so that it acutally uploads a file.
fobj.seek(0)
await ctx.bot.say("`{title}`, by **{username}** (Illustration ID `{id}`):\n"
"\n**{score}** score, **{total_bookmarks}** bookmarks, **{views}** views"
"\n<{url}>".format(**obb))
await ctx.bot.type()
await ctx.bot.upload(fobj, filename=obb["image"].split("/")[-1])
def setup(bot):
bot.add_cog(Pixiv(bot))
| Python | 0.000003 | |
ba06683866ce8e4e3bccd4acebd6ec2278acfeaa | Add Litecoin testnet, and Litecoin BIP32 prefixes. | pycoin/networks.py | pycoin/networks.py | from collections import namedtuple
from .serialize import h2b
NetworkValues = namedtuple('NetworkValues',
('network_name', 'subnet_name', 'code', 'wif', 'address',
'pay_to_script', 'prv32', 'pub32'))
NETWORKS = (
NetworkValues("Bitcoin", "mainnet", "BTC", b'\x80', b'\0', b'\5', h2b("0488ADE4"), h2b("0488B21E")),
NetworkValues("Bitcoin", "testnet3", "XTN", b'\xef', b'\x6f', b'\xc4',
h2b("04358394"), h2b("043587CF")),
NetworkValues("Litecoin", "mainnet", "LTC", b'\xb0', b'\x30', None, h2b('019d9cfe'), h2b('019da462')),
NetworkValues("Litecoin", "testnet", "XLT", b'\xb1', b'\x31', None, h2b('0436ef7d'), h2b('0436f6e1')),
NetworkValues("Dogecoin", "mainnet", "DOGE", b'\x9e', b'\x1e', b'\x16',
h2b("02fda4e8"), h2b("02fda923")),
# BlackCoin: unsure about bip32 prefixes; assuming will use Bitcoin's
NetworkValues("Blackcoin", "mainnet", "BLK", b'\x99', b'\x19', None, h2b("0488ADE4"), h2b("0488B21E")),
)
# Map from short code to details about that network.
NETWORK_NAME_LOOKUP = dict((i.code, i) for i in NETWORKS)
# All network names, return in same order as list above: for UI purposes.
NETWORK_NAMES = [i.code for i in NETWORKS]
DEFAULT_NETCODES = NETWORK_NAMES
def _lookup(netcode, property):
# Lookup a specific value needed for a specific network
network = NETWORK_NAME_LOOKUP.get(netcode)
if network:
return getattr(network, property)
return None
def network_name_for_netcode(netcode):
return _lookup(netcode, "network_name")
def subnet_name_for_netcode(netcode):
return _lookup(netcode, "subnet_name")
def full_network_name_for_netcode(netcode):
network = NETWORK_NAME_LOOKUP[netcode]
if network:
return "%s %s" % (network.network_name, network.subnet_name)
def wif_prefix_for_netcode(netcode):
return _lookup(netcode, "wif")
def address_prefix_for_netcode(netcode):
return _lookup(netcode, "address")
def pay_to_script_prefix_for_netcode(netcode):
return _lookup(netcode, "pay_to_script")
def prv32_prefix_for_netcode(netcode):
return _lookup(netcode, "prv32")
def pub32_prefix_for_netcode(netcode):
return _lookup(netcode, "pub32")
| from collections import namedtuple
from .serialize import h2b
NetworkValues = namedtuple('NetworkValues',
('network_name', 'subnet_name', 'code', 'wif', 'address',
'pay_to_script', 'prv32', 'pub32'))
NETWORKS = (
NetworkValues("Bitcoin", "mainnet", "BTC", b'\x80', b'\0', b'\5', h2b("0488ADE4"), h2b("0488B21E")),
NetworkValues("Bitcoin", "testnet3", "XTN", b'\xef', b'\x6f', b'\xc4',
h2b("04358394"), h2b("043587CF")),
NetworkValues("Litecoin", "mainnet", "LTC", b'\xb0', b'\x30', None, None, None),
NetworkValues("Dogecoin", "mainnet", "DOGE", b'\x9e', b'\x1e', b'\x16',
h2b("02fda4e8"), h2b("02fda923")),
# BlackCoin: unsure about bip32 prefixes; assuming will use Bitcoin's
NetworkValues("Blackcoin", "mainnet", "BLK", b'\x99', b'\x19', None, h2b("0488ADE4"), h2b("0488B21E")),
)
# Map from short code to details about that network.
NETWORK_NAME_LOOKUP = dict((i.code, i) for i in NETWORKS)
# All network names, return in same order as list above: for UI purposes.
NETWORK_NAMES = [i.code for i in NETWORKS]
DEFAULT_NETCODES = NETWORK_NAMES
def _lookup(netcode, property):
# Lookup a specific value needed for a specific network
network = NETWORK_NAME_LOOKUP.get(netcode)
if network:
return getattr(network, property)
return None
def network_name_for_netcode(netcode):
return _lookup(netcode, "network_name")
def subnet_name_for_netcode(netcode):
return _lookup(netcode, "subnet_name")
def full_network_name_for_netcode(netcode):
network = NETWORK_NAME_LOOKUP[netcode]
if network:
return "%s %s" % (network.network_name, network.subnet_name)
def wif_prefix_for_netcode(netcode):
return _lookup(netcode, "wif")
def address_prefix_for_netcode(netcode):
return _lookup(netcode, "address")
def pay_to_script_prefix_for_netcode(netcode):
return _lookup(netcode, "pay_to_script")
def prv32_prefix_for_netcode(netcode):
return _lookup(netcode, "prv32")
def pub32_prefix_for_netcode(netcode):
return _lookup(netcode, "pub32")
| Python | 0 |
1de668219f618a0632fac80fd892a0a229b8fa05 | Solve Code Fights addition without carrying problem | CodeFights/additionWithoutCarrying.py | CodeFights/additionWithoutCarrying.py | #!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
| Python | 0.000002 | |
ed5c27623711a7f3b798aed9c0f7cdbdcebc0dcd | test python interpreter | test/test_interpreter_layer.py | test/test_interpreter_layer.py | # This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
import cocos
from cocos.director import director
import pyglet
if __name__ == "__main__":
director.init()
interpreter_layer = cocos.layer.InterpreterLayer()
main_scene = cocos.scene.Scene(interpreter_layer)
director.run(main_scene)
| Python | 0.00001 | |
7f4bd900d1e647fe017ce4c01e279dd41a71a349 | Add management command to set SoftwareSecure verification status. | lms/djangoapps/verify_student/management/commands/set_software_secure_status.py | lms/djangoapps/verify_student/management/commands/set_software_secure_status.py | """
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
| Python | 0 | |
4dd66150c922e1c700fad74727955ef72c045f37 | Add Find Command MCEdit filter | minecraft/FindCommand.py | minecraft/FindCommand.py | # MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
| Python | 0 | |
5e4ef4737c78b6154596ab8c76c4e60bd840453c | Add component.navbar | src/penn_chime_dash/app/components/navbar.py | src/penn_chime_dash/app/components/navbar.py | # components/navbar.py
import dash_bootstrap_components as dbc
import dash_html_components as html
import dash_core_components as dcc
from ..config import Config
cfg = Config()
navbar = dbc.NavbarSimple(
brand='Penn Med CHIME', # Browser window title
brand_href='/', # index page
children=[
html.Link(
key='penn-med-header',
rel="stylesheet",
href=cfg.PENN_HEADER,
),
dbc.NavItem(
dcc.Link(
'Model',
href='/CHIME',
className='nav-link'
)
),
dbc.NavItem(
dcc.Link(
'Contribute',
href='https://codeforphilly.github.io/chime/',
className='nav-link'
)
),
dbc.NavItem(
dcc.Link(
'Resources',
href='/resources',
className='nav-link'
)
),
dbc.NavItem(
dcc.Link(
'Contact',
href=cfg.PENN_MED_URL,
className='nav-link'
)
),
],
sticky='top',
color='primary',
light=True,
dark=False
)
| Python | 0 | |
eea33e6207da7446e1713eb4d78b76d37ae5eaf2 | Add sample of scheduler using celery | with_celery.py | with_celery.py | from celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
| Python | 0 | |
7ca1f6c5d51f5e2fc582603012c3ca5a053ee4eb | Add BLT package (#19410) | var/spack/repos/builtin/packages/blt/package.py | var/spack/repos/builtin/packages/blt/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Blt(Package):
"""BLT is a streamlined CMake-based foundation for Building, Linking and
Testing large-scale high performance computing (HPC) applications."""
homepage = "https://github.com/LLNL/blt"
url = "https://github.com/LLNL/blt/archive/v0.3.6.tar.gz"
git = "https://github.com/LLNL/blt.git"
maintainers = ['davidbeckingsale']
version('develop', branch='develop')
version('main', branch='main')
version('0.3.6', sha256='6276317c29e7ff8524fbea47d9288ddb40ac06e9f9da5e878bf9011e2c99bf71')
version('0.3.5', sha256='68a1c224bb9203461ae6f5ab0ff3c50b4a58dcce6c2d2799489a1811f425fb84')
version('0.3.0', sha256='bb917a67cb7335d6721c997ba9c5dca70506006d7bba5e0e50033dd0836481a5')
version('0.2.5', sha256='3a000f60194e47b3e5623cc528cbcaf88f7fea4d9620b3c7446ff6658dc582a5')
version('0.2.0', sha256='c0cadf1269c2feb189e398a356e3c49170bc832df95e5564e32bdbb1eb0fa1b3')
depends_on('cmake', type='run')
def install(self, spec, prefix):
install_tree('.', prefix)
| Python | 0 | |
4537ab84bb87eeae6b6865b7b9140d5324384e4a | add test cases for address operations | test/test-rpc/TestCase/Account/test_address.py | test/test-rpc/TestCase/Account/test_address.py | import random
from TestCase.MVSTestCase import *
class TestAccount(MVSTestCaseBase):
roles = (Alice,)
need_mine = False
def test_0_new_address(self):
#password error
ec, message = mvs_rpc.new_address(Alice.name, Alice.password+'1')
self.assertEqual(ec, 1000, message)
#check address_count
ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 0)
self.assertEqual(ec, 4004, message)
ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 0x00100000)
self.assertEqual(ec, 4004, message)
ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 11)
self.assertEqual(ec, 0, message)
def test_1_list_addresses(self):
# password error
ec, message = mvs_rpc.list_addresses(Alice.name, Alice.password + '1')
self.assertEqual(ec, 1000, message)
ec, addresses = mvs_rpc.list_addresses(Alice.name, Alice.password)
self.assertEqual(ec, 0, addresses)
addresses.sort()
alice_addresses = Alice.addresslist[:]
alice_addresses.sort()
self.assertEqual(addresses, alice_addresses)
def test_2_check_address(self):
for address in Alice.addresslist:
ec, message = mvs_rpc.check_address(address)
self.assertEqual(ec, 0, message) | Python | 0 | |
de40e15b661806dc75e73bd9f1fc2c37af60b0d3 | test case for geometry utils | testbeam_analysis/tests/test_geometry_utils.py | testbeam_analysis/tests/test_geometry_utils.py | ''' Script to check the correctness of the geometry utils functions (rotation, translation matrices)
'''
import os
import numpy as np
import unittest
from testbeam_analysis import geometry_utils
tests_data_folder = r'tests/test_track_analysis/'
class TestTrackAnalysis(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls): # remove created files
pass
def test_transformations(self): # Transforms from global to local system and back and checks for equality
position = np.array([0, 0, 0]) # Position in global system to transfrom
for position in (np.array([-1, -2, -3]), np.array([0, 1, 0]), np.array([3, 2, 1])):
for x in range(-3, 4, 3): # Loop over x translation values
for y in range(-3, 4, 3): # Loop over y translation values
for z in range(-3, 4, 3): # Loop over z translation values
for alpha in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop x rotation values
for beta in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop y rotation values
for gamma in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop z rotation values
position_g = np.array([position[0], position[1], position[2], 1]) # Extend global position dimension
transformation_matrix_to_local = geometry_utils.global_to_local_transformation_matrix(x, y, z, alpha, beta, gamma)
transformation_matrix_to_global = geometry_utils.local_to_global_transformation_matrix(x, y, z, alpha, beta, gamma)
position_l = np.dot(transformation_matrix_to_local, position_g) # Transform to local coordinate system
position_g_result = np.dot(transformation_matrix_to_global, position_l) # Transform back to global coordinate system
self.assertTrue(np.allclose(position, np.array(position_g_result[:-1]))) # Finite precision needs equality check with finite precision
def test_rotation_matrices(self):
# Check that the rotation matrices in x, y, z have the features of a rotation matrix (det = 1, inverse = transposed matrix)
for alpha in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop x rotation values
rotation_matrix_x = geometry_utils.rotation_matrix_x(alpha)
self.assertAlmostEqual(np.linalg.det(rotation_matrix_x), 1)
self.assertTrue(np.allclose(rotation_matrix_x.T, np.linalg.inv(rotation_matrix_x)))
for beta in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop y rotation values
rotation_matrix_y = geometry_utils.rotation_matrix_y(beta)
self.assertAlmostEqual(np.linalg.det(rotation_matrix_y), 1)
self.assertTrue(np.allclose(rotation_matrix_y.T, np.linalg.inv(rotation_matrix_y)))
for gamma in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop z rotation values
rotation_matrix_z = geometry_utils.rotation_matrix_z(gamma)
self.assertAlmostEqual(np.linalg.det(rotation_matrix_z), 1)
self.assertTrue(np.allclose(rotation_matrix_z.T, np.linalg.inv(rotation_matrix_z)))
# Check that the rotation matrix build from x, y, z rotation matrices has the features of rotation matrix (det = 1, inverse = transposed matrix)
for alpha in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop x rotation values
for beta in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop y rotation values
for gamma in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop z rotation values
rotation_matrix = geometry_utils.rotation_matrix(alpha, beta, gamma)
self.assertAlmostEqual(np.linalg.det(rotation_matrix), 1)
self.assertTrue(np.allclose(rotation_matrix.T, np.linalg.inv(rotation_matrix)))
if __name__ == '__main__':
tests_data_folder = r'test_track_analysis/'
suite = unittest.TestLoader().loadTestsFromTestCase(TestTrackAnalysis)
unittest.TextTestRunner(verbosity=2).run(suite)
| Python | 0 | |
83cdd840979dc452f444914a0c40d077e6917c38 | Add DB connector class. | DBConnection.py | DBConnection.py | __author__ = 'David'
| Python | 0 | |
e54d1f73bc09ca29a4f760f5846652c689cc45b8 | Create pmf.py | pmf.py | pmf.py | # coding = utf-8
import numpy as np
import random
import math
parameter = {
"path":"./ml-100k.csv.clean",
"split":" ",
"learnrate":0.005,
"trainratio":0.8,
"D":10,
"epoch_num": 10,
"topn":5,
"lambda_u":0.9,
"lambda_v": 0.9,
}
class PMF():
def __init__(self, parameter):
self.initzlize(parameter)
self.train()
self.test()
def initzlize(self, parameter):
self.trainratio = parameter["trainratio"]
self.path = parameter["path"]
self.D = parameter["D"]
self.topn = parameter["topn"]
self.leatnrate = parameter["learnrate"]
self.lambda_u = parameter["lambda_u"]
self.lambda_v = parameter["lambda_v"]
self.epoch_num = parameter["epoch_num"]
self.load_data(self.path)
self.uservec = np.random.random((self.usernum + self.beginid, self.D))
self.itemvec = np.random.random((self.itemnum + self.beginid, self.D))
self.indicator = np.zeros((self.usernum + self.beginid, self.itemnum + self.beginid))
for uid in self.rating.keys():
for vid in self.rating[uid]:
self.indicator[uid][vid] = 1
def train(self):
self.iteration = 0
self.epoch = 0
print("Start training...")
while (1):
if self.epoch > self.epoch_num:
break
u, v = self.sampleob()
r = int(self.traindata[u][v])
self.uservec[u] -= self.leatnrate * (
self.indicator[u][v] * (-r * self.itemvec[v] + np.linalg.norm(self.itemvec[v]) * self.uservec[
u]) + self.lambda_u * np.linalg.norm(self.itemvec[v]) * self.uservec[u])
self.itemvec[v] -= self.leatnrate * (
self.indicator[u][v] * (-r * self.uservec[u] + np.linalg.norm(self.uservec[u]) * self.itemvec[
v]) + self.lambda_v * np.linalg.norm(self.uservec[u]) * self.itemvec[v])
self.iteration += 1
if self.iteration % (self.triplenum/2) == 0:
self.epoch += 1
print("epoch: %d, rmse: %.6f" % (self.epoch, self.rmse()))
print("Training completed.")
def test(self):
print("rmse:%f"%(self.rmse(kind="test")))
def append2json(self,line,dic):
if int(line[0]) in dic.keys():
dic[int(line[0])][int(line[1])] = float(line[2])
else:
dic[int(line[0])] = {int(line[1]): float(line[2])}
def load_data(self,path):
print("Loading data form " + self.path + "...",end=" ")
f = open(path,"r")
users = []
items = []
lines = f.readlines()
self.triplenum = lines.__len__()
for line in lines:
if line =="" or line ==None:break
line = line.split()
if int(line[0]) not in users:
users.append(int(line[0]))
if int(line[1]) not in items:
items.append(int(line[1]))
self.usernum = users.__len__()
self.itemnum = items.__len__()
self.beginid = min(users)
print("completed.\n %d users, %d items, %d triples."%(self.usernum,self.itemnum,self.triplenum))
self.rating = {}
self.traindata = {}
self.testdata = {}
"""
self.rating = {
userid_i:
{itemid_j:rate_ij,
itemid_k:rate_ik,
}
,
useid_l:[
{itemid_m:rate_lm},
],
}
"""
self.traindatanum = 0
self.testdatanum = 0
for line in lines:
line = line.split()
if line =="" or line ==None:break
coin = random.random()
self.append2json(line,self.rating)
self.triplenum +=1
if coin<=self.trainratio:
self.append2json(line,self.traindata)
self.traindatanum += 1
else:
self.append2json(line,self.testdata)
self.testdatanum += 1
print("Split training set : %d, testing set : %d\n"%(self.traindatanum,self.testdatanum))
def sampleob(self):
uid = vid =-1
while(1):
uid = int(random.random()*self.usernum)
if self.beginid == 1:
uid += 1
if uid in self.traindata.keys(): break
while(1):
vid = int(random.random()*self.itemnum)
if self.beginid == 1:
vid += 1
if vid in self.traindata[uid].keys():
break
return uid,vid
def predict(self,uid,vid):
return np.dot(self.uservec[uid],self.itemvec[vid])
def rmse(self,kind = "train"):
r = 0.0
count = 0
if kind == "test":
for uid in self.testdata.keys():
if uid not in self.traindata.keys():
continue
for vid in self.testdata[uid]:
count += 1
r += (self.predict(uid, vid)- self.testdata[uid][vid])*(self.predict(uid, vid) - self.testdata[uid][vid])
r = r/count
elif kind == "train":
for uid in self.traindata.keys():
for vid in self.traindata[uid]:
count += 1
r += (self.predict(uid, vid)- self.traindata[uid][vid])*(self.predict(uid, vid) - self.traindata[uid][vid])
r = r/count
return math.sqrt(r)
def run():
pmf = PMF(parameter)
if __name__ == '__main__':
run()
| Python | 0 | |
4ff0e6a4d190d8c1f60903d18dcdaac1edeace8a | Create test.py | test.py | test.py | import unittest
from mock import patch
import RedDefineBot
class TestBot(unittest.TestCase):
def test_auth_called(self,mock):
self.assertTrue(mock.called)
def test_auth_notcalled(self,mock):
self.assertFalse(mock.called)
if __name__ == '__main__':
unittest.main()
| Python | 0.000005 | |
00b04f773b9e2018b08776c5d53ff3dad7ed00d1 | Create test.py | test.py | test.py | """test.py
"""
print "Hello world"
| Python | 0.000005 | |
b4b2b80cb1d0c0729e8e98085c2cfc3bc55ddda3 | Solve the Longest Lines challenge using Python3 | LongestLines.py | LongestLines.py | # Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
| Python | 0.999972 | |
37e674f05547c7b6b93f447477443644865975d1 | Bring back the Root URL config | urls.py | urls.py | __author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
) | Python | 0 | |
b733f433d797b302c46cb71cf0230b986f630d26 | Create w3_1.py | w3_1.py | w3_1.py | print("你教得真好")
| Python | 0.000482 | |
d23461fb7b81f70c919fb028eb22009deaae13da | Generate posterior information | main.py | main.py | import math
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
def generate_sample(mean, cov_matrix):
'''generate_sample: Generate sample function output from a mean and covariance matrix.'''
cholesky_decomp = tf.cholesky(cov_matrix)
cov_shape = tf.shape(cov_matrix)
result_shape = [cov_shape[0], 1]
uniform_gaussian_distribution = tf.random_normal(result_shape, mean=0.0, stddev=1.0, \
dtype=tf.float64)
return mean + tf.matmul(cholesky_decomp, uniform_gaussian_distribution)
def solve_posterior(x_data, y_data, cov_matrix, sigma, test_data):
'''solve_posterior: Generate the mean, variance and log marginal likelihood from
sample data.'''
cholesky_decomp = tf.cholesky(cov_matrix + math.pow(sigma, 2)*tf.eye(tf.shape(cov_matrix)[0], dtype=tf.float64))
alpha = tf.cholesky_solve(cholesky_decomp, y_data)
star_X_rows, star_X_cols = tf.meshgrid(x_data, test_data)
K_star_X = tf.exp(tf.scalar_mul(-0.5,
tf.squared_difference(star_X_cols, star_X_rows)/length_scale))
mean = tf.matmul(K_star_X, alpha)
star_rows, star_cols = tf.meshgrid(test_data, test_data)
K_star_star = tf.exp(tf.scalar_mul(-0.5,
tf.squared_difference(star_cols, star_rows)/length_scale))
X_star_rows, X_star_cols = tf.meshgrid(test_data, x_data)
K_X_star = tf.exp(tf.scalar_mul(-0.5,
tf.squared_difference(X_star_cols, X_star_rows)/length_scale))
variance = K_star_star - tf.matmul(K_star_X, tf.cholesky_solve(cholesky_decomp, K_X_star))
log_marg_likelihood = -0.5*tf.transpose(y_data)*alpha \
- tf.reduce_sum(tf.log(tf.diag_part(cholesky_decomp))) \
- (x_data.size / 2) * math.log(math.pi)
return mean, variance, log_marg_likelihood
if __name__ == "__main__":
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 10)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
mean_est = 0.0
length_scale = 1.5
# Use squared exponential covariance matrix
# Covariance defined as $exp(-0.5*(x_i-x_j)^2/l^2)$ where l is the length-scale
x_rows, x_cols = tf.meshgrid(x_data, x_data)
covariance_est = tf.exp(tf.scalar_mul(-0.5, \
tf.squared_difference(x_cols, x_rows)/length_scale))
sess = tf.Session()
# print prior samples
num_samples = 0
while num_samples < 5:
prior_sample = sess.run(generate_sample(mean_est, covariance_est))
plt.plot(x_data, prior_sample)
plt.title('Prior Samples')
num_samples = num_samples + 1
plt.show()
x_test = np.linspace(-math.pi, math.pi, 100)
mean, variance, log_marg_likelihood = sess.run(solve_posterior(x_data,
tf.reshape(y_data, [y_data.size, 1]), covariance_est, 0.1, x_test))
mean = mean.flatten()
variance_diag = np.diagonal(variance)
mean_plus_variance = mean + variance_diag
mean_minus_variance = mean - variance_diag
plt.plot(x_data, y_data, 'o')
plt.plot(x_test, mean)
plt.fill_between(x_test, mean_minus_variance, mean_plus_variance)
plt.show()
| import math
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
def generate_sample(mean, cov_matrix):
'''generate_sample: Generate sample function output from a mean and covariance matrix.'''
cholesky_decomp = tf.cholesky(cov_matrix)
cov_shape = tf.shape(cov_matrix)
result_shape = [cov_shape[0], 1]
uniform_gaussian_distribution = tf.random_normal(result_shape, mean=0.0, stddev=1.0, \
dtype=tf.float64)
return mean + tf.matmul(cholesky_decomp, uniform_gaussian_distribution)
if __name__ == "__main__":
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 10)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
mean_est = 0.0
length_scale = 1.5
# Use squared exponential covariance matrix
x_rows, x_cols = tf.meshgrid(x_data, x_data)
# Covariance defined as $exp(-0.5*(x_i-x_j)^2/l^2)$ where l is the length-scale
covariance_est = tf.exp(tf.scalar_mul(-0.5, \
tf.squared_difference(x_cols, x_rows)/length_scale))
sess = tf.Session()
# print prior samples
num_samples = 0
while num_samples < 5:
prior_sample = sess.run(generate_sample(mean_est, covariance_est))
plt.plot(x_data, prior_sample)
plt.title('Prior Samples')
num_samples = num_samples + 1
plt.show()
| Python | 1 |
73afce309f0e73b441c0ade49849397cba0fb0c2 | update spec runner to work with invoke's boolean flags to run specs untranslated | tasks/specs.py | tasks/specs.py | from invoke import task, run as run_
from .base import BaseTest
class Rubyspecs(BaseTest):
def __init__(self, files, options, untranslated=False):
super(Rubyspecs, self).__init__()
self.exe = "`pwd`/bin/%s" % ("topaz_untranslated.py" if untranslated else "topaz")
self.files = files
self.options = options
self.download_mspec()
self.download_rubyspec()
def mspec(self, args):
run_("../mspec/bin/mspec %s -t %s --config=topaz.mspec %s" % (args, self.exe, self.files))
def run(self):
self.mspec("run -G fails %s" % self.options)
def tag(self):
self.mspec("tag --add fails -G fails -f spec %s" % self.options)
def untag(self):
self.mspec("tag --del fails -g fails -f spec %s" % self.options)
def generate_spectask(taskname):
def spectask(files="", options="", untranslated=False):
runner = Rubyspecs(files, options, untranslated=untranslated)
getattr(runner, taskname)()
spectask.__name__ = taskname
return task(spectask)
run = generate_spectask("run")
tag = generate_spectask("tag")
untag = generate_spectask("untag")
| from invoke import task, run as run_
from .base import BaseTest
class Rubyspecs(BaseTest):
def __init__(self, files, options, translated=True):
super(Rubyspecs, self).__init__()
self.exe = "`pwd`/bin/%s" % ("topaz" if translated else "topaz_untranslated.py")
self.files = files
self.options = options
self.download_mspec()
self.download_rubyspec()
def mspec(self, args):
run_("../mspec/bin/mspec %s -t %s --config=topaz.mspec %s" % (args, self.exe, self.files))
def run(self):
self.mspec("run -G fails %s" % self.options)
def tag(self):
self.mspec("tag --add fails -G fails -f spec %s" % self.options)
def untag(self):
self.mspec("tag --del fails -g fails -f spec %s" % self.options)
def generate_spectask(taskname):
def spectask(files="", options="", translated=True):
runner = Rubyspecs(files, options, translated=(translated != "False"))
getattr(runner, taskname)()
spectask.__name__ = taskname
return task(spectask)
run = generate_spectask("run")
tag = generate_spectask("tag")
untag = generate_spectask("untag")
| Python | 0 |
90399f50a3f50d9193ae1e6b2042215fb388230f | Create Video Stream program for webcam | VideoStream.py | VideoStream.py | import cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
| Python | 0 | |
1437bb868844731d3fdb13c6dd52dfd706df6f63 | Add a new script to clean up a habitica user given user email | bin/ext_service/clean_habitica_user.py | bin/ext_service/clean_habitica_user.py | import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
| Python | 0 | |
b9820246c62733e9e47103d41a07a9a4253be15a | Create weather-script.py | weather-script.py | weather-script.py | #!/usr/bin/python2
#Rory Crispin -- rozzles.com -- 2015
from xml.dom import minidom
import datetime
import codecs
import pywapi
result = pywapi.get_weather_from_yahoo('UKXX3856', 'metric')
iconCodes = ["056", "073", "073", "01e", "01e", "064", "01c", "064", "01c", "01c", "015", "019", "019", "064", "064",
"064", "064", "015", "0b6", "063", "014", "014", "062", "050", "050", "076", "013", "013", "013", "002",
"002", "02e", "00d", "02e", "00d", "017", "072", "01e", "01e", "01e", "01a", "064", "064", "064", "013",
"01d", "064", "01d", "00d"]
day_one = datetime.datetime.now()
highs = [None] * 4
lows = [None] * 4
icons = [None] * 4
for i in range(0, 4):
icons[i] = str(iconCodes[int(result['forecasts'][i]['code'])])
highs[i] = str(result['forecasts'][i]['high'])
lows[i] = str(result['forecasts'][i]['low'])
sunsetTime = result['astronomy']['sunset']
#
# Preprocess SVG
#
# Open SVG to process
output = codecs.open('weather-script-preprocess.svg', 'r', encoding='utf-8').read()
# Insert icons and temperatures
output = output.replace('ICON_ONE', icons[0])
output = output.replace('ICON_TWO', icons[1])
output = output.replace('ICON_THREE', icons[2])
output = output.replace('ICON_FOUR', icons[3])
output = output.replace('HIGH_ONE', str(highs[0])).replace('HIGH_TWO', str(highs[1])).replace('HIGH_THREE',
str(highs[2])).replace(
'HIGH_FOUR', str(highs[3]))
output = output.replace('LOW_ONE', str(lows[0])).replace('LOW_TWO', str(lows[1])).replace('LOW_THREE',
str(lows[2])).replace(
'LOW_FOUR', str(lows[3]))
# # Insert days of week
one_day = datetime.timedelta(days=1)
days_of_week = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
output = output.replace('DAY_THREE', days_of_week[(day_one + 2 * one_day).weekday()]).replace('DAY_FOUR', days_of_week[
(day_one + 3 * one_day).weekday()])
output = output.replace('SUNSET', sunsetTime)
# Write output
codecs.open('weather-script-output.svg', 'w', encoding='utf-8').write(output)
| Python | 0.000001 | |
a9dd25c825bacd03ae358cc153c94ce3960ec0cf | Add serializers | chipy_org/apps/meetings/serializers.py | chipy_org/apps/meetings/serializers.py | from rest_framework import serializers
from .models import Meeting, Topic, Presentor
class PresentorSerializer(serializers.ModelSerializer):
class Meta:
model = Presentor
fields = ('name', 'release')
class TopicSerializer(serializers.ModelSerializer):
presentor = PresentorSerializer()
class Meta:
model = Topic
fields = (
'title',
'presentor',
'length',
'description',
'embed_video',
'slides_link',
'start_time',
'approved'
)
depth = 1
class MeetingSerializer(serializers.ModelSerializer):
topics = TopicSerializer(many=True)
class Meta:
model = Meeting
fields = ('when', 'where', 'live_stream', 'topics')
depth = 2
| Python | 0.000005 | |
55185a7a7402c9d0ce2677b00a329aa4197556c3 | add mediator | Mediator.py | Mediator.py | # -*- coding: utf-8 -*-
"""
Mediator pattern
"""
class AbstractColleague(object):
"""
AbstractColleague
"""
def __init__(self, mediator):
self.mediator = mediator
class ConcreteColleague(AbstractColleague):
"""
ConcreteColleague
"""
def __init__(self, name, mediator):
self.name = name
AbstractColleague.__init__(self, mediator)
def send(self, message, receiver=None):
self.mediator.send(message, self, receiver)
@staticmethod
def notify(name, message, sender):
print u'From: {} To: {} -- {}'.format(name, sender.name, message)
class AbstractMediator(object):
"""
AbstractMediator
"""
def send(self, message, colleague):
pass
class ConcreteMediator(AbstractMediator):
def __init__(self, name):
self.name = name
self.colleagues = []
def register(self, colleague):
self.colleagues.append(colleague)
def send(self, message, colleague, receiver=None):
if receiver:
receiver.notify(colleague.name, message, receiver)
else:
for _ in self.colleagues:
if _ != colleague:
_.notify(colleague.name, message, _)
if __name__ == '__main__':
mediator = ConcreteMediator(u'UN')
USA = ConcreteColleague(u'USA', mediator)
mediator.register(USA)
Japan = ConcreteColleague(u'Japan', mediator)
mediator.register(Japan)
Iraq = ConcreteColleague(u'Iraq', mediator)
mediator.register(Iraq)
UK = ConcreteColleague(u'UK', mediator)
mediator.register(UK)
USA.send(u"I'm the boss, bitch!")
Japan.send(u'Emm...', receiver=USA)
Iraq.send(u'A ha!', receiver=USA)
UK.send(u"Reversed?")
UK.send(u"My litter brother send that, boss...Trust me!", receiver=USA)
| Python | 0.001741 | |
290f990e31a5f732fb054846caea9346946778df | enable import as module | __init__.py | __init__.py | """
.. module:: lmtscripts
:platform: Unix
:synopsis: useful scripts for EHT observations at LMT
.. moduleauthor:: Lindy Blackburn <lindylam@gmail.com>
.. moduleauthor:: Katie Bouman <klbouman@gmail.com>
"""
| Python | 0.000001 | |
19a4c4364d1629cd6bfd7ca27ae4e6441f13747e | Make mygmm a module | __init__.py | __init__.py | from .mygmm.mygmm import * | Python | 0.000018 | |
a7f4d96becfd1a58794a4dbedb9e9c8f6ac8c1a6 | Create acceptor.py | acceptor.py | acceptor.py | #! /usr/bin/env python
import message
import logging
class Acceptor(message.MessageListener):
def __init__(self, config, network):
message.MessageListener.__init__(self,
name = 'AcceptorListenser',
mapping = {
message.MSG_PROPOSAL_REQ : self.on_proposal_request,
message.MSG_ACCEPT_REQ : self.on_accept_request
})
self.network = network
self.config = config
self.promised_id = 0
self.accepted_id = 0
self.accepted_values= []
def on_proposal_request(self, pkg, msg):
logging.debug('process proposal request')
return False
def on_accept_request(self, pkg, msg):
logging.debug('process accept request')
return False
| Python | 0.000001 | |
1265e6ce2e6f8423e13f5fb5d54328369cfaa3ec | add geojsonloader tester | tests/geojson/geojsonloader.py | tests/geojson/geojsonloader.py | # -*- coding: utf-8 -*-
import sys
import os
import unittest
import uuid
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
class GeoJSONLoaderTester(unittest.TestCase):
def setUp(self):
pass
def test__init__(self):
from cftt.geojson.geojsonloader import GeoJSONLoader
test = GeoJSONLoader()
s = test()
self.assertEqual(len(s), 0)
self.assertEqual(s.attributes, {})
test.attr('note', 'あ')
s = test()
self.assertEqual(s.attributes, {u'note': 'あ'.decode('utf-8')})
test = GeoJSONLoader(one=1, two=2)
s = test({'features': [
{'type': 'feature',
'properties': {},
'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}}],
'three': 3})
self.assertEqual(s.attributes,
{u'one': 1, u'two': 2, u'three': 3})
self.assertEqual(len(s), 1)
def test__call__(self):
from cftt.geojson.geojsonloader import GeoJSONLoader
id = uuid.uuid4()
test = GeoJSONLoader(id=id)
s = test()
self.assertEqual(s.attributes['id'], id)
self.assertEqual(len(s), 0)
self.assertEqual(len(s.attributes), 1)
s = test({'features': [
{'type': 'feature',
'properties': {},
'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}}],
'three': 3})
self.assertEqual(s.attributes['id'], id)
self.assertEqual(s.attributes,
{u'three': 3, u'id': id})
self.assertEqual(len(s), 1)
test.attr('two', 2)
t = test(s)
self.assertEqual(s.attributes['id'], id)
self.assertEqual(s.attributes,
{u'three': 3, u'id': id})
self.assertEqual(len(s), 1)
self.assertEqual(t.attributes,
{u'three': 3, u'id': id, u'two': 2})
test_data_dir = os.path.join(PROJECT_ROOT, 'test_data')
gj = os.path.join(test_data_dir, 'geojson', 'japan.geojson')
s = test(gj)
self.assertEqual(len(s), 47)
t = test._load_from_json_file(gj)
self.assertEqual(s.attributes, t.attributes)
self.assertEqual(len(s), len(t))
zip = os.path.join(test_data_dir, 'geojson', 'japan.zip')
s = test(zip)
self.assertEqual(len(s), 47)
t = test._load_from_zip_file(zip)
self.assertEqual(s.attributes, t.attributes)
self.assertEqual(len(s), len(t))
from cftt.common.asyncfileserver import AsyncFileServer
with AsyncFileServer(port=8001):
url = 'http://localhost:8001/test_data/geojson/japan.zip'
s = test(url)
self.assertEqual(len(s), 47)
t = test._load_from_url(url)
self.assertEqual(s.attributes, t.attributes)
self.assertEqual(len(s), len(t))
if __name__ == '__main__':
unittest.main()
| Python | 0 | |
13b94129947cbfab4b7870e130a2efbbf41bfbb7 | Add missing file | rehash/__init__.py | rehash/__init__.py | from __future__ import absolute_import, division, print_function, unicode_literals
import os, sys, hashlib
from ctypes import cast, c_void_p, POINTER, Structure, c_int, c_ulong, c_char, c_size_t, c_ssize_t, py_object, memmove
from ssl import OPENSSL_VERSION
PyObject_HEAD = [
('ob_refcnt', c_size_t),
('ob_type', c_void_p)
]
# OpenSSL 1.0.2 and earlier:
# https://github.com/openssl/openssl/blob/OpenSSL_1_0_2-stable/crypto/evp/evp.h#L159-L181
# OpenSSL 1.1.0 and later:
# https://github.com/openssl/openssl/blob/master/crypto/include/internal/evp_int.h#L99-L113
class EVP_MD(Structure):
_fields_ = [
('type', c_int),
('pkey_type', c_int),
('md_size', c_int),
('flags', c_ulong),
('init', c_void_p),
('update', c_void_p),
('final', c_void_p),
('copy', c_void_p),
('cleanup', c_void_p),
]
if OPENSSL_VERSION < "OpenSSL 1.1.0":
_fields_ += [
('sign', c_void_p),
('verify', c_void_p),
('required_pkey_type', c_int * 5),
]
_fields_ += [
('block_size', c_int),
('ctx_size', c_int),
]
# https://github.com/openssl/openssl/blob/master/crypto/evp/evp_locl.h#L12-L22
class EVP_MD_CTX(Structure):
_fields_ = [
('digest', POINTER(EVP_MD)),
('engine', c_void_p),
('flags', c_ulong),
('md_data', POINTER(c_char)),
]
class EVPWrapper(Structure):
_fields_ = PyObject_HEAD + [
("name", POINTER(py_object)),
("ctx", POINTER(EVP_MD_CTX))
]
class ResumableHasher(object):
name = None
def __init__(self, name=None, data=None):
if self.name is not None:
data = name
else:
self.name = name
hasher_args = [] if data is None else [data]
self._hasher = self._get_hashlib_hasher(self.name)(*hasher_args)
def _get_hashlib_hasher(self, name):
if name.startswith("blake2"):
raise Exception("blake2 algorithms are not OpenSSL-based and not supported by rehash")
if name.startswith("sha3"):
raise Exception("sha3 algorithms are not supported by rehash")
if name.startswith("shake"):
raise Exception("shake algorithms are not supported by rehash")
if name in hashlib.algorithms_guaranteed:
return getattr(hashlib, name)
else:
return hashlib.new(name)
def _get_evp_md_ctx(self):
c_evp_obj = cast(c_void_p(id(self._hasher)), POINTER(EVPWrapper))
return c_evp_obj.contents.ctx.contents
def __getstate__(self):
ctx = self._get_evp_md_ctx()
ctx_size = ctx.digest.contents.ctx_size
hasher_state = ctx.md_data[:ctx_size]
return dict(name=self.name, md_data=hasher_state)
def __setstate__(self, state):
self.name = state["name"]
self._hasher = self._get_hashlib_hasher(self.name)()
ctx = self._get_evp_md_ctx()
ctx_size = ctx.digest.contents.ctx_size
memmove(ctx.md_data, state["md_data"], ctx_size)
def __getattr__(self, a):
return getattr(self._hasher, a)
def _initialize():
module = sys.modules[__name__]
for name in hashlib.algorithms_guaranteed:
if name.startswith("blake2"):
continue
setattr(module, name, type(name, (ResumableHasher,), dict(name=name)))
_initialize()
| Python | 0.000006 | |
72d19081bea1dba061c7bf1f57c305f427be1e28 | Implement the SQUIT server command | txircd/modules/server/squit.py | txircd/modules/server/squit.py | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class ServerQuit(ModuleData):
implements(IModuleData)
name = "ServerQuit"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("serverquit", 1, self.sendSQuit),
("commandpermission-SQUIT", 1, self.restrictSQuit) ]
def userCommands(self):
return [ ("SQUIT", 1, UserSQuit(self.ircd)) ]
def serverCommands(self):
return [ ("SQUIT", 1, ServerSQuit(self.ircd)),
("RSQUIT", 1, RemoteSQuit(self.ircd)) ]
def sendSQuit(self, server, reason):
closestHop = server
while closestHop.nextClosest != self.ircd.serverID:
closestHop = self.ircd.servers[closestHop.nextClosest]
for otherServer in self.ircd.servers.itervalues():
if closestHop == otherServer:
continue
otherServer.sendMessage("SQUIT", server.serverID, ":{}".format(reason), prefix=server.nextClosest)
def restrictSQuit(self, user, command, data):
if not self.ircd.runActionUntilValue("userhasoperpermission", user, "command-squit"):
return False
return None
class UserSQuit(Command):
implements(ICommand)
def __init__(self, ircd):
self.ircd = ircd
def parseParams(self, user, params, prefix, tags):
if len(params) < 2:
user.sendSingleError("SQuitParams", irc.ERR_NEEDMOREPARAMS, "SQUIT", ":Not enough parameters")
return None
source = self.ircd.serverID
if params[0] not in self.ircd.serverNames:
if ircLower(params[0]) == ircLower(self.ircd.name):
user.sendSingleError("SQuitTarget", irc.ERR_NOSUCHSERVER, self.ircd.name, ":You can't unlink this server from itself")
return None
user.sendSingleError("SQuitTarget", irc.ERR_NOSUCHSERVER, params[0], ":No such server")
return None
return {
"source": source,
"target": self.ircd.servers[self.ircd.serverNames[params[0]]],
"reason": params[1]
}
def execute(self, user, data):
targetServer = data["target"]
reason = data["reason"]
if targetServer.nextClosest == self.ircd.serverID:
targetServer.disconnect(reason)
user.sendMessage("NOTICE", ":*** Disconnected {}".format(targetServer.name))
else:
targetServer.sendMessage("RSQUIT", targetServer.serverID, ":{}".format(reason), prefix=self.ircd.serverID)
user.sendMessage("NOTICE", ":*** Sent remote SQUIT for {}".format(targetServer.name))
return True
class ServerSQuit(Command):
implements(ICommand)
def __init__(self, ircd):
self.ircd = ircd
def parseParams(self, server, params, prefix, tags):
if len(params) != 2:
return None
if params[0] not in self.ircd.servers:
return None
return {
"target": self.ircd.servers[params[0]],
"reason": params[1]
}
def execute(self, server, data):
data["target"].disconnect(data["reason"])
return True
class RemoteSQuit(Command):
implements(ICommand)
def __init__(self, ircd):
self.ircd = ircd
def parseParams(self, server, params, prefix, tags):
if len(params) != 2:
return None
if params[0] not in self.ircd.servers:
return None
return {
"target": self.ircd.servers[params[0]],
"reason": params[1]
}
def execute(self, server, data):
targetServer = data["target"]
if targetServer.nextClosest == self.ircd.serverID:
targetServer.disconnect(data["reason"])
return True
targetServer.sendMessage("RSQUIT", targetServer.serverID, ":{}".format(data["reason"]), prefix=targetServer.nextClosest)
return True
squit = ServerQuit() | Python | 0.000013 | |
5bb387947ac13bcd3949c6b17839033231c05e2d | Add unittests for cupy.testing.array | tests/cupy_tests/testing_tests/test_array.py | tests/cupy_tests/testing_tests/test_array.py | import copy
import unittest
import numpy
import six
import cupy
from cupy import testing
@testing.parameterize(
*testing.product({
'assertion': ['assert_allclose', 'assert_array_almost_equal',
'assert_array_almost_equal_nulp',
'assert_array_max_ulp', 'assert_array_equal'],
'array_module_x': [numpy, cupy],
'array_module_y': [numpy, cupy]
})
)
@testing.gpu
class TestEqualityAssertion(unittest.TestCase):
def setUp(self):
self.assertion = getattr(testing, self.assertion)
val = numpy.random.uniform(-1, 1, (2, 3))
self.x = self.array_module_x.array(val, val.dtype, copy=True)
self.y = self.array_module_y.array(val, val.dtype, copy=True)
def test_equality(self):
self.assertion(self.x, self.y)
def test_inequality(self):
self.y += 1
with self.assertRaises(AssertionError):
self.assertion(self.x, self.y)
def _convert_array(xs, array_module):
if array_module == 'all_numpy':
return xs
elif array_module == 'all_cupy':
return cupy.asarray(xs)
else:
return [cupy.asarray(x) if numpy.random.random_integers(0, 1)
else x for x in xs]
@testing.parameterize(
*testing.product({
'array_module_x': ['all_numpy', 'all_cupy', 'random'],
'array_module_y': ['all_numpy', 'all_cupy', 'random']
})
)
@testing.gpu
class TestListEqualityAssertion(unittest.TestCase):
def setUp(self):
xs = [numpy.random.uniform(-1, 1, (2, 3)) for _ in six.moves.range(10)]
ys = copy.deepcopy(xs)
self.xs = _convert_array(xs, self.array_module_x)
self.ys = _convert_array(ys, self.array_module_y)
def test_equality_numpy(self):
testing.assert_array_list_equal(self.xs, self.ys)
def test_inequality_numpy(self):
self.xs[0] += 1
with self.assertRaises(AssertionError):
testing.assert_array_list_equal(self.xs, self.ys)
@testing.parameterize(
*testing.product({
'array_module_x': [numpy, cupy],
'array_module_y': [numpy, cupy]
})
)
@testing.gpu
class TestLessAssertion(unittest.TestCase):
def setUp(self):
val = numpy.random.uniform(-1, 1, (2, 3))
self.x = self.array_module_x.array(val, val.dtype, copy=True)
self.y = self.array_module_y.array(val + 1, val.dtype, copy=True)
def test_equality_numpy(self):
testing.assert_array_less(self.x, self.y)
def test_inequality_numpy(self):
self.x[0] += 100
with self.assertRaises(AssertionError):
testing.assert_array_less(self.x, self.y)
| Python | 0.000001 | |
9e862d1ae83a75c9b7ea6fef37ff8d30e920511c | Add tests for cache generation IDs. | ci/tsqa/tests/test_cache_generation.py | ci/tsqa/tests/test_cache_generation.py | '''
Test the cache generation configuration
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import logging
import requests
import random
import uuid
import time
import helpers
import tsqa.test_cases
import tsqa.utils
log = logging.getLogger(__name__)
class TestCacheGeneration(helpers.EnvironmentCase):
'''
Test the cache object generation ID.
'''
def _fetch(self, path):
url = 'http://127.0.0.1:{}/{}'.format(
self.configs['records.config']['CONFIG']['proxy.config.http.server_ports'],
path
)
log.debug('get {}'.format(url))
return requests.get(url, headers={'x-debug': 'x-cache,x-cache-key,via,x-cache-generation'})
def _dump(self, response):
log.info('HTTP response {}'.format(response.status_code))
for k, v in response.headers.items():
log.info(' {}: {}'.format(k, v))
def _ctl(self, *args):
cmd = [os.path.join(self.environment.layout.bindir, 'traffic_ctl')] + list(args)
out, _ = tsqa.utils.run_sync_command(cmd,
env=self.environment.shell_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
return out
@classmethod
def setUpEnv(cls, env):
cls.configs['plugin.config'].add_line('xdebug.so')
cls.configs['remap.config'].add_line(
'map /default/ http://127.0.0.1/ @plugin=generator.so'
)
cls.configs['remap.config'].add_line(
'map /generation1/ http://127.0.0.1/' +
' @plugin=conf_remap.so @pparam=proxy.config.http.cache.generation=1' +
' @plugin=generator.so'
)
cls.configs['remap.config'].add_line(
'map /generation2/ http://127.0.0.1/' +
' @plugin=conf_remap.so @pparam=proxy.config.http.cache.generation=2' +
' @plugin=generator.so'
)
# Start with cache generation turned off
cls.configs['records.config']['CONFIG']['proxy.config.http.cache.generation'] = -1
# Wait for the cache so we don't race client requests against it.
cls.configs['records.config']['CONFIG']['proxy.config.http.wait_for_cache'] = 1
cls.configs['records.config']['CONFIG']['proxy.config.config_update_interval_ms'] = 1
def test_generations_are_disjoint(self):
"""Test that the same URL path in different cache generations creates disjoint objects"""
objectid = uuid.uuid4()
# First touch is a MISS.
ret = self._fetch('default/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers['x-cache'], 'miss', msg=ret)
self.assertEqual(ret.headers['x-cache-generation'], '-1')
# Same URL in generation 1 is a MISS.
ret = self._fetch('generation1/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers['x-cache'], 'miss')
self.assertEqual(ret.headers['x-cache-generation'], '1')
# Same URL in generation 2 is still a MISS.
ret = self._fetch('generation2/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers['x-cache'], 'miss')
self.assertEqual(ret.headers['x-cache-generation'], '2')
# Second touch is a HIT.
ret = self._fetch('default/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers['x-cache'], 'hit-fresh', msg=ret.headers['x-cache'])
self.assertEqual(ret.headers['x-cache-generation'], '-1')
def test_online_cache_clear(self):
"""Test that incrementing the cache generation acts like a cache clear"""
objectid = uuid.uuid4()
# First touch is a MISS.
ret = self._fetch('default/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers['x-cache'], 'miss')
# Second touch is a HIT.
ret = self._fetch('default/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
self.assertEqual(ret.headers['x-cache'], 'hit-fresh')
# Now update the generation number.
timeout = float(self._ctl('config', 'get', 'proxy.config.config_update_interval_ms').split(' ')[-1])
generation = random.randrange(65000)
gencount = 0
self._ctl('config', 'set', 'proxy.config.http.cache.generation', str(generation))
self._ctl('config', 'reload')
for _ in xrange(5):
if gencount == 0:
log.debug('waiting {} secs for the config to update'.format(timeout / 1000))
time.sleep(timeout / 1000)
ret = self._fetch('default/cache/10/{}'.format(objectid))
self.assertEqual(ret.status_code, 200)
if ret.headers['x-cache-generation'] == str(generation):
if gencount == 0:
# First time we see the new generation, it should be a miss.
self.assertEqual(ret.headers['x-cache'], 'miss')
else:
# Now the previous hits should become misses.
self.assertEqual(ret.headers['x-cache'], 'hit-fresh')
else:
# Config has not updated, so it should be a hit.
self.assertEqual(ret.headers['x-cache'], 'hit-fresh')
self.assertEqual(ret.headers['x-cache-generation'], '-1')
gencount = gencount + 1
self.assertNotEqual(gencount, 0, msg='proxy.config.http.cache.generation never updated')
| Python | 0 | |
6891c9e635cbe9ba663ac7f72bdff653bb8c8220 | make sure we can call commit | netforce_general/netforce_general/controllers/root.py | netforce_general/netforce_general/controllers/root.py | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.controller import Controller
from netforce import config
from netforce.database import get_connection
from netforce import access
from netforce.model import get_model
class Root(Controller):
_path="/"
def get(self):
url=None
db=get_connection()
try:
if db:
res=db.get("SELECT root_url FROM settings WHERE id=1")
url=res.root_url
if url:
self.redirect(url)
return
user_id=access.get_active_user()
action=None
if user_id:
user=get_model("base.user").browse(user_id)
profile=user.profile_id
action=profile.home_action
if action:
self.redirect("/ui#name=%s"%action)
return
self.redirect("/ui#name=login")
finally:
if db:
db.commit()
Root.register()
| # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.controller import Controller
from netforce import config
from netforce.database import get_connection
from netforce import access
from netforce.model import get_model
class Root(Controller):
_path="/"
def get(self):
url=None
db=get_connection()
try:
if db:
res=db.get("SELECT root_url FROM settings WHERE id=1")
url=res.root_url
if url:
self.redirect(url)
return
user_id=access.get_active_user()
action=None
if user_id:
user=get_model("base.user").browse(user_id)
profile=user.profile_id
action=profile.home_action
if action:
self.redirect("/ui#name=%s"%action)
return
self.redirect("/ui#name=login")
finally:
db.commit()
Root.register()
| Python | 0 |
21d7e6f83f34e66167d7452998f2c7622a90e46c | Create test_parser.py | test_parser.py | test_parser.py | import os
import csv
import json
import collections
from collections import defaultdict
filename = "C:/Users/zeffi/Documents/Export_482016.csv"
some_dict = defaultdict(list)
def sanedate(date):
MM, DD, YYYY = date.split('/')
return '/'.join([DD, MM, YYYY])
def formatted_time(gtime):
HH, MM, SS = gtime.split(':')
return ':'.join([HH, MM])
def open_csv_test(filename):
#csvfile = open(filename, 'r', encoding='ISO-8859-15', newline='')
csvfile = open(filename, 'r', newline='')
ofile = csv.reader(csvfile, delimiter=',')
# skip the first 7 lines (OneTouch uses an odd csv format)
for i in range(6):
next(ofile)
for row in ofile:
try:
print(row)
date, gtime, gvalue = row[1:4]
date = date + '__' + sanedate(date)
gtime = formatted_time(gtime)
some_dict[date].append({'time': gtime, 'value': float(gvalue)})
except:
print("failed at")
print(row)
with open('C:/Users/zeffi/Documents/some_constructed.json', 'w') as wfile:
wfile.write(json.dumps(some_dict, sort_keys=True, indent=4))
open_csv_test(filename)
| Python | 0.000008 | |
0c49c3dcd168e01512deb72bfbeed1438430abe4 | remove duplicate error messages before displaying, issue 486 | src/robotide/context/logger.py | src/robotide/context/logger.py | # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import wx
class Logger(object):
empty_suite_init_file_warn = re.compile("Test suite directory initialization "
"file '.*' contains no test data.")
def __init__(self):
self._messages = []
def report_parsing_errors(self):
errors = [m[0] for m in self._messages]
if errors:
# Warnings from robot.variables.Variables.set_from_variable_table
# are present multiple times, issue 486.
errors = set(errors)
dlg = ErrorMessageDialog('Parsing errors',
'\n'.join(self._format_parsing_error_line(line)
for line in errors))
dlg.ShowModal()
dlg.Destroy()
self._messages = []
def _format_parsing_error_line(self, line):
if ':' not in line:
return line
index = line.index(':') + 1
return line[:index] + '\n\t' + line[index:]
def warn(self, msg=''):
self._write(msg, 'WARN')
def error(self, msg=''):
self._write(msg, 'ERROR')
def message(self, msg):
message, level = msg.message, msg.level.upper()
if self._is_logged(level):
self._messages.append((message, level))
def _write(self, msg, level):
level = level.upper()
if self._is_logged(level) and not self._is_ignored_warning(msg):
self._show_message(msg, level)
def _is_logged(self, level):
return level.upper() in ['ERROR', 'WARN']
def _is_ignored_warning(self, msg):
return self.empty_suite_init_file_warn.search(msg)
def _show_message(self, msg, level):
try:
icon = level == 'ERROR' and wx.ICON_ERROR or wx.ICON_WARNING
wx.MessageBox(msg, level, icon)
except wx.PyNoAppError:
sys.stderr.write('%s: %s\n' % (level, msg))
class ErrorMessageDialog(wx.Dialog):
def __init__(self, title, message):
wx.Dialog.__init__(self, None, size=(700, 400), title=title,
style=wx.DEFAULT_FRAME_STYLE)
area = wx.TextCtrl(self, size=(700,400), style=wx.TE_MULTILINE|wx.TE_DONTWRAP|wx.TE_READONLY)
area.SetValue(message)
| # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import wx
class Logger(object):
empty_suite_init_file_warn = re.compile("Test suite directory initialization "
"file '.*' contains no test data.")
def __init__(self):
self._messages = []
def report_parsing_errors(self):
errors = [m[0] for m in self._messages]
if errors:
dlg = ErrorMessageDialog('Parsing errors',
'\n'.join(self._format_parsing_error_line(line)
for line in errors))
dlg.ShowModal()
dlg.Destroy()
self._messages = []
def _format_parsing_error_line(self, line):
if ':' not in line:
return line
index = line.index(':') + 1
return line[:index] + '\n\t' + line[index:]
def warn(self, msg=''):
self._write(msg, 'WARN')
def error(self, msg=''):
self._write(msg, 'ERROR')
def message(self, msg):
message, level = msg.message, msg.level.upper()
if self._is_logged(level):
self._messages.append((message, level))
def _write(self, msg, level):
level = level.upper()
if self._is_logged(level) and not self._is_ignored_warning(msg):
self._show_message(msg, level)
def _is_logged(self, level):
return level.upper() in ['ERROR', 'WARN']
def _is_ignored_warning(self, msg):
return self.empty_suite_init_file_warn.search(msg)
def _show_message(self, msg, level):
try:
icon = level == 'ERROR' and wx.ICON_ERROR or wx.ICON_WARNING
wx.MessageBox(msg, level, icon)
except wx.PyNoAppError:
sys.stderr.write('%s: %s\n' % (level, msg))
class ErrorMessageDialog(wx.Dialog):
def __init__(self, title, message):
wx.Dialog.__init__(self, None, size=(700, 400), title=title,
style=wx.DEFAULT_FRAME_STYLE)
area = wx.TextCtrl(self, size=(700,400), style=wx.TE_MULTILINE|wx.TE_DONTWRAP|wx.TE_READONLY)
area.SetValue(message)
| Python | 0 |
fabf4e8bd93155101d459716b35c10b32a3dfd16 | add tests/utils.py | tests/utils.py | tests/utils.py | import sys
import yappi
import unittest
class YappiUnitTestCase(unittest.TestCase):
def setUp(self):
if yappi.is_running():
yappi.stop()
yappi.clear_stats()
yappi.set_clock_type('cpu') # reset to default clock type
def tearDown(self):
fstats = yappi.get_func_stats()
if not fstats._debug_check_sanity():
sys.stdout.write("ERR: Duplicates found in Func stats\r\n")
fstats.debug_print()
for fstat in fstats:
if not fstat.children._debug_check_sanity():
sys.stdout.write("ERR: Duplicates found in ChildFunc stats\r\n")
fstat.children.print_all()
tstats = yappi.get_func_stats()
if not tstats._debug_check_sanity():
sys.stdout.write("ERR: Duplicates found in Thread stats\r\n")
tstats.print_all()
def assert_raises_exception(func):
try:
_run(func)
assert 0 == 1
except:
pass
def run_with_yappi(func, *args, **kwargs):
yappi.start()
func(*args, **kwargs)
yappi.stop()
def run_and_get_func_stats(func, *args, **kwargs):
run_with_yappi(func, *args, **kwargs)
return yappi.get_func_stats()
def run_and_get_thread_stats(func, *args, **kwargs):
run_with_yappi(func, *args, **kwargs)
return yappi.get_thread_stats()
def is_py3x():
return sys.version_info > (3, 0)
def find_stat_by_name(stats, name):
for stat in stats:
if stat.name == name:
return stat
| Python | 0.000001 | |
21a9ca4487d0d3ef9f2aa2ba5909b37c735c18e6 | Fix linter errors in test_tftrt.py | tensorflow/contrib/tensorrt/test/test_tftrt.py | tensorflow/contrib/tensorrt/test/test_tftrt.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
import tensorflow.contrib.tensorrt as trt
def get_simple_graph_def():
"""Create a simple graph and return its graph_def"""
g = tf.Graph()
with g.as_default():
a = tf.placeholder(dtype=tf.float32, shape=(None, 24, 24, 2), name="input")
e = tf.constant(
[[[[1., 0.5, 4., 6., 0.5, 1.], [1., 0.5, 1., 1., 0.5, 1.]]]],
name="weights",
dtype=tf.float32)
conv = tf.nn.conv2d(
input=a, filter=e, strides=[1, 2, 2, 1], padding="SAME", name="conv")
b = tf.constant([4., 1.5, 2., 3., 5., 7.], name="bias", dtype=tf.float32)
t = tf.nn.bias_add(conv, b, name="biasAdd")
relu = tf.nn.relu(t, "relu")
idty = tf.identity(relu, "ID")
v = tf.nn.max_pool(
idty, [1, 2, 2, 1], [1, 2, 2, 1], "VALID", name="max_pool")
tf.squeeze(v, name="output")
return g.as_graph_def()
def run_graph(gdef, dumm_inp):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.50)
tf.reset_default_graph()
g = tf.Graph()
with g.as_default():
inp, out = tf.import_graph_def(
graph_def=gdef, return_elements=["input", "output"])
inp = inp.outputs[0]
out = out.outputs[0]
with tf.Session(
config=tf.ConfigProto(gpu_options=gpu_options), graph=g) as sess:
val = sess.run(out, {inp: dumm_inp})
return val
if "__main__" in __name__:
inp_dims = (100, 24, 24, 2)
dummy_input = np.random.random_sample(inp_dims)
gdef = get_simple_graph_def()
# Get optimized graph
trt_graph = trt.create_inference_graph(gdef, ["output"], inp_dims[0])
o1 = run_graph(gdef, dummy_input)
o2 = run_graph(trt_graph, dummy_input)
assert np.array_equal(o1, o2)
print("Pass")
| # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.tensorrt as trt
import numpy as np
def getSimpleGraphDef():
"""Create a simple graph and return its graph_def"""
g = tf.Graph()
with g.as_default():
A = tf.placeholder(dtype=tf.float32, shape=(None, 24, 24, 2), name="input")
e = tf.constant(
[[[[1., 0.5, 4., 6., 0.5, 1.], [1., 0.5, 1., 1., 0.5, 1.]]]],
name="weights",
dtype=tf.float32)
conv = tf.nn.conv2d(
input=A, filter=e, strides=[1, 2, 2, 1], padding="SAME", name="conv")
b = tf.constant([4., 1.5, 2., 3., 5., 7.], name="bias", dtype=tf.float32)
t = tf.nn.bias_add(conv, b, name="biasAdd")
relu = tf.nn.relu(t, "relu")
idty = tf.identity(relu, "ID")
v = tf.nn.max_pool(
idty, [1, 2, 2, 1], [1, 2, 2, 1], "VALID", name="max_pool")
out = tf.squeeze(v, name="output")
return g.as_graph_def()
def runGraph(gdef, dumm_inp):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.50)
tf.reset_default_graph()
g = tf.Graph()
with g.as_default():
inp, out = tf.import_graph_def(
graph_def=gdef, return_elements=["input", "output"])
inp = inp.outputs[0]
out = out.outputs[0]
with tf.Session(
config=tf.ConfigProto(gpu_options=gpu_options), graph=g) as sess:
val = sess.run(out, {inp: dumm_inp})
return val
if "__main__" in __name__:
inpDims = (100, 24, 24, 2)
dummy_input = np.random.random_sample(inpDims)
gdef = getSimpleGraphDef()
trt_graph = trt.create_inference_graph(gdef, ["output"],
inpDims[0]) # Get optimized graph
o1 = runGraph(gdef, dummy_input)
o2 = runGraph(trt_graph, dummy_input)
assert (np.array_equal(o1, o2))
print("Pass")
| Python | 0.000027 |
a333a5c15ffd2b775ad4d854c7accd32b898d2fb | Add encryptor_python3.py compatible with Python 3 | encryptor_python3.py | encryptor_python3.py | from __future__ import print_function
__author__ = 'Samuel Gratzl'
if __name__ == '__main__':
import uuid
import hashlib
password = input('enter password: ').encode('utf-8')
salt = uuid.uuid4().hex.encode('utf-8')
hashed_password = hashlib.sha512(password + salt).hexdigest()
print(password)
print(salt)
print(hashed_password)
| Python | 0.000669 | |
5a634b9b837726a595a4450c8b1f46dd24b282a0 | Add generic Python context template | scripts/Context.py | scripts/Context.py |
# Copyright (c) 2015-2019 Agalmic Ventures LLC (www.agalmicventures.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import atexit
import datetime
import inspect
try:
import ujson as json
except ImportError:
import json
import os
import subprocess
import threading
import traceback
def _updateRunEnd(context):
"""
Exit handler for clean shutdowns.
"""
endTime = datetime.datetime.now()
startTime = context.startTime()
print('Run duration: %s (%s - %s)' % (endTime - startTime, startTime, endTime))
class _Context(object):
def __init__(self):
self._startTime = datetime.datetime.now()
#Check the version
currentFile = os.path.abspath(inspect.getfile(inspect.currentframe()))
currentDir = os.path.dirname(currentFile)
parentDir = os.path.dirname(currentDir)
workingDirectory = os.getcwd()
os.chdir(parentDir)
try:
self._gitVersion = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('utf8').strip()
print('Version: %s' % self._gitVersion)
except subprocess.CalledProcessError:
self._gitVersion = None
print('WARNING: Could not retrieve git version!')
os.chdir(workingDirectory)
#Prepare for the end of the run
atexit.register(_updateRunEnd, self)
##### Accessors #####
def gitVersion(self):
"""
Returns the git version of the running code.
:return: str
"""
return self._gitVersion
def startTime(self):
"""
Returns the start time of the running code.
:return: datetime.datetime
"""
return self._startTime
##### Helpers #####
def handleException(self, exception, details=None):
"""
Handles an exception by logging it, inserting it into the database, etc.
:param exception: The exception
"""
backtrace = traceback.format_exc()
print(' EXCEPTION THROWN '.center(60, '*'))
print(backtrace)
#The global context is truly a singleton because the hardware it is managing can have only
#one thing using it at a time.
_context = None
_contextLock = threading.Lock()
class Context(object):
"""
The global singleton that holds all other object that are instantiated only
once (e.g. config, logging, DB connections, etc.).
"""
def __new__(cls):
global _context
if _context is None:
if _contextLock.acquire():
try:
if _context is None:
_context = _Context()
finally:
_contextLock.release()
return _context
| Python | 0.000001 | |
a7b25e343623f41b0466c8cea852ecc07ffab359 | Create marsLanderLevelTwo.py | Codingame/Python/Medium/marsLanderLevelTwo.py | Codingame/Python/Medium/marsLanderLevelTwo.py | import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
surface_n = int(input()) # the number of points used to draw the surface of Mars.
surface = []
for i in range(surface_n):
# land_x: X coordinate of a surface point. (0 to 6999)
# land_y: Y coordinate of a surface point. By linking all the points together in a sequential fashion, you form the surface of Mars.
land_x, land_y = [int(j) for j in input().split()]
surface.append([land_x,land_y])
minx = 0
maxx = 0
last = 0
for x in range(1, len(surface)):
if surface[x][1] == surface[last][1]:
minx = last
maxx = x
last = x
ly = surface[minx][1]
minx = surface[minx][0]
maxx = surface[maxx][0]
# game loop
while 1:
# h_speed: the horizontal speed (in m/s), can be negative.
# v_speed: the vertical speed (in m/s), can be negative.
# fuel: the quantity of remaining fuel in liters.
# rotate: the rotation angle in degrees (-90 to 90).
# power: the thrust power (0 to 4).
x, y, h_speed, v_speed, fuel, rotate, power = [int(i) for i in input().split()]
if h_speed < -60 or h_speed > 60:
d = (-45,45)[h_speed > 60]
p = 4
elif x < maxx and x > minx:
if h_speed < -20:
d = -60
p = 4
elif h_speed > 20:
d = 60
p = 4
else:
if maxx - x < 200 and h_speed > 0:
d = 15
elif minx - x > -200 and h_speed < 0:
d = -15
else:
d = 0
p = (3,4)[math.sqrt(v_speed**2+((y-ly)*2*(4-3.711))) < -38]
else:
d = (30,-30)[x < minx]
p = 4
# Write an action using print
# To debug: print("Debug messages...", file=sys.stderr)
# rotate power. rotate is the desired rotation angle. power is the desired thrust power.
print(d,"4")
| Python | 0.000045 | |
ac2f517f15816277dd808ac473c4581212b8e841 | add migration for meta | Seeder/www/migrations/0004_auto_20170223_1457.py | Seeder/www/migrations/0004_auto_20170223_1457.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-23 14:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('www', '0003_auto_20170216_2204'),
]
operations = [
migrations.AlterModelOptions(
name='topiccollection',
options={'ordering': ['id'], 'verbose_name': 'Topic collection', 'verbose_name_plural': 'Topic collections'},
),
]
| Python | 0 | |
0a8af4a4f5e9fa711e9e4b1b14cc639d5ff166a0 | Create beta_dog_recommendation_system.py | Solutions/beta/beta_dog_recommendation_system.py | Solutions/beta/beta_dog_recommendation_system.py | from itertools import takewhile
def find_similar_dogs(breed):
compare = dogs[breed]
scores = sorted((
[
dog,
sum(1 if q in compare else 0 for q in dogs[dog])
]
for dog in dogs if dog != breed
), key = lambda x: x[1], reverse=True)
max_score = scores[0][1]
return {s[0] for s in takewhile(lambda x: x[1]==max_score, scores)}
| Python | 0.000007 | |
8a293ddc633730a6c2323392b1ac9083e5a45ad4 | Create lora_test_recv.py | device/src/test/lora_test_recv.py | device/src/test/lora_test_recv.py | # lora_test_recv.py
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(X3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(X4)(OUT) #mode setting, can not hang
#RXD(IN) <--> X1(TX)(OUT) #UART4
#TXD(OUT) <--> X2(RX)(IN) #UART4
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
#JSON data format:
#{ID:123,CMD:heartbeat,DATA:hello,SEQUENCE:123}
from pyb import Pin
from pyb import UART
from pyb import Timer
import time
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
M0 = Pin('X3', Pin.OUT_PP)
M1 = Pin('X4', Pin.OUT_PP)
M0.low()
M1.low()
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
u4.write('{ID:1,CMD:OnLine,DATA:TYPBoard1,SEQ:0}')
if __name__=='__main__':
while True:
len = u4.any()
if(len > 0):
print(u4.read())
| Python | 0.000002 | |
d19a36fda0bfc9d221d65bde1612ff6181fca66d | add proposed setup.py file | setup.py | setup.py | from distutils.core import setup
setup(
name='vectortween',
version='0.0.1',
packages=['vectortween'],
url='',
license='MIT',
author='stefaan himpe',
author_email='stefaan.himpe@gmail.com',
description='some tweening for use with libraries like gizeh and moviepy'
)
| Python | 0 | |
f96686735db03abdc2470c27ff8d7a04643c7727 | Add Exercise 9.8. | Kane1985/Chapter5/Ex9.8.py | Kane1985/Chapter5/Ex9.8.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 9.8 from Kane 1985."""
from __future__ import division
from sympy import simplify, solve, symbols, Matrix
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import inertia, RigidBody
from sympy.physics.mechanics import cross, dot, dynamicsymbols
from util import msprint, subs, partial_velocities
from util import generalized_active_forces, potential_energy
from util import kde_matrix, vc_matrix
q1, q2, q3, q4, q5 = dynamicsymbols('q1:6')
q1d, q2d, q3d, q4d, q5d = dynamicsymbols('q1:6', level=1)
u1, u2, u3, u4, u5 = dynamicsymbols('u1:6')
u_prime, R, M, g, e, f, theta = symbols('u\' R, M, g, e, f, theta')
a, b, mA, mB, IA, J, K, t = symbols('a b mA mB IA J K t')
IA22, IA23, IA33 = symbols('IA22 IA23 IA33')
Q1, Q2, Q3 = symbols('Q1, Q2 Q3')
TB, TC = symbols('TB TC')
# reference frames
F = ReferenceFrame('F')
P = F.orientnew('P', 'axis', [-theta, F.y])
A = P.orientnew('A', 'axis', [q1, P.x])
# define frames for wheels
B = A.orientnew('B', 'axis', [q4, A.z])
C = A.orientnew('C', 'axis', [q5, A.z])
# define points
pO = Point('O')
pO.set_vel(F, 0)
pD = pO.locatenew('D', q2*P.y + q3*P.z)
pD.set_vel(A, 0)
pD.set_vel(F, pD.pos_from(pO).dt(F))
pS_star = pD.locatenew('S*', e*A.y)
pQ = pD.locatenew('Q', f*A.y - R*A.x)
for p in [pS_star, pQ]:
p.set_vel(A, 0)
p.v2pt_theory(pD, F, A)
# masscenters of bodies A, B, C
pA_star = pD.locatenew('A*', a*A.y)
pB_star = pD.locatenew('B*', -b*A.z)
pC_star = pD.locatenew('C*', +b*A.z)
for p in [pA_star, pB_star, pC_star]:
p.set_vel(A, 0)
p.v2pt_theory(pD, F, A)
# points of B, C touching the plane P
pB_hat = pB_star.locatenew('B^', -R*A.x)
pC_hat = pC_star.locatenew('C^', -R*A.x)
pB_hat.set_vel(B, 0)
pC_hat.set_vel(C, 0)
pB_hat.v2pt_theory(pB_star, F, B)
pC_hat.v2pt_theory(pC_star, F, C)
# kinematic differential equations and velocity constraints
kde = [u1 - dot(A.ang_vel_in(F), A.x),
u2 - dot(pD.vel(F), A.y),
u3 - q3d,
u4 - q4d,
u5 - q5d]
kde_map = solve(kde, [q1d, q2d, q3d, q4d, q5d])
vc = [dot(p.vel(F), A.y) for p in [pB_hat, pC_hat]] + [dot(pD.vel(F), A.z)]
vc_map = solve(subs(vc, kde_map), [u3, u4, u5])
forces = [(pS_star, -M*g*F.x), (pQ, Q1*A.x)] # no friction at point Q
torques = [(A, -TB*A.z), (A, -TC*A.z), (B, TB*A.z), (C, TC*A.z)]
partials = partial_velocities(zip(*forces + torques)[0], [u1, u2],
F, kde_map, vc_map, express_frame=A)
Fr, _ = generalized_active_forces(partials, forces + torques)
q = [q1, q2, q3, q4, q5]
u = [u1, u2]
n = len(q)
p = len(u)
m = n - p
if vc_map is not None:
u += sorted(vc_map.keys())
dV_dq = symbols('∂V/∂q1:{0}'.format(n + 1))
dV_eq = Matrix(Fr).T
W_sr, _ = kde_matrix(u, kde_map)
if vc_map is not None:
A_kr, _ = vc_matrix(u, vc_map)
else:
A_kr = Matrix.zeros(m, p)
for s in range(W_sr.shape[0]):
dV_eq += dV_dq[s] * (W_sr[s, :p] + W_sr[s, p:]*A_kr[:, :p])
print('Eq 5.1.18:')
for elem in dV_eq:
print(msprint(elem))
| Python | 0.000002 | |
a57d39e7f63e6c034644a158aabb5ff6e6f04ae9 | add response test to testing module | oct/testing/response.py | oct/testing/response.py | # This file is fit for containing basic response status check
# All functions have to take a response object in param
def check_response_status(resp, status):
"""
This will check is the response_code is equal to the status
:param resp: a response object
:param status: the expected status
:type status: int
:return: None
:raise: AssertionError
"""
assert(resp.code == status), "Bad Response: HTTP %s, expected %s, URL : %s" % (resp.code, status, resp.geturl()) | Python | 0 | |
f37bcfdae9bfc14bacccdcba325d2b8fb1284d32 | set keystone user name to user's email address | planetstack/observer/steps/sync_user_deployments.py | planetstack/observer/steps/sync_user_deployments.py | import os
import base64
import hashlib
from collections import defaultdict
from django.db.models import F, Q
from planetstack.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.site import SiteDeployments, Deployment
from core.models.user import User, UserDeployments
from util.logger import Logger, logging
logger = Logger(level=logging.INFO)
class SyncUserDeployments(OpenStackSyncStep):
provides=[User, UserDeployments]
requested_interval=0
def fetch_pending(self):
# user deployments are not visible to users. We must ensure
# user are deployed at all deploymets available to their sites.
deployments = Deployment.objects.all()
site_deployments = SiteDeployments.objects.all()
site_deploy_lookup = defaultdict(list)
for site_deployment in site_deployments:
site_deploy_lookup[site_deployment.site].append(site_deployment.deployment)
user_deployments = UserDeployments.objects.all()
user_deploy_lookup = defaultdict(list)
for user_deployment in user_deployments:
user_deploy_lookup[user_deployment.user].append(user_deployment.deployment)
for user in User.objects.all():
if user.is_admin:
# admins should have an account at all deployments
expected_deployments = deployments
else:
# normal users should have an account at their site's deployments
expected_deployments = site_deploy_lookup[user.site]
for expected_deployment in expected_deployments:
if expected_deployment not in user_deploy_lookup[user]:
ud = UserDeployments(user=user, deployment=expected_deployment)
ud.save()
# now we can return all slice deployments that need to be enacted
return UserDeployments.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, user_deployment):
logger.info("sync'ing user %s at deployment %s" % (user_deployment.user, user_deployment.deployment.name))
name = user_deployment.user.email[:user_deployment.user.email.find('@')]
user_fields = {'name': user_deployment.user.email,
'email': user_deployment.user.email,
'password': hashlib.md5(user_deployment.user.password).hexdigest()[:6],
'enabled': True}
driver = self.driver.admin_driver(deployment=user_deployment.deployment.name)
if not user_deployment.kuser_id:
keystone_user = driver.create_user(**user_fields)
user_deployment.kuser_id = keystone_user.id
else:
driver.update_user(user_deployment.kuser_id, user_fields)
# setup user deployment site roles
if user_deployment.user.site:
site_deployments = SiteDeployments.objects.filter(site=user_deployment.user.site,
deployment=user_deployment.deployment)
if site_deployments:
# need the correct tenant id for site at the deployment
tenant_id = site_deployments[0].tenant_id
driver.add_user_role(user_deployment.kuser_id,
tenant_id, 'user')
if user_deployment.user.is_admin:
driver.add_user_role(user_deployment.kuser_id, tenant_id, 'admin')
else:
# may have admin role so attempt to remove it
driver.delete_user_role(user_deployment.kuser_id, tenant_id, 'admin')
if user_deployment.user.public_key:
user_driver = driver.client_driver(caller=user, tenant=user.site.login_base,
deployment=user_deployment.deployment.name)
key_fields = {'name': user_deployment.user.keyname,
'public_key': user_deployment.user.public_key}
user_driver.create_keypair(**key_fields)
user_deployment.save()
| import os
import base64
import hashlib
from collections import defaultdict
from django.db.models import F, Q
from planetstack.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.site import SiteDeployments, Deployment
from core.models.user import User, UserDeployments
from util.logger import Logger, logging
logger = Logger(level=logging.INFO)
class SyncUserDeployments(OpenStackSyncStep):
provides=[User, UserDeployments]
requested_interval=0
def fetch_pending(self):
# user deployments are not visible to users. We must ensure
# user are deployed at all deploymets available to their sites.
deployments = Deployment.objects.all()
site_deployments = SiteDeployments.objects.all()
site_deploy_lookup = defaultdict(list)
for site_deployment in site_deployments:
site_deploy_lookup[site_deployment.site].append(site_deployment.deployment)
user_deployments = UserDeployments.objects.all()
user_deploy_lookup = defaultdict(list)
for user_deployment in user_deployments:
user_deploy_lookup[user_deployment.user].append(user_deployment.deployment)
for user in User.objects.all():
if user.is_admin:
# admins should have an account at all deployments
expected_deployments = deployments
else:
# normal users should have an account at their site's deployments
expected_deployments = site_deploy_lookup[user.site]
for expected_deployment in expected_deployments:
if expected_deployment not in user_deploy_lookup[user]:
ud = UserDeployments(user=user, deployment=expected_deployment)
ud.save()
# now we can return all slice deployments that need to be enacted
return UserDeployments.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, user_deployment):
logger.info("sync'ing user %s at deployment %s" % (user_deployment.user, user_deployment.deployment.name))
name = user_deployment.user.email[:user_deployment.user.email.find('@')]
user_fields = {'name': name,
'email': user_deployment.user.email,
'password': hashlib.md5(user_deployment.user.password).hexdigest()[:6],
'enabled': True}
driver = self.driver.admin_driver(deployment=user_deployment.deployment.name)
if not user_deployment.kuser_id:
keystone_user = driver.create_user(**user_fields)
user_deployment.kuser_id = keystone_user.id
else:
driver.update_user(user_deployment.kuser_id, user_fields)
# setup user deployment site roles
if user_deployment.user.site:
site_deployments = SiteDeployments.objects.filter(site=user_deployment.user.site,
deployment=user_deployment.deployment)
if site_deployments:
# need the correct tenant id for site at the deployment
tenant_id = site_deployments[0].tenant_id
driver.add_user_role(user_deployment.kuser_id,
tenant_id, 'user')
if user_deployment.user.is_admin:
driver.add_user_role(user_deployment.kuser_id, tenant_id, 'admin')
else:
# may have admin role so attempt to remove it
driver.delete_user_role(user_deployment.kuser_id, tenant_id, 'admin')
if user_deployment.user.public_key:
user_driver = driver.client_driver(caller=user, tenant=user.site.login_base,
deployment=user_deployment.deployment.name)
key_fields = {'name': user_deployment.user.keyname,
'public_key': user_deployment.user.public_key}
user_driver.create_keypair(**key_fields)
user_deployment.save()
| Python | 0.000008 |
2f1b12a6f173c01f9631d0ad5a4d3c3f411983cb | add file notification platform | homeassistant/components/notify/file.py | homeassistant/components/notify/file.py | """
homeassistant.components.notify.file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
File notification service.
Configuration:
To use the File notifier you will need to add something like the following
to your config/configuration.yaml
notify:
platform: file
path: PATH_TO_FILE
filename: FILENAME
timestamp: 1 or 0
Variables:
path
*Required
Path to the directory that contains your file. You need to have write
permission for that directory. The directory will be created if it doesn't
exist.
filename
*Required
Name of the file to use. The file will be created if it doesn't exist.
date
*Required
Add a timestamp to the entry, valid entries are 1 or 0.
"""
import logging
from pathlib import (Path, PurePath)
import homeassistant.util.dt as dt_util
from homeassistant.helpers import validate_config
from homeassistant.components.notify import (
DOMAIN, ATTR_TITLE, BaseNotificationService)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config):
""" Get the file notification service. """
if not validate_config(config,
{DOMAIN: ['path',
'filename',
'timestamp']},
_LOGGER):
return None
path = config[DOMAIN]['path']
filename = config[DOMAIN]['filename']
filepath = Path(path, filename)
# pylint: disable=no-member
if not filepath.parent.exists():
try:
filepath.parent.mkdir(parents=True)
filepath.touch(mode=0o644, exist_ok=True)
except:
_LOGGER.exception("No write permission to given location.")
# raise PermissionError('') from None
# raise FileNotFoundError('') from None
return None
return FileNotificationService(filepath, config[DOMAIN]['timestamp'])
# pylint: disable=too-few-public-methods
class FileNotificationService(BaseNotificationService):
""" Implements notification service for the File service. """
# pylint: disable=no-member
def __init__(self, filepath, add_timestamp):
self._filepath = str(PurePath(filepath))
self._add_timestamp = add_timestamp
def send_message(self, message="", **kwargs):
""" Send a message to a file. """
file = open(self._filepath, 'a')
if not Path(self._filepath).stat().st_size:
title = '{} notifications (Log started: {})\n{}\n'.format(
kwargs.get(ATTR_TITLE),
dt_util.strip_microseconds(dt_util.utcnow()),
'-'*80)
file.write(title)
if self._add_timestamp == 1:
text = '{} {}\n'.format(dt_util.utcnow(), message)
file.write(text)
else:
text = '{}\n'.format(message)
file.write(text)
file.close()
| Python | 0 | |
a9893fc562c9131fdaebaa842f587f415b7fdfda | Add second test. | oommfmif/test_basics.py | oommfmif/test_basics.py | import oommfmif as o
def test_get_oommf_version_return_type():
assert isinstance(o.get_version(), str)
def test_get_oommf_version():
assert o.get_version()[0:4] == "1.2."
| import oommfmif as o
def test_get_oommf_version():
assert isinstance(o.get_version(), str)
| Python | 0.000007 |
99389c1f863592c8c56c8dca415155536abbd0fd | Create new.py | simple_mqtt/new.py | simple_mqtt/new.py | Python | 0.000001 | ||
0ec30eb8bcf0e7688182f827bea24fd0ceb33501 | add models | models.py | models.py | from peewee import *
from config import db
class BaseModel(Model):
class Meta:
database = db
class HistoricalTrainPosition(BaseModel):
cars = IntegerField()
line_code = CharField()
next_station = CharField()
dest_station = CharField()
time = IntegerField()
timestamp = DateTimeField()
| Python | 0 | |
5c02d902753327b3413e994d6edc089b8ca72749 | Add create_flipper step | dbaas/workflow/steps/create_flipper.py | dbaas/workflow/steps/create_flipper.py | # -*- coding: utf-8 -*-
import logging
from base import BaseStep
from dbaas_flipper.provider import FlipperProvider
LOG = logging.getLogger(__name__)
class CreateFlipper(BaseStep):
def __unicode__(self):
return "Creating Flipper"
def do(self, workflow_dict):
try:
if workflow_dict['qt']==1:
return True
flipper = FlipperProvider()
LOG.info("Creating Flipper...")
flipper.create_flipper_dependencies(
masterpairname=workflow_dict['names']['infra'],
hostname1=workflow_dict[
'hosts'][0].address,
writeip=workflow_dict[
'databaseinfraattr'][0].ip,
readip=workflow_dict[
'databaseinfraattr'][1].ip,
hostname2=workflow_dict[
'hosts'][1].address,
environment=workflow_dict['environment'])
return True
except Exception, e:
print e
return False
def undo(self, workflow_dict):
try:
if workflow_dict['qt']==1:
return True
LOG.info("Destroying Flipper...")
FlipperProvider(
).destroy_flipper_dependencies(masterpairname=workflow_dict['databaseinfra'].name,
environment=workflow_dict['environment'])
return True
except Exception, e:
print e
return False
| Python | 0.000001 | |
cf99929c923cb31782a192f108c735bfcc9cde2f | Add render module, this will be the interface to manage rendering state files into high state data | salt/render.py | salt/render.py | '''
Render is a module used to parse the render files into high salt state data
structures.
The render system uses render modules which are plugable interfaces under the
render directory.
'''
# Import salt modules
import salt.loader
class Render(object):
'''
Render state files.
'''
def __init__(self, opts):
pass
| Python | 0 | |
773efcb6aec427034263d550c600da0654031fa4 | Add simpleTestCondition.py script to test condition notification framework w/o using full Django unit test infrastucture | apps/basaltApp/scripts/simpleTestCondition.py | apps/basaltApp/scripts/simpleTestCondition.py | #! /usr/bin/env python
#__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
import sys
import requests
import datetime
import json
import pytz
HTTP_PREFIX = 'https'
URL_PREFIX = 'localhost'
def test_set_condition():
url = "%s://%s%s" % (HTTP_PREFIX, URL_PREFIX, '/xgds_core/condition/set/')
nowtime = datetime.datetime.now(pytz.utc)
isonow = nowtime.isoformat()
nested_data_dict = {'start_time': isonow,
'status': 'Started',
'timezone': 'US/Hawaii',
'name': 'test_set_condition',
'extra': 'Start time should be set',
'assignment': 'EV2',
'group_name': '20170426B',
'xgds_id': 'HIL13_A_WAY0_0_PXO'
}
data = {'time': isonow,
'source': 'playbook',
'id': 'PB1',
'data': json.dumps(nested_data_dict)
}
response = requests.post(url, data=data, verify=False)
json_response = response.json()
return json_response
def test_update_condition():
url = "%s://%s%s" % (HTTP_PREFIX, URL_PREFIX, '/xgds_core/condition/set/')
nowtime = datetime.datetime.now(pytz.utc)
isonow = nowtime.isoformat()
nested_data_dict = {'status': 'in_progress',
'extra': 'In progress for this',
'assignment': 'EV2',
'group_name': '20170426B',
'xgds_id': 'HIL13_A_WAY0_0_PXO'
}
data = {'time': isonow,
'source': 'playbook',
'id': 'PB1',
'data': json.dumps(nested_data_dict)
}
response = requests.post(url, data=data, verify=False)
json_response = response.json()
return json_response
def test_end_condition():
url = "%s://%s%s" % (HTTP_PREFIX, URL_PREFIX, '/xgds_core/condition/set/')
nowtime = datetime.datetime.now(pytz.utc)
isonow = nowtime.isoformat()
nested_data_dict = {'end_time': isonow,
'status': 'completed',
'extra': 'Done done done',
'assignment': 'EV2',
'group_name': '20170426B',
'xgds_id': 'HIL13_A_WAY0_0_PXO'
}
data = {'time': isonow,
'source': 'playbook',
'id': 'PB1',
'data': json.dumps(nested_data_dict)
}
response = requests.post(url, data=data, verify=False)
json_response = response.json()
mode = sys.argv[1]
print "Running %s condition check..." % mode
if mode == 'set':
resp = test_set_condition()
if mode == 'update':
resp = test_update_condition()
if mode == 'end':
resp = test_end_condition()
print "response:", resp
| Python | 0 | |
c735935c983cc7ccd72b2c71733e6f785a8a3ae3 | Create urls.py | Assessment/urls.py | Assessment/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^getAssignmentByCode', views.getAssignmentByCode, name='getAssignmentByCode'),
url(r'^retrieveAssignments', views.retrieveAssignments, name='retrieveAssignments'),
]
| Python | 0.000017 | |
1bfbd397e3b3c805aa29f407915e1d10ca7eb179 | Create rbgLib.py | rbgLib.py | rbgLib.py | from __future__ import division
import time
import RPi.GPIO as GPIO
# noinspection PyPep8Naming
class rgbColour(object):
red = 0
green = 0
blue = 0
def __init__(self, red, green, blue):
self.red = red
self.green = green
self.blue = blue
def hexToColour(r,g,b):
hex_constant = 0.3921568627
return rgbColour(hex_constant*r, hex_constant*g, hex_constant*b)
red = rgbColour(100,0,0)
green = rgbColour(0,100,0)
blue = rgbColour(0,0,100)
orange = hexToColour(255, 127, 0)
yellow = hexToColour(255,255,0)
indigo = hexToColour(75,0,130)
purple = rgbColour(100,0,100)
aqua = rgbColour(0,100,100)
turquoise = rgbColour(0,100,30)
# noinspection PyPep8Naming
class rbgLed(object):
def __init__(self, RED, GREEN, BLUE):
self.R_led = RED
self.G_led = GREEN
self.B_led = BLUE
def set_colour(self, colour):
self.R_led.set_ds(colour.red)
self.G_led.set_ds(colour.green)
self.B_led.set_ds(colour.blue)
def verbose_get_colour(self):
print (
"COLOUR : " + str(self.R_led.duty_cycle) + " " + str(self.G_led.duty_cycle) + " " + str(
self.B_led.duty_cycle))
return rgbColour(self.R_led.duty_cycle, self.G_led.duty_cycle, self.B_led.duty_cycle)
def blink(self, colour, hold_time):
self.set_colour(colour)
time.sleep(hold_time)
self.turn_off()
def get_colour(self):
return rgbColour(self.R_led.duty_cycle, self.G_led.duty_cycle, self.B_led.duty_cycle)
def turn_off(self):
self.R_led.turn_off()
self.G_led.turn_off()
self.B_led.turn_off()
def fade_to(self, destColour, length):
startColour = self.get_colour()
redDelta = find_delta(startColour.red, destColour.red, length)
print("RedDelta = " + str(redDelta))
greenDelta = find_delta(startColour.green, destColour.green, length)
blueDelta = find_delta(startColour.blue, destColour.blue, length)
for i in range(0, length + 1, 1):
time.sleep(0.004)
to = rgbColour(startColour.red + (i * redDelta),
startColour.green + (i * greenDelta),
startColour.blue + (i * blueDelta))
self.set_colour(to)
print to.red
class LED(object):
def __init__(self, pin, bool_pwm, freq):
self.pin = pin
GPIO.setup(self.pin, GPIO.OUT)
self.duty_cycle = 0
if bool_pwm:
self.pwm = GPIO.PWM(pin, freq)
self.pwm.start(self.duty_cycle)
def set_ds(self, duty_cycle):
self.duty_cycle = duty_cycle
self.pwm.ChangeDutyCycle(duty_cycle)
def turn_off(self):
self.set_ds(0)
def find_delta(start_val, finish_val, steps):
return (finish_val - start_val) / steps
def ledAlert(destColour, rgbLEDy, length):
startColour = rgbLEDy.get_colour()
rgbLEDy.fade_to(destColour, length)
time.sleep(0.5)
rgbLEDy.fade_to(startColour, length)
def init():
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
def exit(LED):
LED.turn_off()
GPIO.cleanup()
| Python | 0.000003 | |
83d4ac6c3565044727c9b3fcbada9966d529a80e | Add forgotten font leader lib | lib/font_loader.py | lib/font_loader.py | import os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
| Python | 0 | |
d8f7cb58e7f760ccbb839aafeda4dbf7204d7d82 | Add r_latestagecapitalism | channels/r_latestagecapitalism/app.py | channels/r_latestagecapitalism/app.py | #encoding:utf-8
subreddit = 'latestagecapitalism'
t_channel = '@r_latestagecapitalism'
def send_post(submission, r2t):
return r2t.send_simple(submission)
| Python | 0.999492 | |
151e8fc71e5ef2e31db13730bff57bc8fd915c30 | Add test case for list invoice | paystackapi/tests/test_invoice.py | paystackapi/tests/test_invoice.py | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
| Python | 0.000001 | |
dcd1d962feec4f3cd914677545f74924ad9e6351 | Add test for file creation of low level library | testing/test_direct_wrapper.py | testing/test_direct_wrapper.py | import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
| Python | 0 | |
2ccd94f9fb6f4a64976124ca82ac4c5ef585d64b | add serializer field | djbitcoin/serializers.py | djbitcoin/serializers.py | from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from .utils import is_bitcoin_address_valid
class BitcoinAddressField(serializers.CharField):
default_error_messages = {
'invalid': _('Invalid bitcoin address.')
}
def to_internal_value(self, data):
data = super().to_internal_value(data)
if not is_bitcoin_address_valid(data):
self.fail('invalid')
return data
| Python | 0 | |
dddc76173a5150939535b2c506aa967fe17ee000 | Fix #12 : env implementation | elevator/env.py | elevator/env.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from ConfigParser import ConfigParser
from utils.patterns import Singleton
from utils.decorators import lru_cache
from utils.snippets import items_to_dict
class Environment(object):
"""
Unix shells like environment class. Implements add,
get, load, flush methods. Handles lists of values too.
Basically Acts like a basic key/value store.
"""
__metaclass__ = Singleton
SEQ_DELIMITER = ','
def __init__(self, env_file=''):
self.attributes = set() # Stores manually added attributes
if env_file:
self.load(env_file=env_file) # Has to be called last!
def add(self, name, value):
"""Adds a key/value to env"""
setattr(self, name, value)
self.attributes.add(name)
@lru_cache(maxsize=1024)
def get(self, name):
"""Cached env key fetch"""
var = getattr(self, name)
if ',' in var:
return var.split(',')
return var
def append(self, var, value):
"""
`value` can either be a (name, value) tuple/list pair,
or a value string. If a pair is given, the method
will consider that the var to append to is a dict
and will try to add the name/value to it.
If it is a String, it will try to automatically transform
the pointed var to a sequence and add the value to it.
"""
env_var = getattr(self, var)
env_var_type = type(env_var)
if ((isinstance(value, tuple) or isinstance(value, list)) and \
len(value) == 2):
key, value = value
env_var.update({key: value})
elif isinstance(value, str):
if env_var_type != list:
env_var = [env_var]
env_var.append(value)
setattr(self, var, env_var)
else:
err_msg = "Env value has to wether be iterable sequence or str"
raise TypeError(err_msg)
self.attributes.add(var)
def load(self, env_file):
"""Loads an ini file containing the env description : key/value"""
config = ConfigParser()
config.read(env_file)
for section in config.sections():
setattr(self, section, items_to_dict(config.items(section)))
self.attributes.add(section)
for k, v in getattr(self, section).iteritems():
if self.CONFIG_SEQ_DELIMITER in v:
splitted = [e for e in v.split(self.SEQ_DELIMITER) if e]
getattr(self, section)[k] = splitted
def reload(self, env_file=''):
self.flush(env_file)
self.load(env_file)
def flush(self):
"""
Flushes the environment from it's manually
set attributes.
"""
for attr in self.attributes:
delattr(self, attr)
| Python | 0.000001 | |
01b42c531f7ab0ca81768b6e9833062f9e31ba95 | Update train_tagger script | examples/training/train_tagger.py | examples/training/train_tagger.py | """A quick example for training a part-of-speech tagger, without worrying
about the tokenization, or other language-specific customizations."""
from __future__ import unicode_literals
from __future__ import print_function
import plac
from pathlib import Path
from spacy.vocab import Vocab
from spacy.tagger import Tagger
from spacy.tokens import Doc
import random
# You need to define a mapping from your data's part-of-speech tag names to the
# Universal Part-of-Speech tag set, as spaCy includes an enum of these tags.
# See here for the Universal Tag Set:
# http://universaldependencies.github.io/docs/u/pos/index.html
# You may also specify morphological features for your tags, from the universal
# scheme.
TAG_MAP = {
'N': {"pos": "NOUN"},
'V': {"pos": "VERB"},
'J': {"pos": "ADJ"}
}
# Usually you'll read this in, of course. Data formats vary.
# Ensure your strings are unicode.
DATA = [
(
["I", "like", "green", "eggs"],
["N", "V", "J", "N"]
),
(
["Eat", "blue", "ham"],
["V", "J", "N"]
)
]
def ensure_dir(path):
if not path.exists():
path.mkdir()
def main(output_dir=None):
if output_dir is not None:
output_dir = Path(output_dir)
ensure_dir(output_dir)
ensure_dir(output_dir / "pos")
ensure_dir(output_dir / "vocab")
vocab = Vocab(tag_map=TAG_MAP)
# The default_templates argument is where features are specified. See
# spacy/tagger.pyx for the defaults.
tagger = Tagger.blank(vocab, Tagger.default_templates())
for i in range(5):
for words, tags in DATA:
doc = Doc(vocab, orths_and_spaces=zip(words, [True] * len(words)))
tagger.update(doc, tags)
random.shuffle(DATA)
tagger.model.end_training()
doc = Doc(vocab, orths_and_spaces=zip(["I", "like", "blue", "eggs"], [True]*4))
tagger(doc)
for word in doc:
print(word.text, word.tag_, word.pos_)
if output_dir is not None:
tagger.model.dump(str(output_dir / 'pos' / 'model'))
with (output_dir / 'vocab' / 'strings.json').open('wb') as file_:
tagger.vocab.strings.dump(file_)
if __name__ == '__main__':
plac.call(main)
# I V VERB
# like V VERB
# blue N NOUN
# eggs N NOUN
| Python | 0 | |
8fc4fdc96c07432f87b49676b4ba9ca92a0f3385 | Add tool.parser module | grab/tools/parser.py | grab/tools/parser.py | def parse_int(val):
if val is None:
return None
else:
return int(val)
| Python | 0.000001 | |
e426afbe9ccbc72a1aa0d00032144e8b9b2b8cdc | Implement utility for colored, tabular output using fabric's color controls. | gusset/colortable.py | gusset/colortable.py | """
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
| Python | 0 | |
af61c9a44871b1da8a939470492c18a45ab373e1 | Create lineValueDisp.py | lineValueDisp.py | lineValueDisp.py |
import TCP
import Motor
import Steering
import Status
import time
import Cameras
import Lights
import Modes
import os
try:
trip_meter = Motor.TripMeter()
motors = Motor.Motor(trip_meter)
follow_line = Steering.FollowLine(motors, start_speed = 0)
while True:
time.sleep(10)
except:
motors.turn_off()
follow_line.stop()
| Python | 0.000009 | |
3660c183ba1ddec8033ceae21b1b06fd0ab9a8b7 | Add Signal class | phasortoolbox/signal.py | phasortoolbox/signal.py | class Signal(object):
run = False | Python | 0 | |
913bb348938c2b54ab7a76c7e16ce9b3fb999dbe | Copy fail. | judge/management/commands/render_pdf.py | judge/management/commands/render_pdf.py | import os
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from django.utils import translation
from judge.models import Problem, ProblemTranslation
from judge.pdf_problems import WebKitPdfMaker
class Command(BaseCommand):
help = 'renders a PDF file of a problem'
def add_arguments(self, parser):
parser.add_argument('code', help='code of problem to render')
parser.add_argument('directory', nargs='?', help='directory to store temporaries')
parser.add_argument('-l', '--language', default=settings.LANGUAGE_CODE,
help='language to render PDF in')
def handle(self, *args, **options):
try:
problem = Problem.objects.get(code=options['code'])
except Problem.DoesNotExist:
print 'Bad problem code'
return
try:
trans = problem.translations.get(language=options['language'])
except ProblemTranslation.DoesNotExist:
trans = None
directory = options['directory']
with WebKitPdfMaker(directory, clean_up=directory is None) as maker, \
translation.override(options['language']):
maker.html = get_template('problem/raw.jade').render(Context({
'problem': problem,
'problem_name': problem.name if trans is None else trans.name,
'description': problem.description if trans is None else trans.description,
})).replace('"//', '"http://').replace("'//", "'http://")
for file in ('style.css', 'pygment-github.css'):
maker.load(file, os.path.join(settings.DMOJ_RESOURCES, file))
maker.make(debug=True)
if not maker.success:
print>>sys.stderr, maker.log
elif directory is None:
os.rename(maker.pdffile, problem.code + '.pdf')
| import os
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from django.utils import translation
from judge.models import Problem, ProblemTranslation
from judge.pdf_problems import WebKitPdfMaker
class Command(BaseCommand):
help = 'renders a PDF file of a problem'
def add_arguments(self, parser):
parser.add_argument('code', help='code of problem to render')
parser.add_argument('directory', nargs='?', help='directory to store temporaries')
parser.add_argument('-l', '--language', default=settings.LANGUAGE_CODE,
help='language to render PDF in')
def handle(self, *args, **options):
try:
problem = Problem.objects.get(code=options['code'])
except Problem.DoesNotExist:
print 'Bad problem code'
return
problem = self.get_object()
try:
trans = problem.translations.get(language=options['language'])
except ProblemTranslation.DoesNotExist:
trans = None
directory = options['directory']
with WebKitPdfMaker(directory, clean_up=directory is None) as maker, \
translation.override(options['language']):
maker.html = get_template('problem/raw.jade').render(Context({
'problem': problem,
'problem_name': problem.name if trans is None else trans.name,
'description': problem.description if trans is None else trans.description,
})).replace('"//', '"http://').replace("'//", "'http://")
for file in ('style.css', 'pygment-github.css'):
maker.load(file, os.path.join(settings.DMOJ_RESOURCES, file))
maker.make(debug=True)
if not maker.success:
print>>sys.stderr, maker.log
elif directory is None:
os.rename(maker.pdffile, problem.code + '.pdf')
| Python | 0 |
b31e15d12dbff8eaab71ec523ec16d5f1afe908b | add sharpen pic tool | sharpen_pic.py | sharpen_pic.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#function: 锐化图像
import os
import os.path
import sys, getopt, argparse
from PIL import Image, ImageEnhance
def sharpenPic(filein,fileout):
im02 =Image.open(filein)
im_30 =ImageEnhance.Sharpness (im02).enhance(2.0)
im_30.save(fileout)
def main():
argc = len(sys.argv)
cmdargs = str(sys.argv)
parser = argparse.ArgumentParser(description="Tool for sharp the image")
parser.add_argument('-f', '--fromdir', required=True, help='the directory path of the input file')
parser.add_argument('-d', '--outdir', required=True, help='the directory of the output file')
args = parser.parse_args()
fromdir = args.fromdir
outdir = args.outdir
for file in os.listdir(fromdir):
if file == "desktop.ini":
continue
filein = os.path.join(fromdir, file)
fileout = os.path.join(outdir, file)
try:
sharpenPic(filein, fileout)
except Exception as e:
print(e)
continue
if __name__ == '__main__':
main() | Python | 0 | |
ca53fcbba66dd4999f68f3523367c20a6b5e1e47 | Create script.py | script.py | script.py | import math
def getMatrix():
L = [None] * 3
for j in range(3):
print "Ligne "+str(j)+ "\n"
L[j] = [None] * 3
for i in range(3):
L[j][i] = input("Terme "+str(i)+"\n")
return L
def getPoint():
L = [None] * 3
for j in range(2):
L[j] = input("Terme "+str(j)+"\n")
L[2] = 1
return L
def PrintPoint(L):
s ="["+str(L[0])+","+str(L[1])+","+str(L[2])+"]"
print s
def PrintMatrix(L):
s ="["+str(L[0][0])+","+str(L[0][1])+","+str(L[0][2])+"]"
print s
s ="["+str(L[1][0])+","+str(L[1][1])+","+str(L[1][2])+"]"
print s
s ="["+str(L[2][0])+","+str(L[2][1])+","+str(L[2][2])+"]"
print s
def MatrixProduct(L,M):
result = [[0,0,0],[0,0,0],[0,0,0]]
for i in range(len(L)):
for j in range(len(M[0])):
for k in range(len(M)):
result[i][j] += L[i][k] * M[k][j]
return result
def MatrixPointProduct(L,P):
result = [0,0,0]
for i in range(len(L)):
for j in range(len(P)):
result[i] += L[i][j] * P[j]
return result
def GenRotationMatrix(teta):
L = [[math.cos(teta),-math.sin(teta),0],
[math.sin(teta),math.cos(teta),0],
[0,0,1]]
return L
def GenHomothetieMatrix(k):
L = [[k,0,0],
[0,k,0],
[0,0,1]]
return L
def GenTranslationMatrix(a,b):
L = [[1,0,a],
[0,1,b],
[0,0,1]]
return L
def GenCentredRotationMatrix(teta,a,b):
return MatrixProduct(GenRotationMatrix(teta),GenTranslationMatrix(a,b))
def GenCentredHomothetieMatrix(k,a,b):
return MatrixProduct(GenHomothetieMatrix(k),GenTranslationMatrix(a,b))
def GetPolyon():
nb = input("Combien de point voulez vous ?")
L = [None] * nb
for i in range(nb):
L[i] = getPoint()
return L
GetPolyon()
| Python | 0.000002 | |
338a23f907a82d821844639128d070385138af80 | Simple https server. requires key and cert files | server.py | server.py | import SimpleHTTPServer
import SocketServer
import BaseHTTPServer, SimpleHTTPServer
import ssl
httpd = BaseHTTPServer.HTTPServer(('localhost', 4443), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd.socket = ssl.wrap_socket (httpd.socket, certfile='/Users/jtep/Code/own/audioviz/cert.pem', keyfile='/Users/jtep/Code/own/audioviz/key.pem',server_side=True)
httpd.serve_forever()
| Python | 0.999993 | |
98e086696ea36d6050de9f23d2380b704fee305d | Create chatbot3.py | ai/chatbot3.py | ai/chatbot3.py | # ทำ Chat Bot ง่าย ๆ ในภาษา Python
# เขียนโดย นาย วรรณพงษ์ ภัททิยไพบูลย์
# https://python3.wannaphong.com/2015/07/ทำ-chat-bot-ง่าย-ๆ-ในภาษา-python.html
from tinydb import TinyDB, where # เรียกใช้งานโมดูล tinydb
import random
db = TinyDB('db.json') # เรียกใช้ฐานข้อมูลจากไฟล์ db.json
def addword():
print("ไม่พบประโยคนี้ในระบบ คุณต้องการสอนไหม")
addif = str(input("Y or N : "))
if addif == "Y":
q = input("คำถาม : ")
ans = input("คำตอบ : ")
db.insert({q:ans}) # เพิ่มข้อมูลลงฐานข้อมูล
else:
print("Ok")
while True:
text = input("> ")
a = db.search(where(text)) # ค้นหาคำที่เหมือนกันในฐานข้อมูล
if a == []:
addword()
else:
a = random.choice([a for a in db.search(where(text))]) # ทำการลูปในกรณีที่มีคำตอบมากและแยกกันกรอกข้อมูล แล้วทำการสุ่ม
print(a[text])
| Python | 0.00215 | |
0882c8885b88618ea55b97ace256cdf833a1547d | Add tests for pylama isort | tests/test_pylama_isort.py | tests/test_pylama_isort.py | import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
| Python | 0 | |
8a7ea0e8d29d443676c8893790625cbeb9d973ad | Test addByUniqueID Survey model | tests/test_survey_model.py | tests/test_survey_model.py | # Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
import unittest
from app import create_app
from app.models import Survey
class SurveyModelTestCase(unittest.TestCase):
uniqueID = 'd4c74594d841139328695756648b6bd6'
def setUp(self):
self.app = create_app('TESTING')
self.app_context = self.app.app_context()
self.app_context.push()
def tearDown(self):
self.app_context.pop()
def test_addByUniqueID(self):
u = Survey()
self.assertTrue(u.addByUniqueID(self.uniqueID, {}))
| Python | 0 | |
280e72331d99a8c49783196951287627a933a659 | Add py solution for 459. Repeated Substring Pattern | py/repeated-substring-pattern.py | py/repeated-substring-pattern.py | class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
| Python | 0.00038 | |
ce0834943d28f8f1b69992efd12d5e62743fb670 | add an UDP multihoming example | python_examples/udp_multihome.py | python_examples/udp_multihome.py | #!/usr/bin/env python3
# -*- coding:UTF-8 -*-
# Copyright (c) 2014 Nicolas Iooss
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""This program show how it is possible to run an UDP server with multihoming.
The main issue is to reply to incoming requests with the right source address,
when several ones are available. This is done by using recvmsg/sendmsg
functions instead of recvfrom/sendto which only control the remote address.
This use-case is called "multihoming".
This program has been insipred by OpenVPN source code (src/openvpn/socket.c)
@author: Nicolas Iooss
@license: MIT
"""
import argparse
import logging
import os
import socket
import struct
import sys
logger = logging.getLogger(__name__)
# Define some system-specific constants
if sys.platform.startswith('linux'):
if not hasattr(socket, 'IP_PKTINFO'):
socket.IP_PKTINFO = 8
if not hasattr(socket, 'IPV6_RECVPKTINFO'):
socket.IPV6_RECVPKTINFO = 49
if not hasattr(socket, 'IPV6_PKTINFO'):
socket.IPV6_PKTINFO = 50
if not hasattr(socket, 'SO_BINDTODEVICE'):
socket.SO_BINDTODEVICE = 25
elif os.name == 'nt':
if not hasattr(socket, 'IP_RECVDSTADDR'):
socket.IP_RECVDSTADDR = 25
if not hasattr(socket, 'IPV6_RECVDSTADDR'):
socket.IPV6_RECVDSTADDR = 25
else:
raise Exception("Unsupported system")
def main(argv=None):
parser = argparse.ArgumentParser(description="Simple multihomed UDP server")
parser.add_argument('-p', '--port', type=int, default=4242,
help="UDP port to be used (default: 4242)")
parser.add_argument('-w', '--wait', action='store_true',
help="wait for connections instead of creating one")
group = parser.add_mutually_exclusive_group()
group.add_argument('-4', '--ipv4', action='store_true',
help="create an IPv4-only socket")
group.add_argument('-6', '--ipv6', action='store_true',
help="create an IPv6-only socket")
args = parser.parse_args(argv)
# Compute local variables
af = socket.AF_INET if args.ipv4 else socket.AF_INET6
localaddr = '127.0.0.1' if args.ipv4 else '::1'
anyaddr = '0.0.0.0' if args.ipv4 else '::'
port = args.port if args.port > 0 else 4242
# Create and configure socket fro multihoming
skserver = socket.socket(af, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
if not args.ipv6:
if hasattr(socket, 'IP_PKTINFO'):
skserver.setsockopt(socket.SOL_IP, socket.IP_PKTINFO, 1)
elif hasattr(socket, 'IP_RECVDSTADDR'):
skserver.setsockopt(socket.IPPROTO_IP, socket.IP_RECVDSTADDR, 1)
if not args.ipv4:
if hasattr(socket, 'IPV6_RECVPKTINFO'):
skserver.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_RECVPKTINFO, 1)
elif hasattr(socket, 'IPV6_RECVDSTADDR'):
skserver.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_RECVDSTADDR, 1)
if not args.ipv4:
skserver.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, args.ipv6)
# Listen
if args.wait:
listenaddr = anyaddr
elif args.ipv6 or args.ipv4:
listenaddr = localaddr
else:
# To protect dual-stack listen, bind the socket to the loopback interface
listenaddr = anyaddr
try:
skserver.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, b'lo\0')
except PermissionError as exc:
logger.warn("Unable to bind to loopback interface: {}".format(exc))
ainfos = socket.getaddrinfo(listenaddr, port, af, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
skserver.bind(ainfos[0][4])
if args.wait:
logger.info("Waiting for a connection on UDP port {}.".format(port))
else:
# Create a client socket, which uses IPv4-in-IPv6 if enabled
clientaf = socket.AF_INET if not args.ipv6 else socket.AF_INET6
clientdstaddr = '127.0.0.1' if not args.ipv6 else '::1'
skclient = socket.socket(clientaf, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
skclient.sendto(b'Hello, world!', (clientdstaddr, port))
# Receive an incomming packet
(msg, ancdata, _, clientaddrport) = skserver.recvmsg(1024, socket.CMSG_SPACE(100))
assert args.wait or msg == b'Hello, world!' # Check the socket channel
dst_addr = None
ifindex = None
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level == socket.SOL_IP and hasattr(socket, 'IP_PKTINFO') and cmsg_type == socket.IP_PKTINFO:
# struct in_pktinfo { int ipi_ifindex; struct in_addr ipi_spec_dst, ipi_addr; };
assert len(cmsg_data) == 12
dst_addr = socket.inet_ntop(socket.AF_INET, cmsg_data[4:8])
ifindex = struct.unpack('I', cmsg_data[:4])[0]
elif cmsg_level == socket.IPPROTO_IPV6 and hasattr(socket, 'IPV6_PKTINFO') and cmsg_type == socket.IPV6_PKTINFO:
# struct in6_pktinfo { struct in6_addr ipi6_addr; int ipi_ifindex; };
assert len(cmsg_data) == 20
dst_addr = socket.inet_ntop(socket.AF_INET6, cmsg_data[:16])
ifindex = struct.unpack('I', cmsg_data[16:20])[0]
else:
logger.warning("Unknown ancilliary data: {}, {}, {}".format(cmsg_level, cmsg_type, cmsg_data))
# TODO: decode IP_RECVDSTADDR/IPV6_RECVDSTADDR
text = "Received UDP packet from {0[0]} port {0[1]}".format(clientaddrport)
if dst_addr is not None:
text += " to {} port {} interface {}".format(dst_addr, port, ifindex)
logger.info(text)
# Send back a reply with the same ancillary data
skserver.sendmsg([b'Bye!\n'], ancdata, 0, clientaddrport)
skserver.close()
if not args.wait:
skclient.close()
return 0
if __name__ == '__main__':
logging.basicConfig(format='[%(levelname)s] %(message)s', level=logging.DEBUG)
sys.exit(main())
| Python | 0.00001 | |
75dc32ef71fd32c7728269b01a74faf840690473 | Add a slow bot to test timeout feature | examples/too_slow_bot.py | examples/too_slow_bot.py | import random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
| Python | 0 | |
7c865c63d5debcf7463ad1b81470d2f044ec4738 | Add lab result models | radar/patients/lab_results/models.py | radar/patients/lab_results/models.py | from sqlalchemy import Column, Integer, String, ForeignKey, Numeric, Date, Boolean
from sqlalchemy.orm import relationship
from radar.database import db
from radar.models import PatientMixin, UnitMixin, CreatedModifiedMixin, DataSource
class LabOrderDefinition(db.Model):
__tablename__ = 'lab_order_definitions'
id = Column(Integer, primary_key=True)
code = Column(String, nullable=False)
description = Column(String, nullable=False)
pre_post = Column(Boolean, nullable=False)
class LabResultDefinition(db.Model):
__tablename__ = 'lab_result_definitions'
id = Column(Integer, primary_key=True)
lab_order_definition_id = Column(Integer, ForeignKey('lab_order_definitions.id'), nullable=False)
code = Column(String, nullable=False)
description = Column(String, nullable=False)
class LabOrder(DataSource, PatientMixin, UnitMixin, CreatedModifiedMixin):
__tablename__ = 'lab_orders'
id = Column(Integer, ForeignKey('lab_orders.id'), primary_key=True)
lab_order_definition_id = Column(Integer, ForeignKey('lab_order_definitions.id'), nullable=False)
lab_order_definition = relationship('LabOrderDefinition')
date = Column(Date, nullable=False)
pre_post = Column(String)
lab_results = relationship('LabResult', cascade='all, delete-orphan')
__mapper_args__ = {
'polymorphic_identity': 'hospitalisations',
}
class LabResult(db.Model):
__tablename__ = 'lab_results'
id = Column(Integer, primary_key=True)
lab_order_id = Column(Integer, ForeignKey('lab_orders.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)
lab_order = relationship('LabOrder')
lab_result_definition_id = Column(Integer, ForeignKey('lab_result_definitions.id'), nullable=False)
lab_result_definition = relationship('LabResultDefinition')
value = Column(Numeric, nullable=False) | Python | 0 | |
36d8a0e091ec1dd4ff451031810c75cd0431ac44 | add admins.py file | aligot/admin.py | aligot/admin.py | # coding: utf-8
from django.contrib import admin
from .models import Note, NoteBook, NoteRevision, User
admin.site.register(User)
admin.site.register(NoteBook)
admin.site.register(Note)
admin.site.register(NoteRevision)
| Python | 0.000001 | |
4e4b23ebae9274511fa3fad438b198c19b38c98d | Add a breakpad / content_shell integration test | content/shell/tools/breakpad_integration_test.py | content/shell/tools/breakpad_integration_test.py | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Integration test for breakpad in content shell.
This test checks that content shell and breakpad are correctly hooked up, as
well as that the tools can symbolize a stack trace."""
import glob
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
CONCURRENT_TASKS=4
def main():
parser = optparse.OptionParser()
parser.add_option('', '--build-dir', default='',
help='The build output directory.')
parser.add_option('', '--binary', default='',
help='The path of the binary to generate symbols for.')
parser.add_option('', '--no-symbols', default=False, action='store_true',
help='Symbols are not expected to work.')
parser.add_option('-j', '--jobs', default=CONCURRENT_TASKS, action='store',
type='int', help='Number of parallel tasks to run.')
parser.add_option('-v', '--verbose', action='store_true',
help='Print verbose status output.')
(options, _) = parser.parse_args()
if not options.build_dir:
print "Required option --build-dir missing."
return 1
if not options.binary:
print "Required option --binary missing."
return 1
if not os.access(options.binary, os.X_OK):
print "Cannot find %s." % options.binary
return 1
failure = ''
# Create a temporary directory to store the crash dumps and symbols in.
crash_dir = tempfile.mkdtemp()
try:
print "# Generate symbols."
breakpad_tools_dir = os.path.join(
os.path.dirname(__file__), '..', '..', '..',
'components', 'breakpad', 'tools')
generate_symbols = os.path.join(
breakpad_tools_dir, 'generate_breakpad_symbols.py')
symbols_dir = os.path.join(crash_dir, 'symbols')
cmd = [generate_symbols,
'--build-dir=%s' % options.build_dir,
'--binary=%s' % options.binary,
'--symbols-dir=%s' % symbols_dir,
'--jobs=%d' % options.jobs]
if options.verbose:
cmd.append('--verbose')
print ' '.join(cmd)
failure = 'Failed to run generate_breakpad_symbols.py.'
subprocess.check_call(cmd)
print "# Run content_shell and make it crash."
cmd = [options.binary,
'--dump-render-tree',
'chrome://crash',
'--enable-crash-reporter',
'--crash-dumps-dir=%s' % crash_dir]
if options.verbose:
print ' '.join(cmd)
failure = 'Failed to run content_shell.'
if options.verbose:
subprocess.check_call(cmd)
else:
with open(os.devnull, 'w') as devnull:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
print "# Retrieve crash dump."
dmp_files = glob.glob(os.path.join(crash_dir, '*.dmp'))
failure = 'Expected 1 crash dump, found %d.' % len(dmp_files)
if len(dmp_files) != 1:
raise Exception(failure)
dmp_file = dmp_files[0]
minidump = os.path.join(crash_dir, 'minidump')
dmp_to_minidump = os.path.join(breakpad_tools_dir, 'dmp2minidump.py')
cmd = [dmp_to_minidump, dmp_file, minidump]
if options.verbose:
print ' '.join(cmd)
failure = 'Failed to run dmp_to_minidump.'
subprocess.check_call(cmd)
print "# Symbolize crash dump."
minidump_stackwalk = os.path.join(options.build_dir, 'minidump_stackwalk')
cmd = [minidump_stackwalk, minidump, symbols_dir]
if options.verbose:
print ' '.join(cmd)
failure = 'Failed to run minidump_stackwalk.'
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stack = proc.communicate()[0]
# Check whether the stack contains a CrashIntentionally symbol.
found_symbol = 'CrashIntentionally' in stack
if options.no_symbols:
if found_symbol:
if options.verbose:
print stack
failure = 'Found unexpected reference to CrashIntentionally in stack'
raise Exception(failure)
else:
if not found_symbol:
if options.verbose:
print stack
failure = 'Could not find reference to CrashIntentionally in stack.'
raise Exception(failure)
except:
print "FAIL: %s" % failure
return 1
else:
print "PASS: Breakpad integration test ran successfully."
return 0
finally:
try:
shutil.rmtree(crash_dir)
except:
print 'Failed to delete temp directory "%s".' % crash_dir
if '__main__' == __name__:
sys.exit(main())
| Python | 0.999999 | |
f85a5954d337eca9b577664b1ba04e580fdf9b5c | Add slice01.py | trypython/basic/slice01.py | trypython/basic/slice01.py | # coding: utf-8
"""
slice 関数についてのサンプルです。
"""
import itertools
from trypython.common.commoncls import SampleBase
class Sample(SampleBase):
def exec(self):
#############################################################
# slice 関数
# - https://docs.python.jp/3/library/functions.html#slice
#
# slice 関数は、インデックスの集合を表すオブジェクトを生成し返す.
# 取得した slice オブジェクトはシーケンスの添字として利用できる.
#############################################################
l = list(range(10))
# インデックス2番目から4番目までの要素を取得
print(l[2:4])
# slice オブジェクト取得
# 生成した slice オブジェクトはシーケンスの添字として利用可能
# 名前が付けられるので、sequence[x:y:z]とするよりわかりやすい
from_two_to_four = slice(2, 4)
print(type(from_two_to_four))
print(l[from_two_to_four])
print('helloworld'[from_two_to_four])
# slice 関数に渡す引数は、前から start, stop, step となっている
step_two = slice(None, None, 2)
print('helloworld'[step_two])
# イテレータに対しては、itertools.islice を使う
it = iter(l)
iter_from_two_to_end_step_two = itertools.islice(it, 0, None, 2)
print([x for x in iter_from_two_to_end_step_two])
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| Python | 0.00177 | |
cf93f84dd794b63dd373cf59d802000799e32232 | Create main.py | example/main.py | example/main.py | Python | 0.000001 | ||
44afa72717f181fedefe692ea2f2b8d47924395f | add 'fw08.py' which defines 'BaseAction' class | framework_python/fw08.py | framework_python/fw08.py | # -*- coding: utf-8 -*-
##
## 前処理と後処理をサポート、JSON をサポート
##
import os
import json
class Request(object):
def __init__(self, environ):
self.environ = environ
self.method = environ['REQUEST_METHOD']
self.path = environ['PATH_INFO']
class Response(object):
def __init__(self):
self.status = "200 OK"
self.headers = {
'Content-Type': "text/html;charset=utf-8",
}
def header_list(self):
return [ (k, v) for k, v in self.headers.items() ]
@property
def content_type(self):
return self.headers['Content-Type']
@content_type.setter
def content_type(self, value):
self.headers['Content-Type'] = value
class BaseAction(object):
def __init__(self, req, resp):
self.req = req
self.resp = resp
## 前処理
def before_action(self):
pass
## 後処理
def after_action(self, ex):
pass
## 本処理
def invoke_action(self, func):
content = func(self)
return content
## テンプレートメソッドパターンを使って、
## 前処理→本処理→後処理の順に実行する
def handle_action(self, func):
ex = None
#self.before_action()
try:
self.before_action()
return self.invoke_action(func)
except Exception as ex_:
ex = ex_
raise
finally:
self.after_action(ex)
class Action(BaseAction):
def invoke_action(self, func):
## 親クラスのメソッドを呼び出す
content = BaseAction.invoke_action(self, func)
## もしコンテンツが辞書なら、JSON 文字列に変換し、
## Content-Type も application/json にする
## (または application/json;charset=utf-8 にする)
if isinstance(content, dict):
content = json.dumps(content, ensure_ascii=False)
self.resp.content_type = "application/json"
return content
def _http_405(self):
self.resp.status = "405 Method Not Allowed"
return "<h2>405 Method Not Allowed</h2>"
def GET (self): return self._http_405()
def POST (self): return self._http_405()
def PUT (self): return self._http_405()
def DELETE (self): return self._http_405()
def PATCH (self): return self._http_405()
def OPTIONS(self): return self._http_405()
def TRACE (self): return self._http_405()
def HEAD(self):
return self.GET()
class HelloAction(Action):
def GET(self):
return {"message": "Hello, World!"} # ← 変更
class EnvironAction(Action):
def GET(self):
environ = self.req.environ
buf = []
for key in sorted(environ.keys()):
if key in os.environ:
continue
val = environ[key]
typ = "(%s)" % type(val).__name__
buf.append("%-25s %-7s %r\n" % (key, typ, val))
content = "".join(buf)
self.resp.content_type = "text/plain;charset=utf-8"
return content
class FormAction(Action):
def GET(self):
req_meth = self.req.method
html = ('<p>REQUEST_METHOD: %r</p>\n'
'<form method="POST" action="/form">\n'
'<input type="submit">\n'
'</form>\n')
return html % req_meth
def POST(self):
req_meth = self.req.method
html = ('<p>REQUEST_METHOD: %r</p>\n'
'<p><a href="/form">back</p>\n')
return html % req_meth
class WSGIApplication(object):
def __call__(self, environ, start_response):
req = Request(environ)
resp = Response()
#
req_meth = req.method
req_path = req.path
if req_path == '/hello' : klass = HelloAction
elif req_path == '/environ': klass = EnvironAction
elif req_path == '/form' : klass = FormAction
else : klass = None
#
if klass is None:
status = "404 Not Found"
content = "<h2>%s</h2>" % status
elif not hasattr(klass, req_meth):
status = "405 Method Not Allowed"
content = "<h2>%s</h2>" % status
else:
func = getattr(klass, req_meth)
action = klass(req, resp)
content = action.handle_action(func)
status = resp.status
if req_meth == 'HEAD':
content = ""
#
headers = resp.header_list()
start_response(status, headers)
return [content.encode('utf-8')]
wsgi_app = WSGIApplication()
if __name__ == "__main__":
from wsgiref.simple_server import make_server
wsgi_server = make_server('localhost', 7000, wsgi_app)
wsgi_server.serve_forever()
| Python | 0.000012 | |
4efc45499d1736933691b9de39090b86526ea4e1 | Create 217_contain_duplicates.py | 217_contain_duplicates.py | 217_contain_duplicates.py | """
https://leetcode.com/problems/contains-duplicate/description/
Given an array of integers, find if the array contains any duplicates.
Your function should return true if any value appears at least twice in the array, and it should return false if every element
is distinct.
"""
class Solution(object):
def containsDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
dictionary = {}
duplicates = []
is_duplicate = False
for num in nums:
if num in dictionary:
return True
is_duplicate = True
break
else:
dictionary[num] = 1
return is_duplicate
| Python | 0.000607 | |
7ab2298f22de79cd14fae9f3add1417a76bcbcd0 | Add package file. | app/__init__.py | app/__init__.py | #!/usr/bin/env python
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__all__ = [
'data_visualization',
'knn_prediction',
'load_dataset',
'model_visualization',
'select_model',
'svm_prediction',
'validate_dataset',
]
| Python | 0 | |
f622255dc2c6695b785213c8d69cb57ae5d8a5e9 | Add pebble sdk version for detecting sdk features | waftools/pebble_sdk_version.py | waftools/pebble_sdk_version.py | from waflib.Configure import conf
@conf
def compare_sdk_version(ctx, platform, version):
target_env = ctx.all_envs[platform] if platform in ctx.all_envs else ctx.env
target_version = (int(target_env.SDK_VERSION_MAJOR or 0x5) * 0xff +
int(target_env.SDK_VERSION_MINOR or 0x19))
other_version = int(version[0]) * 0xff + int(version[1])
diff_version = target_version - other_version
return 0 if diff_version == 0 else diff_version / abs(diff_version)
@conf
def supports_bitmap_resource(ctx):
return (ctx.compare_sdk_version('aplite', [0x5, 0x48]) >= 0)
| Python | 0 | |
79fdfaceee84321bb802f9f99ee500f400f38780 | Add admin to credentials | dbaas/integrations/credentials/admin/__init__.py | dbaas/integrations/credentials/admin/__init__.py | # -*- coding:utf-8 -*-
from django.contrib import admin
from .. import models
admin.site.register(models.IntegrationType, )
admin.site.register(models.IntegrationCredential, )
| Python | 0 | |
acf0ab67db2856c71440093d0b686650e70e58e1 | Create network_class.py | network_class.py | network_class.py | """
Author: Joel Rieger
October 29, 2016
Description: Classes and functions to perform basic network abstraction and plotting.
"""
from numpy import pi as pi
class network(object):
"""Class for one dimension network (i.e. a matching network)."""
element_array=[]
def __init__(self,*args):
pass
def compute_node_impedances(self,Zp2,freq):
"""Calculate impedances at each node walking back from the output impedance, Zp2"""
pass
def move_element(self,n_a,n_b):
"""
Moves element to new index shifting other elements accordingly.
Simplies drag-drop action of components
"""
self.element_array.insert(n_b,self.element_array.pop(n_a))
class element(object):
"""Class for a single impedance/admittance element (i.e. capacitor, indcutor, etc.)."""
name=''
icon=''
orientation=0
val={'C':1e-12}
Zfunc=lambda self,x: 1e-14 #function to define series impedance
Yfunc=lambda self,x: 1e14 #function to define admittance
def __init__(self,*args):
pass
def Z(self,freq):
return self.Zfunc(freq)
def Y(self,freq):
return self.Yfunc(freq)
class cap(element):
"""Modification of element class to model an ideal capacitor"""
Zfunc=lambda self,freq: 1.0j/(2*pi*freq*self.val['C']) #function to define series impedance
Yfunc=lambda self,freq: (2*pi*freq*self.val['C']) #function to define admittance
class ind(element):
"""Modification of element class to model an ideal capacitor"""
Zfunc=lambda self,freq: 2*pi*freq*self.val['L']*1j #function to define series impedance
Yfunc=lambda self,freq: 1.0j/(2*pi*freq*self.val['L']) #function to define admittance
class indQ(element):
"""Modification of element class to model an capacitor with a fixed Q"""
Zfunc=lambda self,freq: 2*pi*freq*self.val['L']/self.val['Q']+2*pi*freq*self.val['L']*1j #function to define series impedance
Yfunc=lambda self,freq: 1.0j/self.Zfunc(self,x) #function to define admittance
class capQ(element):
"""Modification of element class to model an capacitor with a fixed L"""
Zfunc=lambda self,freq: 1e-12 #function to define series impedance
Yfunc=lambda self,freq: 1.0/self.Zfunc(self,x) #function to define admittance
if __name__=='__main__':
net=network()
L1=ind()
L1.val['L']=1.9e-9
print L1.Z(2.0e9)
L2=indQ()
L2.val={'L':1.9e-9,'Q':30.0}
print L2.Z(2.0e9)
| Python | 0.000004 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.