commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
4761b92bc058c49b696757580212a599e4a22bed | update 26 ott 2016 | ernitron/uPython-esp8266-httpserver | realmain.py | realmain.py | # Micropython Http Server
# Erni Tron ernitron@gmail.com
# Copyright (c) 2016
import time
import network
import gc
import machine
from config import read_config, get_config, set_config, save_config
development = True
def do_connect(ssid, pwd):
sta_if = network.WLAN(network.STA_IF)
if not sta_if.isconnected():
sta_if.active(True)
sta_if.connect(ssid, pwd)
while not sta_if.isconnected():
time.sleep_ms(200)
print('STA config: ', sta_if.ifconfig())
return sta_if
def do_accesspoint(ssid, pwd):
ap_if = network.WLAN(network.AP_IF)
ap.config(essid=ssid, password=pwd)
ap_if.active(True)
time.sleep_ms(200)
#print('AP config: ', ap_if.ifconfig())
return ap_if
#----------------------------------------------------------------
# MAIN PROGRAM STARTS HERE
def main():
# Enable automatic garbage collector
gc.enable()
# Start reading conf
config = read_config()
# Some defaults
ssid = get_config('ssid')
pwd = get_config('pwd')
# Connect to Network and save if
sta_if = do_connect(ssid, pwd)
# Set here special parameters of this application so they can be modified
if 'accesspoint' in config and 'appwd' in config:
pwd = get_config('appwd')
chipid = get_config('chipid')
ssid = 'YoT-'+chipid
do_accesspoint(ssid, pwd)
else:
ap_if = network.WLAN(network.AP_IF)
ap_if.active(False)
# Set Time RTC
from ntptime import settime
settime()
(y, m, d, h, mm, s, c, u) = time.localtime()
set_config('starttime', '%d-%d-%d %d:%d:%d UTC' % (y, m, d, h, mm, s))
# Update config with new values
# Get Network Parameters
(address, mask, gateway, dns) = sta_if.ifconfig()
from ubinascii import hexlify
set_config('address', address)
set_config('mask', mask)
set_config('gateway', gateway)
set_config('dns', dns)
set_config('mac', hexlify(sta_if.config('mac'), ':'))
set_config('chipid', hexlify(machine.unique_id()))
# Important config init values to be set
if 'sensor' not in config :
set_config('sensor', 'temp-generic')
if 'develpment' not in config:
development = False
# Ok save it!
save_config()
# Free some memory
ssid = pwd = None
config = None
# Registering
register_url = get_config('register')
authorization = get_config('authorization')
if register_url != 'none' and authorization != 'none':
from register import register
tim = machine.Timer(-1)
tim.init(period=300000, mode=machine.Timer.PERIODIC, callback=lambda t:register(register_url, authorization))
gc.collect()
# Launch Server
from httpserver import Server
s = Server(8805) # construct server object
try:
s.activate_server() # activate and run
except KeyboardInterrupt:
raise
except Exception:
if development == False:
machine.reset()
| mit | Python | |
69df0819456c45bcac8ace28cfd3f294795ae726 | Create ATM.py | RicoAntonioFelix/ATM-SIMULATOR | src/ATM.py | src/ATM.py | # Copyright 2014 Rico Antonio Felix
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from DatabaseScript import DatabaseScript
from RecordsLoader import RecordsLoader
class ATM:
"""Primitive model of an ATM machine
* Performs pin validation
* Contains a menu and keypad for user interaction
"""
def __init__(self):
"""Initializes the object by loading its memory with the bank account records from the database.
:return: ATM object with its memory initialized with the database records
"""
DatabaseScript.load_database("accounts.db")
self.__memory__ = RecordsLoader.load_records("accounts.db")
def validate_pin(self, pin):
"""Checks if the pin is valid
Checks that the argument passed in is of type 'str' else an error message is displayed and no operation is
performed.
:param pin: Pin to perform validation
:return: True if the pin is valid else returns False
"""
if self.param_is_good(pin):
return pin in self.__memory__
@staticmethod
def menu():
"""Provides a menu for the interface."""
print("Press 1 for withdraw")
print("Press 2 for deposit")
print("Press 3 for account balance")
def get_input(self, message):
"""Get input from the user.
Checks that the argument passed in is of type 'str' else an error message is displayed and no operation is
performed.
:param message: Prompt message for the user to follow
:return: User input as a 'str'
"""
if self.param_is_good(message):
return input(message)
def load_account(self, pin):
"""Get bank account record associated with the pin.
Checks that the argument passed in is of type 'str' else an error message is displayed and no operation is
performed.
:param pin: Pin to locate the associated bank account record
:return: Bank account record associated with the pin
"""
if self.param_is_good(pin):
return self.__memory__.get(pin)
@classmethod
def param_is_good(cls, param):
"""Performs argument type validation.
:param param: Argument for checking type information
:return: True if argument is of the appropriate type else returns False
"""
if type(param) != str:
print("Argument should be of type <class 'str'> found {}".format(type(param)))
return False
return True
| apache-2.0 | Python | |
6616d17a534ff2cf0dbdeb9f138d4ef09bf8d03d | update app.config | sahlinet/httptest,sahlinet/httptest,sahlinet/httptest,sahlinet/httptest | run_all.py | run_all.py | def func(self):
import sys, os
import py_compile
if os.environ['EXECUTOR'] != "docker":
sys.path.append("/home/philipsahli/workspace/httptest/app")
py_compile.compile("/home/philipsahli/workspace/httptest/app/run_all.py", doraise=True)
import run_all
import utils
reload(run_all)
reload(utils)
else:
from app import run_all
return run_all.func(self) | mit | Python | |
de4359d228b47a4da87c0f79603373b916ff9464 | Add migration from other branch | tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web | temba/flows/migrations/0083_populate_recent_msgs.py | temba/flows/migrations/0083_populate_recent_msgs.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-11 09:46
from __future__ import unicode_literals
from django.db import migrations
def old_recent_messages_lookup(FlowStep, step_uuid, rule_uuid, next_uuid):
"""
The old way of looking up recent messages from flow step table
"""
recent_messages = []
if rule_uuid:
recent_steps = FlowStep.objects.filter(step_uuid=step_uuid, rule_uuid=rule_uuid, next_uuid=next_uuid)
msg_direction_filter = 'I'
else:
recent_steps = FlowStep.objects.filter(step_uuid=step_uuid, rule_uuid=None, next_uuid=next_uuid)
msg_direction_filter = 'O'
recent_steps = recent_steps.prefetch_related('messages', 'contact', 'run').order_by('-left_on')[:20]
flow_id = None
for step in recent_steps:
if not step.contact.is_test:
for msg in step.messages.all():
if msg.visibility == 'V' and msg.direction == msg_direction_filter:
recent_messages.append(msg)
if not flow_id:
flow_id = step.run.flow_id
elif flow_id != step.run.flow_id:
raise ValueError("Flow steps with same node UUIDs for different flows!")
return flow_id, set(recent_messages[:5])
def do_populate(FlowStep, FlowPathRecentMessage):
# fetch all of the node->node path segments
segments = list(FlowStep.objects.values_list('step_uuid', 'rule_uuid', 'next_uuid').distinct())
if not segments:
return
print("Fetched %d unique flow path segments" % len(segments))
num_segments = 0
num_messages = 0
for segment in segments:
step_uuid, rule_uuid, next_uuid = segment
flow_id, messages = old_recent_messages_lookup(FlowStep, step_uuid, rule_uuid, next_uuid)
from_uuid = rule_uuid or step_uuid
to_uuid = next_uuid
for msg in messages:
# some might already have been created by new model code
FlowPathRecentMessage.objects.get_or_create(flow_id=flow_id, from_uuid=from_uuid, to_uuid=to_uuid, message=msg)
num_segments += 1
num_messages += len(messages)
if num_segments % 500 == 0:
print(" > Created %d recent message records for %d of %d segments" % (num_messages, num_segments, len(segments)))
def apply_as_migration(apps, schema_editor):
FlowStep = apps.get_model('flows', 'FlowStep')
FlowPathRecentMessage = apps.get_model('flows', 'FlowPathRecentMessage')
do_populate(FlowStep, FlowPathRecentMessage)
def apply_manual():
from temba.flows.models import FlowStep, FlowPathRecentMessage
do_populate(FlowStep, FlowPathRecentMessage)
class Migration(migrations.Migration):
dependencies = [
('flows', '0082_auto_20170111_0943'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
| agpl-3.0 | Python | |
a32b4216b98fb2bb23d8a0a700d05bd417df1906 | Implement main function | Hethurin/UApp | useless_passport_validator/umain.py | useless_passport_validator/umain.py | #!/usr/bin/python3.4
import ulibrary
import sys
from uwindow import UWindow
from gi.repository import Gtk
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def main():
ulibrary.init()
uwindow = UWindow()
uwindow.connect("delete-event", Gtk.main.quit)
uwindow.show_all()
Gtk.main()
if __name__ == "__main__":
sys.exit(main())
| mit | Python | |
845a74ca5475aa8121733ab92d4e1141e62d39a1 | rename error to result error. | tobyqin/testcube,tobyqin/testcube,tobyqin/testcube,tobyqin/testcube | testcube/core/migrations/0006_auto_20170610_0526.py | testcube/core/migrations/0006_auto_20170610_0526.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-10 05:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0005_auto_20170610_0506'),
]
operations = [
migrations.RenameModel(
old_name='TestFailure',
new_name='ResultError',
),
]
| mit | Python | |
a9388f7d4c4747e6710d20d618b57f19360cb69c | Add remove vlan compliance test | lindycoder/netman,internaphosting/netman,internap/netman | tests/adapters/compliance_tests/remove_vlan_test.py | tests/adapters/compliance_tests/remove_vlan_test.py | # Copyright 2019 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from hamcrest import assert_that, equal_to
from netman.core.objects.exceptions import UnknownVlan
from tests.adapters.compliance_test_case import ComplianceTestCase
class RemoveVlanTest(ComplianceTestCase):
_dev_sample = "brocade"
def setUp(self):
super(RemoveVlanTest, self).setUp()
self.client.add_vlan(1000)
def tearDown(self):
self.janitor.remove_vlan(1000)
super(RemoveVlanTest, self).tearDown()
def test_removes_vlan_from_get_vlan(self):
self.client.remove_vlan(1000)
with self.assertRaises(UnknownVlan):
self.client.get_vlan(1000)
def test_removes_vlan_raise_when_vlan_is_already_removed(self):
self.client.remove_vlan(1000)
with self.assertRaises(UnknownVlan):
self.client.remove_vlan(1000)
def test_removes_vlan_is_removed_from_list(self):
vlan_count = len(self.client.get_vlans())
self.client.remove_vlan(1000)
assert_that(len(self.client.get_vlans()), equal_to(vlan_count - 1))
| apache-2.0 | Python | |
b9ba0f23b4e9efdc2c6158ce2047f5e4cc73f9b5 | Add conftest.py to ignore top-level stuff | wmayner/pyemd,wmayner/pyemd | conftest.py | conftest.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
collect_ignore = ["setup.py", ".pythonrc.py"]
| mit | Python | |
63326149ba56677f2fbcfc6e875819d0b2ddf5de | Create sqlsync.py | GaryBrittain/DB2S3,GaryBrittain/DB2S3 | sqlsync.py | sqlsync.py | import MySQLdb as db
HOST = ""
PORT =
USER = ""
PASSWORD = ""
DB = ""
def post_file (path, filename):
try:
conn = db.Connection(host=HOST, port=PORT, user=USER, passwd=PASSWORD, db=DB)
dbhandler = conn.cursor(db.cursors.DictCursor)
sql = ("INSERT INTO db2s3 (PATH, FILENAME, DROPBOX_DOWNLOADED) VALUES('%s','%s', NOW())")%(path, filename)
dbhandler.execute(sql)
conn.commit()
conn.close()
except Exception as e:
with open("errors.txt", "a") as err:
err.write('Failed to upload meta to database for: '+str(path)+"\n")
err.close()
def next_file_to_process():
try:
conn = db.Connection(host=HOST, port=PORT, user=USER, passwd=PASSWORD, db=DB)
dbhandler = conn.cursor(db.cursors.DictCursor)
sql = ("SELECT PATH, FILENAME FROM db2s3 WHERE S3_UPLOADED IS NULL")
dbhandler.execute(sql)
return dbhandler.fetchall()
conn.close()
except:
return 'failure'
def s3_uploaded_confirm(path):
try:
conn = db.Connection(host=HOST, port=PORT, user=USER, passwd=PASSWORD, db=DB)
dbhandler = conn.cursor(db.cursors.DictCursor)
sql = ("UPDATE db2s3 SET S3_UPLOADED = NOW() WHERE PATH = '%s'")%(path)
dbhandler.execute(sql)
conn.commit()
conn.close()
except Exception as e:
with open("errors.txt", "a") as err:
err.write('Failed to update S3 date in db for: '+str(path)+"\n")
err.close()
def check_lock():
try:
conn = db.Connection(host=HOST, port=PORT, user=USER, passwd=PASSWORD, db=DB)
dbhandler = conn.cursor(db.cursors.DictCursor)
sql = ("SELECT LOCKED FROM PROCESS_LOCK")
dbhandler.execute(sql)
lock = dbhandler.fetchone()
return lock["LOCKED"]
conn.close()
except:
return 1
| mit | Python | |
03e1a4d988650196f698623da8d84070f9ace47d | Create ModisLADSFTP.py | Tartomas/PyModisAtm | ModisLADSFTP.py | ModisLADSFTP.py | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 4 13:02:35 2017
@author: Tomás Acuña y Italo Moletto
mail: tomasacuna@ug.uchile.cl
Modified from : http://www.science-emergence.com/Codes/Download-a-file-of-MODIS-MYD08-M3-from-ladsftp-using-python/
"""
from ftplib import FTP
import numpy as np
import os
import calendar
import glob
# Day of year
doy = ['001']
# Years of data
year = [2010,2011]
# Product version
version=['6']
# version =['51','6']
# Adquisition time of MODIS --> Review on Reverb or directly from the FTP
# example MOD07_L2.A2011001.2215.006.2015047180723.hdf
# Adquire at 2215 UTC
time='2215'
time='.'+time+'.'
product=['MOD07_L2','MOD04_L2','MOD05_L2']
MODIS_FOLD='E:\MODIS'
MODIS_FOLD='C:\MODIS'
# Create a local folder call MODIS
if not os.path.exists(MODIS_FOLD):
os.makedirs(MODIS_FOLD)
os.chdir(MODIS_FOLD)
####### Atention ############
# You must be login https://ladsweb.modaps.eosdis.nasa.gov/profile/login/ in order to connected to the FTP
####### Atention ############
for mm in range(0,len(product)):
prod_path=MODIS_FOLD + '\\' +product[mm]
# Create sub folder for each product
if not os.path.exists(prod_path):
os.makedirs(prod_path)
if not os.path.exists(prod_path+"\hdf"):
os.makedirs(prod_path+"\hdf")
os.chdir(prod_path+"\hdf")
else:
os.chdir(prod_path+"\hdf")
#----------------------------------------------------------------------------------------#
# download data from ladsftp
for yy in range(0,len(year)):
directory= 'allData/' + version[0] +'/' + product[mm] + '/' + str(year[yy]) + '/' + doy[0]
ftp = FTP('ladsftp.nascom.nasa.gov')
ftp.login('anonymous','')
ftp.cwd(directory)
yyy = []
ftp.retrlines('NLST', yyy.append)
# Search for specific time zone in the list of scenes
inx = [i for i, s in enumerate(yyy) if time in s]
ooo=["NA"]
ooo[0]=yyy[inx[0]]
if len(ooo) == 1:
file_name = ooo[0]
print('Download file: ', file_name)
e = 1
while e == 1 :
try:
ftp.retrbinary('RETR ' + file_name, open(file_name, 'wb').write)
e = 0
except:
print("Error while downloading")
#----------------------------------------------------------------------------------------#
# Closing FTP connection
print('Closing FTP connection')
ftp.close()
| mit | Python | |
d0c3e0901cbb1c89477ec081a5dce0d3a12f8df8 | add test runner script for local development | adnelson/nixfromnpm,adnelson/nixfromnpm | runtests.py | runtests.py | #!/usr/bin/env nix-shell
#! nix-shell -i python3 -p python3Packages.pyyaml
"""Run the tests in the circleci config, for local dev."""
from os.path import dirname, join
from subprocess import call
import yaml
with open(join(dirname(__file__), ".circleci", "config.yml")) as f:
circle_cfg = yaml.load(f)
results = {}
failures = 0
for step in circle_cfg["jobs"]["build"]["steps"]:
try:
name, command = step["run"]["name"], step["run"]["command"]
except (TypeError, KeyError):
continue
print("Running", name)
if call(command, shell=True) == 0:
results[name] = "PASSED"
else:
results[name] = "FAILED"
failures += 1
print("\nRESULTS:\n")
for name, passed in results.items():
print("*", name, "PASSED" if passed else "FAILED")
exit(failures)
| mit | Python | |
3361cf94d06bdc3a207caa490c7ce2e43cc5795f | Add 136-single-number.py | mvj3/leetcode | 136-single-number.py | 136-single-number.py | """
Question:
Single Number
Given an array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
Performance:
1. Total Accepted: 92009 Total Submissions: 199999 Difficulty: Medium
2. Your runtime beats 55.22% of python submissions.
"""
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
Google it, and suddenly understand that `xor` let two same number convert all bits to zero, so the single one lefted.
"""
result = 0
for num in nums:
result ^= num
return result
assert Solution().singleNumber([1, 0, 1]) == 0
assert Solution().singleNumber([1, 3, 1]) == 3
| mit | Python | |
c8c026cb1f0d19a928671dd80e7a9664d13507c6 | add leetcode Binary Tree Preorder Traversal | Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code | leetcode/BinaryTreePreorderTraversal/solution.py | leetcode/BinaryTreePreorderTraversal/solution.py | # -*- coding:utf-8 -*-
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return a list of integers
def preorderTraversal(self, root):
ret = []
if root:
self.traversal(root, ret)
return ret
def traversal(self, root, ret):
if root is None:
return
ret.append(root.val)
self.traversal(root.left, ret)
self.traversal(root.right, ret)
| mit | Python | |
3c4cc20a2b9708f01569abd7985573e26f0a19ad | add tic_tac_toe.py part 1 | Nethermaker/school-projects | intro/tic_tac_toe.py | intro/tic_tac_toe.py | import random
import copy
#We will represent the board as a string with
# TEN THINGS in it. The first thing will be blank (always),
# and the next nine will be 'X', 'O', or ' '.
board = ' XO XO XOO'
def draw_board(board):
print ' | | '
print ' {} | {} | {} '.format(board[7], board[8], board[9])
print ' | | '
print '-----------' #11 dashes
print ' | | '
print ' {} | {} | {} '.format(board[4], board[5], board[6])
print ' | | '
print '-----------' #11 dashes
print ' | | '
print ' {} | {} | {} '.format(board[1], board[2], board[3])
print ' | | '
def input_player_letter():
#Lets the player type which letter they want to be.
#Returns a list with the player's letter first, and the
# computer letter second
letter = ''
while letter not in ['X', 'O']:
letter = raw_input('Do you want to be X or O? ').upper()
if letter == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def who_goes_first():
#Randomly returns either 'computer' or 'player'
return random.choice(['player', 'computer'])
def play_again():
return raw_input('Do you want to play again (yes/no)? ').lower().startswith('y')
def is_winner(bo, le):
return ((bo[7] == le and bo[8] == le and bo[9] == le) or
(bo[4] == le and bo[5] == le and bo[6] == le) or
(bo[1] == le and bo[2] == le and bo[3] == le) or
(bo[1] == le and bo[4] == le and bo[7] == le) or
(bo[2] == le and bo[5] == le and bo[8] == le) or
(bo[3] == le and bo[6] == le and bo[9] == le) or
(bo[1] == le and bo[5] == le and bo[9] == le) or
(bo[7] == le and bo[5] == le and bo[3] == le))
def get_player_move(board):
move = ' '
while move not in '1 2 3 4 5 6 7 8 9'.split() or board[int(move)] != ' ':
move = raw_input('What is your next move (1-9)? ')
return int(move)
| mit | Python | |
19f58aca5dbfbdd6e47cca5927df51dc8904371c | Add mocked up settings | SEL-Columbia/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq | settings.py | settings.py | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
SECRET_KEY = 'this is not a secret key'
INSTALLED_APPS = (
'couchdbkit.ext.django',
'couchforms',
'coverage'
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'couchforms',
}
}
####### Couch Config ######
COUCH_HTTPS = False # recommended production value is True if enabling https
COUCH_SERVER_ROOT = '127.0.0.1:5984' #6984 for https couch
COUCH_USERNAME = ''
COUCH_PASSWORD = ''
COUCH_DATABASE_NAME = 'couchform'
COUCHDB_DATABASES = [
('couchforms', 'http://127.0.0.1:5984/couchforms'),
('couch', 'http://127.0.0.1:5984/couchforms'), # Why?
]
# Hardcode the test database?
XFORMS_POST_URL = "http://127.0.0.1:5984/couchforms_test/_design/couchforms/_update/xform/"
TEST_RUNNER = 'couchdbkit.ext.django.testrunner.CouchDbKitTestSuiteRunner'
####### # Email setup ########
# Print emails to console so there is no danger of spamming, but you can still get registration URLs
EMAIL_BACKEND='django.core.mail.backends.console.EmailBackend'
EMAIL_LOGIN = "nobody@example.com"
EMAIL_PASSWORD = "******"
EMAIL_SMTP_HOST = "smtp.example.com"
EMAIL_SMTP_PORT = 587
COVERAGE_REPORT_HTML_OUTPUT_DIR='coverage-html'
COVERAGE_MODULE_EXCLUDES= ['tests$', 'settings$', 'urls$', 'locale$',
'common.views.test', '^django', 'management', 'migrations',
'^south', '^djcelery', '^debug_toolbar', '^rosetta']
| bsd-3-clause | Python | |
158e4df1738ab1b1e104ee50a93cb829ed868374 | Create tutorial2.py | eddennison/empty-app | tutorial2.py | tutorial2.py | from ggame import App, Color, LineStyle, Sprite
from ggame import CircleAsset
red = Color(0xff0000, 1.0)
green = Color(0x00ff00, 1.0)
blue = Color(0x0000ff, 1.0)
black = Color(0x000000, 1.0)
thinline = LineStyle(1, black)
mycircle = CircleAsset(5, thinline, blue)
xcoordinates = range(100, 600, 10)
# Generate a list of sprites that form a line!
sprites = [Sprite(mycircle, (x, x*0.5 + 100)) for x in xcoordinates]
myapp = App()
myapp.run()
| mit | Python | |
d0789d6b47a7186077927d8b8b0f812c597e26bc | Add check_keys.py | Celeo/GETIN-Auth,Celeo/GETIN-Auth,Celeo/GETIN-Auth,Celeo/GETIN-Auth | check_keys.py | check_keys.py | from hr.app import *
with open('results.txt', 'w') as f:
for key in APIKey.query.all():
try:
auth = xmlapi.auth(keyID=key.key, vCode=key.code)
result = auth.account.APIKeyInfo()
if not result.key.accessMask == app.config['API_KEY_MASK']:
f.write('Invalid key #{}, mask is {} but needs {}\n'.format(
key.id,
result.key.accessMask,
app.config['API_KEY_MASK']
))
except Exception as e:
f.write('Invalid key #{}, exception: {}'.format(
key.id,
str(e)
))
| mit | Python | |
301e6b9b997c53aec8f63184153c949e8d6a1983 | Create __init__.py | Quantmatic/iq2mongo | iq2mongo/__init__.py | iq2mongo/__init__.py | from iq2mongo import Iqfeedhistoricdata
| apache-2.0 | Python | |
22be0d463a7e4f8795c9b14e761bb2f84ef3ce50 | add UDPCollector | Nihn/Diamond-1,Netuitive/Diamond,bmhatfield/Diamond,signalfx/Diamond,Slach/Diamond,datafiniti/Diamond,CYBERBUGJR/Diamond,rtoma/Diamond,jriguera/Diamond,Ssawa/Diamond,skbkontur/Diamond,actmd/Diamond,EzyInsights/Diamond,disqus/Diamond,timchenxiaoyu/Diamond,hvnsweeting/Diamond,saucelabs/Diamond,Clever/Diamond,Slach/Diamond,Ormod/Diamond,actmd/Diamond,Ensighten/Diamond,Nihn/Diamond-1,h00dy/Diamond,joel-airspring/Diamond,Netuitive/Diamond,MichaelDoyle/Diamond,Basis/Diamond,h00dy/Diamond,Precis/Diamond,tuenti/Diamond,dcsquared13/Diamond,jumping/Diamond,thardie/Diamond,TAKEALOT/Diamond,CYBERBUGJR/Diamond,Netuitive/netuitive-diamond,Ssawa/Diamond,Ensighten/Diamond,eMerzh/Diamond-1,Clever/Diamond,mfriedenhagen/Diamond,zoidbergwill/Diamond,timchenxiaoyu/Diamond,anandbhoraskar/Diamond,hamelg/Diamond,mfriedenhagen/Diamond,rtoma/Diamond,Nihn/Diamond-1,python-diamond/Diamond,metamx/Diamond,zoidbergwill/Diamond,gg7/diamond,ramjothikumar/Diamond,TinLe/Diamond,Slach/Diamond,sebbrandt87/Diamond,ramjothikumar/Diamond,Ssawa/Diamond,metamx/Diamond,hvnsweeting/Diamond,works-mobile/Diamond,jriguera/Diamond,datafiniti/Diamond,Netuitive/Diamond,russss/Diamond,jumping/Diamond,Basis/Diamond,zoidbergwill/Diamond,tusharmakkar08/Diamond,sebbrandt87/Diamond,sebbrandt87/Diamond,disqus/Diamond,mzupan/Diamond,krbaker/Diamond,EzyInsights/Diamond,stuartbfox/Diamond,works-mobile/Diamond,timchenxiaoyu/Diamond,gg7/diamond,sebbrandt87/Diamond,signalfx/Diamond,socialwareinc/Diamond,h00dy/Diamond,cannium/Diamond,acquia/Diamond,timchenxiaoyu/Diamond,joel-airspring/Diamond,TinLe/Diamond,zoidbergwill/Diamond,szibis/Diamond,Basis/Diamond,acquia/Diamond,russss/Diamond,anandbhoraskar/Diamond,Ssawa/Diamond,jaingaurav/Diamond,cannium/Diamond,jriguera/Diamond,dcsquared13/Diamond,codepython/Diamond,tellapart/Diamond,ramjothikumar/Diamond,tuenti/Diamond,actmd/Diamond,eMerzh/Diamond-1,CYBERBUGJR/Diamond,eMerzh/Diamond-1,szibis/Diamond,Netuitive/netuitive-diamond,saucelabs/Diamond,jaingaurav/Diamond,szibis/Diamond,codepython/Diamond,gg7/diamond,MichaelDoyle/Diamond,ceph/Diamond,joel-airspring/Diamond,janisz/Diamond-1,TAKEALOT/Diamond,MediaMath/Diamond,socialwareinc/Diamond,ceph/Diamond,cannium/Diamond,Ormod/Diamond,TinLe/Diamond,TinLe/Diamond,Ormod/Diamond,MediaMath/Diamond,bmhatfield/Diamond,hamelg/Diamond,krbaker/Diamond,jaingaurav/Diamond,MediaMath/Diamond,janisz/Diamond-1,dcsquared13/Diamond,hvnsweeting/Diamond,ceph/Diamond,saucelabs/Diamond,Precis/Diamond,rtoma/Diamond,Clever/Diamond,eMerzh/Diamond-1,datafiniti/Diamond,python-diamond/Diamond,janisz/Diamond-1,Nihn/Diamond-1,tusharmakkar08/Diamond,thardie/Diamond,szibis/Diamond,signalfx/Diamond,Netuitive/netuitive-diamond,russss/Diamond,krbaker/Diamond,MichaelDoyle/Diamond,Netuitive/netuitive-diamond,jumping/Diamond,socialwareinc/Diamond,russss/Diamond,jriguera/Diamond,stuartbfox/Diamond,mzupan/Diamond,TAKEALOT/Diamond,mfriedenhagen/Diamond,saucelabs/Diamond,tellapart/Diamond,acquia/Diamond,hamelg/Diamond,gg7/diamond,Precis/Diamond,jaingaurav/Diamond,anandbhoraskar/Diamond,EzyInsights/Diamond,rtoma/Diamond,Precis/Diamond,tusharmakkar08/Diamond,socialwareinc/Diamond,MediaMath/Diamond,tuenti/Diamond,Slach/Diamond,skbkontur/Diamond,cannium/Diamond,janisz/Diamond-1,tellapart/Diamond,disqus/Diamond,dcsquared13/Diamond,metamx/Diamond,krbaker/Diamond,tellapart/Diamond,skbkontur/Diamond,CYBERBUGJR/Diamond,h00dy/Diamond,Clever/Diamond,anandbhoraskar/Diamond,works-mobile/Diamond,Ormod/Diamond,jumping/Diamond,Basis/Diamond,Ensighten/Diamond,datafiniti/Diamond,skbkontur/Diamond,TAKEALOT/Diamond,MichaelDoyle/Diamond,stuartbfox/Diamond,Netuitive/Diamond,tuenti/Diamond,mzupan/Diamond,codepython/Diamond,codepython/Diamond,stuartbfox/Diamond,tusharmakkar08/Diamond,ceph/Diamond,EzyInsights/Diamond,mfriedenhagen/Diamond,acquia/Diamond,thardie/Diamond,bmhatfield/Diamond,joel-airspring/Diamond,signalfx/Diamond,python-diamond/Diamond,actmd/Diamond,thardie/Diamond,bmhatfield/Diamond,ramjothikumar/Diamond,Ensighten/Diamond,hamelg/Diamond,hvnsweeting/Diamond,mzupan/Diamond,works-mobile/Diamond | src/collectors/udp/udp.py | src/collectors/udp/udp.py | # coding=utf-8
"""
The UDPCollector class collects metrics on UDP stats (surprise!)
It's a copy of the TCPCollector with minor adjustments
#### Dependencies
* /proc/net/snmp
"""
import diamond.collector
import os
class UDPCollector(diamond.collector.Collector):
PROC = '/proc/net/snmp'
def get_default_config_help(self):
config_help = super(UDPCollector, self).get_default_config_help()
config_help.update({
'allowed_names': 'list of entries to collect',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(UDPCollector, self).get_default_config()
config.update({
'path': 'udp',
'allowed_names': 'InDatagrams, NoPorts, '
+ 'InErrors, OutDatagrams, RcvbufErrors, SndbufErrors'
})
return config
def collect(self):
if not os.access(self.PROC, os.R_OK):
return None
lines = []
file = open(self.PROC)
for line in file:
if line.startswith("Udp:"):
lines.append(line[5:].split())
file.close()
if len(lines) != 2:
return
# There are two lines in lines: names and values, space-separated.
names, values = lines
allowed_names = self.config['allowed_names']
for key, value in zip(names, values):
if key in allowed_names:
value = self.derivative(key, long(value))
if value < 0:
continue
self.publish(key, value, 0)
| mit | Python | |
f54a014fa420699f3c161f90b6392075a0134807 | move .ready() to the right place | cdsteinkuehler/MachineKit,araisrobo/machinekit,unseenlaser/machinekit,ArcEye/machinekit-testing,strahlex/machinekit,ArcEye/machinekit-testing,strahlex/machinekit,EqAfrica/machinekit,ArcEye/machinekit-testing,yishinli/emc2,araisrobo/machinekit,unseenlaser/machinekit,strahlex/machinekit,RunningLight/machinekit,ianmcmahon/linuxcnc-mirror,bobvanderlinden/machinekit,araisrobo/machinekit,bobvanderlinden/machinekit,aschiffler/linuxcnc,unseenlaser/linuxcnc,ikcalB/linuxcnc-mirror,mhaberler/machinekit,cdsteinkuehler/linuxcnc,kinsamanka/machinekit,araisrobo/machinekit,aschiffler/linuxcnc,unseenlaser/linuxcnc,Cid427/machinekit,mhaberler/machinekit,jaguarcat79/ILC-with-LinuxCNC,ianmcmahon/linuxcnc-mirror,Cid427/machinekit,cnc-club/linuxcnc,ArcEye/machinekit-testing,ArcEye/MK-Qt5,unseenlaser/linuxcnc,kinsamanka/machinekit,araisrobo/linuxcnc,cnc-club/linuxcnc,araisrobo/machinekit,ArcEye/MK-Qt5,RunningLight/machinekit,unseenlaser/machinekit,ArcEye/MK-Qt5,bmwiedemann/linuxcnc-mirror,bmwiedemann/linuxcnc-mirror,kinsamanka/machinekit,narogon/linuxcnc,bmwiedemann/linuxcnc-mirror,unseenlaser/linuxcnc,Cid427/machinekit,kinsamanka/machinekit,mhaberler/machinekit,cnc-club/linuxcnc,ikcalB/linuxcnc-mirror,cdsteinkuehler/MachineKit,yishinli/emc2,RunningLight/machinekit,ArcEye/machinekit-testing,bobvanderlinden/machinekit,jaguarcat79/ILC-with-LinuxCNC,mhaberler/machinekit,araisrobo/linuxcnc,jaguarcat79/ILC-with-LinuxCNC,jaguarcat79/ILC-with-LinuxCNC,ArcEye/machinekit-testing,ianmcmahon/linuxcnc-mirror,ikcalB/linuxcnc-mirror,cdsteinkuehler/linuxcnc,bmwiedemann/linuxcnc-mirror,bobvanderlinden/machinekit,bobvanderlinden/machinekit,ArcEye/machinekit-testing,ianmcmahon/linuxcnc-mirror,araisrobo/linuxcnc,EqAfrica/machinekit,mhaberler/machinekit,kinsamanka/machinekit,RunningLight/machinekit,mhaberler/machinekit,RunningLight/machinekit,ikcalB/linuxcnc-mirror,cdsteinkuehler/MachineKit,narogon/linuxcnc,cdsteinkuehler/linuxcnc,ArcEye/MK-Qt5,ianmcmahon/linuxcnc-mirror,narogon/linuxcnc,Cid427/machinekit,bobvanderlinden/machinekit,cdsteinkuehler/linuxcnc,araisrobo/linuxcnc,ArcEye/MK-Qt5,araisrobo/machinekit,narogon/linuxcnc,kinsamanka/machinekit,ArcEye/MK-Qt5,bobvanderlinden/machinekit,cnc-club/linuxcnc,ikcalB/linuxcnc-mirror,unseenlaser/machinekit,ikcalB/linuxcnc-mirror,EqAfrica/machinekit,ArcEye/MK-Qt5,cdsteinkuehler/MachineKit,EqAfrica/machinekit,cdsteinkuehler/MachineKit,aschiffler/linuxcnc,ArcEye/machinekit-testing,strahlex/machinekit,strahlex/machinekit,Cid427/machinekit,bmwiedemann/linuxcnc-mirror,cdsteinkuehler/MachineKit,unseenlaser/machinekit,strahlex/machinekit,mhaberler/machinekit,unseenlaser/machinekit,ianmcmahon/linuxcnc-mirror,cdsteinkuehler/linuxcnc,EqAfrica/machinekit,EqAfrica/machinekit,kinsamanka/machinekit,bmwiedemann/linuxcnc-mirror,bobvanderlinden/machinekit,araisrobo/linuxcnc,araisrobo/machinekit,kinsamanka/machinekit,araisrobo/machinekit,ikcalB/linuxcnc-mirror,narogon/linuxcnc,RunningLight/machinekit,EqAfrica/machinekit,aschiffler/linuxcnc,cnc-club/linuxcnc,yishinli/emc2,unseenlaser/linuxcnc,aschiffler/linuxcnc,ArcEye/MK-Qt5,cnc-club/linuxcnc,Cid427/machinekit,bmwiedemann/linuxcnc-mirror,strahlex/machinekit,EqAfrica/machinekit,unseenlaser/machinekit,araisrobo/machinekit,jaguarcat79/ILC-with-LinuxCNC,mhaberler/machinekit,cnc-club/linuxcnc,ianmcmahon/linuxcnc-mirror,RunningLight/machinekit,RunningLight/machinekit,Cid427/machinekit,Cid427/machinekit,cdsteinkuehler/linuxcnc,yishinli/emc2,unseenlaser/machinekit | src/hal/user_comps/pyvcp.py | src/hal/user_comps/pyvcp.py | #!/usr/bin/env python
# This is a component of emc
# Copyright 2007 Anders Wallin <anders.wallin@helsinki.fi>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os
BASE = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
sys.path.insert(0, os.path.join(BASE, "lib", "python"))
import vcpparse
import hal
from Tkinter import Tk
def main():
try:
filename=sys.argv[1]
except:
print "Error: No XML file specified!"
sys.exit()
pyvcp0 = Tk()
vcpparse.filename=filename
pycomp=vcpparse.create_vcp(pyvcp0)
pycomp.ready()
pyvcp0.mainloop()
main()
| #!/usr/bin/env python
# This is a component of emc
# Copyright 2007 Anders Wallin <anders.wallin@helsinki.fi>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os
BASE = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
sys.path.insert(0, os.path.join(BASE, "lib", "python"))
import vcpparse
import hal
from Tkinter import Tk
def main():
try:
filename=sys.argv[1]
except:
print "Error: No XML file specified!"
sys.exit()
pyvcp0 = Tk()
vcpparse.filename=filename
pycomp=vcpparse.create_vcp(pyvcp0)
pyvcp0.mainloop()
pycomp.ready()
main()
| lgpl-2.1 | Python |
365be4a69571f5eadc8e3446e200594e8497b9b5 | add python password generator | bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile | python/python/misc/pwgen.py | python/python/misc/pwgen.py | #!/usr/bin/env python
'''
Copyright (C) 2011 Bryan Maupin <bmaupincode@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
'''
Generates a password of a specified length given specific character classes
'''
# enables true division; not avaliable until Python 3.0
from __future__ import division
import bisect
import random
import sys
# change these as necessary:
# minimum password length
min_password_length = 12
# maximum password length
max_password_length = 15
# minimum number of character classes
min_classes = 3
# character classes
# l, I, 1, O, 0 removed for those who still haven't figured out how to copy
# and paste
char_classes = {'lower': 'abcdefghijkmnopqrstuvwxyz',
'upper': 'ABCDEFGHJKLMNPQRSTUVWXYZ',
'numbers': '23456789',
'symbols': '@#$%',
}
# probabilities that a given character class will be used
class_probs = {'lower': 0.75,
'upper': 0.05,
'numbers': 0.1,
'symbols': 0.1,
}
def main():
if min_classes > len(char_classes):
sys.exit('Error: minimum classes cannot be greater than number of '
'available classes')
if min_classes > max_password_length:
sys.exit('Error: minimum classes cannot be greater than maximum '
'password length')
classes = []
probabilities = []
# split the class_probs dict into lists for easier processing
for class_name in class_probs:
classes.append(class_name)
probabilities.append(class_probs[class_name])
wrg = WeightedRandomGenerator(classes, probabilities)
# use a random password length for more randomness
password_length = min_password_length + \
random.choice(range(max_password_length - min_password_length + 1))
password = build_password(wrg, password_length)
print password
def build_password(wrg, password_length):
password_chars = []
used_classes = []
# get all of the characters, leaving a few to make sure we get the minimum
# number of classes
for n in range(password_length - min_classes):
char_class = wrg.next()
if char_class not in used_classes:
used_classes.append(char_class)
password_chars.append(random.choice(char_classes[char_class]))
# go through the last few characters
for n in range(min_classes):
# if we haven't used the number of minimum classes
if len(used_classes) < min_classes:
# go through all the classes
for char_class in char_classes:
# find one we haven't used
if char_class not in used_classes:
used_classes.append(char_class)
break
else:
char_class = wrg.next()
password_chars.append(random.choice(char_classes[char_class]))
# now shuffle it all up for good measure
random.shuffle(password_chars)
# and put it all together in a string
password = ''.join(password_chars)
return password
# based on code from
# http://eli.thegreenplace.net/2010/01/22/weighted-random-generation-in-python
class WeightedRandomGenerator(object):
def __init__(self, names, weights):
self.names = names
self.totals = []
running_total = 0
for w in weights:
running_total += w
self.totals.append(running_total)
def next(self):
rnd = random.random() * self.totals[-1]
index = bisect.bisect_right(self.totals, rnd)
return self.names[index]
# calls the main() function
if __name__=='__main__':
main()
| mit | Python | |
525f8afcb132c2bb0eb853207685f291b292c41f | Add strip_openflow utility component | pthien92/sdn,MurphyMc/pox,PrincetonUniversity/pox,adusia/pox,pthien92/sdn,noxrepo/pox,MurphyMc/pox,adusia/pox,chenyuntc/pox,denovogroup/pox,carlye566/IoT-POX,MurphyMc/pox,xAKLx/pox,carlye566/IoT-POX,VamsikrishnaNallabothu/pox,VamsikrishnaNallabothu/pox,chenyuntc/pox,diogommartins/pox,noxrepo/pox,waltznetworks/pox,kpengboy/pox-exercise,adusia/pox,waltznetworks/pox,jacobq/csci5221-viro-project,kavitshah8/SDNDeveloper,PrincetonUniversity/pox,andiwundsam/_of_normalize,denovogroup/pox,andiwundsam/_of_normalize,denovogroup/pox,MurphyMc/pox,pthien92/sdn,kavitshah8/SDNDeveloper,kavitshah8/SDNDeveloper,noxrepo/pox,carlye566/IoT-POX,diogommartins/pox,andiwundsam/_of_normalize,VamsikrishnaNallabothu/pox,VamsikrishnaNallabothu/pox,kulawczukmarcin/mypox,carlye566/IoT-POX,waltznetworks/pox,diogommartins/pox,waltznetworks/pox,kulawczukmarcin/mypox,pthien92/sdn,kpengboy/pox-exercise,PrincetonUniversity/pox,xAKLx/pox,VamsikrishnaNallabothu/pox,kulawczukmarcin/mypox,pthien92/sdn,carlye566/IoT-POX,kulawczukmarcin/mypox,waltznetworks/pox,PrincetonUniversity/pox,kpengboy/pox-exercise,denovogroup/pox,PrincetonUniversity/pox,jacobq/csci5221-viro-project,kpengboy/pox-exercise,xAKLx/pox,diogommartins/pox,adusia/pox,jacobq/csci5221-viro-project,chenyuntc/pox,xAKLx/pox,MurphyMc/pox,chenyuntc/pox,kulawczukmarcin/mypox,andiwundsam/_of_normalize,jacobq/csci5221-viro-project,diogommartins/pox,jacobq/csci5221-viro-project,xAKLx/pox,kavitshah8/SDNDeveloper,chenyuntc/pox,denovogroup/pox,noxrepo/pox,adusia/pox,kpengboy/pox-exercise | pox/lib/pxpcap/strip_openflow.py | pox/lib/pxpcap/strip_openflow.py | # Copyright 2012,2013 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
A hacky tool to grab packet in/out data from OpenFlow traffic.
Assumes packets are 1:1 with OF messages (as if captured using the
openflow.debug component).
--infile=<filename> Input file
--outfile=<filename> Output file
--out-only Don't include packet_ins
--in-only Don't include packet_outs
--openflow-port=<num> Specify OpenFlow TCP port
"""
#TODO: Clean this up, follow multiple control traffic streams, decode
# TCP, etc.
from pox.core import core
import pox.openflow.libopenflow_01 as of
import pox.lib.packet as pkt
from pox.lib.util import dpidToStr
import pox.lib.pxpcap.parser as pxparse
import pox.lib.pxpcap.writer as pxwriter
log = core.getLogger()
from pox.lib.pxpcap.writer import PCapRawWriter
_writer = None
_of_port = 6633
_in_only = False
_out_only = False
_pis = 0
_pos = 0
def pi_cb (data, parser):
global _pis, _pos
packet = pkt.ethernet(data)
if packet.find('tcp'):
if packet.find('tcp').dstport == _of_port or \
packet.find('tcp').srcport == _of_port:
p = packet.find('tcp').payload
assert p[0] == '\x01'
t = ord(p[1])
packet_length = ord(p[2]) << 8 | ord(p[3])
if packet_length != len(p):
log.error("%s != %s" % (packet_length, len(p)))
if t == of.OFPT_PACKET_IN:
if _out_only: return
l,p = of.ofp_packet_in.unpack_new(p)
_pis += 1
elif t == of.OFPT_PACKET_OUT:
if _in_only: return
l,p = of.ofp_packet_out.unpack_new(p)
_pos += 1
else:
return
assert l == len(p)
_writer.write(p.data, time=parser._time, wire_size=parser._wire_size)
def launch (infile, outfile, in_only=False, out_only = False):
"""
For stripping PI/PO data
"""
global _writer, _of_port, _in_only, _out_only
_in_only = in_only
_out_only = out_only
data = open(infile, "r").read()
p = pxparse.PCapParser(callback=pi_cb)
_writer = pxwriter.PCapRawWriter(open(outfile, "w"))
p.feed(data)
log.info("%i packet_ins, %i packet_outs", _pis, _pos)
core.quit()
| apache-2.0 | Python | |
148068d99477b5f61a91362a22d56dc3e7ca2dcd | Change where uploads go | comandrei/meh,comandrei/meh | meh/vermin/migrations/0002_auto_20150505_0439.py | meh/vermin/migrations/0002_auto_20150505_0439.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('vermin', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='post',
name='image',
field=models.ImageField(null=True, upload_to=b'posts', blank=True),
),
]
| mit | Python | |
2ddaadff2033517def793af5bfc0bae122789e6a | add users.py, which plugs a hole in the dev authentication process | fcrepo4-archive/RDFDatabank,fcrepo4-archive/RDFDatabank,fcrepo4-archive/RDFDatabank,fcrepo4-archive/RDFDatabank | rdfdatabank/config/users.py | rdfdatabank/config/users.py | _USERS = {'admin': {'owner': '*', 'first_name': 'Databank', 'last_name': 'Admin', 'role': 'admin'}, 'sandbox_user': {'owner': ['sandbox'], 'role': 'user', 'name': 'Sandbox user', 'description': 'User for silo Sandbox'}}
| mit | Python | |
a1be18c495802ab07147b30c2455445817a42fa6 | Test for __unicode__ behavior | amir-qayyum-khan/lore,amir-qayyum-khan/lore,amir-qayyum-khan/lore,amir-qayyum-khan/lore,mitodl/lore,mitodl/lore,amir-qayyum-khan/lore,mitodl/lore,mitodl/lore,mitodl/lore | learningresources/tests/test_models.py | learningresources/tests/test_models.py | """
Tests for learningresources models
"""
from __future__ import unicode_literals
from django.test.testcases import TestCase
from learningresources.models import LearningResourceType
class TestModels(TestCase):
"""Tests for learningresources models"""
def test_unicode(self):
"""Test for __unicode__ on LearningResourceType"""
first = LearningResourceType.objects.create(
name="first"
)
self.assertEquals("first", unicode(first))
| agpl-3.0 | Python | |
24357efc8b44d916d9d43e262bb7b323cc650e81 | Create get_ip_send_email.py | oska874/raspberry | get_ip_send_email.py | get_ip_send_email.py | #!/usr/bin/python
#-*-coding:utf8-*-
__author__ = 'laixintao'
import socket
import fcntl
import time
import struct
import smtplib
import urllib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
import re
import urllib2
# the e-mail config
# this is just a simple format,this e-mail doesn't exist.
smtpserver = "smtp.sina.com"
username = "testxil1@sina.com"
password = "123456a"
sender = "testxil1@sina.com"
receiver = ["testxil1@sina.com"]
subject = "[RPI]IP CHANGED"
# file_path config
file_path = "/home/zlr/lastip.txt"
def sendEmail(msghtml):
msgRoot = MIMEMultipart('related')
msgRoot["To"] = ','.join(receiver)
msgRoot["From"] = sender
msgRoot['Subject'] = subject
msgText = MIMEText(msghtml,'html','utf-8')
msgRoot.attach(msgText)
smtp = smtplib.SMTP()
smtp.connect(smtpserver)
smtp.login(username, password)
smtp.sendmail(sender, receiver, msgRoot.as_string())
smtp.quit()
def check_network():
while True:
try:
print "Network is Ready!"
break
except Exception , e:
print e
print "Network is not ready,Sleep 5s...."
time.sleep(10)
return True
def get_lan_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("oska874.github.io",80))
ipaddr=s.getsockname()[0]
s.close()
return ipaddr
class Getmyip:
def getip(self):
myip = self.visit("http://ip.gs")
return myip
def visit(self,url):
opener = urllib2.urlopen(url,timeout=20)
if url == opener.geturl():
str = opener.read()
print "IP information from",url
return re.search('\d+\.\d+\.\d+\.\d+',str).group(0)
def get_network_ip():
getmyip = Getmyip()
localip = getmyip.getip()
return localip
if __name__ == '__main__':
check_network()
ipaddr=get_network_ip()
lanip=get_lan_ip()
emailip=str(ipaddr)+" "+str(lanip)
ip_file = open(file_path)
last_ip = ip_file.read()
ip_file.close()
if last_ip == emailip:
print "IP not change."
else:
print "IP changed."
ip_file = open(file_path,"w")
ip_file.write(str(emailip))
ip_file.close()
sendEmail(ipaddr)
print "Successfully send the e-mail."
| mit | Python | |
5a38a989591202fae42da75fde73f486634376e3 | Create 7kyu_simple_fun_new_numeral_system.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/7kyu/7kyu_simple_fun_new_numeral_system.py | Solutions/7kyu/7kyu_simple_fun_new_numeral_system.py | from string import ascii_uppercase as AU
def new_numeral_system(number):
idx = AU.index(number)
return ["{} + {}".format(AU[i], AU[idx-i]) for i in range((idx//2)+1)]
| mit | Python | |
6174804ab17b621ad010c6ddd3b07f7728d24120 | add script | adamewing/bamsurgeon,MischaLundberg/bamsurgeon,adamewing/bamsurgeon,MischaLundberg/bamsurgeon | etc/bsrg.py | etc/bsrg.py | #!/usr/bin/env python
''' Adds a bogus read group to a readgroup-less BAM file '''
import os
import sys
import pysam
from uuid import uuid4
def modhead(header, rgid, fn):
if 'RG' in header:
sys.stderr.write("RG found in header, this script is not what you want!\n")
sys.exit(usage())
header['RG'] = [{'SM' : fn,
'LB' : 'bamsurgeon',
'CN' : 'BS',
'PU' : str(uuid4()),
'ID' : rgid,
'PL' : 'ILLUMINA' }]
return header
def getRGID(header):
return header['RG'][0]['ID']
def usage():
return "usage: " + sys.argv[0] + " <BAM with no readgroups>"
if len(sys.argv) == 2:
assert sys.argv[1].endswith('.bam'), usage()
outbamfn = sys.argv[1].replace('.bam', '.BSRG.bam')
rgid = str(uuid4())
inbam = pysam.Samfile(sys.argv[1], 'rb')
outbam = pysam.Samfile(outbamfn, 'wb', header=modhead(inbam.header, rgid, os.path.basename(outbamfn)))
for read in inbam.fetch(until_eof=True):
read.tags = read.tags + [('RG', rgid)]
outbam.write(read)
inbam.close()
outbam.close()
else:
sys.exit(usage())
| mit | Python | |
936042aad2e599c864cf31164d2399f6170e38c1 | add updated data, new 05 homework | bluegrapes/DAT8Coursework | homework/05_pandas_homework_imdb.py | homework/05_pandas_homework_imdb.py | '''
Pandas Homework with IMDb data
'''
'''
BASIC LEVEL
'''
import pandas as pd
import matplotlib.pyplot as plt
# read in 'imdb_1000.csv' and store it in a DataFrame named movies
movies = pd.read_csv("data/imdb_1000.csv")
# check the number of rows and columns
movies.shape
# check the data type of each column
movies.dtypes
# calculate the average movie duration
movies.columns
movies.duration.mean()
movies.describe()
# sort the DataFrame by duration to find the shortest and longest movies
movies.sort('duration').head(1)
movies.sort('duration').tail(1)
# create a histogram of duration, choosing an "appropriate" number of bins
movies.duration.plot(kind="hist", bins=30)
# use a box plot to display that same data
movies.boxplot(column="duration", by="genre")
movies.boxplot(column="duration", by="content_rating")
'''
INTERMEDIATE LEVEL
'''
# count how many movies have each of the content ratings
movies.content_rating.value_counts()
### check total
movies.groupby('content_rating').size().sum() # 976 count
movies.content_rating.isnull().sum() # missing 3 counts
movies.describe() # 979 counts
# use a visualization to display that same data, including a title and x and y labels
movies.content_rating.value_counts().plot(kind="bar")
plt.title("Number of Movies Per Content Rating")
plt.ylabel('Number of Movies')
plt.xlabel('Content Rating')
# convert the following content ratings to "UNRATED": NOT RATED, APPROVED, PASSED, GP
movies.content_rating.replace(['NOT RATED','APPROVED', 'PASSED', 'GP'], 'UNRATED', inplace=True)
movies.content_rating.value_counts()
# convert the following content ratings to "NC-17": X, TV-MA
movies.content_rating.replace(['X','TV-MA'], 'NC-17', inplace=True)
movies.content_rating.value_counts()
# count the number of missing values in each column
movies.isnull().sum()
# if there are missing values: examine them, then fill them in with "reasonable" values
movies[movies.content_rating.isnull()]
### fill using forward fill
movies.content_rating.fillna(method='ffill', inplace=True)
# calculate the average star rating for movies 2 hours or longer,
# and compare that with the average star rating for movies shorter than 2 hours
movies[movies.duration >= 120].star_rating.mean()
movies[movies.duration < 120].star_rating.mean()
# use a visualization to detect whether there is a relationship between duration and star rating
movies.plot(kind="scatter", x="star_rating", y="duration", alpha=0.3)
movies.boxplot(column="duration", by="star_rating")
### The majority of movies with 7.4 to 8.7 star rating have a duration range between 100 to 150.
### Movies that are of higher star rating appears to have longer duration
# calculate the average duration for each genre
movies.groupby('genre').duration.mean()
'''
ADVANCED LEVEL
'''
# visualize the relationship between content rating and duration
movies.content_rating.value_counts()
movies["content_rating_num"] = movies.content_rating.factorize()[0]
movies.plot(kind="scatter", x="content_rating_num", y="duration", alpha=0.3)
movies.hist(column="duration", by="content_rating_num", sharex=True, sharey=True)
### There are a lot more PG rated (0) movies that have longer duration (2 hours or more)
# determine the top rated movie (by star rating) for each genre
high_ratings = movies.groupby('genre').star_rating.max()
for index, value in high_ratings.iteritems():
print movies[(movies.genre == index) & (movies.star_rating == value)]
# check if there are multiple movies with the same title, and if so, determine if they are actually duplicates
movies.duplicated("title").sum()
movies[movies.duplicated("title")]
# calculate the average star rating for each genre, but only include genres with at least 10 movies
'''
BONUS
'''
# Figure out something "interesting" using the actors data!
| apache-2.0 | Python | |
9ffe40aaf5ece521020258c4b31fbdb514e02b69 | Add utility function for get latest Build | colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager | manager/utilities.py | manager/utilities.py | from typing import Optional
from manager.models import Package, Build
def get_latest_build(package: Package) -> Optional[Build]:
try:
return Build.objects.filter(package=package, status=Build.SUCCESS).order_by('-id')[0]
except IndexError:
return None
| mit | Python | |
0b55d97573fcd196a318b3c901f6dcac1b0a4eef | Create a placeholder for pyauto test scripts. | ropik/chromium,adobe/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,ropik/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,ropik/chromium,yitian134/chromium,ropik/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,adobe/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,yitian134/chromium | chrome/test/functional/test_basic.py | chrome/test/functional/test_basic.py | #!/usr/bin/python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from pyauto import PyUITest
class SimpleTest(PyUITest):
def testCanOpenGoogle(self):
self.NavigateToURL("http://www.google.com")
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python | |
ffdfc05affc88226fda08c4d1924a9c509f4b53e | add GBM detector and datatype attrs | dpshelio/sunpy,dpshelio/sunpy,dpshelio/sunpy | sunpy/net/dataretriever/attrs/gbm.py | sunpy/net/dataretriever/attrs/gbm.py | from sunpy.net.attr import SimpleAttr
__all__ = ['Detector']
class Detector(SimpleAttr):
"""
Detector number for FERMI GBM
"""
class Datatype(SimpleAttr):
"""
Data type of GBM - either CSPEC or CTIME
""" | bsd-2-clause | Python | |
6906f574fad033f0b68fe7a5a35f4fcef1207ee0 | Load the source from a highlighted URL into a new window | RichardHyde/SublimeText.Packages | Get_Webpage_Source.py | Get_Webpage_Source.py | import sublime
import sublime_plugin
import urllib
class GetWebpageSourceCommand(sublime_plugin.TextCommand):
def run(self, edit):
url = self.view.substr(self.view.sel()[0])
if len(url) == 0:
return
output = ""
r = urllib.request.urlopen(url)
output = str(r.read(), encoding='utf8')
newView = sublime.active_window().new_file()
newView.insert(edit, 0, output)
| mit | Python | |
430ad720ef43de501f3fb06901fbf5971cf920c6 | Create nowplaying.py | TingPing/plugins,TingPing/plugins | HexChat/nowplaying.py | HexChat/nowplaying.py | import dbus
import hexchat
__module_name__ = "NowPlaying"
__module_author__ = "TingPing"
__module_version__ = "0"
__module_description__ = "Announce Pithos or Audacious Songs"
# This is just me duct-taping two of my scripts together
# Don't expect adding players to this list.
np_help_msg = 'NP: Valid commands are (see /help for specifics):\n \
np [option] [value]\n \
audacious [option]\n \
audacious [option]\n \
pithos [option]\n \
(without args sends to default player)'
aud_help_msg = 'AUD: Valid commands are:\n \
info (prints song)\n \
(without args sends to chan)'
pithos_help_msg = 'PITHOS: Valid commands are:\n \
info (prints song)\n \
next (skips song)\n \
love\n \
hate\n \
(without args sends to chan)'
session_bus = dbus.SessionBus()
def get_player(name):
if name == 'audacious':
bus_object = ['org.mpris.audacious', '/Player']
elif name == 'pithos':
bus_object = ['net.kevinmehall.Pithos', '/net/kevinmehall/Pithos']
try:
player = session_bus.get_object(bus_object[0], bus_object[1])
return player
except (dbus.exceptions.DBusException, TypeError):
print('NP: Could not find player.')
return None
def print_song(title, artist, album, echo=False):
# TODO: customization
if echo:
cmd = 'echo NP: %s by %s on %s.'%(title, artist, album)
elif hexchat.get_pluginpref('np_say'):
cmd = 'say Now playing %s by %s on %s.'%(title, artist, album)
else:
cmd = 'me is now playing %s by %s on %s.'%(title, artist, album)
hexchat.command(cmd)
def audacious_cb(word, word_eol, userdata):
player = get_player('audacious')
if not player:
return hexchat.EAT_ALL
song = player.GetMetadata()
if len(word) > 1:
if word[1].lower() == 'info':
try:
print_song(song['title'], song['artist'], song['album'], echo=True)
except KeyError:
print('NP: Failed to get song information')
else:
print('Audacious: Valid commands are: info, or without args to announce')
else:
try:
print_song(song['title'], song['artist'], song['album'])
except KeyError:
print('Audacious: Failed to get song information')
return hexchat.EAT_ALL
def pithos_cb(word, word_eol, userdata):
player = get_player('pithos')
if not player:
return hexchat.EAT_ALL
song = player.GetCurrentSong()
if len(word) > 1:
if word[1].lower() == 'info':
print_song(song['title'], song['artist'], song['album'], echo=True)
elif word[1].lower() == 'next':
player.SkipSong()
elif word[1].lower() == 'love':
player.LoveCurrentSong()
elif word[1].lower() == 'hate':
player.BanCurrentSong()
else:
print('Pithos: Valid commands are: info, next, love, hate, or without args to announce')
else:
print_song(song['title'], song['artist'], song['album'])
return hexchat.EAT_ALL
def np_cb(word, word_eol, userdata):
if len(word) > 1:
if len(word) > 2:
if word[1].lower() == 'default':
if hexchat.set_pluginpref('np_default', word[2]):
print('NP: Default set to %s' %word[2])
elif word[1].lower() == 'say':
try:
if hexchat.set_pluginpref('np_say', bool(int(word[2]))):
print('NP: Say set to %r' %bool(int(word[2])))
except ValueError:
print('NP: Setting must be a 1 or 0')
else:
default = hexchat.get_pluginpref('np_default').lower()
if default == 'pithos':
pithos_cb(word, word_eol, userdata)
elif default == 'audacious':
audacious_cb(word, word_eol, userdata)
else:
print('NP: No valid default set, use /np default <player> to set one')
return hexchat.EAT_ALL
def unload_cb(userdata):
print(__module_name__ + ' version ' + __module_version__ + ' unloaded.')
hexchat.hook_command("pithos", pithos_cb, help=pithos_help_msg)
hexchat.hook_command("audacious", audacious_cb, help=aud_help_msg)
hexchat.hook_command("aud", audacious_cb, help=aud_help_msg)
hexchat.hook_command("np", np_cb, help=np_help_msg)
hexchat.hook_unload(unload_cb)
hexchat.prnt(__module_name__ + ' version ' + __module_version__ + ' loaded.')
| mit | Python | |
a6edfaeb6fc8036c60d9c20bf871795cba198337 | Add unittests for new bridgedb.translations module. | mmaker/bridgedb,pagea/bridgedb,pagea/bridgedb,mmaker/bridgedb | lib/bridgedb/test/test_translations.py | lib/bridgedb/test/test_translations.py | # -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# :copyright: (c) 2014, Isis Lovecruft
# (c) 2014, The Tor Project, Inc.
# :license: 3-Clause BSD, see LICENSE for licensing information
from twisted.trial import unittest
from bridgedb import translations
from bridgedb.test.test_HTTPServer import DummyRequest
REALISH_HEADERS = {
b'Accept-Encoding': [b'gzip, deflate'],
b'User-Agent': [
b'Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'],
b'Accept': [
b'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'],
}
# Add this to the above REALISH_HEADERS to use it:
ACCEPT_LANGUAGE_HEADER = {
b'Accept-Language': [b'de-de,en-gb;q=0.8,en;q=0.5,en-us;q=0.3'],
}
class TranslationsMiscTests(unittest.TestCase):
"""Tests for module-level code in ``bridgedb.translations`` module."""
def test_getLocaleFromHTTPRequest_withLangParam(self):
"""This request uses a '?lang=ar' param, without an 'Accept-Language'
header.
The request result should be: ['ar', 'en', 'en-US'].
"""
request = DummyRequest([b"bridges"])
request.headers.update(REALISH_HEADERS)
request.args.update({
b'transport': [b'obfs3',],
b'lang': [b'ar',],
})
parsed = translations.getLocaleFromHTTPRequest(request)
self.assertEqual(parsed[0], 'ar')
self.assertEqual(parsed[1], 'en')
self.assertEqual(parsed[2], 'en_US')
self.assertEqual(len(parsed), 3)
def test_getLocaleFromHTTPRequest_withLangParam_AcceptLanguage(self):
"""This request uses a '?lang=ar' param, with an 'Accept-Language'
header which includes: ['de-de', 'en-gb', 'en', 'en-us'].
The request result should be: ['fa', 'de-de', 'en-gb', 'en', 'en-us'].
"""
request = DummyRequest([b"options"])
request.headers.update(ACCEPT_LANGUAGE_HEADER)
request.args.update({b'lang': [b'fa']})
parsed = translations.getLocaleFromHTTPRequest(request)
self.assertEqual(parsed[0], 'fa')
self.assertEqual(parsed[1], 'en')
self.assertEqual(parsed[2], 'en_US')
#self.assertEqual(parsed[3], 'en-gb')
self.assertEqual(len(parsed), 3)
def test_getLocaleFromPlusAddr(self):
emailAddr = 'bridges@torproject.org'
replyLocale = translations.getLocaleFromPlusAddr(emailAddr)
self.assertEqual('en', replyLocale)
def test_getLocaleFromPlusAddr_ar(self):
emailAddr = 'bridges+ar@torproject.org'
replyLocale = translations.getLocaleFromPlusAddr(emailAddr)
self.assertEqual('ar', replyLocale)
| bsd-3-clause | Python | |
93d80604003e1b3b21498df01f7647e7cea69a5f | Add basic WinEvent object tests. | CybOXProject/python-cybox | cybox/test/objects/win_event_test.py | cybox/test/objects/win_event_test.py | # Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from cybox.objects.win_event_object import WinEvent
from cybox.test.objects import ObjectTestCase
class TestWinThread(ObjectTestCase, unittest.TestCase):
object_type = "WindowsEventObjectType"
klass = WinEvent
_full_dict = {
'name': u"Object Open",
'handle': {
'name': u"Event Handle",
'type': u"Event",
'xsi:type': "WindowsHandleObjectType",
},
'type': u"Success",
'xsi:type': "WindowsEventObjectType",
}
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | Python | |
6c36729ec7d9f3345f0adb0c5deba13bd62262c6 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/6e84074d5cbda4900ea3f6cf29333e9f0140a661. | paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "6e84074d5cbda4900ea3f6cf29333e9f0140a661"
TFRT_SHA256 = "6764720c5901c26c6b92b5b16a1274a3184a7c169726ec3cbaa442242e432181"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "d5b707f0e4997a401fd14b4a94e4f670e03e502e"
TFRT_SHA256 = "24a0e9963489cb9f5443e9e4a670b0c9217fa032eac090275b2c353570d638df"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
05afa4481112f4cc890cfe3d6372044397b774c7 | Create set-timeZone-AsiaJakarta.py | agusmakmun/Some-Examples-of-Simple-Python-Script,agusmakmun/Some-Examples-of-Simple-Python-Script | timezone/set-timeZone-AsiaJakarta.py | timezone/set-timeZone-AsiaJakarta.py | >>> import os, time
>>> os.environ['TZ'] = 'Asia/Jakarta'
>>> time.tzset()
>>> time.strftime('%X %x %Z')
'12:26:07 10/15/15 WIB'
>>> print str(datetime.datetime.now()).split()[0]
2015-10-15
>>>
| agpl-3.0 | Python | |
4c71d31fc4adaec4a21f766530f459c911e03492 | add 0001 | Show-Me-the-Code/python,Show-Me-the-Code/python,Show-Me-the-Code/python,Show-Me-the-Code/python,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Show-Me-the-Code/python,Yrthgze/prueba-sourcetree2,Show-Me-the-Code/python | Jaccorot/0001/0001.py | Jaccorot/0001/0001.py | #!/usr/local/bin/python
#coding=utf-8
#第 0001 题:做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
#使用 Python 如何生成 200 个激活码(或者优惠券)?
import uuid
def create_code(num, length):
#生成”num“个激活码,每个激活码含有”length“位
result = []
while True:
uuid_id = uuid.uuid1()
temp = str(uuid_id).replace('-', '')[:length]
if not temp in result:
result.append(temp)
if len(result) == num:
break
return result
print create_code(200, 20)
| mit | Python | |
ea64c4f44d5c386f9d9b43a8f122987b374b10d3 | Update the bot to use our new Yo emote | mileswwatkins/moxie,mileswwatkins/moxie,loandy/moxie,loandy/moxie,paultag/moxie,rshorey/moxie,rshorey/moxie,paultag/moxie,mileswwatkins/moxie,rshorey/moxie,loandy/moxie,paultag/moxie | moxie/butterfield.py | moxie/butterfield.py | import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import EventService
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
class LogService(EventService):
"""
Provide basic text logging using print()
"""
identifier = "moxie.cores.log.LogService"
def __init__(self, bot, *args, **kwargs):
self.bot = bot
super(LogService, self).__init__(*args, **kwargs)
@asyncio.coroutine
def log(self, component, message):
yield from self.send({
"component": component,
"message": message,
})
@asyncio.coroutine
def handle(self, message):
yield from self.bot.post(
"#cron", "[{component}]: {message}".format(**message))
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = EventService.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
return
elif text.strip().lower() in ("yo", ":yo:"):
yield from bot.post(
message['channel'], "<@{}>: :yo:".format(message['user']))
return
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
| import os
import json
import asyncio
from butterfield.utils import at_bot
from aiodocker import Docker
from aiocore import EventService
WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888")
class LogService(EventService):
"""
Provide basic text logging using print()
"""
identifier = "moxie.cores.log.LogService"
def __init__(self, bot, *args, **kwargs):
self.bot = bot
super(LogService, self).__init__(*args, **kwargs)
@asyncio.coroutine
def log(self, component, message):
yield from self.send({
"component": component,
"message": message,
})
@asyncio.coroutine
def handle(self, message):
yield from self.bot.post(
"#cron", "[{component}]: {message}".format(**message))
@asyncio.coroutine
def events(bot):
docker = Docker()
events = docker.events
events.saferun()
stream = events.listen()
while True:
el = yield from stream.get()
yield from bot.post("#cron", "`{}`".format(str(el)))
@asyncio.coroutine
@at_bot
def run(bot, message: "message"):
runner = EventService.resolve("moxie.cores.run.RunService")
text = message.get("text", "")
if text == "":
yield from bot.post(message['channel'], "Invalid request")
return
elif text.strip().lower() == "yo":
yield from bot.post(
message['channel'], "Yo <@{}>".format(message['user']))
return
cmd, arg = text.split(" ", 1)
if cmd == "run":
job = arg
yield from bot.post(
message['channel'], "Doing bringup of {}".format(job))
try:
yield from runner.run(job)
except ValueError as e:
yield from bot.post(
message['channel'],
"Gah, {job} failed - {e}".format(e=e, job=job)
)
return
yield from bot.post(message['channel'],
"Job {job} online - {webroot}/container/{job}/".format(
webroot=WEB_ROOT, job=job))
| mit | Python |
89d4c1420805a6f2e491f1ab250722cdcf950bd8 | Work towards integration NDTable <-> Minivect | seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core | ndtable/engine/mv.py | ndtable/engine/mv.py | import sys
import time
import numpy as np
from minivect import miniast
from minivect import specializers
from minivect import minitypes
from minivect.ctypes_conversion import get_data_pointer, \
get_pointer, convert_to_ctypes
from ndtable.datashape.coretypes import var_generator
from ndtable.expr.visitor import MroTransformer
context_debug = 0
class LazyLLVMContext(miniast.LLVMContext):
debug = context_debug
def stridesvar(self, variable):
return miniast.StridePointer(self.pos, minitypes.NPyIntp.pointer(),
variable)
#------------------------------------------------------------------------
# Blaze Pipeline
#------------------------------------------------------------------------
def setup(debug=0):
context = LazyLLVMContext()
builder = context.astbuilder
ccontext = miniast.CContext()
ccontext.debug = debug
return context, ccontext, builder
#------------------------------------------------------------------------
# Utils
#------------------------------------------------------------------------
def get_blaze_pointer(numpy_array, array_type):
dtype_pointer = array_type.dtype.pointer()
return numpy_array.ctypes.data_as(convert_to_ctypes(dtype_pointer))
def specialize(specializer_cls, ast, context):
specializers = [specializer_cls]
result = iter(context.run(ast, specializers)).next()
_, specialized_ast, _, code_output = result
return specialized_ast, code_output
#------------------------------------------------------------------------
# Mapper
#------------------------------------------------------------------------
class Minivect(MroTransformer):
def __init__(self, context):
self.builder = context.astbuilder
self.variables = []
def ArrayNode(self, node):
return node
def App(self, node):
lhs, rhs = self.visit(node.children)[0]
op = node.operator.op
return self.builder.binop(lhs.type, op, lhs, rhs)
def BinaryOp(self, node):
lhs, rhs = map(self.visit, node.children)
if isinstance(lhs, list):
lhs = self.visit(lhs)
if isinstance(rhs, list):
rhs = self.visit(rhs)
return lhs, rhs
def Literal(self, node):
minidtype = node.datashape.to_minitype()
variable = self.builder.variable(minidtype, str(id(node)))
variable.value = node.val
self.variables.append(variable)
return variable
| bsd-2-clause | Python | |
1146ab654c8b0d6f982f19bafed91f18edb877f3 | Add tests for the `dirty_unzip` rule | gogobebe2/thefuck,bigplus/thefuck,Aeron/thefuck,manashmndl/thefuck,subajat1/thefuck,PLNech/thefuck,princeofdarkness76/thefuck,mcarton/thefuck,lawrencebenson/thefuck,thinkerchan/thefuck,Clpsplug/thefuck,vanita5/thefuck,levythu/thefuck,BertieJim/thefuck,hxddh/thefuck,Clpsplug/thefuck,zhangzhishan/thefuck,barneyElDinosaurio/thefuck,lawrencebenson/thefuck,beni55/thefuck,subajat1/thefuck,mlk/thefuck,redreamality/thefuck,nvbn/thefuck,mlk/thefuck,roth1002/thefuck,PLNech/thefuck,beni55/thefuck,hxddh/thefuck,vanita5/thefuck,SimenB/thefuck,bigplus/thefuck,SimenB/thefuck,thesoulkiller/thefuck,mcarton/thefuck,bugaevc/thefuck,mbbill/thefuck,BertieJim/thefuck,AntonChankin/thefuck,sekaiamber/thefuck,nvbn/thefuck,MJerty/thefuck,thesoulkiller/thefuck,redreamality/thefuck,roth1002/thefuck,LawrenceHan/thefuck,manashmndl/thefuck,MJerty/thefuck,princeofdarkness76/thefuck,scorphus/thefuck,ostree/thefuck,artiya4u/thefuck,barneyElDinosaurio/thefuck,qingying5810/thefuck,scorphus/thefuck,thinkerchan/thefuck,ostree/thefuck,AntonChankin/thefuck,LawrenceHan/thefuck,NguyenHoaiNam/thefuck,levythu/thefuck | tests/rules/test_dirty_unzip.py | tests/rules/test_dirty_unzip.py | import os
import pytest
import zipfile
from thefuck.rules.dirty_unzip import match, get_new_command, side_effect
from tests.utils import Command
@pytest.fixture
def zip_error(tmpdir):
path = os.path.join(str(tmpdir), 'foo.zip')
def reset(path):
with zipfile.ZipFile(path, 'w') as archive:
archive.writestr('a', '1')
archive.writestr('b', '2')
archive.writestr('c', '3')
archive.extractall()
os.chdir(str(tmpdir))
reset(path)
assert(set(os.listdir('.')) == {'foo.zip', 'a', 'b', 'c'})
@pytest.mark.parametrize('script', [
'unzip foo',
'unzip foo.zip'])
def test_match(zip_error, script):
assert match(Command(script=script), None)
@pytest.mark.parametrize('script', [
'unzip foo',
'unzip foo.zip'])
def test_side_effect(zip_error, script):
side_effect(Command(script=script), None)
assert(os.listdir('.') == ['foo.zip'])
@pytest.mark.parametrize('script,fixed', [
('unzip foo', 'unzip foo -d foo'),
('unzip foo.zip', 'unzip foo.zip -d foo')])
def test_get_new_command(zip_error, script, fixed):
assert get_new_command(Command(script=script), None) == fixed
| mit | Python | |
fce66429cb361e9f113b3ec69031502128d192fb | Create OGR_PointIteration.py | leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing | OGR_PointIteration.py | OGR_PointIteration.py | from osgeo import ogr
import os
shapefile = "//mnt//hgfs//Biondo//GINF//Florestas_Parana//pr_300_f22.shp"
driver = ogr.GetDriverByName("ESRI Shapefile")
dataSource = driver.Open(shapefile, 0)
layer = dataSource.GetLayer()
for feature in layer:
geom = feature.GetGeometryRef()
pt = geom.Centroid()
pto = (pt.GetX(),pt.GetY())
print pto
| mit | Python | |
d3922992a4048dfff356ed91196c9c4d4d62bb08 | Create testcase.py | SAYONG/hairy-octo-nemesis,SAYONG/hairy-octo-nemesis | testcase.py | testcase.py | import unittest
from cal import Calculator
class TddInPythonExample(unittest.TestCase):
def test_calculator_add_method_returns_correct_result(self):
calc = Calculator()
result = calc.add(2,2)
self.assertEqual(4, result)
def test_calculator_returns_error_message_if_both_args_not_numbers(self):
calc = Calculator()
self.assertRaises(ValueError, calc.add, '2', '3')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python | |
f83250d04fc4f98e97abcdc705046e5b412fc62e | add solution for Set Matrix Zeroes | zhyu/leetcode,zhyu/leetcode | src/setMatrixZeroes.py | src/setMatrixZeroes.py | class Solution:
# @param matrix, a list of lists of integers
# RETURN NOTHING, MODIFY matrix IN PLACE.
def setZeroes(self, matrix):
if not matrix:
return
n = len(matrix)
if n == 0:
return
m = len(matrix[0])
pos = None
for i in xrange(n):
for j in xrange(m):
if matrix[i][j] == 0:
if pos:
matrix[i][pos[1]] = 0
matrix[pos[0]][j] = 0
else:
pos = (i, j)
if not pos:
return
for i in xrange(n):
if i == pos[0]:
continue
for j in xrange(m):
if j == pos[1]:
continue
if matrix[pos[0]][j] == 0 or matrix[i][pos[1]] == 0:
matrix[i][j] = 0
for i in xrange(n):
matrix[i][pos[1]] = 0
for j in xrange(m):
matrix[pos[0]][j] = 0
| mit | Python | |
3926420273dce58076a8ba148e87f5ef268a2846 | add ie6 specific Focus.focus function | jaredly/pyjamas,jaredly/pyjamas,jaredly/pyjamas,jaredly/pyjamas | library/__ie6__/pyjamas/ui/Focus.py | library/__ie6__/pyjamas/ui/Focus.py | # emulate behaviour of other browsers
def focus(elem):
JS("""
try {
elem.focus();
} catch (e) {
// Only trap the exception if the attempt was mostly legit
if (!elem || !elem.focus) {
// Rethrow the probable NPE or invalid type
throw e;
}
}
""")
| apache-2.0 | Python | |
b219f44c754aafb959217ecf4199c8dc23a41ab1 | Create __init__.py | andrewchenshx/vnpy,bigdig/vnpy,bigdig/vnpy,andrewchenshx/vnpy,msincenselee/vnpy,bigdig/vnpy,vnpy/vnpy,vnpy/vnpy,msincenselee/vnpy,bigdig/vnpy,andrewchenshx/vnpy,andrewchenshx/vnpy,msincenselee/vnpy,andrewchenshx/vnpy,msincenselee/vnpy | vnpy/app/risk_manager/ui/__init__.py | vnpy/app/risk_manager/ui/__init__.py | from .widget import RiskManager
| mit | Python | |
b92f204174e8d14fe4912631f5bc1305d7cd9550 | add choose.py; hardly anything in it | ieuan1630-cmis/ieuan1630-cmis-cs2 | choose.py | choose.py | def main(): #get input and if input is bingo return true
x = raw_input("Type a word: ")
return bool(x == "bingo" or x == "Bingo")
print main()
| cc0-1.0 | Python | |
8188a8327b6269cdb2ae84fd97b19eac50914731 | add time module | anlutro/allib.py | allib/time.py | allib/time.py | import math
def format_timedelta(timedelta, short=True):
"""
Format a timedelta into a human-readable string.
"""
seconds = abs(timedelta.total_seconds())
days = max(0, math.floor(seconds / (3600 * 24)))
seconds -= 3600 * 24 * days
hours = max(0, math.floor(seconds / 3600))
seconds -= 3600 * hours
minutes = max(0, math.floor(seconds / 60))
seconds -= 60 * minutes
parts = []
if days > 0:
parts.append('%d%s' % (days, 'd' if short else ' days'))
if hours > 0:
parts.append('%d%s' % (hours, 'h' if short else ' hours'))
if days == 0 and minutes > 0:
parts.append('%d%s' % (minutes, 'm' if short else ' minutes'))
if days == 0 and hours == 0 and seconds > 0:
parts.append('%d%s' % (seconds, 's' if short else ' seconds'))
return ' '.join(parts) if parts else ''
| mit | Python | |
178d4f3c13fa4baba4a0cb3bcadfa983ee1246ae | Add zenodo harvester | CenterForOpenScience/scrapi,alexgarciac/scrapi,fabianvf/scrapi,jeffreyliu3230/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,erinspace/scrapi,icereval/scrapi,mehanig/scrapi,felliott/scrapi,ostwald/scrapi,mehanig/scrapi | scrapi/harvesters/zenodo.py | scrapi/harvesters/zenodo.py | '''
Harvester for the ASU Digital Repository for the SHARE project
Example API call: https://zenodo.org/oai2d?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ZenodoHarvester(OAIHarvester):
short_name = 'zenodo'
long_name = 'Zenodo'
url = 'https://zenodo.org/oai2d'
base_url = 'https://zenodo.org/oai2d'
property_list = ['language', 'rights', 'source', 'relation', 'date', 'identifier', 'type']
timezone_granularity = True
| apache-2.0 | Python | |
0749111442c638569b6e42a11adee70e71e50813 | Add an helper class to write pexpect-based test cases Over time, we should improve this class and port all pexpect based testing over to using this | llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb | test/lldbpexpect.py | test/lldbpexpect.py | import lldb
from lldbtest import *
import lldbutil
import os
import unittest2
import sys
import pexpect
class PExpectTest(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
def doTest(self):
# put your commands here
pass
def launchArgs(self):
return ""
def launch(self):
self.timeout = 5
self.child = pexpect.spawn('%s %s' % self.lldbHere, self.launchArgs())
def expect(self, patterns=None, timeout=None):
if patterns is None: patterns = '.*'
return self.child.expect(patterns, timeout=timeout)
def sendimpl(self, sender, command, patterns=None, timeout=None):
if timeout is None: timeout = self.timeout
sender(command)
if patterns is not None: return self.expect(patterns=patterns, timeout=timeout)
return None
def send(self, command, patterns=None, timeout=None):
self.sendimpl(self.child.send, command, patterns, timeout)
def sendline(self, command, patterns=None, timeout=None):
self.sendimpl(self.child.sendline, command, patterns, timeout)
def quit(self, gracefully=None):
if gracefully is None: gracefully = True
self.child.sendeof()
self.child.close(force=not gracefully)
self.child = None
| apache-2.0 | Python | |
305c3e0ce2705dd23e00ec801f5588ec1dbcc3a8 | Add py solution for 167. Two Sum II - Input array is sorted | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | py/two-sum-ii-input-array-is-sorted.py | py/two-sum-ii-input-array-is-sorted.py | class Solution(object):
def twoSum(self, numbers, target):
"""
:type numbers: List[int]
:type target: int
:rtype: List[int]
"""
head, tail = 0, len(numbers) - 1
while head < tail:
s = numbers[head] + numbers[tail]
if s == target:
return [head + 1, tail + 1]
elif s < target:
head += 1
elif s > target:
tail -= 1
| apache-2.0 | Python | |
b41fc9f56fbeef66612647605b8cbc144e93e66a | add example on how to run parallel func | chiahaoliu/pdf_lib | pdf_lib/lib_build.py | pdf_lib/lib_build.py | # script to execute database building
from pdf_lib.parallel_func import (save_data, learninglib_build,
map_learninglib)
from ipyparallel import Client
rc = Client()
dview = rc[:]
def run_build(cif_dir):
fn_list = sorted([f for f in os.listdir(cif_dir) if f.endswith('.cif')])
full_fn_list = list(map(lambda x: os.path.join(cif_dir, x), fn_list))
rv = dview.apply_async(learninglib_build, full_fn_list)
save_data(rv)
| mit | Python | |
a7342cf25b850d310b0607f46a86037b26a8ecc4 | Create to_html.py | pactekviz/awesome-datascience-colleges,ryanswanstrom/awesome-datascience-colleges,Deerluluolivia/awesome-datascience-colleges,nitishmadhukar/awesome-datascience-colleges,JeffJetton/awesome-datascience-colleges,strasser/awesome-datascience-colleges | to_html.py | to_html.py | import pandas as pd
in_file = 'https://raw.githubusercontent.com/ryanswanstrom/awesome-datascience-colleges/master/data_science_colleges.csv'
## Read in the file
df = pd.read_csv(in_file)
# get a full degree name
df['DEGREE_FULL'] = df[['DEGREE']].apply(lambda x:
('Bachelors' if x[0]=='B' else 'Masters' if x[0]=='M' else 'Certificate' if x[0]=='C' else 'Doctorate' if x[0]=='D' else 'UNK'), axis=1)
df['html'] = df[['SCHOOL','PROGRAM','DEGREE_FULL','ONLINE','ONCAMPUS','URL']].apply(lambda x:
'<tr><td><b>{0}</b>[<a title="{0}-{1}" href="{5}" target="_blank">{1}</a>] </td><td>{2} </td><td>{3}/{4} </td></tr>'.format(x[0],x[1],x[2],x[3],x[4],x[5]), axis=1)
# write out the output
f = open('html.dat','w')
for row in df['html']:
f.write(row)
f.close()
| mit | Python | |
a26303847f8133ea037c7629c4949d061457f7d2 | Add a new module to import configuration data (hosts/services) from a MySQL database | lets-software/shinken,kaji-project/shinken,xorpaul/shinken,xorpaul/shinken,rednach/krill,Simage/shinken,peeyush-tm/shinken,baloo/shinken,staute/shinken_deb,savoirfairelinux/shinken,dfranco/shinken,staute/shinken_package,rednach/krill,mohierf/shinken,titilambert/alignak,fpeyre/shinken,tal-nino/shinken,naparuba/shinken,staute/shinken_deb,gst/alignak,staute/shinken_deb,fpeyre/shinken,dfranco/shinken,kaji-project/shinken,rednach/krill,savoirfairelinux/shinken,geektophe/shinken,claneys/shinken,naparuba/shinken,KerkhoffTechnologies/shinken,tal-nino/shinken,Alignak-monitoring/alignak,titilambert/alignak,xorpaul/shinken,lets-software/shinken,naparuba/shinken,geektophe/shinken,ddurieux/alignak,geektophe/shinken,naparuba/shinken,peeyush-tm/shinken,dfranco/shinken,h4wkmoon/shinken,fpeyre/shinken,rednach/krill,staute/shinken_deb,baloo/shinken,lets-software/shinken,geektophe/shinken,tal-nino/shinken,KerkhoffTechnologies/shinken,claneys/shinken,h4wkmoon/shinken,Simage/shinken,claneys/shinken,KerkhoffTechnologies/shinken,KerkhoffTechnologies/shinken,ddurieux/alignak,Aimage/shinken,tal-nino/shinken,savoirfairelinux/shinken,gst/alignak,peeyush-tm/shinken,ddurieux/alignak,staute/shinken_deb,dfranco/shinken,mohierf/shinken,rledisez/shinken,kaji-project/shinken,geektophe/shinken,xorpaul/shinken,savoirfairelinux/shinken,tal-nino/shinken,claneys/shinken,claneys/shinken,savoirfairelinux/shinken,baloo/shinken,KerkhoffTechnologies/shinken,gst/alignak,gst/alignak,staute/shinken_deb,rledisez/shinken,kaji-project/shinken,xorpaul/shinken,rledisez/shinken,Simage/shinken,Aimage/shinken,fpeyre/shinken,Simage/shinken,mohierf/shinken,dfranco/shinken,tal-nino/shinken,rednach/krill,rledisez/shinken,naparuba/shinken,claneys/shinken,Simage/shinken,naparuba/shinken,baloo/shinken,titilambert/alignak,lets-software/shinken,staute/shinken_package,baloo/shinken,ddurieux/alignak,staute/shinken_package,Aimage/shinken,xorpaul/shinken,ddurieux/alignak,ddurieux/alignak,Aimage/shinken,rednach/krill,Simage/shinken,h4wkmoon/shinken,fpeyre/shinken,staute/shinken_package,titilambert/alignak,mohierf/shinken,Aimage/shinken,lets-software/shinken,h4wkmoon/shinken,staute/shinken_package,xorpaul/shinken,mohierf/shinken,kaji-project/shinken,Alignak-monitoring/alignak,dfranco/shinken,lets-software/shinken,baloo/shinken,geektophe/shinken,KerkhoffTechnologies/shinken,rledisez/shinken,kaji-project/shinken,h4wkmoon/shinken,h4wkmoon/shinken,h4wkmoon/shinken,kaji-project/shinken,peeyush-tm/shinken,staute/shinken_package,xorpaul/shinken,peeyush-tm/shinken,Aimage/shinken,peeyush-tm/shinken,savoirfairelinux/shinken,mohierf/shinken,h4wkmoon/shinken,fpeyre/shinken,rledisez/shinken | shinken/modules/mysql_import_arbiter.py | shinken/modules/mysql_import_arbiter.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#Copyright (C) 2009 Gabes Jean, naparuba@gmail.com
#
#This file is part of Shinken.
#
#Shinken is free software: you can redistribute it and/or modify
#it under the terms of the GNU Affero General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#Shinken is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU Affero General Public License for more details.
#
#You should have received a copy of the GNU Affero General Public License
#along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#This module imports hosts and services configuration from a MySQL Database
#Queries for getting hosts and services are pulled from shinken-specific.cfg configuration file.
import MySQLdb
from shinken.basemodule import BaseModule
#This text is print at the import
print "Detected module : MySQL importer for Arbiter"
properties = {
'daemons' : ['arbiter'],
'type' : 'mysql_import',
'external' : False,
'phases' : ['configuration'],
}
#called by the plugin manager to get a broker
def get_instance(plugin):
print "[MySQL Importer Module] : Get MySQL importer instance for plugin %s" % plugin.get_name()
host = plugin.host
login = plugin.login
password = plugin.password
database = plugin.database
reqhosts = plugin.reqhosts
reqservices = plugin.reqservices
instance = MySQL_importer_arbiter(plugin, host, login, password, database,reqhosts,reqservices)
return instance
#Retrieve hosts from a MySQL database
class MySQL_importer_arbiter(BaseModule):
def __init__(self, mod_conf, host, login, password, database, reqhosts,reqservices):
BaseModule.__init__(self, mod_conf)
self.host = host
self.login = login
self.password = password
self.database = database
self.reqhosts = reqhosts
self.reqservices = reqservices
#Called by Arbiter to say 'let's prepare yourself guy'
def init(self):
print "[MySQL Importer Module] : Try to open a MySQL connection to %s" % self.host
try:
self.conn = MySQLdb.connect (host = self.host,
user = self.login,
passwd = self.password,
db = self.database)
except MySQLdb.Error, e:
print "MySQL Module : Error %d: %s" % (e.args[0], e.args[1])
raise
print "[MySQL Importer Module] : Connection opened"
#Main function that is called in the CONFIGURATION phase
def get_objects(self):
if not hasattr(self, 'conn'):
print "[MySQL Importer Module] : Problem during init phase"
return {}
r = {'hosts' : []}
result_set = {}
cursor = self.conn.cursor (MySQLdb.cursors.DictCursor)
print "[MySQL Importer Module] : getting hosts configuration from database"
try:
cursor.execute (self.reqhosts)
result_set = cursor.fetchall ()
except MySQLdb.Error, e:
print "MySQL Module : Error %d: %s" % (e.args[0], e.args[1])
for row in result_set:
h = {}
for column in row:
if row[column]:
h[column]= row[column]
r['hosts'].append(h)
print "[MySQL Importer Module] : getting services configuration from database"
r['services'] = []
try:
cursor.execute (self.reqservices)
result_set = cursor.fetchall ()
except MySQLdb.Error, e:
print "MySQL Module : Error %d: %s" % (e.args[0], e.args[1])
for row in result_set:
h = {}
for column in row:
if row[column]:
h[column]= row[column]
r['services'].append(h)
cursor.close ()
self.conn.close ()
del self.conn
print "[MySQL Importer Module] : Returning to Arbiter the object:", r
return r
| agpl-3.0 | Python | |
3bf0a1c0752be428d3259a6639be205f019751d5 | Add lc0056_merge_intervals.py | bowen0701/algorithms_data_structures | lc0056_merge_intervals.py | lc0056_merge_intervals.py | """Leetcode 56. Merge Intervals
Medium
URL: https://leetcode.com/problems/merge-intervals/
Given a collection of intervals, merge all overlapping intervals.
Example 1:
Input: [[1,3],[2,6],[8,10],[15,18]]
Output: [[1,6],[8,10],[15,18]]
Explanation: Since intervals [1,3] and [2,6] overlaps, merge them into [1,6].
Example 2:
Input: [[1,4],[4,5]]
Output: [[1,5]]
Explanation: Intervals [1,4] and [4,5] are considered overlapping.
"""
class Solution(object):
def merge(self, intervals):
"""
:type intervals: List[List[int]]
:rtype: List[List[int]]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
f8e24bf955eb70535b989aad6ab8666ddd013da1 | Add in first py.test tests. | clalancette/pycdlib,clalancette/pyiso | tests/test_basic.py | tests/test_basic.py | import pytest
import subprocess
import os
import sys
prefix = '.'
for i in range(0,3):
if os.path.exists(os.path.join(prefix, 'pyiso.py')):
sys.path.insert(0, prefix)
break
else:
prefix = '../' + prefix
import pyiso
def test_nofiles(tmpdir):
# First set things up, and generate the ISO with genisoimage
outfile = tmpdir.join("no-file-test.iso")
indir = tmpdir.mkdir("nofile")
subprocess.call(["genisoimage", "-v", "-v", "-iso-level", "1", "-no-pad",
"-o", str(outfile), str(indir)])
iso = pyiso.PyIso()
iso.open(open(str(outfile), 'rb'))
# With no files, the ISO should be exactly 24 extents long
assert(iso.pvd.space_size == 24)
assert(iso.pvd.log_block_size == 2048)
assert(iso.pvd.path_tbl_size == 10)
| lgpl-2.1 | Python | |
8a2891aa7b4aaf68655654f19998fe3dbc25db38 | Create tree_de.py | praveendareddy21/my-repo,praveendareddy21/my-repo | tree_de.py | tree_de.py | class nod(object):
def __init__(self ,elem):
self.elem=elem
self.le=None
self.ri=None
def setle(self ,le):
self.le=le
def getle(self):
return self.le
def setri(self ,ri):
self.ri=ri
def getri(self):
return self.ri
class dll(object):
def __init__(self,elem):
self.elem=elem
self.le=None
self.ri=None
def setle(self ,le):
self.le=le
def getle(self):
return self.le
def setri(self ,ri):
self.ri=ri
def getri(self):
return self.ri
s=nod(12)
b=nod(7)
c=nod(14)
s.setle(b)
s.setri(c)
h=nod(9)
b.setri(h)
l=nod(13)
m=nod(16)
c.setle(l)
c.setri(m)
#print s.getle().elem
print s.__dict__
def trav(obj ,d):
if obj.le is not None :
trav(obj.getle(),d)
print obj.elem
if obj.ri is not None :
trav(obj.getri(),d)
else :
t=d
while t.ri is not None:
t=t.getri()
t.ri=obj
obj.ri=None
obj.le=t
d=dll('root')
trav(s ,d)
t=d
while t.ri is not None:
print t.elem
t=t.getri()
print t.elem
while t.le is not None :
print t.elem
t=t.getle()
| mit | Python | |
9c6f0cf829f4f0c7ff71ad65bed36269425dae13 | Add test for zoom backend | python-social-auth/social-core,python-social-auth/social-core | social_core/tests/backends/test_zoom.py | social_core/tests/backends/test_zoom.py | import json
from .oauth import OAuth2Test
class ZoomOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.zoom.ZoomOAuth2'
user_data_url = 'https://api.zoom.us/v2/users/me'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar-token',
'token_type': 'bearer',
'refresh_token': 'foobar-refresh-token',
'expires_in': 3599,
'scope': 'identity'
})
user_data_body = json.dumps({
'id': 'foobar',
'first_name': 'Foo',
'last_name': 'Bar',
'email': 'foobar@email.com',
'type': 2,
'role_name': 'Foobar',
'pmi': 1234567890,
'use_pmi': False,
'vanity_url': 'https://foobar.zoom.us/my/foobar',
'personal_meeting_url': 'https://foobar.zoom.us/j/1234567890',
'timezone': 'America/Denver',
'verified': 1,
'dept': '',
'created_at': '2019-04-05T15:24:32Z',
'last_login_time': '2019-12-16T18:02:48Z',
'last_client_version': 'version',
'pic_url': 'https://foobar.zoom.us/p/123456789',
'host_key': '123456',
'jid': 'foobar@xmpp.zoom.us',
'group_ids': [],
'im_group_ids': [
'foobar-group-id'
],
'account_id': 'foobar-account-id',
'language': 'en-US',
'phone_country': 'US',
'phone_number': '+1 1234567891',
'status': 'active'
})
refresh_token_body = json.dumps({
'access_token': 'foobar-new-token',
'token_type': 'bearer',
'refresh_token': 'foobar-new-refresh-token',
'expires_in': 3599,
'scope': 'identity'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
def test_refresh_token(self):
user, social = self.do_refresh_token()
self.assertEqual(social.extra_data['access_token'], 'foobar-new-token')
| bsd-3-clause | Python | |
6824c741c455339eaaff5481f6e84c42fe1e26cf | Copy of main.py from fergal, reworked | barentsen/dave,barentsen/dave,barentsen/dave,barentsen/dave | susanplay/mainSusan.py | susanplay/mainSusan.py |
"""
This is a template top level script.
Please don't edit this file. Instead, copy it to
youname_main.py, then run and edit that file.
"""
import dave.pipeline.pipeline as dpp
import dave.pipeline.clipboard as clipboard
def main():
"""A bare bones main program"""
cfg = loadMyConfiguration()
epicList = [206103150]
for epic in epicList:
runOne(epic, cfg)
def loadMyConfiguration():
"""Load the default pipeline configuration and adjust as necessary
"""
cfg = dpp.loadDefaultConfig()
#Edit the default configuration to your taste.
#Change anything else you don't like about the default config here.
cfg['debug'] = True
tasks = """dpp.checkDirExistTask dpp.serveTask dpp.extractLightcurveTask
dpp.computeCentroidsTask dpp.rollPhaseTask dpp.cotrendDataTask
dpp.detrendDataTask dpp.blsTask dpp.trapezoidFitTask dpp.lppMetricTask
dpp.modshiftTask dpp.measureDiffImgCentroidsTask dpp.dispositionTask
dpp.plotTask dpp.saveOnError""".split()
cfg['taskList'] = tasks
return cfg
def runOne(k2id, config):
"""Run the pipeline on a single target.
Inputs:
------------
k2id
(int) Epic id of target to run on.
config
(dict) Dictionary of configuration parameters as created by, e.g
loadMyConfiguration()
Returns:
---------
A clipboard containing the results.
Notes:
---------
Don't edit this function. The pipeline can recover gracefully from
errors in any individual task, but an error in this function will crash
the pipeline
"""
taskList = config['taskList']
clip = clipboard.Clipboard()
clip['config'] = config
clip['value'] = k2id
#Check that all the tasks are properly defined
for t in taskList:
f = eval(t)
#Now run them.
for t in taskList:
f = eval(t)
clip = f(clip)
return clip
| mit | Python | |
a4a956899008102b993d2268fbf6ae92d191ee6a | Test suite for Wikipedia triggers | ch3nkula/ifttt,ch3nkula/ifttt,ch3nkula/ifttt | ifttt/ifttt-tests.py | ifttt/ifttt-tests.py | # -*- coding: utf-8 -*-
"""
Wikipedia channel for IFTTT
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Copyright 2015 Ori Livneh <ori@wikimedia.org>
Stephen LaPorte <stephen.laporte@gmail.com>
Alangi Derick <alangiderick@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from triggers import *
def test_aotd_trigger():
"""Test suite for Article of the Day trigger"""
pass | apache-2.0 | Python | |
4a8c3043962efa7e2a443a10a0ad13d025699730 | Add script to extract useful lsf job information | Rfam/rfam-production,Rfam/rfam-production,Rfam/rfam-production | support/get_lsf_job_info.py | support/get_lsf_job_info.py | import os
import sys
from subprocess import Popen, PIPE
# --------------------------------------------------------------------------------------------------
def get_job_run_time(lsf_output_file, time='s'):
"""
"""
fp = open(lsf_output_file, 'r')
process = Popen(['grep', 'Run time', lsf_output_file], stdin=PIPE, stdout=PIPE, stderr=PIPE)
output, err = process.communicate()
run_time = int(output.split(" ")[-2])
if time == 'm':
run_time = run_time/60
return run_time
# --------------------------------------------------------------------------------------------------
def get_job_max_used_memory(lsf_output_file):
"""
"""
fp = open(lsf_output_file, 'r')
process = Popen(['grep', 'Max Memory', lsf_output_file], stdin=PIPE, stdout=PIPE, stderr=PIPE)
output, err = process.communicate()
max_memory = int(output.split(" ")[-2])
return max_memory
# --------------------------------------------------------------------------------------------------
if __name__ == '__main__':
source_dir = sys.argv[1]
families = [x for x in os.listdir(source_dir) if os.path.isdir(os.path.join(source_dir, x))]
for family in families:
family_dir = (os.path.join(source_dir, family))
lsf_output_file = os.path.join(family_dir, "auto_rfsearch.out")
run_time = get_job_run_time(lsf_output_file, time='m')
memory = get_job_max_used_memory(lsf_output_file)
print "%s\t%s\t%s" % (family, run_time, memory)
| apache-2.0 | Python | |
52d947daa8ea6642472660d0c16c2b05e34bea41 | Add migration file for the model of CoC record | pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016 | src/users/migrations/0010_cocrecord.py | src/users/migrations/0010_cocrecord.py | # Generated by Django 3.0.2 on 2020-02-23 11:12
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0009_auto_20160227_1656'),
]
operations = [
migrations.CreateModel(
name='CocRecord',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL, verbose_name='user')),
('coc_version', models.CharField(max_length=15, validators=[django.core.validators.RegexValidator('^202[\\d].[\\d]+$', 'Not a valid CoC version')], verbose_name='latest agreed CoC version')),
],
),
]
| mit | Python | |
bf55611be1b4c3799ab3e14bdcc1b46e96cffe24 | Add try_f test | codeforamerica/Change-By-Us,localprojects/Change-By-Us,localprojects/Change-By-Us,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,codeforamerica/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us | tests/util-tests.py | tests/util-tests.py | import unittest, sys, os
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
import framework.util as util
class UtilTests (unittest.TestCase):
def test_try_f(self):
self.assertEqual(util.try_f(int, "10", -1), 10, "Did not convert string to int.")
self.assertEqual(util.try_f(int, "foo", -1), -1, "Did not get given default value as expected.")
self.assertEqual(util.try_f(int, "foo"), None, "Did not get None as expected.")
self.assertEqual(util.try_f(str, 10, "ten"), "10", "Did not convert int to string.")
if __name__ == "__main__":
unittest.main() | agpl-3.0 | Python | |
b7bc68872a45396358ce20a215e3a3a2c3734b8a | Add pretend ram module to try out the progress bar | guiniol/py3status,guiniol/py3status | py3status/modules/pretend_ram.py | py3status/modules/pretend_ram.py | # -*- coding: utf-8 -*-
from __future__ import division
import random
class Py3status:
"""
"""
format = "{bar}"
thresholds = [(0, "good"), (40, "degraded"), (75, "bad")]
cache_timeout = 1
middle_char = '|'
middle_color = None
left_char = '|'
left_color = None
right_char = '|'
right_color = None
length = 10
def post_config_hook(self):
self.increasing = True
self.value = 0
def testBars(self):
delta = random.randint(1, 10)
if self.increasing:
self.value += delta
if self.value > 99:
self.value = 100
self.increasing = False
else:
self.value -= delta
if self.value < 1:
self.value = 0
self.increasing = True
composites = self.py3.progress_bar(self.value, length=self.length,
middle_char=self.middle_char, middle_color=self.middle_color,
left_char=self.left_char, left_color=self.left_color,
right_char=self.right_char, right_color=self.right_color
)
response = {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': self.py3.safe_format(self.format, {'bar': composites}),
}
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| bsd-3-clause | Python | |
a90eb2777c7a6e80c8a57fec7f5d5b2665339d50 | Add fixed_rank.py module for fixed rank matrices | tingelst/pymanopt,nkoep/pymanopt,pymanopt/pymanopt,pymanopt/pymanopt,j-towns/pymanopt,nkoep/pymanopt,nkoep/pymanopt | pymanopt/manifolds/fixed_rank.py | pymanopt/manifolds/fixed_rank.py | """
Module containing manifolds of fixed rank matrices.
"""
from pymanopt.manifolds.manifold import Manifold
class FixedRankEmbedded(Manifold):
"""
Manifold struct to optimize fixed-rank matrices w/ an embedded geometry.
FixedRankEmbedded(m, n, k)
Manifold of m-by-n real matrices of fixed rank k. This follows the
embedded geometry described in Bart Vandereycken's 2013 paper:
"Low-rank matrix completion by Riemannian optimization".
Paper link: http://arxiv.org/pdf/1209.3834.pdf
A point X on the manifold is represented as a structure with three
fields: U, S and V. The matrices U (mxk) and V (nxk) are orthonormal,
while the matrix S (kxk) is any diagonal, full rank matrix.
Following the SVD formalism, X = U*S*V'. Note that the diagonal entries
of S are not constrained to be nonnegative.
Tangent vectors are represented as a structure with three fields: Up, M
and Vp. The matrices Up (mxk) and Vp (mxk) obey Up'*U = 0 and Vp'*V = 0.
The matrix M (kxk) is arbitrary. Such a structure corresponds to the
following tangent vector in the ambient space of mxn matrices:
Z = U*M*V' + Up*V' + U*Vp'
where (U, S, V) is the current point and (Up, M, Vp) is the tangent
vector at that point.
Vectors in the ambient space are best represented as mxn matrices. If
these are low-rank, they may also be represented as structures with
U, S, V fields, such that Z = U*S*V'. Their are no resitrictions on what
U, S and V are, as long as their product as indicated yields a real, mxn
matrix.
The chosen geometry yields a Riemannian submanifold of the embedding
space R^(mxn) equipped with the usual trace (Frobenius) inner product.
Please cite the Manopt paper as well as the research paper:
@Article{vandereycken2013lowrank,
Title = {Low-rank matrix completion by {Riemannian} optimization},
Author = {Vandereycken, B.},
Journal = {SIAM Journal on Optimization},
Year = {2013},
Number = {2},
Pages = {1214--1236},
Volume = {23},
Doi = {10.1137/110845768}
}
See also: fixedrankfactory_2factors fixedrankfactory_3factors
This file is based on fixedrankembeddedfactory from Manopt: www.manopt.org.
Ported by: Jamie Townsend, Sebastian Weichwald
Original author: Nicolas Boumal, Dec. 30, 2012.
Contributors:
Change log:
Feb. 20, 2014 (NB):
Added function tangent to work with checkgradient.
June 24, 2014 (NB):
A couple modifications following
Bart Vandereycken's feedback:
- The checksum (hash) was replaced for a faster alternative: it's a
bit less "safe" in that collisions could arise with higher
probability, but they're still very unlikely.
- The vector transport was changed.
The typical distance was also modified, hopefully giving the
trustregions method a better initial guess for the trust region
radius, but that should be tested for different cost functions too.
July 11, 2014 (NB):
Added ehess2rhess and tangent2ambient, supplied by Bart.
July 14, 2014 (NB):
Added vec, mat and vecmatareisometries so that hessianspectrum now
works with this geometry. Implemented the tangent function.
Made it clearer in the code and in the documentation in what format
ambient vectors may be supplied, and generalized some functions so
that they should now work with both accepted formats.
It is now clearly stated that for a point X represented as a
triplet (U, S, V), the matrix S needs to be diagonal.
"""
def __init__(self, m, n, k):
self._m = m
self._n = n
self._k = k
self._name = ("Manifold of {m}-by-{n} matrices with rank {k} and "
"embedded geometry".format(m=m, n=n, k=k))
def __str__(self):
return self._name
@property
def dim(self):
return (self._m + self._n - self._k) * self._k
@property
def typicaldist(self):
return 10 * self._k
def dist(self, X, Y):
raise NotImplementedError()
def inner(self, X, G, H):
| bsd-3-clause | Python | |
10b0b790ca503bcabf203e6cdf8cd3ebc0bcfa5a | Create plot_log.py | prlz77/ResNeXt.pytorch | plot_log.py | plot_log.py | import re
import matplotlib.pyplot as plt
if __name__=='__main__':
file = open('./logs/log.txt','r')
accuracy = []
epochs = []
loss = []
for line in file:
test_accuracy = re.search('"test_accuracy": ([0]\.[0-9]+)*', line)
if test_accuracy:
accuracy.append(test_accuracy.group(1))
epoch = re.search('"epoch": ([0-9]+)*', line)
if epoch:
epochs.append(epoch.group(1))
train_loss = re.search('"train_loss": ([0-9]\.[0-9]+)*', line)
if train_loss:
loss.append(train_loss.group(1))
file.close()
plt.figure('test_accuracy vs epochs')
plt.xlabel('epoch')
plt.ylabel('test_accuracy')
plt.plot(epochs,accuracy,'b*')
plt.plot(epochs,accuracy,'r')
plt.grid(True)
plt.figure('train_loss vs epochs')
plt.xlabel('epoch')
plt.ylabel('train_loss')
plt.plot(epochs,loss,'b*')
plt.plot(epochs,loss,'y')
plt.grid(True)
plt.show()
| mit | Python | |
abc155280052ab2f216342acd7933db3e090d94e | Add some basic tests for flow_exports | mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,mitmproxy/mitmproxy,xaxa89/mitmproxy,zlorb/mitmproxy,dwfreed/mitmproxy,dwfreed/mitmproxy,ddworken/mitmproxy,jvillacorta/mitmproxy,ddworken/mitmproxy,mhils/mitmproxy,mhils/mitmproxy,mitmproxy/mitmproxy,gzzhanghao/mitmproxy,laurmurclar/mitmproxy,ddworken/mitmproxy,mosajjal/mitmproxy,StevenVanAcker/mitmproxy,tdickers/mitmproxy,xaxa89/mitmproxy,dufferzafar/mitmproxy,mhils/mitmproxy,mosajjal/mitmproxy,tdickers/mitmproxy,laurmurclar/mitmproxy,ParthGanatra/mitmproxy,dufferzafar/mitmproxy,fimad/mitmproxy,mitmproxy/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,jvillacorta/mitmproxy,Kriechi/mitmproxy,StevenVanAcker/mitmproxy,xaxa89/mitmproxy,ddworken/mitmproxy,cortesi/mitmproxy,ikoz/mitmproxy,dufferzafar/mitmproxy,cortesi/mitmproxy,fimad/mitmproxy,tdickers/mitmproxy,jvillacorta/mitmproxy,gzzhanghao/mitmproxy,ujjwal96/mitmproxy,mosajjal/mitmproxy,ParthGanatra/mitmproxy,zlorb/mitmproxy,ikoz/mitmproxy,fimad/mitmproxy,dwfreed/mitmproxy,MatthewShao/mitmproxy,mhils/mitmproxy,Kriechi/mitmproxy,ujjwal96/mitmproxy,ParthGanatra/mitmproxy,fimad/mitmproxy,gzzhanghao/mitmproxy,ParthGanatra/mitmproxy,laurmurclar/mitmproxy,mosajjal/mitmproxy,mitmproxy/mitmproxy,laurmurclar/mitmproxy,MatthewShao/mitmproxy,vhaupert/mitmproxy,MatthewShao/mitmproxy,xaxa89/mitmproxy,Kriechi/mitmproxy,vhaupert/mitmproxy,MatthewShao/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,tdickers/mitmproxy,gzzhanghao/mitmproxy,StevenVanAcker/mitmproxy,dwfreed/mitmproxy,vhaupert/mitmproxy,dufferzafar/mitmproxy,vhaupert/mitmproxy,cortesi/mitmproxy,jvillacorta/mitmproxy,cortesi/mitmproxy,ikoz/mitmproxy,ikoz/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy | test/test_export_flow.py | test/test_export_flow.py | import netlib.tutils
from libmproxy import flow_export
from . import tutils
req_get = netlib.tutils.treq(
method='GET',
headers=None,
content=None,
)
req_post = netlib.tutils.treq(
method='POST',
headers=None,
)
def test_request_simple():
flow = tutils.tflow(req=req_get)
assert flow_export.curl_command(flow)
flow = tutils.tflow(req=req_post)
assert flow_export.curl_command(flow)
| mit | Python | |
027b1e35a1808b010b30c596b9e98ede72b8c4e4 | add yamladd | benformosa/Toolbox,benformosa/Toolbox | yamladd.py | yamladd.py | #!/usr/bin/env python2
import argparse
import yaml
import sys
parser = argparse.ArgumentParser(
description='Insert a key-value pair into each mapping '
'in a YAML sequence of mappings')
parser.add_argument('key', help='Add this key')
parser.add_argument('value', help='with this value')
args = parser.parse_args()
# Load YAML from standard in
data = yaml.safe_load(sys.stdin)
if type(data) is list:
for d in data:
d[args.key] = args.value
print(yaml.safe_dump(data, default_flow_style=False))
else:
print("Error: data is not a YAML sequence")
sys.exit(1)
| unlicense | Python | |
5c0ef34788202abefbc36f80899f9b9b54ba17be | Add a fabric file to test the generated output | spyder-ide/docrepr,spyder-ide/docrepr,techtonik/docrepr,spyder-ide/docrepr,techtonik/docrepr,techtonik/docrepr | fabfile.py | fabfile.py | # -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.sphinxify as oi
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = oi.sphinxify(docstring, oi.generate_context())
page_name = '/tmp/test_basic.html'
with open(page_name, 'w') as f:
f.write(content)
webbrowser.open_new_tab(page_name)
def run_all():
"""Run all tests"""
test_basic()
| bsd-3-clause | Python | |
6cfd5b46b9208d4e47482739732266c598e514df | add test script | luoyetx/mxnet,larroy/mxnet,jiajiechen/mxnet,CodingCat/mxnet,luoyetx/mxnet,TuSimple/mxnet,dmlc/mxnet,zhreshold/mxnet,reminisce/mxnet,precedenceguo/mxnet,jamesliu/mxnet,LinkHS/incubator-mxnet,navrasio/mxnet,vikingMei/mxnet,vikingMei/mxnet,LinkHS/incubator-mxnet,jiajiechen/mxnet,Northrend/mxnet,luoyetx/mxnet,TuSimple/mxnet,larroy/mxnet,jamesliu/mxnet,reminisce/mxnet,dmlc/mxnet,TuSimple/mxnet,rahul003/mxnet,rahul003/mxnet,vikingMei/mxnet,mbaijal/incubator-mxnet,szha/mxnet,zhreshold/mxnet,indhub/mxnet,tornadomeet/mxnet,navrasio/mxnet,fullfanta/mxnet,reminisce/mxnet,zhreshold/mxnet,apache/incubator-mxnet,Northrend/mxnet,eric-haibin-lin/mxnet,jiajiechen/mxnet,tornadomeet/mxnet,eric-haibin-lin/mxnet,zhreshold/mxnet,jamesliu/mxnet,fullfanta/mxnet,CodingCat/mxnet,szha/mxnet,vikingMei/mxnet,Northrend/mxnet,leezu/mxnet,rahul003/mxnet,TuSimple/mxnet,luoyetx/mxnet,szha/mxnet,indhub/mxnet,navrasio/mxnet,larroy/mxnet,tornadomeet/mxnet,yajiedesign/mxnet,sxjscience/mxnet,sxjscience/mxnet,leezu/mxnet,fullfanta/mxnet,DickJC123/mxnet,larroy/mxnet,ptrendx/mxnet,fullfanta/mxnet,precedenceguo/mxnet,ptrendx/mxnet,apache/incubator-mxnet,reminisce/mxnet,rahul003/mxnet,zhreshold/mxnet,zhreshold/mxnet,ForkedReposBak/mxnet,Northrend/mxnet,luoyetx/mxnet,jiajiechen/mxnet,tornadomeet/mxnet,ptrendx/mxnet,eric-haibin-lin/mxnet,dmlc/mxnet,jiajiechen/mxnet,fullfanta/mxnet,leezu/mxnet,zhreshold/mxnet,CodingCat/mxnet,precedenceguo/mxnet,tornadomeet/mxnet,dmlc/mxnet,ptrendx/mxnet,leezu/mxnet,DickJC123/mxnet,tornadomeet/mxnet,ptrendx/mxnet,leezu/mxnet,leezu/mxnet,dmlc/mxnet,CodingCat/mxnet,navrasio/mxnet,szha/mxnet,mbaijal/incubator-mxnet,yajiedesign/mxnet,Northrend/mxnet,larroy/mxnet,eric-haibin-lin/mxnet,jiajiechen/mxnet,LinkHS/incubator-mxnet,dmlc/mxnet,fullfanta/mxnet,yajiedesign/mxnet,jamesliu/mxnet,dmlc/mxnet,larroy/mxnet,Northrend/mxnet,indhub/mxnet,CodingCat/mxnet,reminisce/mxnet,tlby/mxnet,navrasio/mxnet,zhreshold/mxnet,szha/mxnet,sxjscience/mxnet,navrasio/mxnet,CodingCat/mxnet,jamesliu/mxnet,tornadomeet/mxnet,ptrendx/mxnet,vikingMei/mxnet,luoyetx/mxnet,fullfanta/mxnet,navrasio/mxnet,fullfanta/mxnet,mbaijal/incubator-mxnet,mbaijal/incubator-mxnet,reminisce/mxnet,ForkedReposBak/mxnet,rahul003/mxnet,reminisce/mxnet,tornadomeet/mxnet,sxjscience/mxnet,DickJC123/mxnet,ptrendx/mxnet,apache/incubator-mxnet,luoyetx/mxnet,yajiedesign/mxnet,indhub/mxnet,vikingMei/mxnet,tlby/mxnet,LinkHS/incubator-mxnet,ptrendx/mxnet,jiajiechen/mxnet,CodingCat/mxnet,mbaijal/incubator-mxnet,vikingMei/mxnet,sxjscience/mxnet,mbaijal/incubator-mxnet,leezu/mxnet,yajiedesign/mxnet,larroy/mxnet,indhub/mxnet,ForkedReposBak/mxnet,Northrend/mxnet,TuSimple/mxnet,CodingCat/mxnet,indhub/mxnet,sxjscience/mxnet,indhub/mxnet,reminisce/mxnet,tlby/mxnet,tlby/mxnet,tlby/mxnet,ForkedReposBak/mxnet,yajiedesign/mxnet,ptrendx/mxnet,sxjscience/mxnet,LinkHS/incubator-mxnet,LinkHS/incubator-mxnet,TuSimple/mxnet,zhreshold/mxnet,mbaijal/incubator-mxnet,luoyetx/mxnet,eric-haibin-lin/mxnet,rahul003/mxnet,navrasio/mxnet,precedenceguo/mxnet,tlby/mxnet,precedenceguo/mxnet,yajiedesign/mxnet,precedenceguo/mxnet,LinkHS/incubator-mxnet,TuSimple/mxnet,fullfanta/mxnet,eric-haibin-lin/mxnet,tlby/mxnet,szha/mxnet,leezu/mxnet,vikingMei/mxnet,jamesliu/mxnet,dmlc/mxnet,jamesliu/mxnet,CodingCat/mxnet,precedenceguo/mxnet,eric-haibin-lin/mxnet,jamesliu/mxnet,rahul003/mxnet,LinkHS/incubator-mxnet,yajiedesign/mxnet,TuSimple/mxnet,ForkedReposBak/mxnet,reminisce/mxnet,apache/incubator-mxnet,yajiedesign/mxnet,larroy/mxnet,precedenceguo/mxnet,larroy/mxnet,Northrend/mxnet,szha/mxnet,sxjscience/mxnet,ptrendx/mxnet,jiajiechen/mxnet,TuSimple/mxnet,ForkedReposBak/mxnet,navrasio/mxnet,ForkedReposBak/mxnet,jiajiechen/mxnet,precedenceguo/mxnet,szha/mxnet,rahul003/mxnet,tornadomeet/mxnet,luoyetx/mxnet,DickJC123/mxnet,vikingMei/mxnet,leezu/mxnet,eric-haibin-lin/mxnet,LinkHS/incubator-mxnet,sxjscience/mxnet,indhub/mxnet,tlby/mxnet,mbaijal/incubator-mxnet,szha/mxnet,TuSimple/mxnet,reminisce/mxnet,rahul003/mxnet,Northrend/mxnet,indhub/mxnet,ForkedReposBak/mxnet,eric-haibin-lin/mxnet,mbaijal/incubator-mxnet,ForkedReposBak/mxnet,tlby/mxnet,tlby/mxnet,jamesliu/mxnet,dmlc/mxnet | test_new_image_loader.py | test_new_image_loader.py | import os
os.environ['MXNET_CPU_WORKER_NTHREADS'] = '1'
os.environ['OMP_NUM_THREADS'] = '1'
import time
import numpy as np
import multiprocessing as mp
import mxnet as mx
from mxnet import gluon as gl
from mxnet.gluon.data.vision import transforms
if __name__ == '__main__':
M = 24
BS = 100
dataset = gl.data.vision.ImageFolderDataset('../256_ObjectCategories')
transform = transforms.Compose([transforms.ToTensor(),
transforms.RandomBrightness(1.0),
transforms.RandomContrast(1.0),
transforms.RandomSaturation(1.0),
transforms.Normalize([0, 0, 0], [1, 1, 1])])
dataset = dataset.transform_first(lambda x: transform(mx.image.center_crop(x, (224, 224))[0]))
data_loader = gl.data.DataLoader(dataset, BS, shuffle=True, num_workers=M)
N = len(dataset)
iterator = iter(data_loader)
tic = time.time()
for data, label in iterator:
data.wait_to_read()
print(data.shape)
print(N/(time.time() - tic))
| apache-2.0 | Python | |
8d017a1f54bfff98f8f571c69d1e19ddbe8abdde | Add processify decorator | dseuss/pythonlibs | tools/processify.py | tools/processify.py | #!/usr/bin/env python
# encoding: utf-8
"""Taken from https://gist.github.com/schlamar/2311116"""
from __future__ import division, print_function
import sys
import traceback
from functools import wraps
from multiprocessing import Process, Queue
def processify(func):
"""Decorator to run a function as a process. Be sure that every argument
and the return value is *pickable*. The created process is joined, so the
code does not run in parallel.
"""
def process_func(q, *args, **kwargs):
try:
ret = func(*args, **kwargs)
except Exception:
ex_type, ex_value, tb = sys.exc_info()
error = ex_type, ex_value, ''.join(traceback.format_tb(tb))
ret = None
else:
error = None
q.put((ret, error))
# register original function with different name
# in sys.modules so it is pickable
process_func.__name__ = func.__name__ + 'processify_func'
setattr(sys.modules[__name__], process_func.__name__, process_func)
@wraps(func)
def wrapper(*args, **kwargs):
q = Queue()
p = Process(target=process_func, args=[q] + list(args), kwargs=kwargs)
p.start()
p.join()
ret, error = q.get()
if error:
ex_type, ex_value, tb_str = error
message = '%s (in subprocess)\n%s' % (ex_value.message, tb_str)
raise ex_type(message)
return ret
return wrapper
| unlicense | Python | |
c5da3ee962a05c05d55fd98149c1095a57f03e36 | Add tests for task types for shot route | cgwire/zou | test/shots/test_task_types_for_shot.py | test/shots/test_task_types_for_shot.py | from test.base import ApiDBTestCase
class ShotTaskTypesTestCase(ApiDBTestCase):
def setUp(self):
super(ShotTaskTypesTestCase, self).setUp()
self.generate_fixture_project_status()
self.generate_fixture_project()
self.generate_fixture_entity_type()
self.generate_fixture_sequence()
self.generate_fixture_shot()
self.generate_fixture_entity()
self.generate_fixture_person()
self.generate_fixture_assigner()
self.generate_fixture_task_status()
self.generate_fixture_department()
self.generate_fixture_task_type()
self.generate_fixture_shot_task()
def test_get_task_types_for_shot(self):
task_types = self.get("/data/shots/%s/task-types" % self.shot.id)
self.assertEquals(len(task_types), 1)
self.assertDictEqual(
task_types[0],
self.task_type_animation.serialize()
)
| agpl-3.0 | Python | |
554243dd90d6a96e5782f2b3e7ed5a72254fdf5b | Add tests. | Scifabric/pybossa,PyBossa/pybossa,PyBossa/pybossa,Scifabric/pybossa | test/test_jobs/test_export_userdata.py | test/test_jobs/test_export_userdata.py | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2018 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
import json
from default import Test, with_context, FakeResponse
from factories import UserFactory, ProjectFactory, TaskRunFactory
from pybossa.jobs import export_userdata, send_mail
from pybossa.core import user_repo
from pybossa.exporter.json_export import JsonExporter
from mock import patch, MagicMock
from flask import current_app, render_template, url_for
from flask.ext.mail import Message
#@patch('pybossa.jobs.uploader')
class TestExportAccount(Test):
@with_context
@patch('pybossa.jobs.Message')
@patch('pybossa.jobs.send_mail')
@patch('pybossa.jobs.JsonExporter')
def test_export(self, json_mock, m1, m2):
"""Check email is sent to user."""
user = UserFactory.create()
project = ProjectFactory.create(owner=user)
taskrun = TaskRunFactory.create(user_id=user.id)
export_userdata(user.id)
upload_method = 'uploads.uploaded_file'
personal_data_link = url_for(upload_method,
filename="user_%s/personal_data.zip" % user.id)
personal_projects_link = url_for(upload_method,
filename="user_%s/user_projects.zip" % user.id)
personal_contributions_link = url_for(upload_method,
filename="user_%s/user_contributions.zip" % user.id)
body = render_template('/account/email/exportdata.md',
user=user.dictize(),
personal_data_link=personal_data_link,
personal_projects_link=personal_projects_link,
personal_contributions_link=personal_contributions_link,
config=current_app.config)
html = render_template('/account/email/exportdata.html',
user=user.dictize(),
personal_data_link=personal_data_link,
personal_projects_link=personal_projects_link,
personal_contributions_link=personal_contributions_link,
config=current_app.config)
subject = 'Your personal data'
mail_dict = dict(recipients=[user.email_addr],
subject=subject,
body=body,
html=html)
m1.assert_called_with(mail_dict)
| agpl-3.0 | Python | |
566b2e9450c0602ea622d5c9b0ea90512336aef5 | Add exceptions from ceilometerclient | rdo-management/python-tuskarclient,tuskar/python-tuskarclient | tuskarclient/exc.py | tuskarclient/exc.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
class BaseException(Exception):
"""An error occurred."""
def __init__(self, message=None):
self.message = message
def __str__(self):
return self.message or self.__class__.__doc__
class CommandError(BaseException):
"""Invalid usage of CLI."""
class InvalidEndpoint(BaseException):
"""The provided endpoint is invalid."""
class CommunicationError(BaseException):
"""Unable to communicate with server."""
class ClientException(Exception):
"""DEPRECATED."""
class HTTPException(ClientException):
"""Base exception for all HTTP-derived exceptions."""
code = 'N/A'
def __init__(self, details=None):
self.details = details
def __str__(self):
return "%s (HTTP %s)" % (self.__class__.__name__, self.code)
class HTTPMultipleChoices(HTTPException):
code = 300
def __str__(self):
self.details = ("Requested version of OpenStack Images API is not"
"available.")
return "%s (HTTP %s) %s" % (self.__class__.__name__, self.code,
self.details)
class BadRequest(HTTPException):
"""DEPRECATED."""
code = 400
class HTTPBadRequest(BadRequest):
pass
class Unauthorized(HTTPException):
"""DEPRECATED."""
code = 401
class HTTPUnauthorized(Unauthorized):
pass
class Forbidden(HTTPException):
"""DEPRECATED."""
code = 403
class HTTPForbidden(Forbidden):
pass
class NotFound(HTTPException):
"""DEPRECATED."""
code = 404
class HTTPNotFound(NotFound):
pass
class HTTPMethodNotAllowed(HTTPException):
code = 405
class Conflict(HTTPException):
"""DEPRECATED."""
code = 409
class HTTPConflict(Conflict):
pass
class OverLimit(HTTPException):
"""DEPRECATED."""
code = 413
class HTTPOverLimit(OverLimit):
pass
class HTTPInternalServerError(HTTPException):
code = 500
class HTTPNotImplemented(HTTPException):
code = 501
class HTTPBadGateway(HTTPException):
code = 502
class ServiceUnavailable(HTTPException):
"""DEPRECATED."""
code = 503
class HTTPServiceUnavailable(ServiceUnavailable):
pass
#NOTE(bcwaldon): Build a mapping of HTTP codes to corresponding exception
# classes
_code_map = {}
for obj_name in dir(sys.modules[__name__]):
if obj_name.startswith('HTTP'):
obj = getattr(sys.modules[__name__], obj_name)
_code_map[obj.code] = obj
def from_response(response):
"""Return an instance of an HTTPException based on httplib response."""
cls = _code_map.get(response.status, HTTPException)
return cls()
class NoTokenLookupException(Exception):
"""DEPRECATED."""
pass
class EndpointNotFound(Exception):
"""DEPRECATED."""
pass
| apache-2.0 | Python | |
ff68543ba0ebe15b5847dfbc0f2d45221b1f3634 | add inflexible fast tests | dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy | tests/fast_test_suite.py | tests/fast_test_suite.py | import unittest
from test_evaluation_metrics import EvaluationMetricsTestCase
from test_appliance_trace import ApplianceTraceTestCase
from test_appliance_instance import ApplianceInstanceTestCase
from test_appliance_set import ApplianceSetTestCase
from test_appliance_type import ApplianceTypeTestCase
from test_utils import UtilsTestCase
# import test_pecanstreet_dataset_adapter as test_psda
def all_tests():
ev_m_suite =\
unittest.TestLoader().loadTestsFromTestCase(EvaluationMetricsTestCase)
a_tr_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceTraceTestCase)
a_in_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceIstanceTestCase)
a_st_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceSetTestCase)
a_ty_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceTypeTestCase)
util_suite =\
unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
psda_tests = [
'test_get_table_names',
'test_get_month_traces',
]
psda_suite =\
unittest.TestSuite(map(test_psda.PecanStreetDatasetAdapterTestCase, psda_tests))
all_tests = unittest.TestSuite([
ev_m_suite,
a_tr_suite,
a_in_suite,
a_st_suite,
a_ty_suite,
util_suite,
psda_suite,
])
return all_tests
if __name__ == "__main__":
print 'running fast tests'
#suite = all_tests()
#unittest.TextTestRunner().run(suite)
| mit | Python | |
fc74bf1ee86b667221d25c296c305d2d2e478e23 | add inflexible fast tests | dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy | tests/fast_test_suite.py | tests/fast_test_suite.py | import unittest
from test_evaluation_metrics import EvaluationMetricsTestCase
from test_appliance_trace import ApplianceTraceTestCase
from test_appliance_instance import ApplianceInstanceTestCase
from test_appliance_set import ApplianceSetTestCase
from test_appliance_type import ApplianceTypeTestCase
from test_utils import UtilsTestCase
# import test_pecanstreet_dataset_adapter as test_psda
def all_tests():
ev_m_suite =\
unittest.TestLoader().loadTestsFromTestCase(EvaluationMetricsTestCase)
a_tr_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceTraceTestCase)
a_in_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceIstanceTestCase)
a_st_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceSetTestCase)
a_ty_suite =\
unittest.TestLoader().loadTestsFromTestCase(ApplianceTypeTestCase)
util_suite =\
unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
psda_tests = [
'test_get_table_names',
'test_get_month_traces',
]
psda_suite =\
unittest.TestSuite(map(test_psda.PecanStreetDatasetAdapterTestCase, psda_tests))
all_tests = unittest.TestSuite([
ev_m_suite,
a_tr_suite,
a_in_suite,
a_st_suite,
a_ty_suite,
util_suite,
psda_suite,
])
return all_tests
if __name__ == "__main__":
print 'running fast tests'
#suite = all_tests()
#unittest.TextTestRunner().run(suite)
| mit | Python | |
de15489a27c28c6cb5bf35c325ebdd6e4dad5d2c | Create log.py | ArtiomL/tls-scan,ArtiomL/tls-scan | lib/log.py | lib/log.py | test
| mit | Python | |
21077f751e6cacae2e2383942f9b572a5ede86c9 | Add some example function tests that use gaeftest | MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging | tests/test_functional.py | tests/test_functional.py | #!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Matthew Wilkes" <matthew@matthewwilkes.co.uk>',
]
from gaeftest.test import FunctionalTestCase
from zope.testbrowser import browser
import os.path
class MelangeFunctionalTestCase(FunctionalTestCase):
"""A base class for all functional tests in Melange.
Tests MUST NOT be defined here, but the superclass requires a path
attribute that points to the app.yaml. Utility functions MAY be
declared here to be shared by all functional tests, but any
overridden unittest methods MUST call the superclass version.
"""
path = os.path.abspath(__file__+"/../../app/app.yaml")
class TestBranding(MelangeFunctionalTestCase):
"""Tests that ensure Melange properly displays attribution.
Other notices, as required by the project and/or law, are tested
here as well.
"""
def test_attribution(self):
"""Ensure that the front page asserts that it is a Melange app.
"""
tb = browser.Browser()
tb.open("http://127.0.0.1:8080/site/show/site")
self.assertTrue("Powered by Melange" in tb.contents)
class TestLogin(MelangeFunctionalTestCase):
"""Tests that check the login system is functioning correctly.
Also tests that users go through the correct registration workflow.
"""
def test_firstLogin(self):
"""Ensure that new users are prompted to create a profile.
Also test that only new users are prompted.
"""
tb = browser.Browser()
tb.open("http://127.0.0.1:8080")
tb.getLink("Sign in").click()
self.assertTrue("login" in tb.url)
# fill in dev_appserver login form
tb.getForm().getControl("Email").value = "newuser@example.com"
tb.getForm().getControl("Login").click()
self.assertTrue(tb.url.endswith("/show/site"))
self.assertTrue('Please create <a href="/user/create_profile">'
'User Profile</a> in order to view this page' in tb.contents)
tb.getLink("User Profile").click()
# fill in the user profile
cp = tb.getForm(action="create_profile")
cp.getControl(name="link_id").value = "exampleuser"
cp.getControl(name="name").value = "Example user"
cp.getControl("Save").click()
# if all is well, we go to the edit page
self.assertTrue("edit_profile" in tb.url)
tb.open("http://127.0.0.1:8080")
# call to action no longer on front page
self.assertFalse('Please create <a href="/user/create_profile">'
'User Profile</a> in order to view this page' in tb.contents)
| apache-2.0 | Python | |
21b8c5317af4ff129f72fa949e32a57eae0df7d4 | Create categoryManager.py | MichaelCurrin/twitterverse,MichaelCurrin/twitterverse | app/utils/categoryManager.py | app/utils/categoryManager.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Category manager utility.
Manage values in the Category table and manage links between Category
and Profiles.
"""
import argparse
import os
import sys
# Allow imports to be done when executing this file directly.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir)))
from lib import database as db
from lib.tweets import assignProfileCategory
from lib.query.tweets.categories import printAvailableCategories,\
printCategoriesAndProfiles
def main():
"""
Handle command-line arguments to print or edit data.
"""
parser = argparse.ArgumentParser(description="Category manager utility.")
parser.add_argument('-a', '--available',
action='store_true',
help="Output available Categories in db, with Profile counts for each.")
parser.add_argument('-s', '--summary',
action='store_true',
help="Output summary of Categories and Profiles then exit")
parser.add_argument('-u', '--unassigned',
action='store_true',
help="Output list of Profiles which do yet have a Category assigned to them.")
parser.add_argument('-c', '--category',
help="Create category if it does not yet exist. If --names argument is used, assign this Category (name or the --available index) to screen names.")
parser.add_argument('-n', '--names',
metavar='SCREEN_NAME',
nargs='+',
help="One or more screen names (without leading @). Assign Category to these screen names.")
args = parser.parse_args()
if args.available:
printAvailableCategories()
elif args.summary:
printCategoriesAndProfiles()
elif args.unassigned:
for p in db.Profile.select(orderBy='screen_name'):
if not p.categories.count():
print u"@{screenName} | {name} | {followers:,d} followers"\
.format(screenName=p.screenName, name=p.name,
followers=p.followersCount)
print p.description
print
elif args.category:
# Create category alone, or assign Profiles if they are provided.
if not args.names:
assignProfileCategory(cat=args.category, screenNames=None)
else:
# Encode list of str command-line arguments as unicode.
screenNames = [s.decode('utf-8') for s in args.names]
if args.category.isdigit():
# Get one item but decrease index by 1 since the available list
# starts at 1.
cat = db.Category.select()[int(args.category) - 1].name
else:
cat = args.category
print "Category: {0}".format(cat)
newCnt, existingCnt = assignProfileCategory(cat,
screenNames=screenNames)
print " - new links: {0:,d}".format(newCnt)
print " - existing links found: {0:,d}".format(existingCnt)
else:
raise AssertionError("Invalid arguments. See --help.")
if __name__ == '__main__':
main()
| mit | Python | |
a9976160ad297953ecb91ff38fe904a993eba784 | Remove _clean | jorgebastida/django-dajax,jayfk/django-dajax,jorgebastida/django-dajax,jorgebastida/django-dajax,jayfk/django-dajax,Leonime/django-dajax,Leonime/django-dajax | dajax/core.py | dajax/core.py | from django.utils import simplejson as json
class Dajax(object):
def __init__(self):
self.calls = []
def json(self):
return json.dumps(self.calls)
def alert(self, message):
self.calls.append({'cmd': 'alert', 'val': message})
def assign(self, id, attribute, value):
self.calls.append({'cmd': 'as', 'id': id, 'prop': attribute, 'val': value})
def add_css_class(self, id, value):
if not hasattr(value, '__iter__'):
value = [value]
self.calls.append({'cmd': 'addcc', 'id': id, 'val': value})
def remove_css_class(self, id, value):
if not hasattr(value, '__iter__'):
value = [value]
self.calls.append({'cmd': 'remcc', 'id': id, 'val': value})
def append(self, id, attribute, value):
self.calls.append({'cmd': 'ap', 'id': id, 'prop': attribute, 'val': value})
def prepend(self, id, attribute, value):
self.calls.append({'cmd': 'pp', 'id': id, 'prop': attribute, 'val': value})
def clear(self, id, attribute):
self.calls.append({'cmd': 'clr', 'id': id, 'prop': attribute})
def redirect(self, url, delay=0):
self.calls.append({'cmd': 'red', 'url': url, 'delay': delay})
def script(self, code): # OK
self.calls.append({'cmd': 'js', 'val': code})
def remove(self, id):
self.calls.append({'cmd': 'rm', 'id': id})
def add_data(self, data, function):
self.calls.append({'cmd': 'data', 'val': data, 'fun': function})
| from django.utils import simplejson as json
class Dajax(object):
def __init__(self):
self.calls = []
def json(self):
return json.dumps(self.calls)
def alert(self, message):
self.calls.append({'cmd': 'alert', 'val': self._clean(message)})
def assign(self, id, attribute, value):
self.calls.append({'cmd': 'as', 'id': id, 'prop': attribute, 'val': self._clean(value)})
def add_css_class(self, id, value):
if not hasattr(value, '__iter__'):
value = [value]
self.calls.append({'cmd': 'addcc', 'id': id, 'val': self._clean(value)})
def remove_css_class(self, id, value):
if not hasattr(value, '__iter__'):
value = [value]
self.calls.append({'cmd': 'remcc', 'id': id, 'val': self._clean(value)})
def append(self, id, attribute, value):
self.calls.append({'cmd': 'ap', 'id': id, 'prop': attribute, 'val': self._clean(value)})
def prepend(self, id, attribute, value):
self.calls.append({'cmd': 'pp', 'id': id, 'prop': attribute, 'val': self._clean(value)})
def clear(self, id, attribute):
self.calls.append({'cmd': 'clr', 'id': id, 'prop': attribute})
def redirect(self, url, delay=0):
self.calls.append({'cmd': 'red', 'url': url, 'delay': delay})
def script(self, code): # OK
self.calls.append({'cmd': 'js', 'val': code})
def remove(self, id):
self.calls.append({'cmd': 'rm', 'id': id})
def add_data(self, data, function):
self.calls.append({'cmd': 'data', 'val': data, 'fun': function})
def _clean(self, data):
if hasattr(data, '__iter__'):
return map(self._clean, data)
else:
return unicode(data).replace('\n', '').replace('\r', '')
| bsd-3-clause | Python |
5db1ac3c73ebba56f478c8ee75d725032c5ab34e | Create gui.py | gappleto97/Senior-Project | client/gui.py | client/gui.py | mit | Python | ||
9c6da7e968f6659016fd34fb32a71476bd63c96b | Create emoji.py | PrestigeDox/Watashi-SelfBot | cogs/emoji.py | cogs/emoji.py | import discord
from discord.ext import commands
class Emoji:
def __init__(self, bot):
self.bot = bot
@commands.group(invoke_without_command=True)
async def emoji(self, ctx):
await ctx.message.delete()
emb = discord.Embed(colour=self.bot.gold)
emb.add_field(name='Usage', value='{***REMOVED***emoji <emojiname>'.format(self.bot.configs['prefix']))
await ctx.send(embed=emb)
@emoji.command()
async def shrug(self, ctx):
await ctx.message.delete()
await ctx.send("¯\_(ツ)_/¯")
@emoji.command()
async def face(self, ctx):
await ctx.message.delete()
await ctx.send("( ͡° ͜ʖ ͡°)")
@emoji.command()
async def badman(self, ctx):
await ctx.message.delete()
await ctx.send("̿̿ ̿̿ ̿̿ ̿'̿'\̵͇̿̿\з= ( ▀ ͜͞ʖ▀) =ε/̵͇̿̿/’̿’̿ ̿ ̿̿ ̿̿ ̿̿")
@emoji.command(aliases=['guns'])
async def gun(self, ctx):
await ctx.message.delete()
await ctx.send("▄︻̷̿┻̿═━一")
@emoji.command()
async def ameno(self, ctx):
await ctx.message.delete()
await ctx.send("༼ つ ◕_◕ ༽つ")
@emoji.command()
async def sunglasses(self, ctx):
await ctx.message.delete()
await ctx.send("(▀̿Ĺ̯▀̿ ̿)")
@emoji.command()
async def eyesworried(self, ctx):
await ctx.message.delete()
await ctx.send("ಠ_ಠ")
@emoji.command(aliases=['5dollars', '5dollar', 'money'])
async def fivedollar(self, ctx):
await ctx.message.delete()
await ctx.send("[̲̅$̲̅(̲̅5̲̅)̲̅$̲̅]")
@emoji.command()
async def hiding(self, ctx):
await ctx.message.delete()
await ctx.send("┬┴┬┴┤ ͜ʖ ͡°) ├┬┴┬┴")
@emoji.command()
async def boxing(self, ctx):
await ctx.message.delete()
await ctx.send("(ง'̀-'́)ง")
@emoji.command()
async def tableflip(self, ctx):
await ctx.message.delete()
await ctx.send("(╯°□°)╯︵ ┻━┻")
@emoji.command(aliases=['0.o', '0.0', 'o.o'])
async def wierdeyes(self, ctx):
await ctx.message.delete()
await ctx.send("◉_◉")
@emoji.command()
async def tableflip2(self, ctx):
await ctx.message.delete()
await ctx.send("┻━┻ ︵ヽ(`Д´)ノ︵ ┻━┻")
@emoji.command(aliases=['cuteface'])
async def cute(self, ctx):
await ctx.message.delete()
await ctx.send("(。◕‿◕。)")
@emoji.command()
async def unflip(self, ctx):
await ctx.message.delete()
await ctx.send("┬─┬ ノ( ゜-゜ノ)")
def setup(bot):
bot.add_cog(Emoji(bot))
| mit | Python | |
26b0c71b43c19984121c3f474b0836981950c468 | Add 0014 file | Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Show-Me-the-Code/python,Show-Me-the-Code/python,Show-Me-the-Code/python,Show-Me-the-Code/python,Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Show-Me-the-Code/python,Show-Me-the-Code/python | Drake-Z/0014/0014.py | Drake-Z/0014/0014.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''第 0014 题: 纯文本文件 student.txt为学生信息, 里面的内容(包括花括号)如下所示:
{
"1":["张三",150,120,100],
"2":["李四",90,99,95],
"3":["王五",60,66,68]
}
请将上述内容写到 student.xls 文件中。'''
__author__ = 'Drake-Z'
import os
import re
from collections import OrderedDict
import xlwt
def read_data(data):
c = OrderedDict([])
re_xuhao = re.compile(r'"([\d]+)":')
re_yuansu = re.compile(r'\[(.*?)\]')
a = re_xuhao.findall(data) #得到序号
b = re_yuansu.findall(data) #得到具体数据
for m, n in zip(a, b): #将数据转为Dict
n = re.split(r',', n)
n[0] = n[0][1:-1] #去除引号
c[m] = n
writeFlie(c)
def writeFlie(dictdata):
workbook = xlwt.Workbook(encoding = 'utf-8') #创建工作薄
worksheet = workbook.add_sheet('My Worksheet') #创建表
num = list(dictdata.keys()) #得到序号
for i in range(0, 3):
worksheet.write(i, 0, label = num[i])
for m in range(0, 4):
worksheet.write(i, m+1, label = dictdata[num[i]][m])
workbook.save('0014/student.xls')
if __name__ == '__main__':
file = open('0014/student.txt', 'r', encoding='utf-8')
read_data(file.read()) | mit | Python | |
3f6b36d96e7e718ff7fb9d98a8211448f6b0d7e9 | Add upgrade script | TribeMedia/synapse,howethomas/synapse,rzr/synapse,illicitonion/synapse,howethomas/synapse,TribeMedia/synapse,matrix-org/synapse,iot-factory/synapse,matrix-org/synapse,iot-factory/synapse,rzr/synapse,iot-factory/synapse,matrix-org/synapse,rzr/synapse,rzr/synapse,matrix-org/synapse,illicitonion/synapse,iot-factory/synapse,rzr/synapse,matrix-org/synapse,illicitonion/synapse,illicitonion/synapse,iot-factory/synapse,illicitonion/synapse,howethomas/synapse,TribeMedia/synapse,howethomas/synapse,howethomas/synapse,matrix-org/synapse,TribeMedia/synapse,TribeMedia/synapse | scripts/upgrade_appservice_db.py | scripts/upgrade_appservice_db.py | import argparse
import json
import sqlite3
def main(dbname):
con = sqlite3.connect(dbname)
cur = con.cursor()
cur.execute("SELECT id, regex FROM application_services_regex")
for row in cur.fetchall():
try:
print "checking %s..." % row[0]
json.loads(row[1])
print "Already in new format"
except ValueError:
# row isn't in json, make it so.
string_regex = row[1]
new_regex = json.dumps({
"regex": string_regex,
"exclusive": True
})
cur.execute(
"UPDATE application_services_regex SET regex=? WHERE id=?",
(new_regex, row[0])
)
cur.close()
con.commit()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("database")
args = parser.parse_args()
main(args.database)
| apache-2.0 | Python | |
fe757d0f17c57f6748ca28e678bb22a9f36271de | Add cd magic test | Calysto/metakernel | metakernel/magics/tests/test_cd_magic.py | metakernel/magics/tests/test_cd_magic.py |
import os
from metakernel.tests.utils import get_kernel
def test_cd_magic():
kernel = get_kernel()
kernel.do_execute("%cd ~")
assert os.getcwd() == os.path.expanduser('~'), os.getcwd()
| bsd-3-clause | Python | |
45e6903bdb6d75f68dd09e156e85960acd885638 | Add main program. | Uberi/The-Mippits | mippits.py | mippits.py | #!/usr/bin/env python3
code = "00001020afc2fffcafc5fff8afdffff4000028140000000c03c5f02200001814ffffffff104300260000a014000000300282a0200000a814ffff000caeb400000042102000421020002220208c8200040000a014000000410282a0200000a814ffff000caeb400000000f8140000000403e00009006028208c8200080000a014000000410282a0200000a814ffff000caeb400000000f8140000000403e000090060202000a018200064102a1040000100801820000010140000000100621820000028140000000c03c5f0208fdffff48fc5fff88fc2fffc03e000080000a014000000410000a814ffff000caeb40000"
PC = 0
HI, LO = 0, 0
registers = [0] * 32
from collections import defaultdict
MEM = defaultdict(int)
def normalize(value):
return value & 0xFFFFFFFF
def signed(value):
return value - 0x100000000 if value & 0x80000000 else value
# loader
assert len(code) % 8 == 0
for i in range(0, len(code) // 8):
MEM[i] = int(code[i * 8:i * 8 + 8], 16)
registers[30] = 0xFFFFFFFC
registers[31] = 0xFFFFFFFC
def decode_execute(instruction):
global PC
registers[0] = 0
d, s, t = (instruction >> 11) & 0b11111, (instruction >> 21) & 0b11111, (instruction >> 16) & 0b11111
i = instruction & 0b1111111111111111
if i & 0x8000: i -= 0x10000
if instruction >> 26 == 0b000000 and instruction & 0b11111111111 == 0b00000100000: # add (add)
registers[d] = normalize(registers[s] + registers[t])
elif instruction >> 26 == 0b000000 and instruction & 0b11111111111 == 0b00000100010: # subtract (sub)
registers[d] = normalize(registers[s] - registers[t])
elif instruction >> 26 == 0b000000 and instruction & 0b1111111111111111 == 0b0000000000011000: # multiply (mult)
result = signed(registers[s]) * signed(registers[t])
HI, LO = result >> 32, result & 0xFFFFFFFF
elif instruction >> 26 == 0b000000 and instruction & 0b1111111111111111 == 0b0000000000011001: # multiply unsigned (multu)
result = registers[s] * registers[t]
HI, LO = result >> 32, result & 0xFFFFFFFF
elif instruction >> 26 == 0b000000 and instruction & 0b1111111111111111 == 0b0000000000011010: # divide (div)
HI, LO = signed(registers[s]) % signed(registers[t]), signed(registers[s]) / signed(registers[t])
elif instruction >> 26 == 0b000000 and instruction & 0b1111111111111111 == 0b0000000000011011: # divide unsigned (divu)
HI, LO = registers[s] / registers[t], registers[s] / registers[t]
elif instruction >> 16 == 0b0000000000000000 and instruction & 0b11111111111 == 0b00000010000: # move from high/remainder (mfhi)
registers[d] = HI
elif instruction >> 16 == 0b0000000000000000 and instruction & 0b11111111111 == 0b00000010010: # move from low/quotient (mflo)
registers[d] = LO
elif instruction >> 16 == 0b0000000000000000 and instruction & 0b11111111111 == 0b00000010100: # load immediate and skip (lis)
assert PC % 4 == 0
registers[d] = MEM[PC // 4]
PC += 4
elif instruction >> 26 == 0b100011: # load word (lw)
address = registers[s] + i
assert address % 4 == 0
#wip: read from stdin when loading from 0xFFFF0004
print("a", address // 4, "a")
registers[t] = MEM[address // 4]
elif instruction >> 26 == 0b101011: # store word (sw)
address = registers[s] + i
assert address % 4 == 0
if address == 0xFFFF000C: print(chr(registers[t] & 0xFF), end="")
MEM[address // 4] = registers[t]
elif instruction >> 26 == 0b000000 and instruction & 0b11111111111 == 0b00000101010: # set less than (slt)
registers[d] = 1 if signed(registers[s]) < signed(registers[t]) else 0
elif instruction >> 26 == 0b000000 and instruction & 0b11111111111 == 0b00000101011: # set less than unsigned (sltu)
registers[d] = 1 if registers[s] < registers[t] else 0
elif instruction >> 26 == 0b000100: # branch on equal (beq)
if registers[s] == registers[t]: PC += i * 4
elif instruction >> 26 == 0b000101: # branch on not equal (bne)
if registers[s] != registers[t]: PC += i * 4
elif instruction >> 26 == 0b000000 and instruction & 0b111111111111111111111 == 0b000000000000000001000: # jump register (jr)
PC = registers[s]
elif instruction >> 26 == 0b000000 and instruction & 0b111111111111111111111 == 0b000000000000000001001: # jump and link register (jalr)
temp = registers[s]
registers[31] = PC
PC = temp
else: raise Exception("Unknown instruction: " + instruction)
while True:
assert PC % 4 == 0
if PC == 0xFFFFFFFC: break # jumped to the end location, terminate program
instruction = MEM[PC // 4]
PC += 4
try:
decode_execute(instruction)
except: break
print(registers)
| mit | Python | |
daef27939bed715d8a7d1f27fbfb4a193fb2de73 | add multiprocessing for cpu-load | Justontheway/HelloWorld,Justontheway/HelloWorld | src/python/multiprocessing/predict.py | src/python/multiprocessing/predict.py | # -*- coding:utf-8 -*-
import os
import sys
from math import ceil
import time
import signal
os.environ['OMP_NUM_THREADS'] = '1'
import numpy as np
import multiprocessing as mp
MATRIX_SIZE = (20000, 20000)
#MATRIX_SIZE = (2000, 2000)
pool = None
EXIT_FLAG = False
def worker(*args, **kwargs):
mat1 = np.random.random(MATRIX_SIZE)
mat2 = np.random.random(MATRIX_SIZE)
return mat1.dot(mat2).sum()
def forever(worker, *args, **kwargs):
while True:
worker(*args, *kwargs)
def sigint(signum, stack):
global EXIT_FLAG
EXIT_FLAG = True
def main(size=None):
global pool, EXIT_FLAG
signal.signal(signal.SIGINT, sigint)
ts = time.perf_counter()
worker()
te = time.perf_counter()
t = te - ts
pool = mp.Pool(size)
import tracemalloc
tracemalloc.start()
while True and not EXIT_FLAG:
ss1 = tracemalloc.take_snapshot()
pool.apply_async(worker)
ss2 = tracemalloc.take_snapshot()
print(ss2.compare_to(ss1, 'lineno'))
#pool.apply(worker)
#pool.map_async(worker, range(int(ceil(t))))
#pool.map(worker, range(size))
#time.sleep(t)
pool.terminate()
pool.join()
def main1(size=None):
ps = [mp.Process(target=forever, args=(worker, )) for i in range(size)]
signal.signal(signal.SIGINT, sigint)
for p in ps:
p.start()
while not EXIT_FLAG:
time.sleep(1)
for p in ps:
p.terminate()
for p in ps:
p.join()
if __name__ == "__main__":
size = max(1, int(ceil(mp.cpu_count() * int(sys.argv[1]) / 100.0)))
print(size)
main(size)
#main1(size)
| apache-2.0 | Python | |
067668ee5879ba80cd29c9ae0ddee2a9be15fc31 | Create weather.py | krishnaaswani29/DenverCrimeVsPopulation | weather.py | weather.py |
from bs4 import BeautifulSoup
# In[27]:
import urllib2
# In[28]:
f = open('weather_1.txt', 'w')
f.write("Date" + '\t' + "Temp" +'\t' + "precp" +'\n')
# In[29]:
for y in range(2013, 2014):
for m in range(1, 13):
for d in range(1, 32):
# Check if leap year
if y%400 == 0:
leap = True
elif y%100 == 0:
leap = False
elif y%4 == 0:
leap = True
else:
leap = False
# Check if already gone through month
if (m == 2 and leap and d > 29):
continue
elif (m == 2 and d > 28):
continue
elif (m in [4, 6, 9, 10] and d > 30):
continue
# Open wunderground.com url
url = "http://www.wunderground.com/history/airport/KBUF/"+str(y)+ "/" + str(m) + "/" + str(d) + "/DailyHistory.html"
page = urllib2.urlopen(url)
# Get temperature from page
soup = BeautifulSoup(page)
# dayTemp = soup.body.nobr.b.string
dayTemp = soup.findAll(attrs={"class":"wx-value"})[1].string
precp = soup.findAll(attrs={"class":"wx-value"})[12].string
print dayTemp
print precp
# Format month for timestamp
if len(str(m)) < 2:
mStamp = '0' +str(m)
else:
mStamp = str(m)
# Format day for timestamp
if len(str(d)) < 2:
dStamp = '0' + str(d)
else:
dStamp = str(d)
# Build timestamp
timestamp = str(y) + '/' + mStamp + '/' +dStamp
print timestamp
print timestamp + '\t' + dayTemp +'\t' + precp
print url
# Write timestamp and temperature to file
f.write(timestamp + '\t' + dayTemp +'\t' + precp +'\n')
# Done getting data! Close file.
f.close()
| mit | Python | |
b7975ad7b733eda545e3647d40bc46abbe4df250 | add elseclause01.py | devlights/try-python | trypython/basic/elseclause01.py | trypython/basic/elseclause01.py | # coding: utf-8
"""
各処理の後に付与できる オプション else節 についてのサンプルです。
"""
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
class Sample(SampleBase):
def exec(self):
#
# Pythonの for, while, try には else をつけることが出来る
# どれも、正常に処理が通った場合に else に入るようになっている。
#
# for: ループが break されずに終了した場合
# while: ループが break されずに終了した場合
# try: 例外が発生しなかった場合
#
# 何かをサーチするような処理の場合、見つけた時点で break することが
# 多いため、このようなときに else を入れておくと見つからなかった場合の
# 処理を簡単に書くことが出来る。
#
# tryの場合は、例外が発生せずに正常に処理が通ったときを判定できる
#
for x in range(5):
pr('for-loop', x)
else:
pr('for-else', 'passed')
pr('-----------------------------------', '')
for x in range(5):
if x == 3:
break
pr('for-loop', x)
else:
pr('for-else', 'passed')
pr('-----------------------------------', '')
count = 5
while count >= 0:
pr('while-loop', count)
count -= 1
else:
pr('while-else', 'passed')
pr('-----------------------------------', '')
count = 5
while count >= 0:
if count == 2:
break
pr('while-loop', count)
count -= 1
else:
pr('while-else', 'passed')
pr('-----------------------------------', '')
try:
pr('try', sum((10, 20,)))
except MyException as e:
pr('except', e)
else:
pr('try-else', 'passed')
pr('-----------------------------------', '')
# noinspection PyUnreachableCode
try:
pr('try', sum((10, 20,)))
raise MyException('this is test exception.')
except MyException as e:
pr('except', e)
else:
pr('try-else', 'passed')
class MyException(Exception):
def __init__(self, message: str, *args, **kwargs):
super().__init__(*args, **kwargs)
self._message = message
def __str__(self, *args, **kwargs):
return f'{self._message}'
def __repr__(self, *args, **kwargs):
return self.__str__(*args, **kwargs)
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| mit | Python | |
5a0afa8eead558adf8e24aa1a6a32055c0296b08 | Add docker settings | dkarchmer/django-aws-template,dkarchmer/django-aws-template,dkarchmer/django-aws-template,dkarchmer/django-aws-template | server/config/settings/docker.py | server/config/settings/docker.py | from .base import * # NOQA
import sys
import logging.config
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATES[0]['OPTIONS'].update({'debug': True})
STATIC_URL = '/static/'
STATIC_ROOT = '/www/static/'
STATICFILES_DIRS = (
('dist', os.path.join(STATIC_ROOT, 'dist')),
)
# Turn off debug while imported by Celery with a workaround
# See http://stackoverflow.com/a/4806384
if "celery" in sys.argv[0]:
DEBUG = False
# Show emails to console in DEBUG mode
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Show thumbnail generation errors
THUMBNAIL_DEBUG = True
# Log everything to the logs directory at the top
LOGFILE_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'logs')
# Reset logging
# (see http://www.caktusgroup.com/blog/2015/01/27/Django-Logging-Configuration-logging_config-default-settings-logger/)
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': "[%(asctime)s] %(levelname)s [%(pathname)s:%(lineno)s] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'proj_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(LOGFILE_ROOT, 'project.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'django': {
'handlers': ['django_log_file'],
'propagate': True,
'level': 'DEBUG',
},
'project': {
'handlers': ['proj_log_file'],
'level': 'DEBUG',
},
}
}
logging.config.dictConfig(LOGGING)
# Debug Toolbar (http://django-debug-toolbar.readthedocs.org/)
INSTALLED_APPS += ('debug_toolbar',)
DEBUG_TOOLBAR_PATCH_SETTINGS = False
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', '192.168.99.100',)
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
# 'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
| mit | Python | |
bdc9a6525e75014c727eeaf0aeb8b21a5b2290ca | Create wildchr.py | praveendareddy21/my-repo,praveendareddy21/my-repo | wildchr.py | wildchr.py | def match(str1,str2):
print '_'
if str1=='*' and str2=='' :
return True
if str1=='' and str2=='' :
return True
if str1=='' or str2=='' :
return False
if str1[0] ==str2[0] :
return match(str1[1:],str2[1:])
if str1[0]=='*' and str2[0] != '':
return match(str1[1:],str2) or match(str1,str2[1:])
if str1[0]=='?' and str2[0] !='' :
return match(str1[1:],str2[1:])
return False
print match("ge*ek*s*s*a","geeksfassa")
| mit | Python | |
14199e0590cf07f791c7422ee0b670d93ff0c5b0 | Create Transformations2D.py (#2310) | TheAlgorithms/Python | linear_algebra/src/transformations_2d.py | linear_algebra/src/transformations_2d.py | """
2D Transformations are regularly used in Linear Algebra.
I have added the codes for reflection, projection, scaling and rotation 2D matrices.
scaling(5) = [[5.0, 0.0], [0.0, 5.0]]
rotation(45) = [[0.5253219888177297, -0.8509035245341184],
[0.8509035245341184, 0.5253219888177297]]
projection(45) = [[0.27596319193541496, 0.446998331800279],
[0.446998331800279, 0.7240368080645851]]
reflection(45) = [[0.05064397763545947, 0.893996663600558],
[0.893996663600558, 0.7018070490682369]]
"""
from math import cos, sin
from typing import List
def scaling(scaling_factor: float) -> List[List[float]]:
"""
>>> scaling(5)
[[5.0, 0.0], [0.0, 5.0]]
"""
scaling_factor = float(scaling_factor)
return [[scaling_factor * int(x == y) for x in range(2)] for y in range(2)]
def rotation(angle: float) -> List[List[float]]:
"""
>>> rotation(45) # doctest: +NORMALIZE_WHITESPACE
[[0.5253219888177297, -0.8509035245341184],
[0.8509035245341184, 0.5253219888177297]]
"""
c, s = cos(angle), sin(angle)
return [[c, -s], [s, c]]
def projection(angle: float) -> List[List[float]]:
"""
>>> projection(45) # doctest: +NORMALIZE_WHITESPACE
[[0.27596319193541496, 0.446998331800279],
[0.446998331800279, 0.7240368080645851]]
"""
c, s = cos(angle), sin(angle)
cs = c * s
return [[c * c, cs], [cs, s * s]]
def reflection(angle: float) -> List[List[float]]:
"""
>>> reflection(45) # doctest: +NORMALIZE_WHITESPACE
[[0.05064397763545947, 0.893996663600558],
[0.893996663600558, 0.7018070490682369]]
"""
c, s = cos(angle), sin(angle)
cs = c * s
return [[2 * c - 1, 2 * cs], [2 * cs, 2 * s - 1]]
print(f" {scaling(5) = }")
print(f" {rotation(45) = }")
print(f"{projection(45) = }")
print(f"{reflection(45) = }")
| mit | Python | |
0942d2ccf68b88db2616f9839c1ca1ebfacb8ad9 | Migrate in domain model changes | johnjohndoe/spendb,CivicVision/datahub,USStateDept/FPA_Core,USStateDept/FPA_Core,spendb/spendb,openspending/spendb,pudo/spendb,johnjohndoe/spendb,nathanhilbert/FPA_Core,spendb/spendb,nathanhilbert/FPA_Core,CivicVision/datahub,spendb/spendb,nathanhilbert/FPA_Core,CivicVision/datahub,openspending/spendb,openspending/spendb,pudo/spendb,pudo/spendb,USStateDept/FPA_Core,johnjohndoe/spendb | migration/versions/013_dataset_serp.py | migration/versions/013_dataset_serp.py | from sqlalchemy import *
from migrate import *
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
dataset = Table('dataset', meta, autoload=True)
serp_title = Column('serp_title', Unicode())
serp_title.create(dataset)
serp_teaser = Column('serp_teaser', Unicode())
serp_teaser.create(dataset)
| agpl-3.0 | Python | |
c3743e6ba9f38388c6705f59b7991d4ed75d849e | Add helper module | minhvvu/writing_ass | helper.py | helper.py | from itertools import tee
def pairwise(itr):
a, b = tee(itr) # two version of itr
next(b, None) # b goes ahead one step
return zip(a, b) # return iterator
| mit | Python | |
53f4477776e922455c4a9180ac587ccfe5a3f589 | add Matplotlib1DWidget | michaellaier/pymor,michaellaier/pymor,michaellaier/pymor,michaellaier/pymor | src/pymor/gui/matplotlib.py | src/pymor/gui/matplotlib.py | # This file is part of the pyMor project (http://www.pymor.org).
# Copyright Holders: Felix Albrecht, Rene Milk, Stephan Rave
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
import math as m
import numpy as np
from PySide.QtOpenGL import QGLWidget
from PySide.QtGui import QSizePolicy, QPainter, QFontMetrics
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from pymor.la.numpyvectorarray import NumpyVectorArray
from pymor.grids.referenceelements import line
class Matplotlib1DWidget(FigureCanvas):
def __init__(self, parent, grid, vmin=None, vmax=None, codim=1, dpi=100):
assert grid.reference_element is line
assert codim in (0, 1)
self.figure = Figure(dpi=dpi)
self.axes = self.figure.gca()
self.axes.hold(False)
self.line, = self.axes.plot(grid.centers(codim), np.zeros_like(grid.centers(codim)), 'b')
self.axes.set_ylim(vmin, vmax)
super(Matplotlib1DWidget, self).__init__(self.figure)
self.setParent(parent)
self.setMinimumSize(300, 300)
self.setSizePolicy(QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding))
def set(self, U):
self.line.set_ydata(U)
self.draw()
| bsd-2-clause | Python | |
bbb9c4df3b4bdafb1e3b4c5bc05b19dd56aff407 | Add interactive console to add test cases. | google/budou | add_testcase.py | add_testcase.py | # coding: utf-8
from six.moves import input
import budou
import json
TESTCASES_PATH = 'test/cases.ndjson'
def colorize(text, color='green'):
ENDC = '\033[0m'
colors = {
'red': '\033[91m',
'green': '\033[92m',
'yellow': '\033[93m',
'blue': '\033[94m',
}
return colors[color] + text + ENDC
def main():
print('Hello, this is an interactive console to add test cases for Budou.')
print('By following this instruction, new test case will be added and '
'validated in the future updates.')
print('Press Ctrl-C to exit.')
parser = budou.authenticate()
while True:
source = input(
colorize('Input a source sentence to process: ')).decode('utf-8')
if not source:
print(colorize('No test case was provided. Try again.', 'red'))
continue
source = source.strip()
print('Your input: %s' % (source))
result = parser.parse(source, use_cache=False, use_entity=False)
print(colorize('Retrived chunks from current implementation:', 'blue'))
for chunk in result['chunks']:
print('pos: %s\tword: "%s"' % (chunk['pos'], chunk['word']))
is_correct = ask_if_correct()
if is_correct:
words = [chunk['word'] for chunk in result['chunks']]
else:
words = ask_expectation(source)
add_test_case(source, words, result['tokens'], result['language'])
def ask_if_correct():
while True:
response = input(colorize(
'Is this result expected? Please enter `yes` if it is. (yes/no): '))
if response in {'y', 'yes'}:
print('Thanks. the test case will be added as is.\n\n')
return True
elif response in {'n', 'no'}:
return False
else:
print('Please enter yes or no. (yes/no)')
def ask_expectation(source):
print('Uh-oh. Please input the expected result by separating the sentence '
'with slashes.')
print('e.g. 今日の/ランチは/カツ丼です。')
while True:
response = input(colorize('Input expected result: ')).decode('utf-8')
if not response:
print(colorize('No input was provided. Try again.', 'red'))
continue
words = response.split('/')
if ''.join(words) != source:
print(colorize(
'The input has different words from source input. Please verify if '
'your input matches with the source.', 'red'))
continue
print(colorize('Expected chunks:', 'blue'))
for word in words:
print('word: %s' % (word))
print('Please enter `yes` if this is correct. Press enter `no` if you want '
'to edit again.')
while True:
response = input(colorize('Correct?: '))
if response in {'y', 'yes'}:
return words
elif response in {'n', 'no'}:
break
else:
print('Please enter `yes` or `no`.')
def add_test_case(source, words, tokens, language):
with open(TESTCASES_PATH) as f:
cases = [json.loads(row) for row in f.readlines() if row]
for case in cases:
if case['sentence'] == source:
print('The test case "%s" is already included.' % (source))
print('Do you want to update the test case with the new configuration? '
'Enter `yes` to update or `no` to cencel. (y/n)')
while True:
response = input(colorize('Update?: '))
if response in {'y', 'yes'}:
break
elif response in {'n', 'no'}:
return False
else:
print('Please enter `yes` or `no`.')
with open(TESTCASES_PATH, 'a') as f:
f.write(json.dumps({
'sentence': source,
'language': language,
'tokens': tokens,
'expected': words,
}, ensure_ascii=False, sort_keys=True).encode('utf-8'))
print('Thank you for submission. Your test case "%s" is added.\n\n' % (
source))
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
9961234594b2581707978c20a51b838ba4b70627 | Add lan_ping.py | catroll/clipboard,catroll/clipboard,catroll/clipboard,catroll/clipboard | lan_ping.py | lan_ping.py | #!/usr/bin/env python
# -*- coding:utf8 -*-
import Queue
import threading
import subprocess
import re
import sys
lock = threading.Lock()
DEFAULT_THREAD_NUM = 100
def get_ips(ip):
a = re.match(r'(.*\d+)\.(\d+)-(\d+)', ip)
if not a:
raise Exception('IP range error')
start = int(a.group(2))
end = int(a.group(3)) + 1
ips = []
for i in range(start, end):
ips.append(a.group(1) + "." + str(i))
return ips
def ping(queue):
while True:
if queue.empty():
sys.exit()
ip = queue.get()
ret = subprocess.call("ping -c 1 %s" % ip, shell=True,
stdout=open('/dev/null', 'w'),
stderr=subprocess.STDOUT)
lock.acquire()
if ret == 0:
print ip
lock.release()
queue.task_done()
def main():
args, arg_num = sys.argv, len(sys.argv)
if arg_num < 2 or arg_num > 3:
print "Usage: %s IP段(如:192.168.1.1-254) 线程数(默认:100)" % args[0]
exit()
ip_range = get_ips(args[1])
thread_num = int(args[2]) if arg_num == 3 and args[2].isdigit() else DEFAULT_THREAD_NUM
queue = Queue.Queue()
for i in ip_range:
queue.put(i)
for q in range(thread_num):
worker = threading.Thread(target=ping, args=(queue,))
worker.setDaemon(True)
worker.start()
queue.join()
if __name__ == "__main__":
main()
| mit | Python | |
b768281bf3b0e069312b64e738c12064cf4bd185 | add missing model migration | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | features/tags/migrations/0002_auto_20170116_2047.py | features/tags/migrations/0002_auto_20170116_2047.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-16 19:47
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tags', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='tag',
options={'ordering': ('name',)},
),
]
| agpl-3.0 | Python | |
b38ddb6c279da63d98e64476b4edc6b4c02f02d6 | add numeric sample | yusabana-sandbox/python-practice | numeric.py | numeric.py | # -*- coding: utf-8 -*-
print(0b11)
print(0o17)
print(0xb)
# int('2.1') # Error
print(type(0xb))
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.